diff --git a/Darwin/bin/pip3 b/Darwin/bin/pip3 index cda228a..4e2a61e 100644 --- a/Darwin/bin/pip3 +++ b/Darwin/bin/pip3 @@ -1,11 +1,10 @@ #!/usr/bin/env python3 - -# -*- coding: utf-8 -*- -import re +# EASY-INSTALL-ENTRY-SCRIPT: 'pip==7.1.2','console_scripts','pip3' +__requires__ = 'pip==7.1.2' import sys - -from pip import main +from pkg_resources import load_entry_point if __name__ == '__main__': - sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) - sys.exit(main()) + sys.exit( + load_entry_point('pip==7.1.2', 'console_scripts', 'pip3')() + ) diff --git a/Darwin/bin/python3 b/Darwin/bin/python3 index bdceba4..f549cea 120000 --- a/Darwin/bin/python3 +++ b/Darwin/bin/python3 @@ -1 +1 @@ -python3.4 \ No newline at end of file +python3.5 \ No newline at end of file diff --git a/Darwin/bin/python3.4 b/Darwin/bin/python3.4 deleted file mode 100755 index c797052..0000000 Binary files a/Darwin/bin/python3.4 and /dev/null differ diff --git a/Darwin/bin/python3.5 b/Darwin/bin/python3.5 new file mode 100755 index 0000000..acddb14 Binary files /dev/null and b/Darwin/bin/python3.5 differ diff --git a/Darwin/include/python3.4m/fileutils.h b/Darwin/include/python3.4m/fileutils.h deleted file mode 100644 index e9bad80..0000000 --- a/Darwin/include/python3.4m/fileutils.h +++ /dev/null @@ -1,79 +0,0 @@ -#ifndef Py_FILEUTILS_H -#define Py_FILEUTILS_H - -#ifdef __cplusplus -extern "C" { -#endif - -PyAPI_FUNC(PyObject *) _Py_device_encoding(int); - -PyAPI_FUNC(wchar_t *) _Py_char2wchar( - const char *arg, - size_t *size); - -PyAPI_FUNC(char*) _Py_wchar2char( - const wchar_t *text, - size_t *error_pos); - -#if defined(HAVE_STAT) && !defined(MS_WINDOWS) -PyAPI_FUNC(int) _Py_wstat( - const wchar_t* path, - struct stat *buf); -#endif - -#ifdef HAVE_STAT -PyAPI_FUNC(int) _Py_stat( - PyObject *path, - struct stat *statbuf); -#endif - -#ifndef Py_LIMITED_API -PyAPI_FUNC(int) _Py_open( - const char *pathname, - int flags); -#endif - -PyAPI_FUNC(FILE *) _Py_wfopen( - const wchar_t *path, - const wchar_t *mode); - -PyAPI_FUNC(FILE*) _Py_fopen( - const char *pathname, - const char *mode); - -PyAPI_FUNC(FILE*) _Py_fopen_obj( - PyObject *path, - const char *mode); - -#ifdef HAVE_READLINK -PyAPI_FUNC(int) _Py_wreadlink( - const wchar_t *path, - wchar_t *buf, - size_t bufsiz); -#endif - -#ifdef HAVE_REALPATH -PyAPI_FUNC(wchar_t*) _Py_wrealpath( - const wchar_t *path, - wchar_t *resolved_path, - size_t resolved_path_size); -#endif - -PyAPI_FUNC(wchar_t*) _Py_wgetcwd( - wchar_t *buf, - size_t size); - -#ifndef Py_LIMITED_API -PyAPI_FUNC(int) _Py_get_inheritable(int fd); - -PyAPI_FUNC(int) _Py_set_inheritable(int fd, int inheritable, - int *atomic_flag_works); - -PyAPI_FUNC(int) _Py_dup(int fd); -#endif - -#ifdef __cplusplus -} -#endif - -#endif /* !Py_FILEUTILS_H */ diff --git a/Darwin/include/python3.4m/genobject.h b/Darwin/include/python3.4m/genobject.h deleted file mode 100644 index 65f1ecf..0000000 --- a/Darwin/include/python3.4m/genobject.h +++ /dev/null @@ -1,46 +0,0 @@ - -/* Generator object interface */ - -#ifndef Py_LIMITED_API -#ifndef Py_GENOBJECT_H -#define Py_GENOBJECT_H -#ifdef __cplusplus -extern "C" { -#endif - -struct _frame; /* Avoid including frameobject.h */ - -typedef struct { - PyObject_HEAD - /* The gi_ prefix is intended to remind of generator-iterator. */ - - /* Note: gi_frame can be NULL if the generator is "finished" */ - struct _frame *gi_frame; - - /* True if generator is being executed. */ - char gi_running; - - /* The code object backing the generator */ - PyObject *gi_code; - - /* List of weak reference. */ - PyObject *gi_weakreflist; -} PyGenObject; - -PyAPI_DATA(PyTypeObject) PyGen_Type; - -#define PyGen_Check(op) PyObject_TypeCheck(op, &PyGen_Type) -#define PyGen_CheckExact(op) (Py_TYPE(op) == &PyGen_Type) - -PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *); -PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *); -PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); -PyObject *_PyGen_Send(PyGenObject *, PyObject *); -PyAPI_FUNC(void) _PyGen_Finalize(PyObject *self); - - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GENOBJECT_H */ -#endif /* Py_LIMITED_API */ diff --git a/Darwin/include/python3.4m/graminit.h b/Darwin/include/python3.4m/graminit.h deleted file mode 100644 index 3ec949a..0000000 --- a/Darwin/include/python3.4m/graminit.h +++ /dev/null @@ -1,84 +0,0 @@ -/* Generated by Parser/pgen */ - -#define single_input 256 -#define file_input 257 -#define eval_input 258 -#define decorator 259 -#define decorators 260 -#define decorated 261 -#define funcdef 262 -#define parameters 263 -#define typedargslist 264 -#define tfpdef 265 -#define varargslist 266 -#define vfpdef 267 -#define stmt 268 -#define simple_stmt 269 -#define small_stmt 270 -#define expr_stmt 271 -#define testlist_star_expr 272 -#define augassign 273 -#define del_stmt 274 -#define pass_stmt 275 -#define flow_stmt 276 -#define break_stmt 277 -#define continue_stmt 278 -#define return_stmt 279 -#define yield_stmt 280 -#define raise_stmt 281 -#define import_stmt 282 -#define import_name 283 -#define import_from 284 -#define import_as_name 285 -#define dotted_as_name 286 -#define import_as_names 287 -#define dotted_as_names 288 -#define dotted_name 289 -#define global_stmt 290 -#define nonlocal_stmt 291 -#define assert_stmt 292 -#define compound_stmt 293 -#define if_stmt 294 -#define while_stmt 295 -#define for_stmt 296 -#define try_stmt 297 -#define with_stmt 298 -#define with_item 299 -#define except_clause 300 -#define suite 301 -#define test 302 -#define test_nocond 303 -#define lambdef 304 -#define lambdef_nocond 305 -#define or_test 306 -#define and_test 307 -#define not_test 308 -#define comparison 309 -#define comp_op 310 -#define star_expr 311 -#define expr 312 -#define xor_expr 313 -#define and_expr 314 -#define shift_expr 315 -#define arith_expr 316 -#define term 317 -#define factor 318 -#define power 319 -#define atom 320 -#define testlist_comp 321 -#define trailer 322 -#define subscriptlist 323 -#define subscript 324 -#define sliceop 325 -#define exprlist 326 -#define testlist 327 -#define dictorsetmaker 328 -#define classdef 329 -#define arglist 330 -#define argument 331 -#define comp_iter 332 -#define comp_for 333 -#define comp_if 334 -#define encoding_decl 335 -#define yield_expr 336 -#define yield_arg 337 diff --git a/Darwin/include/python3.4m/opcode.h b/Darwin/include/python3.4m/opcode.h deleted file mode 100644 index 0936f2d..0000000 --- a/Darwin/include/python3.4m/opcode.h +++ /dev/null @@ -1,159 +0,0 @@ -#ifndef Py_OPCODE_H -#define Py_OPCODE_H -#ifdef __cplusplus -extern "C" { -#endif - - -/* Instruction opcodes for compiled code */ - -#define POP_TOP 1 -#define ROT_TWO 2 -#define ROT_THREE 3 -#define DUP_TOP 4 -#define DUP_TOP_TWO 5 -#define NOP 9 - -#define UNARY_POSITIVE 10 -#define UNARY_NEGATIVE 11 -#define UNARY_NOT 12 - -#define UNARY_INVERT 15 - -#define BINARY_POWER 19 - -#define BINARY_MULTIPLY 20 - -#define BINARY_MODULO 22 -#define BINARY_ADD 23 -#define BINARY_SUBTRACT 24 -#define BINARY_SUBSCR 25 -#define BINARY_FLOOR_DIVIDE 26 -#define BINARY_TRUE_DIVIDE 27 -#define INPLACE_FLOOR_DIVIDE 28 -#define INPLACE_TRUE_DIVIDE 29 - -#define STORE_MAP 54 -#define INPLACE_ADD 55 -#define INPLACE_SUBTRACT 56 -#define INPLACE_MULTIPLY 57 - -#define INPLACE_MODULO 59 -#define STORE_SUBSCR 60 -#define DELETE_SUBSCR 61 - -#define BINARY_LSHIFT 62 -#define BINARY_RSHIFT 63 -#define BINARY_AND 64 -#define BINARY_XOR 65 -#define BINARY_OR 66 -#define INPLACE_POWER 67 -#define GET_ITER 68 -#define PRINT_EXPR 70 -#define LOAD_BUILD_CLASS 71 -#define YIELD_FROM 72 - -#define INPLACE_LSHIFT 75 -#define INPLACE_RSHIFT 76 -#define INPLACE_AND 77 -#define INPLACE_XOR 78 -#define INPLACE_OR 79 -#define BREAK_LOOP 80 -#define WITH_CLEANUP 81 - -#define RETURN_VALUE 83 -#define IMPORT_STAR 84 - -#define YIELD_VALUE 86 -#define POP_BLOCK 87 -#define END_FINALLY 88 -#define POP_EXCEPT 89 - -#define HAVE_ARGUMENT 90 /* Opcodes from here have an argument: */ - -#define STORE_NAME 90 /* Index in name list */ -#define DELETE_NAME 91 /* "" */ -#define UNPACK_SEQUENCE 92 /* Number of sequence items */ -#define FOR_ITER 93 -#define UNPACK_EX 94 /* Num items before variable part + - (Num items after variable part << 8) */ - -#define STORE_ATTR 95 /* Index in name list */ -#define DELETE_ATTR 96 /* "" */ -#define STORE_GLOBAL 97 /* "" */ -#define DELETE_GLOBAL 98 /* "" */ - -#define LOAD_CONST 100 /* Index in const list */ -#define LOAD_NAME 101 /* Index in name list */ -#define BUILD_TUPLE 102 /* Number of tuple items */ -#define BUILD_LIST 103 /* Number of list items */ -#define BUILD_SET 104 /* Number of set items */ -#define BUILD_MAP 105 /* Always zero for now */ -#define LOAD_ATTR 106 /* Index in name list */ -#define COMPARE_OP 107 /* Comparison operator */ -#define IMPORT_NAME 108 /* Index in name list */ -#define IMPORT_FROM 109 /* Index in name list */ - -#define JUMP_FORWARD 110 /* Number of bytes to skip */ -#define JUMP_IF_FALSE_OR_POP 111 /* Target byte offset from beginning of code */ -#define JUMP_IF_TRUE_OR_POP 112 /* "" */ -#define JUMP_ABSOLUTE 113 /* "" */ -#define POP_JUMP_IF_FALSE 114 /* "" */ -#define POP_JUMP_IF_TRUE 115 /* "" */ - -#define LOAD_GLOBAL 116 /* Index in name list */ - -#define CONTINUE_LOOP 119 /* Start of loop (absolute) */ -#define SETUP_LOOP 120 /* Target address (relative) */ -#define SETUP_EXCEPT 121 /* "" */ -#define SETUP_FINALLY 122 /* "" */ - -#define LOAD_FAST 124 /* Local variable number */ -#define STORE_FAST 125 /* Local variable number */ -#define DELETE_FAST 126 /* Local variable number */ - -#define RAISE_VARARGS 130 /* Number of raise arguments (1, 2 or 3) */ -/* CALL_FUNCTION_XXX opcodes defined below depend on this definition */ -#define CALL_FUNCTION 131 /* #args + (#kwargs<<8) */ -#define MAKE_FUNCTION 132 /* #defaults + #kwdefaults<<8 + #annotations<<16 */ -#define BUILD_SLICE 133 /* Number of items */ - -#define MAKE_CLOSURE 134 /* same as MAKE_FUNCTION */ -#define LOAD_CLOSURE 135 /* Load free variable from closure */ -#define LOAD_DEREF 136 /* Load and dereference from closure cell */ -#define STORE_DEREF 137 /* Store into cell */ -#define DELETE_DEREF 138 /* Delete closure cell */ - -/* The next 3 opcodes must be contiguous and satisfy - (CALL_FUNCTION_VAR - CALL_FUNCTION) & 3 == 1 */ -#define CALL_FUNCTION_VAR 140 /* #args + (#kwargs<<8) */ -#define CALL_FUNCTION_KW 141 /* #args + (#kwargs<<8) */ -#define CALL_FUNCTION_VAR_KW 142 /* #args + (#kwargs<<8) */ - -#define SETUP_WITH 143 - -/* Support for opargs more than 16 bits long */ -#define EXTENDED_ARG 144 - -#define LIST_APPEND 145 -#define SET_ADD 146 -#define MAP_ADD 147 - -#define LOAD_CLASSDEREF 148 - -/* EXCEPT_HANDLER is a special, implicit block type which is created when - entering an except handler. It is not an opcode but we define it here - as we want it to be available to both frameobject.c and ceval.c, while - remaining private.*/ -#define EXCEPT_HANDLER 257 - - -enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE, - PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD}; - -#define HAS_ARG(op) ((op) >= HAVE_ARGUMENT) - -#ifdef __cplusplus -} -#endif -#endif /* !Py_OPCODE_H */ diff --git a/Darwin/include/python3.4m/pytime.h b/Darwin/include/python3.4m/pytime.h deleted file mode 100644 index b0fc6d0..0000000 --- a/Darwin/include/python3.4m/pytime.h +++ /dev/null @@ -1,105 +0,0 @@ -#ifndef Py_LIMITED_API -#ifndef Py_PYTIME_H -#define Py_PYTIME_H - -#include "pyconfig.h" /* include for defines */ -#include "object.h" - -/************************************************************************** -Symbols and macros to supply platform-independent interfaces to time related -functions and constants -**************************************************************************/ -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef HAVE_GETTIMEOFDAY -typedef struct timeval _PyTime_timeval; -#else -typedef struct { - time_t tv_sec; /* seconds since Jan. 1, 1970 */ - long tv_usec; /* and microseconds */ -} _PyTime_timeval; -#endif - -/* Structure used by time.get_clock_info() */ -typedef struct { - const char *implementation; - int monotonic; - int adjustable; - double resolution; -} _Py_clock_info_t; - -/* Similar to POSIX gettimeofday but cannot fail. If system gettimeofday - * fails or is not available, fall back to lower resolution clocks. - */ -PyAPI_FUNC(void) _PyTime_gettimeofday(_PyTime_timeval *tp); - -/* Similar to _PyTime_gettimeofday() but retrieve also information on the - * clock used to get the current time. */ -PyAPI_FUNC(void) _PyTime_gettimeofday_info( - _PyTime_timeval *tp, - _Py_clock_info_t *info); - -#define _PyTime_ADD_SECONDS(tv, interval) \ -do { \ - tv.tv_usec += (long) (((long) interval - interval) * 1000000); \ - tv.tv_sec += (time_t) interval + (time_t) (tv.tv_usec / 1000000); \ - tv.tv_usec %= 1000000; \ -} while (0) - -#define _PyTime_INTERVAL(tv_start, tv_end) \ - ((tv_end.tv_sec - tv_start.tv_sec) + \ - (tv_end.tv_usec - tv_start.tv_usec) * 0.000001) - -#ifndef Py_LIMITED_API - -typedef enum { - /* Round towards zero. */ - _PyTime_ROUND_DOWN=0, - /* Round away from zero. */ - _PyTime_ROUND_UP -} _PyTime_round_t; - -/* Convert a number of seconds, int or float, to time_t. */ -PyAPI_FUNC(int) _PyTime_ObjectToTime_t( - PyObject *obj, - time_t *sec, - _PyTime_round_t); - -/* Convert a time_t to a PyLong. */ -PyAPI_FUNC(PyObject *) _PyLong_FromTime_t( - time_t sec); - -/* Convert a PyLong to a time_t. */ -PyAPI_FUNC(time_t) _PyLong_AsTime_t( - PyObject *obj); - -/* Convert a number of seconds, int or float, to a timeval structure. - usec is in the range [0; 999999] and rounded towards zero. - For example, -1.2 is converted to (-2, 800000). */ -PyAPI_FUNC(int) _PyTime_ObjectToTimeval( - PyObject *obj, - time_t *sec, - long *usec, - _PyTime_round_t); - -/* Convert a number of seconds, int or float, to a timespec structure. - nsec is in the range [0; 999999999] and rounded towards zero. - For example, -1.2 is converted to (-2, 800000000). */ -PyAPI_FUNC(int) _PyTime_ObjectToTimespec( - PyObject *obj, - time_t *sec, - long *nsec, - _PyTime_round_t); -#endif - -/* Dummy to force linking. */ -PyAPI_FUNC(void) _PyTime_Init(void); - -#ifdef __cplusplus -} -#endif - -#endif /* Py_PYTIME_H */ -#endif /* Py_LIMITED_API */ diff --git a/Darwin/include/python3.4m/Python-ast.h b/Darwin/include/python3.5m/Python-ast.h similarity index 86% rename from Darwin/include/python3.4m/Python-ast.h rename to Darwin/include/python3.5m/Python-ast.h index 67d677b..3bc015f 100644 --- a/Darwin/include/python3.4m/Python-ast.h +++ b/Darwin/include/python3.5m/Python-ast.h @@ -15,9 +15,9 @@ typedef struct _slice *slice_ty; typedef enum _boolop { And=1, Or=2 } boolop_ty; -typedef enum _operator { Add=1, Sub=2, Mult=3, Div=4, Mod=5, Pow=6, LShift=7, - RShift=8, BitOr=9, BitXor=10, BitAnd=11, FloorDiv=12 } - operator_ty; +typedef enum _operator { Add=1, Sub=2, Mult=3, MatMult=4, Div=5, Mod=6, Pow=7, + LShift=8, RShift=9, BitOr=10, BitXor=11, BitAnd=12, + FloorDiv=13 } operator_ty; typedef enum _unaryop { Invert=1, Not=2, UAdd=3, USub=4 } unaryop_ty; @@ -63,12 +63,13 @@ struct _mod { } v; }; -enum _stmt_kind {FunctionDef_kind=1, ClassDef_kind=2, Return_kind=3, - Delete_kind=4, Assign_kind=5, AugAssign_kind=6, For_kind=7, - While_kind=8, If_kind=9, With_kind=10, Raise_kind=11, - Try_kind=12, Assert_kind=13, Import_kind=14, - ImportFrom_kind=15, Global_kind=16, Nonlocal_kind=17, - Expr_kind=18, Pass_kind=19, Break_kind=20, Continue_kind=21}; +enum _stmt_kind {FunctionDef_kind=1, AsyncFunctionDef_kind=2, ClassDef_kind=3, + Return_kind=4, Delete_kind=5, Assign_kind=6, + AugAssign_kind=7, For_kind=8, AsyncFor_kind=9, While_kind=10, + If_kind=11, With_kind=12, AsyncWith_kind=13, Raise_kind=14, + Try_kind=15, Assert_kind=16, Import_kind=17, + ImportFrom_kind=18, Global_kind=19, Nonlocal_kind=20, + Expr_kind=21, Pass_kind=22, Break_kind=23, Continue_kind=24}; struct _stmt { enum _stmt_kind kind; union { @@ -80,12 +81,18 @@ struct _stmt { expr_ty returns; } FunctionDef; + struct { + identifier name; + arguments_ty args; + asdl_seq *body; + asdl_seq *decorator_list; + expr_ty returns; + } AsyncFunctionDef; + struct { identifier name; asdl_seq *bases; asdl_seq *keywords; - expr_ty starargs; - expr_ty kwargs; asdl_seq *body; asdl_seq *decorator_list; } ClassDef; @@ -116,6 +123,13 @@ struct _stmt { asdl_seq *orelse; } For; + struct { + expr_ty target; + expr_ty iter; + asdl_seq *body; + asdl_seq *orelse; + } AsyncFor; + struct { expr_ty test; asdl_seq *body; @@ -133,6 +147,11 @@ struct _stmt { asdl_seq *body; } With; + struct { + asdl_seq *items; + asdl_seq *body; + } AsyncWith; + struct { expr_ty exc; expr_ty cause; @@ -180,11 +199,11 @@ struct _stmt { enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4, IfExp_kind=5, Dict_kind=6, Set_kind=7, ListComp_kind=8, SetComp_kind=9, DictComp_kind=10, GeneratorExp_kind=11, - Yield_kind=12, YieldFrom_kind=13, Compare_kind=14, - Call_kind=15, Num_kind=16, Str_kind=17, Bytes_kind=18, - NameConstant_kind=19, Ellipsis_kind=20, Attribute_kind=21, - Subscript_kind=22, Starred_kind=23, Name_kind=24, - List_kind=25, Tuple_kind=26}; + Await_kind=12, Yield_kind=13, YieldFrom_kind=14, + Compare_kind=15, Call_kind=16, Num_kind=17, Str_kind=18, + Bytes_kind=19, NameConstant_kind=20, Ellipsis_kind=21, + Attribute_kind=22, Subscript_kind=23, Starred_kind=24, + Name_kind=25, List_kind=26, Tuple_kind=27}; struct _expr { enum _expr_kind kind; union { @@ -245,6 +264,10 @@ struct _expr { asdl_seq *generators; } GeneratorExp; + struct { + expr_ty value; + } Await; + struct { expr_ty value; } Yield; @@ -263,8 +286,6 @@ struct _expr { expr_ty func; asdl_seq *args; asdl_seq *keywords; - expr_ty starargs; - expr_ty kwargs; } Call; struct { @@ -406,11 +427,14 @@ mod_ty _Py_Suite(asdl_seq * body, PyArena *arena); stmt_ty _Py_FunctionDef(identifier name, arguments_ty args, asdl_seq * body, asdl_seq * decorator_list, expr_ty returns, int lineno, int col_offset, PyArena *arena); -#define ClassDef(a0, a1, a2, a3, a4, a5, a6, a7, a8, a9) _Py_ClassDef(a0, a1, a2, a3, a4, a5, a6, a7, a8, a9) +#define AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7) _Py_AsyncFunctionDef(a0, a1, a2, a3, a4, a5, a6, a7) +stmt_ty _Py_AsyncFunctionDef(identifier name, arguments_ty args, asdl_seq * + body, asdl_seq * decorator_list, expr_ty returns, + int lineno, int col_offset, PyArena *arena); +#define ClassDef(a0, a1, a2, a3, a4, a5, a6, a7) _Py_ClassDef(a0, a1, a2, a3, a4, a5, a6, a7) stmt_ty _Py_ClassDef(identifier name, asdl_seq * bases, asdl_seq * keywords, - expr_ty starargs, expr_ty kwargs, asdl_seq * body, - asdl_seq * decorator_list, int lineno, int col_offset, - PyArena *arena); + asdl_seq * body, asdl_seq * decorator_list, int lineno, + int col_offset, PyArena *arena); #define Return(a0, a1, a2, a3) _Py_Return(a0, a1, a2, a3) stmt_ty _Py_Return(expr_ty value, int lineno, int col_offset, PyArena *arena); #define Delete(a0, a1, a2, a3) _Py_Delete(a0, a1, a2, a3) @@ -425,6 +449,9 @@ stmt_ty _Py_AugAssign(expr_ty target, operator_ty op, expr_ty value, int #define For(a0, a1, a2, a3, a4, a5, a6) _Py_For(a0, a1, a2, a3, a4, a5, a6) stmt_ty _Py_For(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * orelse, int lineno, int col_offset, PyArena *arena); +#define AsyncFor(a0, a1, a2, a3, a4, a5, a6) _Py_AsyncFor(a0, a1, a2, a3, a4, a5, a6) +stmt_ty _Py_AsyncFor(expr_ty target, expr_ty iter, asdl_seq * body, asdl_seq * + orelse, int lineno, int col_offset, PyArena *arena); #define While(a0, a1, a2, a3, a4, a5) _Py_While(a0, a1, a2, a3, a4, a5) stmt_ty _Py_While(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, int col_offset, PyArena *arena); @@ -434,6 +461,9 @@ stmt_ty _Py_If(expr_ty test, asdl_seq * body, asdl_seq * orelse, int lineno, #define With(a0, a1, a2, a3, a4) _Py_With(a0, a1, a2, a3, a4) stmt_ty _Py_With(asdl_seq * items, asdl_seq * body, int lineno, int col_offset, PyArena *arena); +#define AsyncWith(a0, a1, a2, a3, a4) _Py_AsyncWith(a0, a1, a2, a3, a4) +stmt_ty _Py_AsyncWith(asdl_seq * items, asdl_seq * body, int lineno, int + col_offset, PyArena *arena); #define Raise(a0, a1, a2, a3, a4) _Py_Raise(a0, a1, a2, a3, a4) stmt_ty _Py_Raise(expr_ty exc, expr_ty cause, int lineno, int col_offset, PyArena *arena); @@ -496,6 +526,8 @@ expr_ty _Py_DictComp(expr_ty key, expr_ty value, asdl_seq * generators, int #define GeneratorExp(a0, a1, a2, a3, a4) _Py_GeneratorExp(a0, a1, a2, a3, a4) expr_ty _Py_GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int col_offset, PyArena *arena); +#define Await(a0, a1, a2, a3) _Py_Await(a0, a1, a2, a3) +expr_ty _Py_Await(expr_ty value, int lineno, int col_offset, PyArena *arena); #define Yield(a0, a1, a2, a3) _Py_Yield(a0, a1, a2, a3) expr_ty _Py_Yield(expr_ty value, int lineno, int col_offset, PyArena *arena); #define YieldFrom(a0, a1, a2, a3) _Py_YieldFrom(a0, a1, a2, a3) @@ -504,10 +536,9 @@ expr_ty _Py_YieldFrom(expr_ty value, int lineno, int col_offset, PyArena #define Compare(a0, a1, a2, a3, a4, a5) _Py_Compare(a0, a1, a2, a3, a4, a5) expr_ty _Py_Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int lineno, int col_offset, PyArena *arena); -#define Call(a0, a1, a2, a3, a4, a5, a6, a7) _Py_Call(a0, a1, a2, a3, a4, a5, a6, a7) -expr_ty _Py_Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, expr_ty - starargs, expr_ty kwargs, int lineno, int col_offset, PyArena - *arena); +#define Call(a0, a1, a2, a3, a4, a5) _Py_Call(a0, a1, a2, a3, a4, a5) +expr_ty _Py_Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, int + lineno, int col_offset, PyArena *arena); #define Num(a0, a1, a2, a3) _Py_Num(a0, a1, a2, a3) expr_ty _Py_Num(object n, int lineno, int col_offset, PyArena *arena); #define Str(a0, a1, a2, a3) _Py_Str(a0, a1, a2, a3) diff --git a/Darwin/include/python3.4m/Python.h b/Darwin/include/python3.5m/Python.h similarity index 98% rename from Darwin/include/python3.4m/Python.h rename to Darwin/include/python3.5m/Python.h index 2dd8290..858dbd1 100644 --- a/Darwin/include/python3.4m/Python.h +++ b/Darwin/include/python3.5m/Python.h @@ -85,6 +85,7 @@ #include "tupleobject.h" #include "listobject.h" #include "dictobject.h" +#include "odictobject.h" #include "enumobject.h" #include "setobject.h" #include "methodobject.h" @@ -112,6 +113,7 @@ #include "pyarena.h" #include "modsupport.h" #include "pythonrun.h" +#include "pylifecycle.h" #include "ceval.h" #include "sysmodule.h" #include "intrcheck.h" diff --git a/Darwin/include/python3.4m/abstract.h b/Darwin/include/python3.5m/abstract.h similarity index 98% rename from Darwin/include/python3.4m/abstract.h rename to Darwin/include/python3.5m/abstract.h index 6e850b8..83dbf94 100644 --- a/Darwin/include/python3.4m/abstract.h +++ b/Darwin/include/python3.5m/abstract.h @@ -266,6 +266,12 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/ PyAPI_FUNC(PyObject *) PyObject_Call(PyObject *callable_object, PyObject *args, PyObject *kw); +#ifndef Py_LIMITED_API + PyAPI_FUNC(PyObject *) _Py_CheckFunctionResult(PyObject *func, + PyObject *result, + const char *where); +#endif + /* Call a callable Python object, callable_object, with arguments and keywords arguments. The 'args' argument can not be @@ -658,6 +664,12 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/ o1*o2. */ + PyAPI_FUNC(PyObject *) PyNumber_MatrixMultiply(PyObject *o1, PyObject *o2); + + /* + This is the equivalent of the Python expression: o1 @ o2. + */ + PyAPI_FUNC(PyObject *) PyNumber_FloorDivide(PyObject *o1, PyObject *o2); /* @@ -832,6 +844,12 @@ xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/ o1 *= o2. */ + PyAPI_FUNC(PyObject *) PyNumber_InPlaceMatrixMultiply(PyObject *o1, PyObject *o2); + + /* + This is the equivalent of the Python expression: o1 @= o2. + */ + PyAPI_FUNC(PyObject *) PyNumber_InPlaceFloorDivide(PyObject *o1, PyObject *o2); diff --git a/Darwin/include/python3.4m/accu.h b/Darwin/include/python3.5m/accu.h similarity index 100% rename from Darwin/include/python3.4m/accu.h rename to Darwin/include/python3.5m/accu.h diff --git a/Darwin/include/python3.4m/asdl.h b/Darwin/include/python3.5m/asdl.h similarity index 100% rename from Darwin/include/python3.4m/asdl.h rename to Darwin/include/python3.5m/asdl.h diff --git a/Darwin/include/python3.4m/ast.h b/Darwin/include/python3.5m/ast.h similarity index 100% rename from Darwin/include/python3.4m/ast.h rename to Darwin/include/python3.5m/ast.h diff --git a/Darwin/include/python3.4m/bitset.h b/Darwin/include/python3.5m/bitset.h similarity index 100% rename from Darwin/include/python3.4m/bitset.h rename to Darwin/include/python3.5m/bitset.h diff --git a/Darwin/include/python3.4m/bltinmodule.h b/Darwin/include/python3.5m/bltinmodule.h similarity index 100% rename from Darwin/include/python3.4m/bltinmodule.h rename to Darwin/include/python3.5m/bltinmodule.h diff --git a/Darwin/include/python3.4m/boolobject.h b/Darwin/include/python3.5m/boolobject.h similarity index 100% rename from Darwin/include/python3.4m/boolobject.h rename to Darwin/include/python3.5m/boolobject.h diff --git a/Darwin/include/python3.4m/bytearrayobject.h b/Darwin/include/python3.5m/bytearrayobject.h similarity index 100% rename from Darwin/include/python3.4m/bytearrayobject.h rename to Darwin/include/python3.5m/bytearrayobject.h diff --git a/Darwin/include/python3.4m/bytes_methods.h b/Darwin/include/python3.5m/bytes_methods.h similarity index 94% rename from Darwin/include/python3.4m/bytes_methods.h rename to Darwin/include/python3.5m/bytes_methods.h index 1498b8f..11d5f42 100644 --- a/Darwin/include/python3.4m/bytes_methods.h +++ b/Darwin/include/python3.5m/bytes_methods.h @@ -21,8 +21,8 @@ extern void _Py_bytes_title(char *result, char *s, Py_ssize_t len); extern void _Py_bytes_capitalize(char *result, char *s, Py_ssize_t len); extern void _Py_bytes_swapcase(char *result, char *s, Py_ssize_t len); -/* This one gets the raw argument list. */ -extern PyObject* _Py_bytes_maketrans(PyObject *args); +/* The maketrans() static method. */ +extern PyObject* _Py_bytes_maketrans(Py_buffer *frm, Py_buffer *to); /* Shared __doc__ strings. */ extern const char _Py_isspace__doc__[]; diff --git a/Darwin/include/python3.4m/bytesobject.h b/Darwin/include/python3.5m/bytesobject.h similarity index 98% rename from Darwin/include/python3.4m/bytesobject.h rename to Darwin/include/python3.5m/bytesobject.h index 0ee8d36..e379bac 100644 --- a/Darwin/include/python3.4m/bytesobject.h +++ b/Darwin/include/python3.5m/bytesobject.h @@ -62,6 +62,7 @@ PyAPI_FUNC(void) PyBytes_Concat(PyObject **, PyObject *); PyAPI_FUNC(void) PyBytes_ConcatAndDel(PyObject **, PyObject *); #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyBytes_Resize(PyObject **, Py_ssize_t); +PyAPI_FUNC(PyObject *) _PyBytes_Format(PyObject *, PyObject *); #endif PyAPI_FUNC(PyObject *) PyBytes_DecodeEscape(const char *, Py_ssize_t, const char *, Py_ssize_t, diff --git a/Darwin/include/python3.4m/cellobject.h b/Darwin/include/python3.5m/cellobject.h similarity index 100% rename from Darwin/include/python3.4m/cellobject.h rename to Darwin/include/python3.5m/cellobject.h diff --git a/Darwin/include/python3.4m/ceval.h b/Darwin/include/python3.5m/ceval.h similarity index 95% rename from Darwin/include/python3.4m/ceval.h rename to Darwin/include/python3.5m/ceval.h index 6811367..eb1ee43 100644 --- a/Darwin/include/python3.4m/ceval.h +++ b/Darwin/include/python3.5m/ceval.h @@ -23,6 +23,8 @@ PyAPI_FUNC(PyObject *) PyEval_CallMethod(PyObject *obj, #ifndef Py_LIMITED_API PyAPI_FUNC(void) PyEval_SetProfile(Py_tracefunc, PyObject *); PyAPI_FUNC(void) PyEval_SetTrace(Py_tracefunc, PyObject *); +PyAPI_FUNC(void) _PyEval_SetCoroutineWrapper(PyObject *); +PyAPI_FUNC(PyObject *) _PyEval_GetCoroutineWrapper(void); #endif struct _frame; /* Avoid including frameobject.h */ @@ -46,16 +48,16 @@ PyAPI_FUNC(int) Py_MakePendingCalls(void); In Python 3.0, this protection has two levels: * normal anti-recursion protection is triggered when the recursion level - exceeds the current recursion limit. It raises a RuntimeError, and sets + exceeds the current recursion limit. It raises a RecursionError, and sets the "overflowed" flag in the thread state structure. This flag temporarily *disables* the normal protection; this allows cleanup code to potentially outgrow the recursion limit while processing the - RuntimeError. + RecursionError. * "last chance" anti-recursion protection is triggered when the recursion level exceeds "current recursion limit + 50". By construction, this protection can only be triggered when the "overflowed" flag is set. It means the cleanup code has itself gone into an infinite loop, or the - RuntimeError has been mistakingly ignored. When this protection is + RecursionError has been mistakingly ignored. When this protection is triggered, the interpreter aborts with a Fatal Error. In addition, the "overflowed" flag is automatically reset when the @@ -77,7 +79,7 @@ PyAPI_FUNC(int) Py_GetRecursionLimit(void); do{ if(_Py_MakeEndRecCheck(PyThreadState_GET()->recursion_depth)) \ PyThreadState_GET()->overflowed = 0; \ } while(0) -PyAPI_FUNC(int) _Py_CheckRecursiveCall(char *where); +PyAPI_FUNC(int) _Py_CheckRecursiveCall(const char *where); PyAPI_DATA(int) _Py_CheckRecursionLimit; #ifdef USE_STACKCHECK diff --git a/Darwin/include/python3.4m/classobject.h b/Darwin/include/python3.5m/classobject.h similarity index 100% rename from Darwin/include/python3.4m/classobject.h rename to Darwin/include/python3.5m/classobject.h diff --git a/Darwin/include/python3.4m/code.h b/Darwin/include/python3.5m/code.h similarity index 87% rename from Darwin/include/python3.4m/code.h rename to Darwin/include/python3.5m/code.h index 7c7e5bf..56e6ec1 100644 --- a/Darwin/include/python3.4m/code.h +++ b/Darwin/include/python3.5m/code.h @@ -21,7 +21,12 @@ typedef struct { PyObject *co_varnames; /* tuple of strings (local variable names) */ PyObject *co_freevars; /* tuple of strings (free variable names) */ PyObject *co_cellvars; /* tuple of strings (cell variable names) */ - /* The rest doesn't count for hash or comparisons */ + /* The rest aren't used in either hash or comparisons, except for + co_name (used in both) and co_firstlineno (used only in + comparisons). This is done to preserve the name and line number + for tracebacks and debuggers; otherwise, constant de-duplication + would collapse identical functions/lambdas defined on different lines. + */ unsigned char *co_cell2arg; /* Maps cell vars which are arguments. */ PyObject *co_filename; /* unicode (where it was loaded from) */ PyObject *co_name; /* unicode (name, for reference) */ @@ -46,6 +51,11 @@ typedef struct { */ #define CO_NOFREE 0x0040 +/* The CO_COROUTINE flag is set for coroutine functions (defined with + ``async def`` keywords) */ +#define CO_COROUTINE 0x0080 +#define CO_ITERABLE_COROUTINE 0x0100 + /* These are no longer used. */ #if 0 #define CO_GENERATOR_ALLOWED 0x1000 @@ -57,6 +67,7 @@ typedef struct { #define CO_FUTURE_UNICODE_LITERALS 0x20000 #define CO_FUTURE_BARRY_AS_BDFL 0x40000 +#define CO_FUTURE_GENERATOR_STOP 0x80000 /* This value is found in the co_cell2arg array when the associated cell variable does not correspond to an argument. The maximum number of diff --git a/Darwin/include/python3.4m/codecs.h b/Darwin/include/python3.5m/codecs.h similarity index 96% rename from Darwin/include/python3.4m/codecs.h rename to Darwin/include/python3.5m/codecs.h index 611964c..9e4f305 100644 --- a/Darwin/include/python3.4m/codecs.h +++ b/Darwin/include/python3.5m/codecs.h @@ -49,6 +49,10 @@ PyAPI_FUNC(int) PyCodec_Register( PyAPI_FUNC(PyObject *) _PyCodec_Lookup( const char *encoding ); + +PyAPI_FUNC(int) _PyCodec_Forget( + const char *encoding + ); #endif /* Codec registry encoding check API. @@ -67,7 +71,7 @@ PyAPI_FUNC(int) PyCodec_KnownEncoding( object is passed through the encoder function found for the given encoding using the error handling method defined by errors. errors may be NULL to use the default method defined for the codec. - + Raises a LookupError in case no encoder can be found. */ @@ -83,7 +87,7 @@ PyAPI_FUNC(PyObject *) PyCodec_Encode( object is passed through the decoder function found for the given encoding using the error handling method defined by errors. errors may be NULL to use the default method defined for the codec. - + Raises a LookupError in case no encoder can be found. */ @@ -141,7 +145,7 @@ PyAPI_FUNC(PyObject *) _PyCodecInfo_GetIncrementalEncoder( -/* --- Codec Lookup APIs -------------------------------------------------- +/* --- Codec Lookup APIs -------------------------------------------------- All APIs return a codec object with incremented refcount and are based on _PyCodec_Lookup(). The same comments w/r to the encoding @@ -221,6 +225,9 @@ PyAPI_FUNC(PyObject *) PyCodec_XMLCharRefReplaceErrors(PyObject *exc); /* replace the unicode encode error with backslash escapes (\x, \u and \U) */ PyAPI_FUNC(PyObject *) PyCodec_BackslashReplaceErrors(PyObject *exc); +/* replace the unicode encode error with backslash escapes (\N, \x, \u and \U) */ +PyAPI_FUNC(PyObject *) PyCodec_NameReplaceErrors(PyObject *exc); + PyAPI_DATA(const char *) Py_hexdigits; #ifdef __cplusplus diff --git a/Darwin/include/python3.4m/compile.h b/Darwin/include/python3.5m/compile.h similarity index 97% rename from Darwin/include/python3.4m/compile.h rename to Darwin/include/python3.5m/compile.h index c6650d7..ecd8dc1 100644 --- a/Darwin/include/python3.4m/compile.h +++ b/Darwin/include/python3.5m/compile.h @@ -27,6 +27,7 @@ typedef struct { #define FUTURE_PRINT_FUNCTION "print_function" #define FUTURE_UNICODE_LITERALS "unicode_literals" #define FUTURE_BARRY_AS_BDFL "barry_as_FLUFL" +#define FUTURE_GENERATOR_STOP "generator_stop" struct _mod; /* Declare the existence of this type */ #define PyAST_Compile(mod, s, f, ar) PyAST_CompileEx(mod, s, f, -1, ar) diff --git a/Darwin/include/python3.4m/complexobject.h b/Darwin/include/python3.5m/complexobject.h similarity index 72% rename from Darwin/include/python3.4m/complexobject.h rename to Darwin/include/python3.5m/complexobject.h index 1934f3b..cb8c52c 100644 --- a/Darwin/include/python3.4m/complexobject.h +++ b/Darwin/include/python3.5m/complexobject.h @@ -14,21 +14,13 @@ typedef struct { /* Operations on complex numbers from complexmodule.c */ -#define c_sum _Py_c_sum -#define c_diff _Py_c_diff -#define c_neg _Py_c_neg -#define c_prod _Py_c_prod -#define c_quot _Py_c_quot -#define c_pow _Py_c_pow -#define c_abs _Py_c_abs - -PyAPI_FUNC(Py_complex) c_sum(Py_complex, Py_complex); -PyAPI_FUNC(Py_complex) c_diff(Py_complex, Py_complex); -PyAPI_FUNC(Py_complex) c_neg(Py_complex); -PyAPI_FUNC(Py_complex) c_prod(Py_complex, Py_complex); -PyAPI_FUNC(Py_complex) c_quot(Py_complex, Py_complex); -PyAPI_FUNC(Py_complex) c_pow(Py_complex, Py_complex); -PyAPI_FUNC(double) c_abs(Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_sum(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_diff(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_neg(Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_prod(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_quot(Py_complex, Py_complex); +PyAPI_FUNC(Py_complex) _Py_c_pow(Py_complex, Py_complex); +PyAPI_FUNC(double) _Py_c_abs(Py_complex); #endif /* Complex object interface */ diff --git a/Darwin/include/python3.4m/datetime.h b/Darwin/include/python3.5m/datetime.h similarity index 100% rename from Darwin/include/python3.4m/datetime.h rename to Darwin/include/python3.5m/datetime.h diff --git a/Darwin/include/python3.4m/descrobject.h b/Darwin/include/python3.5m/descrobject.h similarity index 100% rename from Darwin/include/python3.4m/descrobject.h rename to Darwin/include/python3.5m/descrobject.h diff --git a/Darwin/include/python3.4m/dictobject.h b/Darwin/include/python3.5m/dictobject.h similarity index 83% rename from Darwin/include/python3.4m/dictobject.h rename to Darwin/include/python3.5m/dictobject.h index ef122bd..320f9ec 100644 --- a/Darwin/include/python3.4m/dictobject.h +++ b/Darwin/include/python3.5m/dictobject.h @@ -27,6 +27,11 @@ typedef struct { PyObject **ma_values; } PyDictObject; +typedef struct { + PyObject_HEAD + PyDictObject *dv_dict; +} _PyDictViewObject; + #endif /* Py_LIMITED_API */ PyAPI_DATA(PyTypeObject) PyDict_Type; @@ -40,9 +45,9 @@ PyAPI_DATA(PyTypeObject) PyDictValues_Type; #define PyDict_Check(op) \ PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_DICT_SUBCLASS) #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type) -#define PyDictKeys_Check(op) (Py_TYPE(op) == &PyDictKeys_Type) -#define PyDictItems_Check(op) (Py_TYPE(op) == &PyDictItems_Type) -#define PyDictValues_Check(op) (Py_TYPE(op) == &PyDictValues_Type) +#define PyDictKeys_Check(op) PyObject_TypeCheck(op, &PyDictKeys_Type) +#define PyDictItems_Check(op) PyObject_TypeCheck(op, &PyDictItems_Type) +#define PyDictValues_Check(op) PyObject_TypeCheck(op, &PyDictValues_Type) /* This excludes Values, since they are not sets. */ # define PyDictViewSet_Check(op) \ (PyDictKeys_Check(op) || PyDictItems_Check(op)) @@ -50,6 +55,10 @@ PyAPI_DATA(PyTypeObject) PyDictValues_Type; PyAPI_FUNC(PyObject *) PyDict_New(void); PyAPI_FUNC(PyObject *) PyDict_GetItem(PyObject *mp, PyObject *key); +#ifndef Py_LIMITED_API +PyAPI_FUNC(PyObject *) _PyDict_GetItem_KnownHash(PyObject *mp, PyObject *key, + Py_hash_t hash); +#endif PyAPI_FUNC(PyObject *) PyDict_GetItemWithError(PyObject *mp, PyObject *key); PyAPI_FUNC(PyObject *) _PyDict_GetItemIdWithError(PyObject *dp, struct _Py_Identifier *key); @@ -58,6 +67,10 @@ PyAPI_FUNC(PyObject *) PyDict_SetDefault( PyObject *mp, PyObject *key, PyObject *defaultobj); #endif PyAPI_FUNC(int) PyDict_SetItem(PyObject *mp, PyObject *key, PyObject *item); +#ifndef Py_LIMITED_API +PyAPI_FUNC(int) _PyDict_SetItem_KnownHash(PyObject *mp, PyObject *key, + PyObject *item, Py_hash_t hash); +#endif PyAPI_FUNC(int) PyDict_DelItem(PyObject *mp, PyObject *key); PyAPI_FUNC(void) PyDict_Clear(PyObject *mp); PyAPI_FUNC(int) PyDict_Next( @@ -67,6 +80,7 @@ PyDictKeysObject *_PyDict_NewKeysForClass(void); PyAPI_FUNC(PyObject *) PyObject_GenericGetDict(PyObject *, void *); PyAPI_FUNC(int) _PyDict_Next( PyObject *mp, Py_ssize_t *pos, PyObject **key, PyObject **value, Py_hash_t *hash); +PyObject *_PyDictView_New(PyObject *, PyTypeObject *); #endif PyAPI_FUNC(PyObject *) PyDict_Keys(PyObject *mp); PyAPI_FUNC(PyObject *) PyDict_Values(PyObject *mp); @@ -80,6 +94,9 @@ PyAPI_FUNC(PyObject *) _PyDict_NewPresized(Py_ssize_t minused); PyAPI_FUNC(void) _PyDict_MaybeUntrack(PyObject *mp); PyAPI_FUNC(int) _PyDict_HasOnlyStringKeys(PyObject *mp); Py_ssize_t _PyDict_KeysSize(PyDictKeysObject *keys); +PyObject *_PyDict_SizeOf(PyDictObject *); +PyObject *_PyDict_Pop(PyDictObject *, PyObject *, PyObject *); +PyObject *_PyDict_FromKeys(PyObject *, PyObject *, PyObject *); #define _PyDict_HasSplitTable(d) ((d)->ma_values != NULL) PyAPI_FUNC(int) PyDict_ClearFreeList(void); @@ -97,6 +114,10 @@ PyAPI_FUNC(int) PyDict_Merge(PyObject *mp, PyObject *other, int override); +#ifndef Py_LIMITED_API +PyAPI_FUNC(PyObject *) _PyDictView_Intersect(PyObject* self, PyObject *other); +#endif + /* PyDict_MergeFromSeq2 updates/merges from an iterable object producing iterable objects of length 2. If override is true, the last occurrence of a key wins, else the first. The Python dict constructor dict(seq2) diff --git a/Darwin/include/python3.4m/dtoa.h b/Darwin/include/python3.5m/dtoa.h similarity index 100% rename from Darwin/include/python3.4m/dtoa.h rename to Darwin/include/python3.5m/dtoa.h diff --git a/Darwin/include/python3.4m/dynamic_annotations.h b/Darwin/include/python3.5m/dynamic_annotations.h similarity index 99% rename from Darwin/include/python3.4m/dynamic_annotations.h rename to Darwin/include/python3.5m/dynamic_annotations.h index d63c5db..0bd1a83 100644 --- a/Darwin/include/python3.4m/dynamic_annotations.h +++ b/Darwin/include/python3.5m/dynamic_annotations.h @@ -150,7 +150,7 @@ /* Report that a new memory at "address" of size "size" has been allocated. This might be used when the memory has been retrieved from a free list and - is about to be reused, or when a the locking discipline for a variable + is about to be reused, or when the locking discipline for a variable changes. */ #define _Py_ANNOTATE_NEW_MEMORY(address, size) \ AnnotateNewMemory(__FILE__, __LINE__, address, size) diff --git a/Darwin/include/python3.4m/enumobject.h b/Darwin/include/python3.5m/enumobject.h similarity index 100% rename from Darwin/include/python3.4m/enumobject.h rename to Darwin/include/python3.5m/enumobject.h diff --git a/Darwin/include/python3.4m/errcode.h b/Darwin/include/python3.5m/errcode.h similarity index 100% rename from Darwin/include/python3.4m/errcode.h rename to Darwin/include/python3.5m/errcode.h diff --git a/Darwin/include/python3.4m/eval.h b/Darwin/include/python3.5m/eval.h similarity index 100% rename from Darwin/include/python3.4m/eval.h rename to Darwin/include/python3.5m/eval.h diff --git a/Darwin/include/python3.4m/fileobject.h b/Darwin/include/python3.5m/fileobject.h similarity index 81% rename from Darwin/include/python3.4m/fileobject.h rename to Darwin/include/python3.5m/fileobject.h index 0939744..03155d3 100644 --- a/Darwin/include/python3.4m/fileobject.h +++ b/Darwin/include/python3.5m/fileobject.h @@ -32,17 +32,6 @@ PyAPI_DATA(int) Py_HasFileSystemDefaultEncoding; #ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyFile_NewStdPrinter(int); PyAPI_DATA(PyTypeObject) PyStdPrinter_Type; - -#if defined _MSC_VER && _MSC_VER >= 1400 -/* A routine to check if a file descriptor is valid on Windows. Returns 0 - * and sets errno to EBADF if it isn't. This is to avoid Assertions - * from various functions in the Windows CRT beginning with - * Visual Studio 2005 - */ -int _PyVerify_fd(int fd); -#else -#define _PyVerify_fd(A) (1) /* dummy */ -#endif #endif /* Py_LIMITED_API */ /* A routine to check if a file descriptor can be select()-ed. */ diff --git a/Darwin/include/python3.5m/fileutils.h b/Darwin/include/python3.5m/fileutils.h new file mode 100644 index 0000000..b4a683c --- /dev/null +++ b/Darwin/include/python3.5m/fileutils.h @@ -0,0 +1,142 @@ +#ifndef Py_FILEUTILS_H +#define Py_FILEUTILS_H + +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_FUNC(PyObject *) _Py_device_encoding(int); + +PyAPI_FUNC(wchar_t *) Py_DecodeLocale( + const char *arg, + size_t *size); + +PyAPI_FUNC(char*) Py_EncodeLocale( + const wchar_t *text, + size_t *error_pos); + +#ifndef Py_LIMITED_API + +#ifdef MS_WINDOWS +struct _Py_stat_struct { + unsigned long st_dev; + __int64 st_ino; + unsigned short st_mode; + int st_nlink; + int st_uid; + int st_gid; + unsigned long st_rdev; + __int64 st_size; + time_t st_atime; + int st_atime_nsec; + time_t st_mtime; + int st_mtime_nsec; + time_t st_ctime; + int st_ctime_nsec; + unsigned long st_file_attributes; +}; +#else +# define _Py_stat_struct stat +#endif + +PyAPI_FUNC(int) _Py_fstat( + int fd, + struct _Py_stat_struct *status); + +PyAPI_FUNC(int) _Py_fstat_noraise( + int fd, + struct _Py_stat_struct *status); +#endif /* Py_LIMITED_API */ + +PyAPI_FUNC(int) _Py_stat( + PyObject *path, + struct stat *status); + +#ifndef Py_LIMITED_API +PyAPI_FUNC(int) _Py_open( + const char *pathname, + int flags); + +PyAPI_FUNC(int) _Py_open_noraise( + const char *pathname, + int flags); +#endif + +PyAPI_FUNC(FILE *) _Py_wfopen( + const wchar_t *path, + const wchar_t *mode); + +PyAPI_FUNC(FILE*) _Py_fopen( + const char *pathname, + const char *mode); + +PyAPI_FUNC(FILE*) _Py_fopen_obj( + PyObject *path, + const char *mode); + +PyAPI_FUNC(Py_ssize_t) _Py_read( + int fd, + void *buf, + size_t count); + +PyAPI_FUNC(Py_ssize_t) _Py_write( + int fd, + const void *buf, + size_t count); + +PyAPI_FUNC(Py_ssize_t) _Py_write_noraise( + int fd, + const void *buf, + size_t count); + +#ifdef HAVE_READLINK +PyAPI_FUNC(int) _Py_wreadlink( + const wchar_t *path, + wchar_t *buf, + size_t bufsiz); +#endif + +#ifdef HAVE_REALPATH +PyAPI_FUNC(wchar_t*) _Py_wrealpath( + const wchar_t *path, + wchar_t *resolved_path, + size_t resolved_path_size); +#endif + +PyAPI_FUNC(wchar_t*) _Py_wgetcwd( + wchar_t *buf, + size_t size); + +#ifndef Py_LIMITED_API +PyAPI_FUNC(int) _Py_get_inheritable(int fd); + +PyAPI_FUNC(int) _Py_set_inheritable(int fd, int inheritable, + int *atomic_flag_works); + +PyAPI_FUNC(int) _Py_dup(int fd); + +#ifndef MS_WINDOWS +PyAPI_FUNC(int) _Py_get_blocking(int fd); + +PyAPI_FUNC(int) _Py_set_blocking(int fd, int blocking); +#endif /* !MS_WINDOWS */ + +#if defined _MSC_VER && _MSC_VER >= 1400 && _MSC_VER < 1900 +/* A routine to check if a file descriptor is valid on Windows. Returns 0 + * and sets errno to EBADF if it isn't. This is to avoid Assertions + * from various functions in the Windows CRT beginning with + * Visual Studio 2005 + */ +int _PyVerify_fd(int fd); + +#else +#define _PyVerify_fd(A) (1) /* dummy */ +#endif + +#endif /* Py_LIMITED_API */ + +#ifdef __cplusplus +} +#endif + +#endif /* !Py_FILEUTILS_H */ diff --git a/Darwin/include/python3.4m/floatobject.h b/Darwin/include/python3.5m/floatobject.h similarity index 100% rename from Darwin/include/python3.4m/floatobject.h rename to Darwin/include/python3.5m/floatobject.h diff --git a/Darwin/include/python3.4m/frameobject.h b/Darwin/include/python3.5m/frameobject.h similarity index 100% rename from Darwin/include/python3.4m/frameobject.h rename to Darwin/include/python3.5m/frameobject.h diff --git a/Darwin/include/python3.4m/funcobject.h b/Darwin/include/python3.5m/funcobject.h similarity index 100% rename from Darwin/include/python3.4m/funcobject.h rename to Darwin/include/python3.5m/funcobject.h diff --git a/Darwin/include/python3.5m/genobject.h b/Darwin/include/python3.5m/genobject.h new file mode 100644 index 0000000..4c71861 --- /dev/null +++ b/Darwin/include/python3.5m/genobject.h @@ -0,0 +1,68 @@ + +/* Generator object interface */ + +#ifndef Py_LIMITED_API +#ifndef Py_GENOBJECT_H +#define Py_GENOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + +struct _frame; /* Avoid including frameobject.h */ + +/* _PyGenObject_HEAD defines the initial segment of generator + and coroutine objects. */ +#define _PyGenObject_HEAD(prefix) \ + PyObject_HEAD \ + /* Note: gi_frame can be NULL if the generator is "finished" */ \ + struct _frame *prefix##_frame; \ + /* True if generator is being executed. */ \ + char prefix##_running; \ + /* The code object backing the generator */ \ + PyObject *prefix##_code; \ + /* List of weak reference. */ \ + PyObject *prefix##_weakreflist; \ + /* Name of the generator. */ \ + PyObject *prefix##_name; \ + /* Qualified name of the generator. */ \ + PyObject *prefix##_qualname; + +typedef struct { + /* The gi_ prefix is intended to remind of generator-iterator. */ + _PyGenObject_HEAD(gi) +} PyGenObject; + +PyAPI_DATA(PyTypeObject) PyGen_Type; + +#define PyGen_Check(op) PyObject_TypeCheck(op, &PyGen_Type) +#define PyGen_CheckExact(op) (Py_TYPE(op) == &PyGen_Type) + +PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *); +PyAPI_FUNC(PyObject *) PyGen_NewWithQualName(struct _frame *, + PyObject *name, PyObject *qualname); +PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *); +PyAPI_FUNC(int) _PyGen_FetchStopIterationValue(PyObject **); +PyObject *_PyGen_Send(PyGenObject *, PyObject *); +PyAPI_FUNC(void) _PyGen_Finalize(PyObject *self); + +#ifndef Py_LIMITED_API +typedef struct { + _PyGenObject_HEAD(cr) +} PyCoroObject; + +PyAPI_DATA(PyTypeObject) PyCoro_Type; +PyAPI_DATA(PyTypeObject) _PyCoroWrapper_Type; + +#define PyCoro_CheckExact(op) (Py_TYPE(op) == &PyCoro_Type) +PyObject *_PyCoro_GetAwaitableIter(PyObject *o); +PyAPI_FUNC(PyObject *) PyCoro_New(struct _frame *, + PyObject *name, PyObject *qualname); +#endif + +#undef _PyGenObject_HEAD + +#ifdef __cplusplus +} +#endif +#endif /* !Py_GENOBJECT_H */ +#endif /* Py_LIMITED_API */ diff --git a/Darwin/include/python3.5m/graminit.h b/Darwin/include/python3.5m/graminit.h new file mode 100644 index 0000000..d030bc3 --- /dev/null +++ b/Darwin/include/python3.5m/graminit.h @@ -0,0 +1,87 @@ +/* Generated by Parser/pgen */ + +#define single_input 256 +#define file_input 257 +#define eval_input 258 +#define decorator 259 +#define decorators 260 +#define decorated 261 +#define async_funcdef 262 +#define funcdef 263 +#define parameters 264 +#define typedargslist 265 +#define tfpdef 266 +#define varargslist 267 +#define vfpdef 268 +#define stmt 269 +#define simple_stmt 270 +#define small_stmt 271 +#define expr_stmt 272 +#define testlist_star_expr 273 +#define augassign 274 +#define del_stmt 275 +#define pass_stmt 276 +#define flow_stmt 277 +#define break_stmt 278 +#define continue_stmt 279 +#define return_stmt 280 +#define yield_stmt 281 +#define raise_stmt 282 +#define import_stmt 283 +#define import_name 284 +#define import_from 285 +#define import_as_name 286 +#define dotted_as_name 287 +#define import_as_names 288 +#define dotted_as_names 289 +#define dotted_name 290 +#define global_stmt 291 +#define nonlocal_stmt 292 +#define assert_stmt 293 +#define compound_stmt 294 +#define async_stmt 295 +#define if_stmt 296 +#define while_stmt 297 +#define for_stmt 298 +#define try_stmt 299 +#define with_stmt 300 +#define with_item 301 +#define except_clause 302 +#define suite 303 +#define test 304 +#define test_nocond 305 +#define lambdef 306 +#define lambdef_nocond 307 +#define or_test 308 +#define and_test 309 +#define not_test 310 +#define comparison 311 +#define comp_op 312 +#define star_expr 313 +#define expr 314 +#define xor_expr 315 +#define and_expr 316 +#define shift_expr 317 +#define arith_expr 318 +#define term 319 +#define factor 320 +#define power 321 +#define atom_expr 322 +#define atom 323 +#define testlist_comp 324 +#define trailer 325 +#define subscriptlist 326 +#define subscript 327 +#define sliceop 328 +#define exprlist 329 +#define testlist 330 +#define dictorsetmaker 331 +#define classdef 332 +#define arglist 333 +#define argument 334 +#define comp_iter 335 +#define comp_for 336 +#define comp_if 337 +#define encoding_decl 338 +#define yield_expr 339 +#define yield_arg 340 diff --git a/Darwin/include/python3.4m/grammar.h b/Darwin/include/python3.5m/grammar.h similarity index 99% rename from Darwin/include/python3.4m/grammar.h rename to Darwin/include/python3.5m/grammar.h index ba7d19d..85120b9 100644 --- a/Darwin/include/python3.4m/grammar.h +++ b/Darwin/include/python3.5m/grammar.h @@ -37,7 +37,7 @@ typedef struct { typedef struct { int s_narcs; arc *s_arc; /* Array of arcs */ - + /* Optional accelerators */ int s_lower; /* Lowest label index */ int s_upper; /* Highest label index */ diff --git a/Darwin/include/python3.4m/import.h b/Darwin/include/python3.5m/import.h similarity index 100% rename from Darwin/include/python3.4m/import.h rename to Darwin/include/python3.5m/import.h diff --git a/Darwin/include/python3.4m/intrcheck.h b/Darwin/include/python3.5m/intrcheck.h similarity index 100% rename from Darwin/include/python3.4m/intrcheck.h rename to Darwin/include/python3.5m/intrcheck.h diff --git a/Darwin/include/python3.4m/iterobject.h b/Darwin/include/python3.5m/iterobject.h similarity index 100% rename from Darwin/include/python3.4m/iterobject.h rename to Darwin/include/python3.5m/iterobject.h diff --git a/Darwin/include/python3.4m/listobject.h b/Darwin/include/python3.5m/listobject.h similarity index 95% rename from Darwin/include/python3.4m/listobject.h rename to Darwin/include/python3.5m/listobject.h index dc62aee..daa513f 100644 --- a/Darwin/include/python3.4m/listobject.h +++ b/Darwin/include/python3.5m/listobject.h @@ -46,7 +46,7 @@ PyAPI_DATA(PyTypeObject) PyListRevIter_Type; PyAPI_DATA(PyTypeObject) PySortWrapper_Type; #define PyList_Check(op) \ - PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LIST_SUBCLASS) + PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LIST_SUBCLASS) #define PyList_CheckExact(op) (Py_TYPE(op) == &PyList_Type) PyAPI_FUNC(PyObject *) PyList_New(Py_ssize_t size); @@ -72,6 +72,7 @@ PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #define PyList_GET_ITEM(op, i) (((PyListObject *)(op))->ob_item[i]) #define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v)) #define PyList_GET_SIZE(op) Py_SIZE(op) +#define _PyList_ITEMS(op) (((PyListObject *)(op))->ob_item) #endif #ifdef __cplusplus diff --git a/Darwin/include/python3.4m/longintrepr.h b/Darwin/include/python3.5m/longintrepr.h similarity index 100% rename from Darwin/include/python3.4m/longintrepr.h rename to Darwin/include/python3.5m/longintrepr.h diff --git a/Darwin/include/python3.4m/longobject.h b/Darwin/include/python3.5m/longobject.h similarity index 98% rename from Darwin/include/python3.4m/longobject.h rename to Darwin/include/python3.5m/longobject.h index ff43309..aed59ce 100644 --- a/Darwin/include/python3.4m/longobject.h +++ b/Darwin/include/python3.5m/longobject.h @@ -198,6 +198,9 @@ PyAPI_FUNC(int) _PyLong_FormatAdvancedWriter( PyAPI_FUNC(unsigned long) PyOS_strtoul(const char *, char **, int); PyAPI_FUNC(long) PyOS_strtol(const char *, char **, int); +/* For use by the gcd function in mathmodule.c */ +PyAPI_FUNC(PyObject *) _PyLong_GCD(PyObject *, PyObject *); + #ifdef __cplusplus } #endif diff --git a/Darwin/include/python3.4m/marshal.h b/Darwin/include/python3.5m/marshal.h similarity index 100% rename from Darwin/include/python3.4m/marshal.h rename to Darwin/include/python3.5m/marshal.h diff --git a/Darwin/include/python3.4m/memoryobject.h b/Darwin/include/python3.5m/memoryobject.h similarity index 93% rename from Darwin/include/python3.4m/memoryobject.h rename to Darwin/include/python3.5m/memoryobject.h index c2e1194..ab5ee09 100644 --- a/Darwin/include/python3.4m/memoryobject.h +++ b/Darwin/include/python3.5m/memoryobject.h @@ -45,9 +45,6 @@ typedef struct { } _PyManagedBufferObject; -/* static storage used for casting between formats */ -#define _Py_MEMORYVIEW_MAX_FORMAT 3 /* must be >= 3 */ - /* memoryview state flags */ #define _Py_MEMORYVIEW_RELEASED 0x001 /* access to master buffer blocked */ #define _Py_MEMORYVIEW_C 0x002 /* C-contiguous layout */ @@ -62,7 +59,6 @@ typedef struct { int flags; /* state flags */ Py_ssize_t exports; /* number of buffer re-exports */ Py_buffer view; /* private copy of the exporter's view */ - char format[_Py_MEMORYVIEW_MAX_FORMAT]; /* used for casting */ PyObject *weakreflist; Py_ssize_t ob_array[1]; /* shape, strides, suboffsets */ } PyMemoryViewObject; diff --git a/Darwin/include/python3.4m/metagrammar.h b/Darwin/include/python3.5m/metagrammar.h similarity index 100% rename from Darwin/include/python3.4m/metagrammar.h rename to Darwin/include/python3.5m/metagrammar.h diff --git a/Darwin/include/python3.4m/methodobject.h b/Darwin/include/python3.5m/methodobject.h similarity index 98% rename from Darwin/include/python3.4m/methodobject.h rename to Darwin/include/python3.5m/methodobject.h index 3cc2ea9..e2ad804 100644 --- a/Darwin/include/python3.4m/methodobject.h +++ b/Darwin/include/python3.5m/methodobject.h @@ -47,7 +47,7 @@ struct PyMethodDef { typedef struct PyMethodDef PyMethodDef; #define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL) -PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, +PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, PyObject *); /* Flag passed to newmethodobject */ @@ -66,7 +66,7 @@ PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, /* METH_COEXIST allows a method to be entered even though a slot has already filled the entry. When defined, the flag allows a separate - method, "__contains__" for example, to coexist with a defined + method, "__contains__" for example, to coexist with a defined slot like sq_contains. */ #define METH_COEXIST 0x0040 @@ -77,6 +77,7 @@ typedef struct { PyMethodDef *m_ml; /* Description of the C function to call */ PyObject *m_self; /* Passed as 'self' arg to the C func, can be NULL */ PyObject *m_module; /* The __module__ attribute, can be anything */ + PyObject *m_weakreflist; /* List of weak references */ } PyCFunctionObject; #endif diff --git a/Darwin/include/python3.4m/modsupport.h b/Darwin/include/python3.5m/modsupport.h similarity index 59% rename from Darwin/include/python3.4m/modsupport.h rename to Darwin/include/python3.5m/modsupport.h index 5de0458..829aaf8 100644 --- a/Darwin/include/python3.4m/modsupport.h +++ b/Darwin/include/python3.5m/modsupport.h @@ -12,13 +12,13 @@ extern "C" { /* If PY_SSIZE_T_CLEAN is defined, each functions treats #-specifier to mean Py_ssize_t */ #ifdef PY_SSIZE_T_CLEAN -#define PyArg_Parse _PyArg_Parse_SizeT -#define PyArg_ParseTuple _PyArg_ParseTuple_SizeT -#define PyArg_ParseTupleAndKeywords _PyArg_ParseTupleAndKeywords_SizeT -#define PyArg_VaParse _PyArg_VaParse_SizeT -#define PyArg_VaParseTupleAndKeywords _PyArg_VaParseTupleAndKeywords_SizeT -#define Py_BuildValue _Py_BuildValue_SizeT -#define Py_VaBuildValue _Py_VaBuildValue_SizeT +#define PyArg_Parse _PyArg_Parse_SizeT +#define PyArg_ParseTuple _PyArg_ParseTuple_SizeT +#define PyArg_ParseTupleAndKeywords _PyArg_ParseTupleAndKeywords_SizeT +#define PyArg_VaParse _PyArg_VaParse_SizeT +#define PyArg_VaParseTupleAndKeywords _PyArg_VaParseTupleAndKeywords_SizeT +#define Py_BuildValue _Py_BuildValue_SizeT +#define Py_VaBuildValue _Py_VaBuildValue_SizeT #else PyAPI_FUNC(PyObject *) _Py_VaBuildValue_SizeT(const char *, va_list); #endif @@ -50,6 +50,13 @@ PyAPI_FUNC(int) PyModule_AddStringConstant(PyObject *, const char *, const char #define PyModule_AddIntMacro(m, c) PyModule_AddIntConstant(m, #c, c) #define PyModule_AddStringMacro(m, c) PyModule_AddStringConstant(m, #c, c) +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +/* New in 3.5 */ +PyAPI_FUNC(int) PyModule_SetDocString(PyObject *, const char *); +PyAPI_FUNC(int) PyModule_AddFunctions(PyObject *, PyMethodDef *); +PyAPI_FUNC(int) PyModule_ExecDef(PyObject *module, PyModuleDef *def); +#endif + #define Py_CLEANUP_SUPPORTED 0x20000 #define PYTHON_API_VERSION 1013 @@ -67,35 +74,35 @@ PyAPI_FUNC(int) PyModule_AddStringConstant(PyObject *, const char *, const char Please add a line or two to the top of this log for each API version change: - 22-Feb-2006 MvL 1013 PEP 353 - long indices for sequence lengths + 22-Feb-2006 MvL 1013 PEP 353 - long indices for sequence lengths - 19-Aug-2002 GvR 1012 Changes to string object struct for - interning changes, saving 3 bytes. + 19-Aug-2002 GvR 1012 Changes to string object struct for + interning changes, saving 3 bytes. - 17-Jul-2001 GvR 1011 Descr-branch, just to be on the safe side + 17-Jul-2001 GvR 1011 Descr-branch, just to be on the safe side 25-Jan-2001 FLD 1010 Parameters added to PyCode_New() and PyFrame_New(); Python 2.1a2 14-Mar-2000 GvR 1009 Unicode API added - 3-Jan-1999 GvR 1007 Decided to change back! (Don't reuse 1008!) + 3-Jan-1999 GvR 1007 Decided to change back! (Don't reuse 1008!) - 3-Dec-1998 GvR 1008 Python 1.5.2b1 + 3-Dec-1998 GvR 1008 Python 1.5.2b1 - 18-Jan-1997 GvR 1007 string interning and other speedups + 18-Jan-1997 GvR 1007 string interning and other speedups - 11-Oct-1996 GvR renamed Py_Ellipses to Py_Ellipsis :-( + 11-Oct-1996 GvR renamed Py_Ellipses to Py_Ellipsis :-( - 30-Jul-1996 GvR Slice and ellipses syntax added + 30-Jul-1996 GvR Slice and ellipses syntax added - 23-Jul-1996 GvR For 1.4 -- better safe than sorry this time :-) + 23-Jul-1996 GvR For 1.4 -- better safe than sorry this time :-) - 7-Nov-1995 GvR Keyword arguments (should've been done at 1.3 :-( ) + 7-Nov-1995 GvR Keyword arguments (should've been done at 1.3 :-( ) - 10-Jan-1995 GvR Renamed globals to new naming scheme + 10-Jan-1995 GvR Renamed globals to new naming scheme - 9-Jan-1995 GvR Initial version (incompatible with older API) + 9-Jan-1995 GvR Initial version (incompatible with older API) */ /* The PYTHON_ABI_VERSION is introduced in PEP 384. For the lifetime of @@ -105,10 +112,11 @@ PyAPI_FUNC(int) PyModule_AddStringConstant(PyObject *, const char *, const char #define PYTHON_ABI_STRING "3" #ifdef Py_TRACE_REFS - /* When we are tracing reference counts, rename PyModule_Create2 so + /* When we are tracing reference counts, rename module creation functions so modules compiled with incompatible settings will generate a link-time error. */ #define PyModule_Create2 PyModule_Create2TraceRefs + #define PyModule_FromDefAndSpec2 PyModule_FromDefAndSpec2TraceRefs #endif PyAPI_FUNC(PyObject *) PyModule_Create2(struct PyModuleDef*, @@ -116,12 +124,27 @@ PyAPI_FUNC(PyObject *) PyModule_Create2(struct PyModuleDef*, #ifdef Py_LIMITED_API #define PyModule_Create(module) \ - PyModule_Create2(module, PYTHON_ABI_VERSION) + PyModule_Create2(module, PYTHON_ABI_VERSION) #else #define PyModule_Create(module) \ - PyModule_Create2(module, PYTHON_API_VERSION) + PyModule_Create2(module, PYTHON_API_VERSION) #endif +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +/* New in 3.5 */ +PyAPI_FUNC(PyObject *) PyModule_FromDefAndSpec2(PyModuleDef *def, + PyObject *spec, + int module_api_version); + +#ifdef Py_LIMITED_API +#define PyModule_FromDefAndSpec(module, spec) \ + PyModule_FromDefAndSpec2(module, spec, PYTHON_ABI_VERSION) +#else +#define PyModule_FromDefAndSpec(module, spec) \ + PyModule_FromDefAndSpec2(module, spec, PYTHON_API_VERSION) +#endif /* Py_LIMITED_API */ +#endif /* New in 3.5 */ + #ifndef Py_LIMITED_API PyAPI_DATA(char *) _Py_PackageContext; #endif diff --git a/Darwin/include/python3.4m/moduleobject.h b/Darwin/include/python3.5m/moduleobject.h similarity index 74% rename from Darwin/include/python3.4m/moduleobject.h rename to Darwin/include/python3.5m/moduleobject.h index f119364..229d7fa 100644 --- a/Darwin/include/python3.4m/moduleobject.h +++ b/Darwin/include/python3.5m/moduleobject.h @@ -30,6 +30,12 @@ PyAPI_FUNC(void) _PyModule_ClearDict(PyObject *); PyAPI_FUNC(struct PyModuleDef*) PyModule_GetDef(PyObject*); PyAPI_FUNC(void*) PyModule_GetState(PyObject*); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +/* New in 3.5 */ +PyAPI_FUNC(PyObject *) PyModuleDef_Init(struct PyModuleDef*); +PyAPI_DATA(PyTypeObject) PyModuleDef_Type; +#endif + typedef struct PyModuleDef_Base { PyObject_HEAD PyObject* (*m_init)(void); @@ -44,19 +50,35 @@ typedef struct PyModuleDef_Base { NULL, /* m_copy */ \ } +struct PyModuleDef_Slot; +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +/* New in 3.5 */ +typedef struct PyModuleDef_Slot{ + int slot; + void *value; +} PyModuleDef_Slot; + +#define Py_mod_create 1 +#define Py_mod_exec 2 + +#ifndef Py_LIMITED_API +#define _Py_mod_LAST_SLOT 2 +#endif + +#endif /* New in 3.5 */ + typedef struct PyModuleDef{ PyModuleDef_Base m_base; const char* m_name; const char* m_doc; Py_ssize_t m_size; PyMethodDef *m_methods; - inquiry m_reload; + struct PyModuleDef_Slot* m_slots; traverseproc m_traverse; inquiry m_clear; freefunc m_free; }PyModuleDef; - #ifdef __cplusplus } #endif diff --git a/Darwin/include/python3.4m/namespaceobject.h b/Darwin/include/python3.5m/namespaceobject.h similarity index 100% rename from Darwin/include/python3.4m/namespaceobject.h rename to Darwin/include/python3.5m/namespaceobject.h diff --git a/Darwin/include/python3.4m/node.h b/Darwin/include/python3.5m/node.h similarity index 95% rename from Darwin/include/python3.4m/node.h rename to Darwin/include/python3.5m/node.h index 99c13f7..654ad85 100644 --- a/Darwin/include/python3.4m/node.h +++ b/Darwin/include/python3.5m/node.h @@ -21,12 +21,12 @@ PyAPI_FUNC(int) PyNode_AddChild(node *n, int type, char *str, int lineno, int col_offset); PyAPI_FUNC(void) PyNode_Free(node *n); #ifndef Py_LIMITED_API -Py_ssize_t _PyNode_SizeOf(node *n); +PyAPI_FUNC(Py_ssize_t) _PyNode_SizeOf(node *n); #endif /* Node access functions */ #define NCH(n) ((n)->n_nchildren) - + #define CHILD(n, i) (&(n)->n_child[i]) #define RCHILD(n, i) (CHILD(n, NCH(n) + i)) #define TYPE(n) ((n)->n_type) diff --git a/Darwin/include/python3.4m/object.h b/Darwin/include/python3.5m/object.h similarity index 98% rename from Darwin/include/python3.4m/object.h rename to Darwin/include/python3.5m/object.h index 7584d4c..8afcbe9 100644 --- a/Darwin/include/python3.4m/object.h +++ b/Darwin/include/python3.5m/object.h @@ -65,6 +65,7 @@ whose size is determined when the object is allocated. #error Py_LIMITED_API is incompatible with Py_DEBUG, Py_TRACE_REFS, and Py_REF_DEBUG #endif + #ifdef Py_TRACE_REFS /* Define pointers to support a doubly-linked list of all live heap objects. */ #define _PyObject_HEAD_EXTRA \ @@ -275,6 +276,9 @@ typedef struct { binaryfunc nb_inplace_true_divide; unaryfunc nb_index; + + binaryfunc nb_matrix_multiply; + binaryfunc nb_inplace_matrix_multiply; } PyNumberMethods; typedef struct { @@ -297,6 +301,11 @@ typedef struct { objobjargproc mp_ass_subscript; } PyMappingMethods; +typedef struct { + unaryfunc am_await; + unaryfunc am_aiter; + unaryfunc am_anext; +} PyAsyncMethods; typedef struct { getbufferproc bf_getbuffer; @@ -342,7 +351,7 @@ typedef struct _typeobject { printfunc tp_print; getattrfunc tp_getattr; setattrfunc tp_setattr; - void *tp_reserved; /* formerly known as tp_compare */ + PyAsyncMethods *tp_as_async; /* formerly known as tp_compare or tp_reserved */ reprfunc tp_repr; /* Method suites for standard classes */ @@ -449,6 +458,7 @@ typedef struct _heaptypeobject { /* Note: there's a dependency on the order of these members in slotptr() in typeobject.c . */ PyTypeObject ht_type; + PyAsyncMethods as_async; PyNumberMethods as_number; PyMappingMethods as_mapping; PySequenceMethods as_sequence; /* as_sequence comes after as_mapping, @@ -572,9 +582,6 @@ PyAPI_FUNC(PyObject *) PyObject_Dir(PyObject *); PyAPI_FUNC(int) Py_ReprEnter(PyObject *); PyAPI_FUNC(void) Py_ReprLeave(PyObject *); -/* Helper for passing objects to printf and the like */ -#define PyObject_REPR(obj) _PyUnicode_AsString(PyObject_Repr(obj)) - /* Flag bits for printing: */ #define Py_PRINT_RAW 1 /* No string quotes etc. */ @@ -710,11 +717,17 @@ PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); _Py_NegativeRefcount(__FILE__, __LINE__, \ (PyObject *)(OP)); \ } +/* Py_REF_DEBUG also controls the display of refcounts and memory block + * allocations at the interactive prompt and at interpreter shutdown + */ +PyAPI_FUNC(void) _PyDebug_PrintTotalRefs(void); +#define _PY_DEBUG_PRINT_TOTAL_REFS() _PyDebug_PrintTotalRefs() #else #define _Py_INC_REFTOTAL #define _Py_DEC_REFTOTAL #define _Py_REF_DEBUG_COMMA #define _Py_CHECK_REFCNT(OP) /* a semicolon */; +#define _PY_DEBUG_PRINT_TOTAL_REFS() #endif /* Py_REF_DEBUG */ #ifdef COUNT_ALLOCS @@ -775,7 +788,7 @@ PyAPI_FUNC(void) _Py_Dealloc(PyObject *); } while (0) /* Safely decref `op` and set `op` to NULL, especially useful in tp_clear - * and tp_dealloc implementatons. + * and tp_dealloc implementations. * * Note that "the obvious" code can be deadly: * diff --git a/Darwin/include/python3.4m/objimpl.h b/Darwin/include/python3.5m/objimpl.h similarity index 98% rename from Darwin/include/python3.4m/objimpl.h rename to Darwin/include/python3.5m/objimpl.h index 3f21b70..65b6d91 100644 --- a/Darwin/include/python3.4m/objimpl.h +++ b/Darwin/include/python3.5m/objimpl.h @@ -95,6 +95,7 @@ PyObject_{New, NewVar, Del}. the raw memory. */ PyAPI_FUNC(void *) PyObject_Malloc(size_t size); +PyAPI_FUNC(void *) PyObject_Calloc(size_t nelem, size_t elsize); PyAPI_FUNC(void *) PyObject_Realloc(void *ptr, size_t new_size); PyAPI_FUNC(void) PyObject_Free(void *ptr); @@ -321,7 +322,8 @@ extern PyGC_Head *_PyGC_generation0; (!PyTuple_CheckExact(obj) || _PyObject_GC_IS_TRACKED(obj))) #endif /* Py_LIMITED_API */ -PyAPI_FUNC(PyObject *) _PyObject_GC_Malloc(size_t); +PyAPI_FUNC(PyObject *) _PyObject_GC_Malloc(size_t size); +PyAPI_FUNC(PyObject *) _PyObject_GC_Calloc(size_t size); PyAPI_FUNC(PyObject *) _PyObject_GC_New(PyTypeObject *); PyAPI_FUNC(PyVarObject *) _PyObject_GC_NewVar(PyTypeObject *, Py_ssize_t); PyAPI_FUNC(void) PyObject_GC_Track(void *); diff --git a/Darwin/include/python3.5m/odictobject.h b/Darwin/include/python3.5m/odictobject.h new file mode 100644 index 0000000..c1d9592 --- /dev/null +++ b/Darwin/include/python3.5m/odictobject.h @@ -0,0 +1,43 @@ +#ifndef Py_ODICTOBJECT_H +#define Py_ODICTOBJECT_H +#ifdef __cplusplus +extern "C" { +#endif + + +/* OrderedDict */ + +#ifndef Py_LIMITED_API + +typedef struct _odictobject PyODictObject; + +PyAPI_DATA(PyTypeObject) PyODict_Type; +PyAPI_DATA(PyTypeObject) PyODictIter_Type; +PyAPI_DATA(PyTypeObject) PyODictKeys_Type; +PyAPI_DATA(PyTypeObject) PyODictItems_Type; +PyAPI_DATA(PyTypeObject) PyODictValues_Type; + +#endif /* Py_LIMITED_API */ + +#define PyODict_Check(op) PyObject_TypeCheck(op, &PyODict_Type) +#define PyODict_CheckExact(op) (Py_TYPE(op) == &PyODict_Type) +#define PyODict_SIZE(op) ((PyDictObject *)op)->ma_used +#define PyODict_HasKey(od, key) (PyMapping_HasKey(PyObject *)od, key) + +PyAPI_FUNC(PyObject *) PyODict_New(void); +PyAPI_FUNC(int) PyODict_SetItem(PyObject *od, PyObject *key, PyObject *item); +PyAPI_FUNC(int) PyODict_DelItem(PyObject *od, PyObject *key); + +/* wrappers around PyDict* functions */ +#define PyODict_GetItem(od, key) PyDict_GetItem((PyObject *)od, key) +#define PyODict_GetItemWithError(od, key) \ + PyDict_GetItemWithError((PyObject *)od, key) +#define PyODict_Contains(od, key) PyDict_Contains((PyObject *)od, key) +#define PyODict_Size(od) PyDict_Size((PyObject *)od) +#define PyODict_GetItemString(od, key) \ + PyDict_GetItemString((PyObject *)od, key) + +#ifdef __cplusplus +} +#endif +#endif /* !Py_ODICTOBJECT_H */ diff --git a/Darwin/include/python3.5m/opcode.h b/Darwin/include/python3.5m/opcode.h new file mode 100644 index 0000000..3f917fb --- /dev/null +++ b/Darwin/include/python3.5m/opcode.h @@ -0,0 +1,142 @@ +/* Auto-generated by Tools/scripts/generate_opcode_h.py */ +#ifndef Py_OPCODE_H +#define Py_OPCODE_H +#ifdef __cplusplus +extern "C" { +#endif + + + /* Instruction opcodes for compiled code */ +#define POP_TOP 1 +#define ROT_TWO 2 +#define ROT_THREE 3 +#define DUP_TOP 4 +#define DUP_TOP_TWO 5 +#define NOP 9 +#define UNARY_POSITIVE 10 +#define UNARY_NEGATIVE 11 +#define UNARY_NOT 12 +#define UNARY_INVERT 15 +#define BINARY_MATRIX_MULTIPLY 16 +#define INPLACE_MATRIX_MULTIPLY 17 +#define BINARY_POWER 19 +#define BINARY_MULTIPLY 20 +#define BINARY_MODULO 22 +#define BINARY_ADD 23 +#define BINARY_SUBTRACT 24 +#define BINARY_SUBSCR 25 +#define BINARY_FLOOR_DIVIDE 26 +#define BINARY_TRUE_DIVIDE 27 +#define INPLACE_FLOOR_DIVIDE 28 +#define INPLACE_TRUE_DIVIDE 29 +#define GET_AITER 50 +#define GET_ANEXT 51 +#define BEFORE_ASYNC_WITH 52 +#define INPLACE_ADD 55 +#define INPLACE_SUBTRACT 56 +#define INPLACE_MULTIPLY 57 +#define INPLACE_MODULO 59 +#define STORE_SUBSCR 60 +#define DELETE_SUBSCR 61 +#define BINARY_LSHIFT 62 +#define BINARY_RSHIFT 63 +#define BINARY_AND 64 +#define BINARY_XOR 65 +#define BINARY_OR 66 +#define INPLACE_POWER 67 +#define GET_ITER 68 +#define GET_YIELD_FROM_ITER 69 +#define PRINT_EXPR 70 +#define LOAD_BUILD_CLASS 71 +#define YIELD_FROM 72 +#define GET_AWAITABLE 73 +#define INPLACE_LSHIFT 75 +#define INPLACE_RSHIFT 76 +#define INPLACE_AND 77 +#define INPLACE_XOR 78 +#define INPLACE_OR 79 +#define BREAK_LOOP 80 +#define WITH_CLEANUP_START 81 +#define WITH_CLEANUP_FINISH 82 +#define RETURN_VALUE 83 +#define IMPORT_STAR 84 +#define YIELD_VALUE 86 +#define POP_BLOCK 87 +#define END_FINALLY 88 +#define POP_EXCEPT 89 +#define HAVE_ARGUMENT 90 +#define STORE_NAME 90 +#define DELETE_NAME 91 +#define UNPACK_SEQUENCE 92 +#define FOR_ITER 93 +#define UNPACK_EX 94 +#define STORE_ATTR 95 +#define DELETE_ATTR 96 +#define STORE_GLOBAL 97 +#define DELETE_GLOBAL 98 +#define LOAD_CONST 100 +#define LOAD_NAME 101 +#define BUILD_TUPLE 102 +#define BUILD_LIST 103 +#define BUILD_SET 104 +#define BUILD_MAP 105 +#define LOAD_ATTR 106 +#define COMPARE_OP 107 +#define IMPORT_NAME 108 +#define IMPORT_FROM 109 +#define JUMP_FORWARD 110 +#define JUMP_IF_FALSE_OR_POP 111 +#define JUMP_IF_TRUE_OR_POP 112 +#define JUMP_ABSOLUTE 113 +#define POP_JUMP_IF_FALSE 114 +#define POP_JUMP_IF_TRUE 115 +#define LOAD_GLOBAL 116 +#define CONTINUE_LOOP 119 +#define SETUP_LOOP 120 +#define SETUP_EXCEPT 121 +#define SETUP_FINALLY 122 +#define LOAD_FAST 124 +#define STORE_FAST 125 +#define DELETE_FAST 126 +#define RAISE_VARARGS 130 +#define CALL_FUNCTION 131 +#define MAKE_FUNCTION 132 +#define BUILD_SLICE 133 +#define MAKE_CLOSURE 134 +#define LOAD_CLOSURE 135 +#define LOAD_DEREF 136 +#define STORE_DEREF 137 +#define DELETE_DEREF 138 +#define CALL_FUNCTION_VAR 140 +#define CALL_FUNCTION_KW 141 +#define CALL_FUNCTION_VAR_KW 142 +#define SETUP_WITH 143 +#define EXTENDED_ARG 144 +#define LIST_APPEND 145 +#define SET_ADD 146 +#define MAP_ADD 147 +#define LOAD_CLASSDEREF 148 +#define BUILD_LIST_UNPACK 149 +#define BUILD_MAP_UNPACK 150 +#define BUILD_MAP_UNPACK_WITH_CALL 151 +#define BUILD_TUPLE_UNPACK 152 +#define BUILD_SET_UNPACK 153 +#define SETUP_ASYNC_WITH 154 + +/* EXCEPT_HANDLER is a special, implicit block type which is created when + entering an except handler. It is not an opcode but we define it here + as we want it to be available to both frameobject.c and ceval.c, while + remaining private.*/ +#define EXCEPT_HANDLER 257 + + +enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, + PyCmp_GT=Py_GT, PyCmp_GE=Py_GE, PyCmp_IN, PyCmp_NOT_IN, + PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD}; + +#define HAS_ARG(op) ((op) >= HAVE_ARGUMENT) + +#ifdef __cplusplus +} +#endif +#endif /* !Py_OPCODE_H */ diff --git a/Darwin/include/python3.4m/osdefs.h b/Darwin/include/python3.5m/osdefs.h similarity index 79% rename from Darwin/include/python3.4m/osdefs.h rename to Darwin/include/python3.5m/osdefs.h index 0c2e34b..bd84c1c 100644 --- a/Darwin/include/python3.4m/osdefs.h +++ b/Darwin/include/python3.5m/osdefs.h @@ -7,15 +7,12 @@ extern "C" { /* Operating system dependencies */ -/* Mod by chrish: QNX has WATCOM, but isn't DOS */ -#if !defined(__QNX__) -#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__) +#ifdef MS_WINDOWS #define SEP L'\\' #define ALTSEP L'/' #define MAXPATHLEN 256 #define DELIM L';' #endif -#endif /* Filename separator */ #ifndef SEP diff --git a/Darwin/include/python3.4m/parsetok.h b/Darwin/include/python3.5m/parsetok.h similarity index 100% rename from Darwin/include/python3.4m/parsetok.h rename to Darwin/include/python3.5m/parsetok.h diff --git a/Darwin/include/python3.4m/patchlevel.h b/Darwin/include/python3.5m/patchlevel.h similarity index 92% rename from Darwin/include/python3.4m/patchlevel.h rename to Darwin/include/python3.5m/patchlevel.h index 960d7d0..226f9cf 100644 --- a/Darwin/include/python3.4m/patchlevel.h +++ b/Darwin/include/python3.5m/patchlevel.h @@ -17,13 +17,13 @@ /* Version parsed out into numeric values */ /*--start constants--*/ #define PY_MAJOR_VERSION 3 -#define PY_MINOR_VERSION 4 -#define PY_MICRO_VERSION 1 +#define PY_MINOR_VERSION 5 +#define PY_MICRO_VERSION 0 #define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL #define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.4.1" +#define PY_VERSION "3.5.0" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Darwin/include/python3.4m/pgen.h b/Darwin/include/python3.5m/pgen.h similarity index 100% rename from Darwin/include/python3.4m/pgen.h rename to Darwin/include/python3.5m/pgen.h diff --git a/Darwin/include/python3.4m/pgenheaders.h b/Darwin/include/python3.5m/pgenheaders.h similarity index 100% rename from Darwin/include/python3.4m/pgenheaders.h rename to Darwin/include/python3.5m/pgenheaders.h diff --git a/Darwin/include/python3.4m/py_curses.h b/Darwin/include/python3.5m/py_curses.h similarity index 100% rename from Darwin/include/python3.4m/py_curses.h rename to Darwin/include/python3.5m/py_curses.h diff --git a/Darwin/include/python3.4m/pyarena.h b/Darwin/include/python3.5m/pyarena.h similarity index 100% rename from Darwin/include/python3.4m/pyarena.h rename to Darwin/include/python3.5m/pyarena.h diff --git a/Darwin/include/python3.4m/pyatomic.h b/Darwin/include/python3.5m/pyatomic.h similarity index 68% rename from Darwin/include/python3.4m/pyatomic.h rename to Darwin/include/python3.5m/pyatomic.h index d4e19e0..99816a5 100644 --- a/Darwin/include/python3.4m/pyatomic.h +++ b/Darwin/include/python3.5m/pyatomic.h @@ -1,14 +1,15 @@ -#ifndef Py_LIMITED_API +/* Issue #23644: is incompatible with C++, see: + https://gcc.gnu.org/bugzilla/show_bug.cgi?id=60932 */ +#if !defined(Py_LIMITED_API) && !defined(__cplusplus) #ifndef Py_ATOMIC_H #define Py_ATOMIC_H -/* XXX: When compilers start offering a stdatomic.h with lock-free - atomic_int and atomic_address types, include that here and rewrite - the atomic operations in terms of it. */ #include "dynamic_annotations.h" -#ifdef __cplusplus -extern "C" { +#include "pyconfig.h" + +#if defined(HAVE_STD_ATOMIC) +#include #endif /* This is modeled after the atomics interface from C1x, according to @@ -20,6 +21,76 @@ extern "C" { * Beware, the implementations here are deep magic. */ +#if defined(HAVE_STD_ATOMIC) + +typedef enum _Py_memory_order { + _Py_memory_order_relaxed = memory_order_relaxed, + _Py_memory_order_acquire = memory_order_acquire, + _Py_memory_order_release = memory_order_release, + _Py_memory_order_acq_rel = memory_order_acq_rel, + _Py_memory_order_seq_cst = memory_order_seq_cst +} _Py_memory_order; + +typedef struct _Py_atomic_address { + _Atomic void *_value; +} _Py_atomic_address; + +typedef struct _Py_atomic_int { + atomic_int _value; +} _Py_atomic_int; + +#define _Py_atomic_signal_fence(/*memory_order*/ ORDER) \ + atomic_signal_fence(ORDER) + +#define _Py_atomic_thread_fence(/*memory_order*/ ORDER) \ + atomic_thread_fence(ORDER) + +#define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \ + atomic_store_explicit(&(ATOMIC_VAL)->_value, NEW_VAL, ORDER) + +#define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \ + atomic_load_explicit(&(ATOMIC_VAL)->_value, ORDER) + +/* Use builtin atomic operations in GCC >= 4.7 */ +#elif defined(HAVE_BUILTIN_ATOMIC) + +typedef enum _Py_memory_order { + _Py_memory_order_relaxed = __ATOMIC_RELAXED, + _Py_memory_order_acquire = __ATOMIC_ACQUIRE, + _Py_memory_order_release = __ATOMIC_RELEASE, + _Py_memory_order_acq_rel = __ATOMIC_ACQ_REL, + _Py_memory_order_seq_cst = __ATOMIC_SEQ_CST +} _Py_memory_order; + +typedef struct _Py_atomic_address { + void *_value; +} _Py_atomic_address; + +typedef struct _Py_atomic_int { + int _value; +} _Py_atomic_int; + +#define _Py_atomic_signal_fence(/*memory_order*/ ORDER) \ + __atomic_signal_fence(ORDER) + +#define _Py_atomic_thread_fence(/*memory_order*/ ORDER) \ + __atomic_thread_fence(ORDER) + +#define _Py_atomic_store_explicit(ATOMIC_VAL, NEW_VAL, ORDER) \ + (assert((ORDER) == __ATOMIC_RELAXED \ + || (ORDER) == __ATOMIC_SEQ_CST \ + || (ORDER) == __ATOMIC_RELEASE), \ + __atomic_store_n(&(ATOMIC_VAL)->_value, NEW_VAL, ORDER)) + +#define _Py_atomic_load_explicit(ATOMIC_VAL, ORDER) \ + (assert((ORDER) == __ATOMIC_RELAXED \ + || (ORDER) == __ATOMIC_SEQ_CST \ + || (ORDER) == __ATOMIC_ACQUIRE \ + || (ORDER) == __ATOMIC_CONSUME), \ + __atomic_load_n(&(ATOMIC_VAL)->_value, ORDER)) + +#else + typedef enum _Py_memory_order { _Py_memory_order_relaxed, _Py_memory_order_acquire, @@ -162,6 +233,7 @@ _Py_ANNOTATE_MEMORY_ORDER(const volatile void *address, _Py_memory_order order) ((ATOMIC_VAL)->_value) #endif /* !gcc x86 */ +#endif /* Standardized shortcuts. */ #define _Py_atomic_store(ATOMIC_VAL, NEW_VAL) \ @@ -176,9 +248,5 @@ _Py_ANNOTATE_MEMORY_ORDER(const volatile void *address, _Py_memory_order order) #define _Py_atomic_load_relaxed(ATOMIC_VAL) \ _Py_atomic_load_explicit(ATOMIC_VAL, _Py_memory_order_relaxed) -#ifdef __cplusplus -} -#endif - #endif /* Py_ATOMIC_H */ #endif /* Py_LIMITED_API */ diff --git a/Darwin/include/python3.4m/pycapsule.h b/Darwin/include/python3.5m/pycapsule.h similarity index 100% rename from Darwin/include/python3.4m/pycapsule.h rename to Darwin/include/python3.5m/pycapsule.h diff --git a/Darwin/include/python3.4m/pyconfig.h b/Darwin/include/python3.5m/pyconfig.h similarity index 96% rename from Darwin/include/python3.4m/pyconfig.h rename to Darwin/include/python3.5m/pyconfig.h index 34a68be..80474e6 100644 --- a/Darwin/include/python3.4m/pyconfig.h +++ b/Darwin/include/python3.5m/pyconfig.h @@ -102,6 +102,9 @@ /* Define if `unsetenv` does not return an int. */ /* #undef HAVE_BROKEN_UNSETENV */ +/* Has builtin atomics */ +#define HAVE_BUILTIN_ATOMIC 1 + /* Define this if you have the type _Bool. */ #define HAVE_C99_BOOL 1 @@ -181,6 +184,9 @@ /* Define to 1 if you have the header file. */ /* #undef HAVE_DIRECT_H */ +/* Define to 1 if the dirent structure has a d_type field */ +#define HAVE_DIRENT_D_TYPE 1 + /* Define to 1 if you have the header file, and it defines `DIR'. */ #define HAVE_DIRENT_H 1 @@ -228,7 +234,7 @@ #define HAVE_EXPM1 1 /* Define to 1 if you have the `faccessat' function. */ -/* #undef HAVE_FACCESSAT */ +#define HAVE_FACCESSAT 1 /* Define if you have the 'fchdir' function. */ #define HAVE_FCHDIR 1 @@ -237,13 +243,13 @@ #define HAVE_FCHMOD 1 /* Define to 1 if you have the `fchmodat' function. */ -/* #undef HAVE_FCHMODAT */ +#define HAVE_FCHMODAT 1 /* Define to 1 if you have the `fchown' function. */ #define HAVE_FCHOWN 1 /* Define to 1 if you have the `fchownat' function. */ -/* #undef HAVE_FCHOWNAT */ +#define HAVE_FCHOWNAT 1 /* Define to 1 if you have the header file. */ #define HAVE_FCNTL_H 1 @@ -252,7 +258,7 @@ /* #undef HAVE_FDATASYNC */ /* Define to 1 if you have the `fdopendir' function. */ -/* #undef HAVE_FDOPENDIR */ +#define HAVE_FDOPENDIR 1 /* Define to 1 if you have the `fexecve' function. */ /* #undef HAVE_FEXECVE */ @@ -279,7 +285,7 @@ #define HAVE_FSEEKO 1 /* Define to 1 if you have the `fstatat' function. */ -/* #undef HAVE_FSTATAT */ +#define HAVE_FSTATAT 1 /* Define to 1 if you have the `fstatvfs' function. */ #define HAVE_FSTATVFS 1 @@ -314,15 +320,18 @@ /* Define to 1 if you have the `gamma' function. */ #define HAVE_GAMMA 1 +/* Define if we can use gcc inline assembler to get and set mc68881 fpcr */ +/* #undef HAVE_GCC_ASM_FOR_MC68881 */ + /* Define if we can use x64 gcc inline assembler */ -/* #undef HAVE_GCC_ASM_FOR_X64 */ +#define HAVE_GCC_ASM_FOR_X64 1 /* Define if we can use gcc inline assembler to get and set x87 control word */ #define HAVE_GCC_ASM_FOR_X87 1 /* Define if your compiler provides __uint128_t */ -/* #undef HAVE_GCC_UINT128_T */ +#define HAVE_GCC_UINT128_T 1 /* Define if you have the getaddrinfo function. */ #define HAVE_GETADDRINFO 1 @@ -330,6 +339,9 @@ /* Define this if you have flockfile(), getc_unlocked(), and funlockfile() */ #define HAVE_GETC_UNLOCKED 1 +/* Define to 1 if you have the `getentropy' function. */ +/* #undef HAVE_GETENTROPY */ + /* Define to 1 if you have the `getgrouplist' function. */ #define HAVE_GETGROUPLIST 1 @@ -384,6 +396,9 @@ /* Define to 1 if you have the `getpwent' function. */ #define HAVE_GETPWENT 1 +/* Define to 1 if the Linux getrandom() syscall is available */ +/* #undef HAVE_GETRANDOM_SYSCALL */ + /* Define to 1 if you have the `getresgid' function. */ /* #undef HAVE_GETRESGID */ @@ -467,7 +482,7 @@ and long long is available and at least as big as an off_t. You may need to add some flags for configuration and compilation to enable this mode. (For Solaris and Linux, the necessary defines are already defined.) */ -#define HAVE_LARGEFILE_SUPPORT 1 +/* #undef HAVE_LARGEFILE_SUPPORT */ /* Define to 1 if you have the 'lchflags' function. */ #define HAVE_LCHFLAGS 1 @@ -509,7 +524,7 @@ #define HAVE_LINK 1 /* Define to 1 if you have the `linkat' function. */ -/* #undef HAVE_LINKAT */ +#define HAVE_LINKAT 1 /* Define to 1 if you have the header file. */ /* #undef HAVE_LINUX_CAN_BCM_H */ @@ -517,6 +532,9 @@ /* Define to 1 if you have the header file. */ /* #undef HAVE_LINUX_CAN_H */ +/* Define if compiling using Linux 3.6 or later. */ +/* #undef HAVE_LINUX_CAN_RAW_FD_FRAMES */ + /* Define to 1 if you have the header file. */ /* #undef HAVE_LINUX_CAN_RAW_H */ @@ -563,7 +581,7 @@ /* #undef HAVE_MEMRCHR */ /* Define to 1 if you have the `mkdirat' function. */ -/* #undef HAVE_MKDIRAT */ +#define HAVE_MKDIRAT 1 /* Define to 1 if you have the `mkfifo' function. */ #define HAVE_MKFIFO 1 @@ -602,7 +620,7 @@ #define HAVE_NICE 1 /* Define to 1 if you have the `openat' function. */ -/* #undef HAVE_OPENAT */ +#define HAVE_OPENAT 1 /* Define to 1 if you have the `openpty' function. */ #define HAVE_OPENPTY 1 @@ -673,11 +691,14 @@ /* Define to 1 if you have the `pwrite' function. */ #define HAVE_PWRITE 1 +/* Define if the libcrypto has RAND_egd */ +#define HAVE_RAND_EGD 1 + /* Define to 1 if you have the `readlink' function. */ #define HAVE_READLINK 1 /* Define to 1 if you have the `readlinkat' function. */ -/* #undef HAVE_READLINKAT */ +#define HAVE_READLINKAT 1 /* Define to 1 if you have the `readv' function. */ #define HAVE_READV 1 @@ -686,13 +707,16 @@ #define HAVE_REALPATH 1 /* Define to 1 if you have the `renameat' function. */ -/* #undef HAVE_RENAMEAT */ +#define HAVE_RENAMEAT 1 + +/* Define if readline supports append_history */ +#define HAVE_RL_APPEND_HISTORY 1 /* Define if you have readline 2.1 */ #define HAVE_RL_CALLBACK 1 /* Define if you can turn off readline's signal handling. */ -/* #undef HAVE_RL_CATCH_SIGNAL */ +#define HAVE_RL_CATCH_SIGNAL 1 /* Define if you have readline 2.2 */ #define HAVE_RL_COMPLETION_APPEND_CHARACTER 1 @@ -704,7 +728,7 @@ #define HAVE_RL_COMPLETION_MATCHES 1 /* Define if you have rl_completion_suppress_append */ -/* #undef HAVE_RL_COMPLETION_SUPPRESS_APPEND */ +#define HAVE_RL_COMPLETION_SUPPRESS_APPEND 1 /* Define if you have readline 4.0 */ #define HAVE_RL_PRE_INPUT_HOOK 1 @@ -866,6 +890,9 @@ /* Define to 1 if you have the header file. */ #define HAVE_STDLIB_H 1 +/* Has stdatomic.h, atomic_int and _Atomic void* types work */ +/* #undef HAVE_STD_ATOMIC */ + /* Define to 1 if you have the `strdup' function. */ #define HAVE_STRDUP 1 @@ -913,7 +940,7 @@ #define HAVE_SYMLINK 1 /* Define to 1 if you have the `symlinkat' function. */ -/* #undef HAVE_SYMLINKAT */ +#define HAVE_SYMLINKAT 1 /* Define to 1 if you have the `sync' function. */ #define HAVE_SYNC 1 @@ -1091,7 +1118,7 @@ #define HAVE_UNISTD_H 1 /* Define to 1 if you have the `unlinkat' function. */ -/* #undef HAVE_UNLINKAT */ +#define HAVE_UNLINKAT 1 /* Define to 1 if you have the `unsetenv' function. */ #define HAVE_UNSETENV 1 @@ -1232,7 +1259,7 @@ #define SIZEOF_INT 4 /* The size of `long', as computed by sizeof. */ -#define SIZEOF_LONG 4 +#define SIZEOF_LONG 8 /* The size of `long double', as computed by sizeof. */ #define SIZEOF_LONG_DOUBLE 16 @@ -1247,22 +1274,22 @@ #define SIZEOF_PID_T 4 /* The size of `pthread_t', as computed by sizeof. */ -#define SIZEOF_PTHREAD_T 4 +#define SIZEOF_PTHREAD_T 8 /* The size of `short', as computed by sizeof. */ #define SIZEOF_SHORT 2 /* The size of `size_t', as computed by sizeof. */ -#define SIZEOF_SIZE_T 4 +#define SIZEOF_SIZE_T 8 /* The size of `time_t', as computed by sizeof. */ -#define SIZEOF_TIME_T 4 +#define SIZEOF_TIME_T 8 /* The size of `uintptr_t', as computed by sizeof. */ -#define SIZEOF_UINTPTR_T 4 +#define SIZEOF_UINTPTR_T 8 /* The size of `void *', as computed by sizeof. */ -#define SIZEOF_VOID_P 4 +#define SIZEOF_VOID_P 8 /* The size of `wchar_t', as computed by sizeof. */ #define SIZEOF_WCHAR_T 4 @@ -1318,7 +1345,7 @@ /* Define if a va_list is an array of some kind */ -/* #undef VA_LIST_IS_ARRAY */ +#define VA_LIST_IS_ARRAY 1 /* Define if you want SIGFPE handled (see Include/pyfpe.h). */ /* #undef WANT_SIGFPE_HANDLER */ @@ -1339,7 +1366,7 @@ /* Define if you want to produce an OpenStep/Rhapsody framework (shared library plus accessory files). */ -/* #undef WITH_NEXT_FRAMEWORK */ +#define WITH_NEXT_FRAMEWORK 1 /* Define if you want to compile in Python-specific mallocs */ #define WITH_PYMALLOC 1 diff --git a/Darwin/include/python3.4m/pyctype.h b/Darwin/include/python3.5m/pyctype.h similarity index 100% rename from Darwin/include/python3.4m/pyctype.h rename to Darwin/include/python3.5m/pyctype.h diff --git a/Darwin/include/python3.4m/pydebug.h b/Darwin/include/python3.5m/pydebug.h similarity index 89% rename from Darwin/include/python3.4m/pydebug.h rename to Darwin/include/python3.5m/pydebug.h index 8fe9818..19bec2b 100644 --- a/Darwin/include/python3.4m/pydebug.h +++ b/Darwin/include/python3.5m/pydebug.h @@ -5,6 +5,8 @@ extern "C" { #endif +/* These global variable are defined in pylifecycle.c */ +/* XXX (ncoghlan): move these declarations to pylifecycle.h? */ PyAPI_DATA(int) Py_DebugFlag; PyAPI_DATA(int) Py_VerboseFlag; PyAPI_DATA(int) Py_QuietFlag; diff --git a/Darwin/include/python3.4m/pyerrors.h b/Darwin/include/python3.5m/pyerrors.h similarity index 97% rename from Darwin/include/python3.4m/pyerrors.h rename to Darwin/include/python3.5m/pyerrors.h index e44fb5f..35aedb7 100644 --- a/Darwin/include/python3.4m/pyerrors.h +++ b/Darwin/include/python3.5m/pyerrors.h @@ -99,6 +99,7 @@ PyAPI_FUNC(void) PyErr_SetExcInfo(PyObject *, PyObject *, PyObject *); #define _Py_NO_RETURN #endif +/* Defined in Python/pylifecycle.c */ PyAPI_FUNC(void) Py_FatalError(const char *message) _Py_NO_RETURN; #if defined(Py_DEBUG) || defined(Py_LIMITED_API) @@ -123,7 +124,9 @@ PyAPI_FUNC(void) PyException_SetCause(PyObject *, PyObject *); /* Context manipulation (PEP 3134) */ PyAPI_FUNC(PyObject *) PyException_GetContext(PyObject *); PyAPI_FUNC(void) PyException_SetContext(PyObject *, PyObject *); - +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyErr_ChainExceptions(PyObject *, PyObject *, PyObject *); +#endif /* */ @@ -144,6 +147,7 @@ PyAPI_FUNC(void) PyException_SetContext(PyObject *, PyObject *); PyAPI_DATA(PyObject *) PyExc_BaseException; PyAPI_DATA(PyObject *) PyExc_Exception; +PyAPI_DATA(PyObject *) PyExc_StopAsyncIteration; PyAPI_DATA(PyObject *) PyExc_StopIteration; PyAPI_DATA(PyObject *) PyExc_GeneratorExit; PyAPI_DATA(PyObject *) PyExc_ArithmeticError; @@ -163,6 +167,7 @@ PyAPI_DATA(PyObject *) PyExc_MemoryError; PyAPI_DATA(PyObject *) PyExc_NameError; PyAPI_DATA(PyObject *) PyExc_OverflowError; PyAPI_DATA(PyObject *) PyExc_RuntimeError; +PyAPI_DATA(PyObject *) PyExc_RecursionError; PyAPI_DATA(PyObject *) PyExc_NotImplementedError; PyAPI_DATA(PyObject *) PyExc_SyntaxError; PyAPI_DATA(PyObject *) PyExc_IndentationError; @@ -242,6 +247,12 @@ PyAPI_FUNC(PyObject *) PyErr_Format( const char *format, /* ASCII-encoded string */ ... ); +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +PyAPI_FUNC(PyObject *) PyErr_FormatV( + PyObject *exception, + const char *format, + va_list vargs); +#endif #ifdef MS_WINDOWS PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithFilename( diff --git a/Darwin/include/python3.4m/pyexpat.h b/Darwin/include/python3.5m/pyexpat.h similarity index 100% rename from Darwin/include/python3.4m/pyexpat.h rename to Darwin/include/python3.5m/pyexpat.h diff --git a/Darwin/include/python3.4m/pyfpe.h b/Darwin/include/python3.5m/pyfpe.h similarity index 100% rename from Darwin/include/python3.4m/pyfpe.h rename to Darwin/include/python3.5m/pyfpe.h diff --git a/Darwin/include/python3.4m/pygetopt.h b/Darwin/include/python3.5m/pygetopt.h similarity index 100% rename from Darwin/include/python3.4m/pygetopt.h rename to Darwin/include/python3.5m/pygetopt.h diff --git a/Darwin/include/python3.4m/pyhash.h b/Darwin/include/python3.5m/pyhash.h similarity index 100% rename from Darwin/include/python3.4m/pyhash.h rename to Darwin/include/python3.5m/pyhash.h diff --git a/Darwin/include/python3.5m/pylifecycle.h b/Darwin/include/python3.5m/pylifecycle.h new file mode 100644 index 0000000..ccdebe2 --- /dev/null +++ b/Darwin/include/python3.5m/pylifecycle.h @@ -0,0 +1,124 @@ + +/* Interfaces to configure, query, create & destroy the Python runtime */ + +#ifndef Py_PYLIFECYCLE_H +#define Py_PYLIFECYCLE_H +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_FUNC(void) Py_SetProgramName(wchar_t *); +PyAPI_FUNC(wchar_t *) Py_GetProgramName(void); + +PyAPI_FUNC(void) Py_SetPythonHome(wchar_t *); +PyAPI_FUNC(wchar_t *) Py_GetPythonHome(void); + +#ifndef Py_LIMITED_API +/* Only used by applications that embed the interpreter and need to + * override the standard encoding determination mechanism + */ +PyAPI_FUNC(int) Py_SetStandardStreamEncoding(const char *encoding, + const char *errors); +#endif + +PyAPI_FUNC(void) Py_Initialize(void); +PyAPI_FUNC(void) Py_InitializeEx(int); +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _Py_InitializeEx_Private(int, int); +#endif +PyAPI_FUNC(void) Py_Finalize(void); +PyAPI_FUNC(int) Py_IsInitialized(void); +PyAPI_FUNC(PyThreadState *) Py_NewInterpreter(void); +PyAPI_FUNC(void) Py_EndInterpreter(PyThreadState *); + + +/* Py_PyAtExit is for the atexit module, Py_AtExit is for low-level + * exit functions. + */ +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _Py_PyAtExit(void (*func)(void)); +#endif +PyAPI_FUNC(int) Py_AtExit(void (*func)(void)); + +PyAPI_FUNC(void) Py_Exit(int); + +/* Restore signals that the interpreter has called SIG_IGN on to SIG_DFL. */ +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _Py_RestoreSignals(void); + +PyAPI_FUNC(int) Py_FdIsInteractive(FILE *, const char *); +#endif + +/* Bootstrap __main__ (defined in Modules/main.c) */ +PyAPI_FUNC(int) Py_Main(int argc, wchar_t **argv); + +/* In getpath.c */ +PyAPI_FUNC(wchar_t *) Py_GetProgramFullPath(void); +PyAPI_FUNC(wchar_t *) Py_GetPrefix(void); +PyAPI_FUNC(wchar_t *) Py_GetExecPrefix(void); +PyAPI_FUNC(wchar_t *) Py_GetPath(void); +PyAPI_FUNC(void) Py_SetPath(const wchar_t *); +#ifdef MS_WINDOWS +int _Py_CheckPython3(); +#endif + +/* In their own files */ +PyAPI_FUNC(const char *) Py_GetVersion(void); +PyAPI_FUNC(const char *) Py_GetPlatform(void); +PyAPI_FUNC(const char *) Py_GetCopyright(void); +PyAPI_FUNC(const char *) Py_GetCompiler(void); +PyAPI_FUNC(const char *) Py_GetBuildInfo(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(const char *) _Py_hgidentifier(void); +PyAPI_FUNC(const char *) _Py_hgversion(void); +#endif + +/* Internal -- various one-time initializations */ +#ifndef Py_LIMITED_API +PyAPI_FUNC(PyObject *) _PyBuiltin_Init(void); +PyAPI_FUNC(PyObject *) _PySys_Init(void); +PyAPI_FUNC(void) _PyImport_Init(void); +PyAPI_FUNC(void) _PyExc_Init(PyObject * bltinmod); +PyAPI_FUNC(void) _PyImportHooks_Init(void); +PyAPI_FUNC(int) _PyFrame_Init(void); +PyAPI_FUNC(int) _PyFloat_Init(void); +PyAPI_FUNC(int) PyByteArray_Init(void); +PyAPI_FUNC(void) _PyRandom_Init(void); +#endif + +/* Various internal finalizers */ +#ifndef Py_LIMITED_API +PyAPI_FUNC(void) _PyExc_Fini(void); +PyAPI_FUNC(void) _PyImport_Fini(void); +PyAPI_FUNC(void) PyMethod_Fini(void); +PyAPI_FUNC(void) PyFrame_Fini(void); +PyAPI_FUNC(void) PyCFunction_Fini(void); +PyAPI_FUNC(void) PyDict_Fini(void); +PyAPI_FUNC(void) PyTuple_Fini(void); +PyAPI_FUNC(void) PyList_Fini(void); +PyAPI_FUNC(void) PySet_Fini(void); +PyAPI_FUNC(void) PyBytes_Fini(void); +PyAPI_FUNC(void) PyByteArray_Fini(void); +PyAPI_FUNC(void) PyFloat_Fini(void); +PyAPI_FUNC(void) PyOS_FiniInterrupts(void); +PyAPI_FUNC(void) _PyGC_DumpShutdownStats(void); +PyAPI_FUNC(void) _PyGC_Fini(void); +PyAPI_FUNC(void) PySlice_Fini(void); +PyAPI_FUNC(void) _PyType_Fini(void); +PyAPI_FUNC(void) _PyRandom_Fini(void); + +PyAPI_DATA(PyThreadState *) _Py_Finalizing; +#endif + +/* Signals */ +typedef void (*PyOS_sighandler_t)(int); +PyAPI_FUNC(PyOS_sighandler_t) PyOS_getsig(int); +PyAPI_FUNC(PyOS_sighandler_t) PyOS_setsig(int, PyOS_sighandler_t); + +/* Random */ +PyAPI_FUNC(int) _PyOS_URandom (void *buffer, Py_ssize_t size); + +#ifdef __cplusplus +} +#endif +#endif /* !Py_PYLIFECYCLE_H */ diff --git a/Darwin/include/python3.4m/pymacconfig.h b/Darwin/include/python3.5m/pymacconfig.h similarity index 100% rename from Darwin/include/python3.4m/pymacconfig.h rename to Darwin/include/python3.5m/pymacconfig.h diff --git a/Darwin/include/python3.4m/pymacro.h b/Darwin/include/python3.5m/pymacro.h similarity index 85% rename from Darwin/include/python3.4m/pymacro.h rename to Darwin/include/python3.5m/pymacro.h index 7997c55..3f6f5dc 100644 --- a/Darwin/include/python3.4m/pymacro.h +++ b/Darwin/include/python3.5m/pymacro.h @@ -1,13 +1,26 @@ #ifndef Py_PYMACRO_H #define Py_PYMACRO_H +/* Minimum value between x and y */ #define Py_MIN(x, y) (((x) > (y)) ? (y) : (x)) + +/* Maximum value between x and y */ #define Py_MAX(x, y) (((x) > (y)) ? (x) : (y)) +/* Absolute value of the number x */ +#define Py_ABS(x) ((x) < 0 ? -(x) : (x)) + +#define _Py_XSTRINGIFY(x) #x + +/* Convert the argument to a string. For example, Py_STRINGIFY(123) is replaced + with "123" by the preprocessor. Defines are also replaced by their value. + For example Py_STRINGIFY(__LINE__) is replaced by the line number, not + by "__LINE__". */ +#define Py_STRINGIFY(x) _Py_XSTRINGIFY(x) + /* Argument must be a char or an int in [-128, 127] or [0, 255]. */ #define Py_CHARMASK(c) ((unsigned char)((c) & 0xff)) - /* Assert a build-time dependency, as an expression. Your compile will fail if the condition isn't true, or can't be evaluated diff --git a/Darwin/include/python3.4m/pymath.h b/Darwin/include/python3.5m/pymath.h similarity index 88% rename from Darwin/include/python3.4m/pymath.h rename to Darwin/include/python3.5m/pymath.h index 62a6c42..1ea9ac1 100644 --- a/Darwin/include/python3.4m/pymath.h +++ b/Darwin/include/python3.5m/pymath.h @@ -150,7 +150,29 @@ PyAPI_FUNC(void) _Py_set_387controlword(unsigned short); * doesn't support NaNs. */ #if !defined(Py_NAN) && !defined(Py_NO_NAN) -#define Py_NAN (Py_HUGE_VAL * 0.) +#if !defined(__INTEL_COMPILER) + #define Py_NAN (Py_HUGE_VAL * 0.) +#else /* __INTEL_COMPILER */ + #if defined(ICC_NAN_STRICT) + #pragma float_control(push) + #pragma float_control(precise, on) + #pragma float_control(except, on) + #if defined(_MSC_VER) + __declspec(noinline) + #else /* Linux */ + __attribute__((noinline)) + #endif /* _MSC_VER */ + static double __icc_nan() + { + return sqrt(-1.0); + } + #pragma float_control (pop) + #define Py_NAN __icc_nan() + #else /* ICC_NAN_RELAXED as default for Intel Compiler */ + static union { unsigned char buf[8]; double __icc_nan; } __nan_store = {0,0,0,0,0,0,0xf8,0x7f}; + #define Py_NAN (__nan_store.__icc_nan) + #endif /* ICC_NAN_STRICT */ +#endif /* __INTEL_COMPILER */ #endif /* Py_OVERFLOWED(X) diff --git a/Darwin/include/python3.4m/pymem.h b/Darwin/include/python3.5m/pymem.h similarity index 93% rename from Darwin/include/python3.4m/pymem.h rename to Darwin/include/python3.5m/pymem.h index 2372b86..043db64 100644 --- a/Darwin/include/python3.4m/pymem.h +++ b/Darwin/include/python3.5m/pymem.h @@ -13,6 +13,7 @@ extern "C" { #ifndef Py_LIMITED_API PyAPI_FUNC(void *) PyMem_RawMalloc(size_t size); +PyAPI_FUNC(void *) PyMem_RawCalloc(size_t nelem, size_t elsize); PyAPI_FUNC(void *) PyMem_RawRealloc(void *ptr, size_t new_size); PyAPI_FUNC(void) PyMem_RawFree(void *ptr); #endif @@ -57,6 +58,7 @@ PyAPI_FUNC(void) PyMem_RawFree(void *ptr); */ PyAPI_FUNC(void *) PyMem_Malloc(size_t size); +PyAPI_FUNC(void *) PyMem_Calloc(size_t nelem, size_t elsize); PyAPI_FUNC(void *) PyMem_Realloc(void *ptr, size_t new_size); PyAPI_FUNC(void) PyMem_Free(void *ptr); @@ -126,22 +128,25 @@ typedef enum { } PyMemAllocatorDomain; typedef struct { - /* user context passed as the first argument to the 3 functions */ + /* user context passed as the first argument to the 4 functions */ void *ctx; /* allocate a memory block */ void* (*malloc) (void *ctx, size_t size); + /* allocate a memory block initialized by zeros */ + void* (*calloc) (void *ctx, size_t nelem, size_t elsize); + /* allocate or resize a memory block */ void* (*realloc) (void *ctx, void *ptr, size_t new_size); /* release a memory block */ void (*free) (void *ctx, void *ptr); -} PyMemAllocator; +} PyMemAllocatorEx; /* Get the memory block allocator of the specified domain. */ PyAPI_FUNC(void) PyMem_GetAllocator(PyMemAllocatorDomain domain, - PyMemAllocator *allocator); + PyMemAllocatorEx *allocator); /* Set the memory block allocator of the specified domain. @@ -155,7 +160,7 @@ PyAPI_FUNC(void) PyMem_GetAllocator(PyMemAllocatorDomain domain, PyMem_SetupDebugHooks() function must be called to reinstall the debug hooks on top on the new allocator. */ PyAPI_FUNC(void) PyMem_SetAllocator(PyMemAllocatorDomain domain, - PyMemAllocator *allocator); + PyMemAllocatorEx *allocator); /* Setup hooks to detect bugs in the following Python memory allocator functions: diff --git a/Darwin/include/python3.4m/pyport.h b/Darwin/include/python3.5m/pyport.h similarity index 95% rename from Darwin/include/python3.4m/pyport.h rename to Darwin/include/python3.5m/pyport.h index c706213..66e00d4 100644 --- a/Darwin/include/python3.4m/pyport.h +++ b/Darwin/include/python3.5m/pyport.h @@ -270,7 +270,7 @@ typedef int Py_ssize_clean_t; * for platforms that support that. * * If PY_LOCAL_AGGRESSIVE is defined before python.h is included, more - * "aggressive" inlining/optimizaion is enabled for the entire module. This + * "aggressive" inlining/optimization is enabled for the entire module. This * may lead to code bloat, and may slow things down for those reasons. It may * also lead to errors, if the code relies on pointer aliasing. Use with * care. @@ -357,28 +357,6 @@ typedef int Py_ssize_clean_t; * stat() and fstat() fiddling * *******************************/ -/* We expect that stat and fstat exist on most systems. - * It's confirmed on Unix, Mac and Windows. - * If you don't have them, add - * #define DONT_HAVE_STAT - * and/or - * #define DONT_HAVE_FSTAT - * to your pyconfig.h. Python code beyond this should check HAVE_STAT and - * HAVE_FSTAT instead. - * Also - * #define HAVE_SYS_STAT_H - * if exists on your platform, and - * #define HAVE_STAT_H - * if does. - */ -#ifndef DONT_HAVE_STAT -#define HAVE_STAT -#endif - -#ifndef DONT_HAVE_FSTAT -#define HAVE_FSTAT -#endif - #ifdef HAVE_SYS_STAT_H #include #elif defined(HAVE_STAT_H) @@ -588,6 +566,25 @@ extern "C" { } while (0) #endif +#ifdef HAVE_GCC_ASM_FOR_MC68881 +#define HAVE_PY_SET_53BIT_PRECISION 1 +#define _Py_SET_53BIT_PRECISION_HEADER \ + unsigned int old_fpcr, new_fpcr +#define _Py_SET_53BIT_PRECISION_START \ + do { \ + __asm__ ("fmove.l %%fpcr,%0" : "=g" (old_fpcr)); \ + /* Set double precision / round to nearest. */ \ + new_fpcr = (old_fpcr & ~0xf0) | 0x80; \ + if (new_fpcr != old_fpcr) \ + __asm__ volatile ("fmove.l %0,%%fpcr" : : "g" (new_fpcr)); \ + } while (0) +#define _Py_SET_53BIT_PRECISION_END \ + do { \ + if (new_fpcr != old_fpcr) \ + __asm__ volatile ("fmove.l %0,%%fpcr" : : "g" (old_fpcr)); \ + } while (0) +#endif + /* default definitions are empty */ #ifndef HAVE_PY_SET_53BIT_PRECISION #define _Py_SET_53BIT_PRECISION_HEADER @@ -880,4 +877,24 @@ extern pid_t forkpty(int *, char *, struct termios *, struct winsize *); #define PY_LITTLE_ENDIAN 1 #endif +#ifdef Py_BUILD_CORE +/* + * Macros to protect CRT calls against instant termination when passed an + * invalid parameter (issue23524). + */ +#if defined _MSC_VER && _MSC_VER >= 1900 + +extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler; +#define _Py_BEGIN_SUPPRESS_IPH { _invalid_parameter_handler _Py_old_handler = \ + _set_thread_local_invalid_parameter_handler(_Py_silent_invalid_parameter_handler); +#define _Py_END_SUPPRESS_IPH _set_thread_local_invalid_parameter_handler(_Py_old_handler); } + +#else + +#define _Py_BEGIN_SUPPRESS_IPH +#define _Py_END_SUPPRESS_IPH + +#endif /* _MSC_VER >= 1900 */ +#endif /* Py_BUILD_CORE */ + #endif /* Py_PYPORT_H */ diff --git a/Darwin/include/python3.4m/pystate.h b/Darwin/include/python3.5m/pystate.h similarity index 95% rename from Darwin/include/python3.4m/pystate.h rename to Darwin/include/python3.5m/pystate.h index 4992c22..a2fd803 100644 --- a/Darwin/include/python3.4m/pystate.h +++ b/Darwin/include/python3.5m/pystate.h @@ -134,6 +134,9 @@ typedef struct _ts { void (*on_delete)(void *); void *on_delete_data; + PyObject *coroutine_wrapper; + int in_coroutine_wrapper; + /* XXX signal handlers should also be here */ } PyThreadState; @@ -174,12 +177,16 @@ PyAPI_FUNC(int) PyThreadState_SetAsyncExc(long, PyObject *); /* Variable and macro for in-line access to current thread state */ /* Assuming the current thread holds the GIL, this is the - PyThreadState for the current thread. */ -#ifndef Py_LIMITED_API + PyThreadState for the current thread. + + Issue #23644: pyatomic.h is incompatible with C++ (yet). Disable + PyThreadState_GET() optimization: declare it as an alias to + PyThreadState_Get(), as done for limited API. */ +#if !defined(Py_LIMITED_API) && !defined(__cplusplus) PyAPI_DATA(_Py_atomic_address) _PyThreadState_Current; #endif -#if defined(Py_DEBUG) || defined(Py_LIMITED_API) +#if defined(Py_DEBUG) || defined(Py_LIMITED_API) || defined(__cplusplus) #define PyThreadState_GET() PyThreadState_Get() #else #define PyThreadState_GET() \ diff --git a/Darwin/include/python3.4m/pystrcmp.h b/Darwin/include/python3.5m/pystrcmp.h similarity index 100% rename from Darwin/include/python3.4m/pystrcmp.h rename to Darwin/include/python3.5m/pystrcmp.h diff --git a/Darwin/include/python3.5m/pystrhex.h b/Darwin/include/python3.5m/pystrhex.h new file mode 100644 index 0000000..1dc1255 --- /dev/null +++ b/Darwin/include/python3.5m/pystrhex.h @@ -0,0 +1,17 @@ +#ifndef Py_STRHEX_H +#define Py_STRHEX_H + +#ifdef __cplusplus +extern "C" { +#endif + +/* Returns a str() containing the hex representation of argbuf. */ +PyAPI_FUNC(PyObject*) _Py_strhex(const char* argbuf, const Py_ssize_t arglen); +/* Returns a bytes() containing the ASCII hex representation of argbuf. */ +PyAPI_FUNC(PyObject*) _Py_strhex_bytes(const char* argbuf, const Py_ssize_t arglen); + +#ifdef __cplusplus +} +#endif + +#endif /* !Py_STRHEX_H */ diff --git a/Darwin/include/python3.4m/pystrtod.h b/Darwin/include/python3.5m/pystrtod.h similarity index 100% rename from Darwin/include/python3.4m/pystrtod.h rename to Darwin/include/python3.5m/pystrtod.h diff --git a/Darwin/include/python3.4m/pythonrun.h b/Darwin/include/python3.5m/pythonrun.h similarity index 65% rename from Darwin/include/python3.4m/pythonrun.h rename to Darwin/include/python3.5m/pythonrun.h index 2fc5578..f92148d 100644 --- a/Darwin/include/python3.4m/pythonrun.h +++ b/Darwin/include/python3.5m/pythonrun.h @@ -9,7 +9,8 @@ extern "C" { #define PyCF_MASK (CO_FUTURE_DIVISION | CO_FUTURE_ABSOLUTE_IMPORT | \ CO_FUTURE_WITH_STATEMENT | CO_FUTURE_PRINT_FUNCTION | \ - CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL) + CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL | \ + CO_FUTURE_GENERATOR_STOP) #define PyCF_MASK_OBSOLETE (CO_NESTED) #define PyCF_SOURCE_IS_UTF8 0x0100 #define PyCF_DONT_IMPLY_DEDENT 0x0200 @@ -22,30 +23,6 @@ typedef struct { } PyCompilerFlags; #endif -PyAPI_FUNC(void) Py_SetProgramName(wchar_t *); -PyAPI_FUNC(wchar_t *) Py_GetProgramName(void); - -PyAPI_FUNC(void) Py_SetPythonHome(wchar_t *); -PyAPI_FUNC(wchar_t *) Py_GetPythonHome(void); - -#ifndef Py_LIMITED_API -/* Only used by applications that embed the interpreter and need to - * override the standard encoding determination mechanism - */ -PyAPI_FUNC(int) Py_SetStandardStreamEncoding(const char *encoding, - const char *errors); -#endif - -PyAPI_FUNC(void) Py_Initialize(void); -PyAPI_FUNC(void) Py_InitializeEx(int); -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _Py_InitializeEx_Private(int, int); -#endif -PyAPI_FUNC(void) Py_Finalize(void); -PyAPI_FUNC(int) Py_IsInitialized(void); -PyAPI_FUNC(PyThreadState *) Py_NewInterpreter(void); -PyAPI_FUNC(void) Py_EndInterpreter(PyThreadState *); - #ifndef Py_LIMITED_API PyAPI_FUNC(int) PyRun_SimpleStringFlags(const char *, PyCompilerFlags *); PyAPI_FUNC(int) PyRun_AnyFileFlags(FILE *, const char *, PyCompilerFlags *); @@ -166,26 +143,6 @@ PyAPI_FUNC(void) PyErr_Print(void); PyAPI_FUNC(void) PyErr_PrintEx(int); PyAPI_FUNC(void) PyErr_Display(PyObject *, PyObject *, PyObject *); -/* Py_PyAtExit is for the atexit module, Py_AtExit is for low-level - * exit functions. - */ -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _Py_PyAtExit(void (*func)(void)); -#endif -PyAPI_FUNC(int) Py_AtExit(void (*func)(void)); - -PyAPI_FUNC(void) Py_Exit(int); - -/* Restore signals that the interpreter has called SIG_IGN on to SIG_DFL. */ -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _Py_RestoreSignals(void); - -PyAPI_FUNC(int) Py_FdIsInteractive(FILE *, const char *); -#endif - -/* Bootstrap */ -PyAPI_FUNC(int) Py_Main(int argc, wchar_t **argv); - #ifndef Py_LIMITED_API /* Use macros for a bunch of old variants */ #define PyRun_String(str, s, g, l) PyRun_StringFlags(str, s, g, l, NULL) @@ -207,64 +164,6 @@ PyAPI_FUNC(int) Py_Main(int argc, wchar_t **argv); PyRun_FileExFlags(fp, p, s, g, l, 0, flags) #endif -/* In getpath.c */ -PyAPI_FUNC(wchar_t *) Py_GetProgramFullPath(void); -PyAPI_FUNC(wchar_t *) Py_GetPrefix(void); -PyAPI_FUNC(wchar_t *) Py_GetExecPrefix(void); -PyAPI_FUNC(wchar_t *) Py_GetPath(void); -PyAPI_FUNC(void) Py_SetPath(const wchar_t *); -#ifdef MS_WINDOWS -int _Py_CheckPython3(); -#endif - -/* In their own files */ -PyAPI_FUNC(const char *) Py_GetVersion(void); -PyAPI_FUNC(const char *) Py_GetPlatform(void); -PyAPI_FUNC(const char *) Py_GetCopyright(void); -PyAPI_FUNC(const char *) Py_GetCompiler(void); -PyAPI_FUNC(const char *) Py_GetBuildInfo(void); -#ifndef Py_LIMITED_API -PyAPI_FUNC(const char *) _Py_hgidentifier(void); -PyAPI_FUNC(const char *) _Py_hgversion(void); -#endif - -/* Internal -- various one-time initializations */ -#ifndef Py_LIMITED_API -PyAPI_FUNC(PyObject *) _PyBuiltin_Init(void); -PyAPI_FUNC(PyObject *) _PySys_Init(void); -PyAPI_FUNC(void) _PyImport_Init(void); -PyAPI_FUNC(void) _PyExc_Init(PyObject * bltinmod); -PyAPI_FUNC(void) _PyImportHooks_Init(void); -PyAPI_FUNC(int) _PyFrame_Init(void); -PyAPI_FUNC(int) _PyFloat_Init(void); -PyAPI_FUNC(int) PyByteArray_Init(void); -PyAPI_FUNC(void) _PyRandom_Init(void); -#endif - -/* Various internal finalizers */ -#ifndef Py_LIMITED_API -PyAPI_FUNC(void) _PyExc_Fini(void); -PyAPI_FUNC(void) _PyImport_Fini(void); -PyAPI_FUNC(void) PyMethod_Fini(void); -PyAPI_FUNC(void) PyFrame_Fini(void); -PyAPI_FUNC(void) PyCFunction_Fini(void); -PyAPI_FUNC(void) PyDict_Fini(void); -PyAPI_FUNC(void) PyTuple_Fini(void); -PyAPI_FUNC(void) PyList_Fini(void); -PyAPI_FUNC(void) PySet_Fini(void); -PyAPI_FUNC(void) PyBytes_Fini(void); -PyAPI_FUNC(void) PyByteArray_Fini(void); -PyAPI_FUNC(void) PyFloat_Fini(void); -PyAPI_FUNC(void) PyOS_FiniInterrupts(void); -PyAPI_FUNC(void) _PyGC_DumpShutdownStats(void); -PyAPI_FUNC(void) _PyGC_Fini(void); -PyAPI_FUNC(void) PySlice_Fini(void); -PyAPI_FUNC(void) _PyType_Fini(void); -PyAPI_FUNC(void) _PyRandom_Fini(void); - -PyAPI_DATA(PyThreadState *) _Py_Finalizing; -#endif - /* Stuff with no proper home (yet) */ #ifndef Py_LIMITED_API PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, const char *); @@ -290,14 +189,6 @@ PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState; PyAPI_FUNC(int) PyOS_CheckStack(void); #endif -/* Signals */ -typedef void (*PyOS_sighandler_t)(int); -PyAPI_FUNC(PyOS_sighandler_t) PyOS_getsig(int); -PyAPI_FUNC(PyOS_sighandler_t) PyOS_setsig(int, PyOS_sighandler_t); - -/* Random */ -PyAPI_FUNC(int) _PyOS_URandom (void *buffer, Py_ssize_t size); - #ifdef __cplusplus } #endif diff --git a/Darwin/include/python3.4m/pythread.h b/Darwin/include/python3.5m/pythread.h similarity index 100% rename from Darwin/include/python3.4m/pythread.h rename to Darwin/include/python3.5m/pythread.h diff --git a/Darwin/include/python3.5m/pytime.h b/Darwin/include/python3.5m/pytime.h new file mode 100644 index 0000000..027c3d8 --- /dev/null +++ b/Darwin/include/python3.5m/pytime.h @@ -0,0 +1,181 @@ +#ifndef Py_LIMITED_API +#ifndef Py_PYTIME_H +#define Py_PYTIME_H + +#include "pyconfig.h" /* include for defines */ +#include "object.h" + +/************************************************************************** +Symbols and macros to supply platform-independent interfaces to time related +functions and constants +**************************************************************************/ +#ifdef __cplusplus +extern "C" { +#endif + +#ifdef PY_INT64_T +/* _PyTime_t: Python timestamp with subsecond precision. It can be used to + store a duration, and so indirectly a date (related to another date, like + UNIX epoch). */ +typedef PY_INT64_T _PyTime_t; +#define _PyTime_MIN PY_LLONG_MIN +#define _PyTime_MAX PY_LLONG_MAX +#else +# error "_PyTime_t need signed 64-bit integer type" +#endif + +typedef enum { + /* Round towards minus infinity (-inf). + For example, used to read a clock. */ + _PyTime_ROUND_FLOOR=0, + /* Round towards infinity (+inf). + For example, used for timeout to wait "at least" N seconds. */ + _PyTime_ROUND_CEILING +} _PyTime_round_t; + +/* Convert a time_t to a PyLong. */ +PyAPI_FUNC(PyObject *) _PyLong_FromTime_t( + time_t sec); + +/* Convert a PyLong to a time_t. */ +PyAPI_FUNC(time_t) _PyLong_AsTime_t( + PyObject *obj); + +/* Convert a number of seconds, int or float, to time_t. */ +PyAPI_FUNC(int) _PyTime_ObjectToTime_t( + PyObject *obj, + time_t *sec, + _PyTime_round_t); + +/* Convert a number of seconds, int or float, to a timeval structure. + usec is in the range [0; 999999] and rounded towards zero. + For example, -1.2 is converted to (-2, 800000). */ +PyAPI_FUNC(int) _PyTime_ObjectToTimeval( + PyObject *obj, + time_t *sec, + long *usec, + _PyTime_round_t); + +/* Convert a number of seconds, int or float, to a timespec structure. + nsec is in the range [0; 999999999] and rounded towards zero. + For example, -1.2 is converted to (-2, 800000000). */ +PyAPI_FUNC(int) _PyTime_ObjectToTimespec( + PyObject *obj, + time_t *sec, + long *nsec, + _PyTime_round_t); + + +/* Create a timestamp from a number of seconds. */ +PyAPI_FUNC(_PyTime_t) _PyTime_FromSeconds(int seconds); + +/* Macro to create a timestamp from a number of seconds, no integer overflow. + Only use the macro for small values, prefer _PyTime_FromSeconds(). */ +#define _PYTIME_FROMSECONDS(seconds) \ + ((_PyTime_t)(seconds) * (1000 * 1000 * 1000)) + +/* Create a timestamp from a number of nanoseconds. */ +PyAPI_FUNC(_PyTime_t) _PyTime_FromNanoseconds(PY_LONG_LONG ns); + +/* Convert a number of seconds (Python float or int) to a timetamp. + Raise an exception and return -1 on error, return 0 on success. */ +PyAPI_FUNC(int) _PyTime_FromSecondsObject(_PyTime_t *t, + PyObject *obj, + _PyTime_round_t round); + +/* Convert a number of milliseconds (Python float or int, 10^-3) to a timetamp. + Raise an exception and return -1 on error, return 0 on success. */ +PyAPI_FUNC(int) _PyTime_FromMillisecondsObject(_PyTime_t *t, + PyObject *obj, + _PyTime_round_t round); + +/* Convert a timestamp to a number of seconds as a C double. */ +PyAPI_FUNC(double) _PyTime_AsSecondsDouble(_PyTime_t t); + +/* Convert timestamp to a number of milliseconds (10^-3 seconds). */ +PyAPI_FUNC(_PyTime_t) _PyTime_AsMilliseconds(_PyTime_t t, + _PyTime_round_t round); + +/* Convert timestamp to a number of microseconds (10^-6 seconds). */ +PyAPI_FUNC(_PyTime_t) _PyTime_AsMicroseconds(_PyTime_t t, + _PyTime_round_t round); + +/* Convert timestamp to a number of nanoseconds (10^-9 seconds) as a Python int + object. */ +PyAPI_FUNC(PyObject *) _PyTime_AsNanosecondsObject(_PyTime_t t); + +/* Convert a timestamp to a timeval structure (microsecond resolution). + tv_usec is always positive. + Raise an exception and return -1 if the conversion overflowed, + return 0 on success. */ +PyAPI_FUNC(int) _PyTime_AsTimeval(_PyTime_t t, + struct timeval *tv, + _PyTime_round_t round); + +/* Similar to _PyTime_AsTimeval(), but don't raise an exception on error. */ +PyAPI_FUNC(int) _PyTime_AsTimeval_noraise(_PyTime_t t, + struct timeval *tv, + _PyTime_round_t round); + +#if defined(HAVE_CLOCK_GETTIME) || defined(HAVE_KQUEUE) +/* Convert a timestamp to a timespec structure (nanosecond resolution). + tv_nsec is always positive. + Raise an exception and return -1 on error, return 0 on success. */ +PyAPI_FUNC(int) _PyTime_AsTimespec(_PyTime_t t, struct timespec *ts); +#endif + +/* Get the current time from the system clock. + + The function cannot fail. _PyTime_Init() ensures that the system clock + works. */ +PyAPI_FUNC(_PyTime_t) _PyTime_GetSystemClock(void); + +/* Get the time of a monotonic clock, i.e. a clock that cannot go backwards. + The clock is not affected by system clock updates. The reference point of + the returned value is undefined, so that only the difference between the + results of consecutive calls is valid. + + The function cannot fail. _PyTime_Init() ensures that a monotonic clock + is available and works. */ +PyAPI_FUNC(_PyTime_t) _PyTime_GetMonotonicClock(void); + + +/* Structure used by time.get_clock_info() */ +typedef struct { + const char *implementation; + int monotonic; + int adjustable; + double resolution; +} _Py_clock_info_t; + +/* Get the current time from the system clock. + * Fill clock information if info is not NULL. + * Raise an exception and return -1 on error, return 0 on success. + */ +PyAPI_FUNC(int) _PyTime_GetSystemClockWithInfo( + _PyTime_t *t, + _Py_clock_info_t *info); + +/* Get the time of a monotonic clock, i.e. a clock that cannot go backwards. + The clock is not affected by system clock updates. The reference point of + the returned value is undefined, so that only the difference between the + results of consecutive calls is valid. + + Fill info (if set) with information of the function used to get the time. + + Return 0 on success, raise an exception and return -1 on error. */ +PyAPI_FUNC(int) _PyTime_GetMonotonicClockWithInfo( + _PyTime_t *t, + _Py_clock_info_t *info); + + +/* Initialize time. + Return 0 on success, raise an exception and return -1 on error. */ +PyAPI_FUNC(int) _PyTime_Init(void); + +#ifdef __cplusplus +} +#endif + +#endif /* Py_PYTIME_H */ +#endif /* Py_LIMITED_API */ diff --git a/Darwin/include/python3.4m/rangeobject.h b/Darwin/include/python3.5m/rangeobject.h similarity index 100% rename from Darwin/include/python3.4m/rangeobject.h rename to Darwin/include/python3.5m/rangeobject.h diff --git a/Darwin/include/python3.4m/setobject.h b/Darwin/include/python3.5m/setobject.h similarity index 66% rename from Darwin/include/python3.4m/setobject.h rename to Darwin/include/python3.5m/setobject.h index ae3f556..f17bc1b 100644 --- a/Darwin/include/python3.4m/setobject.h +++ b/Darwin/include/python3.5m/setobject.h @@ -6,38 +6,43 @@ extern "C" { #endif +#ifndef Py_LIMITED_API -/* -There are three kinds of slots in the table: +/* There are three kinds of entries in the table: 1. Unused: key == NULL 2. Active: key != NULL and key != dummy 3. Dummy: key == dummy -Note: .pop() abuses the hash field of an Unused or Dummy slot to -hold a search finger. The hash field of Unused or Dummy slots has -no meaning otherwise. +The hash field of Unused slots have no meaning. +The hash field of Dummny slots are set to -1 +meaning that dummy entries can be detected by +either entry->key==dummy or by entry->hash==-1. */ -#ifndef Py_LIMITED_API + #define PySet_MINSIZE 8 typedef struct { - /* Cached hash code of the key. */ PyObject *key; - Py_hash_t hash; + Py_hash_t hash; /* Cached hash code of the key */ } setentry; +/* The SetObject data structure is shared by set and frozenset objects. + +Invariant for sets: + - hash is -1 + +Invariants for frozensets: + - data is immutable. + - hash is the hash of the frozenset or -1 if not computed yet. -/* -This data structure is shared by set and frozenset objects. */ -typedef struct _setobject PySetObject; -struct _setobject { +typedef struct { PyObject_HEAD - Py_ssize_t fill; /* # Active + # Dummy */ - Py_ssize_t used; /* # Active */ + Py_ssize_t fill; /* Number active and dummy entries*/ + Py_ssize_t used; /* Number active entries */ /* The table contains mask + 1 slots, and that's a power of 2. * We store the mask instead of the size because the mask is more @@ -45,33 +50,42 @@ struct _setobject { */ Py_ssize_t mask; - /* table points to smalltable for small tables, else to - * additional malloc'ed memory. table is never NULL! This rule - * saves repeated runtime null-tests. + /* The table points to a fixed-size smalltable for small tables + * or to additional malloc'ed memory for bigger tables. + * The table pointer is never NULL which saves us from repeated + * runtime null-tests. */ setentry *table; - setentry *(*lookup)(PySetObject *so, PyObject *key, Py_hash_t hash); - Py_hash_t hash; /* only used by frozenset objects */ - setentry smalltable[PySet_MINSIZE]; + Py_hash_t hash; /* Only used by frozenset objects */ + Py_ssize_t finger; /* Search finger for pop() */ + setentry smalltable[PySet_MINSIZE]; PyObject *weakreflist; /* List of weak references */ -}; -#endif /* Py_LIMITED_API */ +} PySetObject; + +#define PySet_GET_SIZE(so) (((PySetObject *)(so))->used) + +PyAPI_DATA(PyObject *) _PySet_Dummy; + +PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, Py_hash_t *hash); +PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); +PyAPI_FUNC(int) PySet_ClearFreeList(void); + +#endif /* Section excluded by Py_LIMITED_API */ PyAPI_DATA(PyTypeObject) PySet_Type; PyAPI_DATA(PyTypeObject) PyFrozenSet_Type; PyAPI_DATA(PyTypeObject) PySetIter_Type; -#ifndef Py_LIMITED_API -PyAPI_DATA(PyObject *) _PySet_Dummy; -#endif +PyAPI_FUNC(PyObject *) PySet_New(PyObject *); +PyAPI_FUNC(PyObject *) PyFrozenSet_New(PyObject *); -/* Invariants for frozensets: - * data is immutable. - * hash is the hash of the frozenset or -1 if not computed yet. - * Invariants for sets: - * hash is -1 - */ +PyAPI_FUNC(int) PySet_Add(PyObject *set, PyObject *key); +PyAPI_FUNC(int) PySet_Clear(PyObject *set); +PyAPI_FUNC(int) PySet_Contains(PyObject *anyset, PyObject *key); +PyAPI_FUNC(int) PySet_Discard(PyObject *set, PyObject *key); +PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set); +PyAPI_FUNC(Py_ssize_t) PySet_Size(PyObject *anyset); #define PyFrozenSet_CheckExact(ob) (Py_TYPE(ob) == &PyFrozenSet_Type) #define PyAnySet_CheckExact(ob) \ @@ -87,26 +101,6 @@ PyAPI_DATA(PyObject *) _PySet_Dummy; (Py_TYPE(ob) == &PyFrozenSet_Type || \ PyType_IsSubtype(Py_TYPE(ob), &PyFrozenSet_Type)) -PyAPI_FUNC(PyObject *) PySet_New(PyObject *); -PyAPI_FUNC(PyObject *) PyFrozenSet_New(PyObject *); -PyAPI_FUNC(Py_ssize_t) PySet_Size(PyObject *anyset); -#ifndef Py_LIMITED_API -#define PySet_GET_SIZE(so) (((PySetObject *)(so))->used) -#endif -PyAPI_FUNC(int) PySet_Clear(PyObject *set); -PyAPI_FUNC(int) PySet_Contains(PyObject *anyset, PyObject *key); -PyAPI_FUNC(int) PySet_Discard(PyObject *set, PyObject *key); -PyAPI_FUNC(int) PySet_Add(PyObject *set, PyObject *key); -#ifndef Py_LIMITED_API -PyAPI_FUNC(int) _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, Py_hash_t *hash); -#endif -PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set); -#ifndef Py_LIMITED_API -PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable); - -PyAPI_FUNC(int) PySet_ClearFreeList(void); -#endif - #ifdef __cplusplus } #endif diff --git a/Darwin/include/python3.4m/sliceobject.h b/Darwin/include/python3.5m/sliceobject.h similarity index 97% rename from Darwin/include/python3.4m/sliceobject.h rename to Darwin/include/python3.5m/sliceobject.h index f7ee90c..26370e0 100644 --- a/Darwin/include/python3.4m/sliceobject.h +++ b/Darwin/include/python3.5m/sliceobject.h @@ -41,7 +41,7 @@ PyAPI_FUNC(int) _PySlice_GetLongIndices(PySliceObject *self, PyObject *length, PyAPI_FUNC(int) PySlice_GetIndices(PyObject *r, Py_ssize_t length, Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step); PyAPI_FUNC(int) PySlice_GetIndicesEx(PyObject *r, Py_ssize_t length, - Py_ssize_t *start, Py_ssize_t *stop, + Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step, Py_ssize_t *slicelength); #ifdef __cplusplus diff --git a/Darwin/include/python3.4m/structmember.h b/Darwin/include/python3.5m/structmember.h similarity index 100% rename from Darwin/include/python3.4m/structmember.h rename to Darwin/include/python3.5m/structmember.h diff --git a/Darwin/include/python3.4m/structseq.h b/Darwin/include/python3.5m/structseq.h similarity index 100% rename from Darwin/include/python3.4m/structseq.h rename to Darwin/include/python3.5m/structseq.h diff --git a/Darwin/include/python3.4m/symtable.h b/Darwin/include/python3.5m/symtable.h similarity index 95% rename from Darwin/include/python3.4m/symtable.h rename to Darwin/include/python3.5m/symtable.h index 1cfd884..1409cd9 100644 --- a/Darwin/include/python3.4m/symtable.h +++ b/Darwin/include/python3.5m/symtable.h @@ -43,7 +43,6 @@ typedef struct _symtable_entry { PyObject *ste_children; /* list of child blocks */ PyObject *ste_directives;/* locations of global and nonlocal statements */ _Py_block_ty ste_type; /* module, class, or function */ - int ste_unoptimized; /* false if namespace is optimized */ int ste_nested; /* true if block is nested */ unsigned ste_free : 1; /* true if block has free variables */ unsigned ste_child_free : 1; /* true if a child block has free vars, @@ -108,10 +107,6 @@ PyAPI_FUNC(void) PySymtable_Free(struct symtable *); #define FREE 4 #define CELL 5 -/* The following two names are used for the ste_unoptimized bit field */ -#define OPT_IMPORT_STAR 1 -#define OPT_TOPLEVEL 2 /* top-level names, including eval and exec */ - #define GENERATOR 1 #define GENERATOR_EXPRESSION 2 diff --git a/Darwin/include/python3.4m/sysmodule.h b/Darwin/include/python3.5m/sysmodule.h similarity index 94% rename from Darwin/include/python3.4m/sysmodule.h rename to Darwin/include/python3.5m/sysmodule.h index 79e52a3..cde10ac 100644 --- a/Darwin/include/python3.4m/sysmodule.h +++ b/Darwin/include/python3.5m/sysmodule.h @@ -33,6 +33,10 @@ PyAPI_FUNC(int) PySys_HasWarnOptions(void); PyAPI_FUNC(void) PySys_AddXOption(const wchar_t *); PyAPI_FUNC(PyObject *) PySys_GetXOptions(void); +#ifndef Py_LIMITED_API +PyAPI_FUNC(size_t) _PySys_GetSizeOf(PyObject *); +#endif + #ifdef __cplusplus } #endif diff --git a/Darwin/include/python3.4m/token.h b/Darwin/include/python3.5m/token.h similarity index 90% rename from Darwin/include/python3.4m/token.h rename to Darwin/include/python3.5m/token.h index 905022b..595afa0 100644 --- a/Darwin/include/python3.4m/token.h +++ b/Darwin/include/python3.5m/token.h @@ -58,13 +58,16 @@ extern "C" { #define DOUBLESTAREQUAL 46 #define DOUBLESLASH 47 #define DOUBLESLASHEQUAL 48 -#define AT 49 -#define RARROW 50 -#define ELLIPSIS 51 +#define AT 49 +#define ATEQUAL 50 +#define RARROW 51 +#define ELLIPSIS 52 /* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */ -#define OP 52 -#define ERRORTOKEN 53 -#define N_TOKENS 54 +#define OP 53 +#define AWAIT 54 +#define ASYNC 55 +#define ERRORTOKEN 56 +#define N_TOKENS 57 /* Special definitions for cooperation with parser */ diff --git a/Darwin/include/python3.4m/traceback.h b/Darwin/include/python3.5m/traceback.h similarity index 96% rename from Darwin/include/python3.4m/traceback.h rename to Darwin/include/python3.5m/traceback.h index 7734707..891000c 100644 --- a/Darwin/include/python3.4m/traceback.h +++ b/Darwin/include/python3.5m/traceback.h @@ -24,6 +24,7 @@ PyAPI_FUNC(int) PyTraceBack_Here(struct _frame *); PyAPI_FUNC(int) PyTraceBack_Print(PyObject *, PyObject *); #ifndef Py_LIMITED_API PyAPI_FUNC(int) _Py_DisplaySourceLine(PyObject *, PyObject *, int, int); +PyAPI_FUNC(void) _PyTraceback_Add(const char *, const char *, int); #endif /* Reveal traceback type so we can typecheck traceback objects */ diff --git a/Darwin/include/python3.4m/tupleobject.h b/Darwin/include/python3.5m/tupleobject.h similarity index 100% rename from Darwin/include/python3.4m/tupleobject.h rename to Darwin/include/python3.5m/tupleobject.h diff --git a/Darwin/include/python3.4m/typeslots.h b/Darwin/include/python3.5m/typeslots.h similarity index 88% rename from Darwin/include/python3.4m/typeslots.h rename to Darwin/include/python3.5m/typeslots.h index ad3cdfb..0ce6a37 100644 --- a/Darwin/include/python3.4m/typeslots.h +++ b/Darwin/include/python3.5m/typeslots.h @@ -74,3 +74,12 @@ #define Py_tp_members 72 #define Py_tp_getset 73 #define Py_tp_free 74 +#define Py_nb_matrix_multiply 75 +#define Py_nb_inplace_matrix_multiply 76 +#define Py_am_await 77 +#define Py_am_aiter 78 +#define Py_am_anext 79 +#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03050000 +/* New in 3.5 */ +#define Py_tp_finalize 80 +#endif diff --git a/Darwin/include/python3.4m/ucnhash.h b/Darwin/include/python3.5m/ucnhash.h similarity index 94% rename from Darwin/include/python3.4m/ucnhash.h rename to Darwin/include/python3.5m/ucnhash.h index 8de9ba0..45362e9 100644 --- a/Darwin/include/python3.4m/ucnhash.h +++ b/Darwin/include/python3.5m/ucnhash.h @@ -16,7 +16,7 @@ typedef struct { int size; /* Get name for a given character code. Returns non-zero if - success, zero if not. Does not set Python exceptions. + success, zero if not. Does not set Python exceptions. If self is NULL, data come from the default version of the database. If it is not NULL, it should be a unicodedata.ucd_X_Y_Z object */ int (*getname)(PyObject *self, Py_UCS4 code, char* buffer, int buflen, diff --git a/Darwin/include/python3.4m/unicodeobject.h b/Darwin/include/python3.5m/unicodeobject.h similarity index 99% rename from Darwin/include/python3.4m/unicodeobject.h rename to Darwin/include/python3.5m/unicodeobject.h index faa53d6..4ba6328 100644 --- a/Darwin/include/python3.4m/unicodeobject.h +++ b/Darwin/include/python3.5m/unicodeobject.h @@ -605,7 +605,7 @@ PyAPI_FUNC(PyObject*) PyUnicode_New( ); #endif -/* Initializes the canonical string representation from a the deprecated +/* Initializes the canonical string representation from the deprecated wstr/Py_UNICODE representation. This function is used to convert Unicode objects which were created using the old API to the new flexible format introduced with PEP 393. @@ -849,7 +849,7 @@ PyAPI_FUNC(int) PyUnicode_Resize( Coercion is done in the following way: - 1. bytes, bytearray and other char buffer compatible objects are decoded + 1. bytes, bytearray and other bytes-like objects are decoded under the assumptions that they contain data using the UTF-8 encoding. Decoding is done in "strict" mode. @@ -1052,7 +1052,7 @@ PyAPI_FUNC(Py_ssize_t) PyUnicode_AsWideChar( always ends with a nul character. If size is not NULL, write the number of wide characters (excluding the null character) into *size. - Returns a buffer allocated by PyMem_Alloc() (use PyMem_Free() to free it) + Returns a buffer allocated by PyMem_Malloc() (use PyMem_Free() to free it) on success. On error, returns NULL, *size is undefined and raises a MemoryError. */ @@ -2060,12 +2060,6 @@ PyAPI_FUNC(int) PyUnicode_Contains( PyObject *element /* Element string */ ); -/* Checks whether the string contains any NUL characters. */ - -#ifndef Py_LIMITED_API -PyAPI_FUNC(int) _PyUnicode_HasNULChars(PyObject *); -#endif - /* Checks whether argument is a valid identifier. */ PyAPI_FUNC(int) PyUnicode_IsIdentifier(PyObject *s); @@ -2245,6 +2239,8 @@ PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strrchr( Py_UNICODE c ); +PyAPI_FUNC(PyObject*) _PyUnicode_FormatLong(PyObject *, int, int, int); + /* Create a copy of a unicode string ending with a nul character. Return NULL and raise a MemoryError exception on memory allocation failure, otherwise return a new allocated buffer (use PyMem_Free() to free the buffer). */ diff --git a/Darwin/include/python3.4m/warnings.h b/Darwin/include/python3.5m/warnings.h similarity index 100% rename from Darwin/include/python3.4m/warnings.h rename to Darwin/include/python3.5m/warnings.h diff --git a/Darwin/include/python3.4m/weakrefobject.h b/Darwin/include/python3.5m/weakrefobject.h similarity index 100% rename from Darwin/include/python3.4m/weakrefobject.h rename to Darwin/include/python3.5m/weakrefobject.h diff --git a/Darwin/lib/libcrypto.1.0.0.dylib b/Darwin/lib/libcrypto.1.0.0.dylib new file mode 100644 index 0000000..99205eb Binary files /dev/null and b/Darwin/lib/libcrypto.1.0.0.dylib differ diff --git a/Darwin/lib/libexslt.0.dylib b/Darwin/lib/libexslt.0.dylib new file mode 100644 index 0000000..d569d1b Binary files /dev/null and b/Darwin/lib/libexslt.0.dylib differ diff --git a/Darwin/lib/libfreetype.6.dylib b/Darwin/lib/libfreetype.6.dylib deleted file mode 100755 index f914ca4..0000000 Binary files a/Darwin/lib/libfreetype.6.dylib and /dev/null differ diff --git a/Darwin/lib/libjpeg.62.dylib b/Darwin/lib/libjpeg.62.dylib deleted file mode 100755 index b96740d..0000000 Binary files a/Darwin/lib/libjpeg.62.dylib and /dev/null differ diff --git a/Darwin/lib/liblzma.5.dylib b/Darwin/lib/liblzma.5.dylib new file mode 100644 index 0000000..0dc756f Binary files /dev/null and b/Darwin/lib/liblzma.5.dylib differ diff --git a/Darwin/lib/libpng12.0.dylib b/Darwin/lib/libpng12.0.dylib deleted file mode 100755 index 8cb0155..0000000 Binary files a/Darwin/lib/libpng12.0.dylib and /dev/null differ diff --git a/Darwin/lib/libreadline.6.dylib b/Darwin/lib/libreadline.6.dylib new file mode 120000 index 0000000..da772ad --- /dev/null +++ b/Darwin/lib/libreadline.6.dylib @@ -0,0 +1 @@ +libreadline.6.3.dylib \ No newline at end of file diff --git a/Darwin/lib/libsqlite3.0.dylib b/Darwin/lib/libsqlite3.0.dylib new file mode 100644 index 0000000..7a71ac0 Binary files /dev/null and b/Darwin/lib/libsqlite3.0.dylib differ diff --git a/Darwin/lib/libssl.1.0.0.dylib b/Darwin/lib/libssl.1.0.0.dylib new file mode 100644 index 0000000..7c2b076 Binary files /dev/null and b/Darwin/lib/libssl.1.0.0.dylib differ diff --git a/Darwin/lib/libxml2.2.dylib b/Darwin/lib/libxml2.2.dylib new file mode 100644 index 0000000..f69719d Binary files /dev/null and b/Darwin/lib/libxml2.2.dylib differ diff --git a/Darwin/lib/libxslt.1.dylib b/Darwin/lib/libxslt.1.dylib new file mode 100644 index 0000000..b768347 Binary files /dev/null and b/Darwin/lib/libxslt.1.dylib differ diff --git a/Darwin/lib/libz.1.2.8.dylib b/Darwin/lib/libz.1.2.8.dylib deleted file mode 100755 index 4010902..0000000 Binary files a/Darwin/lib/libz.1.2.8.dylib and /dev/null differ diff --git a/Darwin/lib/libz.1.dylib b/Darwin/lib/libz.1.dylib deleted file mode 120000 index 3871806..0000000 --- a/Darwin/lib/libz.1.dylib +++ /dev/null @@ -1 +0,0 @@ -libz.1.2.8.dylib \ No newline at end of file diff --git a/Darwin/lib/python3.4/_compat_pickle.py b/Darwin/lib/python3.4/_compat_pickle.py deleted file mode 100644 index 978c01e..0000000 --- a/Darwin/lib/python3.4/_compat_pickle.py +++ /dev/null @@ -1,137 +0,0 @@ -# This module is used to map the old Python 2 names to the new names used in -# Python 3 for the pickle module. This needed to make pickle streams -# generated with Python 2 loadable by Python 3. - -# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import -# lib2to3 and use the mapping defined there, because lib2to3 uses pickle. -# Thus, this could cause the module to be imported recursively. -IMPORT_MAPPING = { - 'StringIO': 'io', - 'cStringIO': 'io', - 'cPickle': 'pickle', - '__builtin__' : 'builtins', - 'copy_reg': 'copyreg', - 'Queue': 'queue', - 'SocketServer': 'socketserver', - 'ConfigParser': 'configparser', - 'repr': 'reprlib', - 'FileDialog': 'tkinter.filedialog', - 'tkFileDialog': 'tkinter.filedialog', - 'SimpleDialog': 'tkinter.simpledialog', - 'tkSimpleDialog': 'tkinter.simpledialog', - 'tkColorChooser': 'tkinter.colorchooser', - 'tkCommonDialog': 'tkinter.commondialog', - 'Dialog': 'tkinter.dialog', - 'Tkdnd': 'tkinter.dnd', - 'tkFont': 'tkinter.font', - 'tkMessageBox': 'tkinter.messagebox', - 'ScrolledText': 'tkinter.scrolledtext', - 'Tkconstants': 'tkinter.constants', - 'Tix': 'tkinter.tix', - 'ttk': 'tkinter.ttk', - 'Tkinter': 'tkinter', - 'markupbase': '_markupbase', - '_winreg': 'winreg', - 'thread': '_thread', - 'dummy_thread': '_dummy_thread', - 'dbhash': 'dbm.bsd', - 'dumbdbm': 'dbm.dumb', - 'dbm': 'dbm.ndbm', - 'gdbm': 'dbm.gnu', - 'xmlrpclib': 'xmlrpc.client', - 'DocXMLRPCServer': 'xmlrpc.server', - 'SimpleXMLRPCServer': 'xmlrpc.server', - 'httplib': 'http.client', - 'htmlentitydefs' : 'html.entities', - 'HTMLParser' : 'html.parser', - 'Cookie': 'http.cookies', - 'cookielib': 'http.cookiejar', - 'BaseHTTPServer': 'http.server', - 'SimpleHTTPServer': 'http.server', - 'CGIHTTPServer': 'http.server', - 'test.test_support': 'test.support', - 'commands': 'subprocess', - 'UserString' : 'collections', - 'UserList' : 'collections', - 'urlparse' : 'urllib.parse', - 'robotparser' : 'urllib.robotparser', - 'whichdb': 'dbm', - 'anydbm': 'dbm' -} - - -# This contains rename rules that are easy to handle. We ignore the more -# complex stuff (e.g. mapping the names in the urllib and types modules). -# These rules should be run before import names are fixed. -NAME_MAPPING = { - ('__builtin__', 'xrange'): ('builtins', 'range'), - ('__builtin__', 'reduce'): ('functools', 'reduce'), - ('__builtin__', 'intern'): ('sys', 'intern'), - ('__builtin__', 'unichr'): ('builtins', 'chr'), - ('__builtin__', 'basestring'): ('builtins', 'str'), - ('__builtin__', 'long'): ('builtins', 'int'), - ('itertools', 'izip'): ('builtins', 'zip'), - ('itertools', 'imap'): ('builtins', 'map'), - ('itertools', 'ifilter'): ('builtins', 'filter'), - ('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'), -} - -PYTHON2_EXCEPTIONS = ( - "ArithmeticError", - "AssertionError", - "AttributeError", - "BaseException", - "BufferError", - "BytesWarning", - "DeprecationWarning", - "EOFError", - "EnvironmentError", - "Exception", - "FloatingPointError", - "FutureWarning", - "GeneratorExit", - "IOError", - "ImportError", - "ImportWarning", - "IndentationError", - "IndexError", - "KeyError", - "KeyboardInterrupt", - "LookupError", - "MemoryError", - "NameError", - "NotImplementedError", - "OSError", - "OverflowError", - "PendingDeprecationWarning", - "ReferenceError", - "RuntimeError", - "RuntimeWarning", - # StandardError is gone in Python 3, so we map it to Exception - "StopIteration", - "SyntaxError", - "SyntaxWarning", - "SystemError", - "SystemExit", - "TabError", - "TypeError", - "UnboundLocalError", - "UnicodeDecodeError", - "UnicodeEncodeError", - "UnicodeError", - "UnicodeTranslateError", - "UnicodeWarning", - "UserWarning", - "ValueError", - "Warning", - "ZeroDivisionError", -) - -for excname in PYTHON2_EXCEPTIONS: - NAME_MAPPING[("exceptions", excname)] = ("builtins", excname) - -NAME_MAPPING[("exceptions", "StandardError")] = ("builtins", "Exception") - -# Same, but for 3.x to 2.x -REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items()) -REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items()) diff --git a/Darwin/lib/python3.4/asyncio/base_subprocess.py b/Darwin/lib/python3.4/asyncio/base_subprocess.py deleted file mode 100644 index b78f816..0000000 --- a/Darwin/lib/python3.4/asyncio/base_subprocess.py +++ /dev/null @@ -1,159 +0,0 @@ -import collections -import subprocess - -from . import protocols -from . import tasks -from . import transports - - -class BaseSubprocessTransport(transports.SubprocessTransport): - - def __init__(self, loop, protocol, args, shell, - stdin, stdout, stderr, bufsize, - extra=None, **kwargs): - super().__init__(extra) - self._protocol = protocol - self._loop = loop - - self._pipes = {} - if stdin == subprocess.PIPE: - self._pipes[0] = None - if stdout == subprocess.PIPE: - self._pipes[1] = None - if stderr == subprocess.PIPE: - self._pipes[2] = None - self._pending_calls = collections.deque() - self._finished = False - self._returncode = None - self._start(args=args, shell=shell, stdin=stdin, stdout=stdout, - stderr=stderr, bufsize=bufsize, **kwargs) - self._extra['subprocess'] = self._proc - - def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs): - raise NotImplementedError - - def _make_write_subprocess_pipe_proto(self, fd): - raise NotImplementedError - - def _make_read_subprocess_pipe_proto(self, fd): - raise NotImplementedError - - def close(self): - for proto in self._pipes.values(): - proto.pipe.close() - if self._returncode is None: - self.terminate() - - def get_pid(self): - return self._proc.pid - - def get_returncode(self): - return self._returncode - - def get_pipe_transport(self, fd): - if fd in self._pipes: - return self._pipes[fd].pipe - else: - return None - - def send_signal(self, signal): - self._proc.send_signal(signal) - - def terminate(self): - self._proc.terminate() - - def kill(self): - self._proc.kill() - - @tasks.coroutine - def _post_init(self): - proc = self._proc - loop = self._loop - if proc.stdin is not None: - _, pipe = yield from loop.connect_write_pipe( - lambda: WriteSubprocessPipeProto(self, 0), - proc.stdin) - self._pipes[0] = pipe - if proc.stdout is not None: - _, pipe = yield from loop.connect_read_pipe( - lambda: ReadSubprocessPipeProto(self, 1), - proc.stdout) - self._pipes[1] = pipe - if proc.stderr is not None: - _, pipe = yield from loop.connect_read_pipe( - lambda: ReadSubprocessPipeProto(self, 2), - proc.stderr) - self._pipes[2] = pipe - - assert self._pending_calls is not None - - self._loop.call_soon(self._protocol.connection_made, self) - for callback, data in self._pending_calls: - self._loop.call_soon(callback, *data) - self._pending_calls = None - - def _call(self, cb, *data): - if self._pending_calls is not None: - self._pending_calls.append((cb, data)) - else: - self._loop.call_soon(cb, *data) - - def _pipe_connection_lost(self, fd, exc): - self._call(self._protocol.pipe_connection_lost, fd, exc) - self._try_finish() - - def _pipe_data_received(self, fd, data): - self._call(self._protocol.pipe_data_received, fd, data) - - def _process_exited(self, returncode): - assert returncode is not None, returncode - assert self._returncode is None, self._returncode - self._returncode = returncode - self._call(self._protocol.process_exited) - self._try_finish() - - def _try_finish(self): - assert not self._finished - if self._returncode is None: - return - if all(p is not None and p.disconnected - for p in self._pipes.values()): - self._finished = True - self._loop.call_soon(self._call_connection_lost, None) - - def _call_connection_lost(self, exc): - try: - self._protocol.connection_lost(exc) - finally: - self._proc = None - self._protocol = None - self._loop = None - - -class WriteSubprocessPipeProto(protocols.BaseProtocol): - - def __init__(self, proc, fd): - self.proc = proc - self.fd = fd - self.pipe = None - self.disconnected = False - - def connection_made(self, transport): - self.pipe = transport - - def connection_lost(self, exc): - self.disconnected = True - self.proc._pipe_connection_lost(self.fd, exc) - - def pause_writing(self): - self.proc._protocol.pause_writing() - - def resume_writing(self): - self.proc._protocol.resume_writing() - - -class ReadSubprocessPipeProto(WriteSubprocessPipeProto, - protocols.Protocol): - - def data_received(self, data): - self.proc._pipe_data_received(self.fd, data) diff --git a/Darwin/lib/python3.4/config-3.4m/libpython3.4m.a b/Darwin/lib/python3.4/config-3.4m/libpython3.4m.a deleted file mode 100644 index de4151e..0000000 Binary files a/Darwin/lib/python3.4/config-3.4m/libpython3.4m.a and /dev/null differ diff --git a/Darwin/lib/python3.4/config-3.4m/python.o b/Darwin/lib/python3.4/config-3.4m/python.o deleted file mode 100644 index cdc2bde..0000000 Binary files a/Darwin/lib/python3.4/config-3.4m/python.o and /dev/null differ diff --git a/Darwin/lib/python3.4/ctypes/test/__init__.py b/Darwin/lib/python3.4/ctypes/test/__init__.py deleted file mode 100644 index 7c72210..0000000 --- a/Darwin/lib/python3.4/ctypes/test/__init__.py +++ /dev/null @@ -1,208 +0,0 @@ -import os, sys, unittest, getopt, time - -use_resources = [] - -class ResourceDenied(Exception): - """Test skipped because it requested a disallowed resource. - - This is raised when a test calls requires() for a resource that - has not be enabled. Resources are defined by test modules. - """ - -def is_resource_enabled(resource): - """Test whether a resource is enabled. - - If the caller's module is __main__ then automatically return True.""" - if sys._getframe().f_back.f_globals.get("__name__") == "__main__": - return True - result = use_resources is not None and \ - (resource in use_resources or "*" in use_resources) - if not result: - _unavail[resource] = None - return result - -_unavail = {} -def requires(resource, msg=None): - """Raise ResourceDenied if the specified resource is not available. - - If the caller's module is __main__ then automatically return True.""" - # see if the caller's module is __main__ - if so, treat as if - # the resource was set - if sys._getframe().f_back.f_globals.get("__name__") == "__main__": - return - if not is_resource_enabled(resource): - if msg is None: - msg = "Use of the `%s' resource not enabled" % resource - raise ResourceDenied(msg) - -def find_package_modules(package, mask): - import fnmatch - if (package.__loader__ is not None and - hasattr(package.__loader__, '_files')): - path = package.__name__.replace(".", os.path.sep) - mask = os.path.join(path, mask) - for fnm in package.__loader__._files.keys(): - if fnmatch.fnmatchcase(fnm, mask): - yield os.path.splitext(fnm)[0].replace(os.path.sep, ".") - else: - path = package.__path__[0] - for fnm in os.listdir(path): - if fnmatch.fnmatchcase(fnm, mask): - yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0]) - -def get_tests(package, mask, verbosity, exclude=()): - """Return a list of skipped test modules, and a list of test cases.""" - tests = [] - skipped = [] - for modname in find_package_modules(package, mask): - if modname.split(".")[-1] in exclude: - skipped.append(modname) - if verbosity > 1: - print("Skipped %s: excluded" % modname, file=sys.stderr) - continue - try: - mod = __import__(modname, globals(), locals(), ['*']) - except (ResourceDenied, unittest.SkipTest) as detail: - skipped.append(modname) - if verbosity > 1: - print("Skipped %s: %s" % (modname, detail), file=sys.stderr) - continue - for name in dir(mod): - if name.startswith("_"): - continue - o = getattr(mod, name) - if type(o) is type(unittest.TestCase) and issubclass(o, unittest.TestCase): - tests.append(o) - return skipped, tests - -def usage(): - print(__doc__) - return 1 - -def test_with_refcounts(runner, verbosity, testcase): - """Run testcase several times, tracking reference counts.""" - import gc - import ctypes - ptc = ctypes._pointer_type_cache.copy() - cfc = ctypes._c_functype_cache.copy() - wfc = ctypes._win_functype_cache.copy() - - # when searching for refcount leaks, we have to manually reset any - # caches that ctypes has. - def cleanup(): - ctypes._pointer_type_cache = ptc.copy() - ctypes._c_functype_cache = cfc.copy() - ctypes._win_functype_cache = wfc.copy() - gc.collect() - - test = unittest.makeSuite(testcase) - for i in range(5): - rc = sys.gettotalrefcount() - runner.run(test) - cleanup() - COUNT = 5 - refcounts = [None] * COUNT - for i in range(COUNT): - rc = sys.gettotalrefcount() - runner.run(test) - cleanup() - refcounts[i] = sys.gettotalrefcount() - rc - if filter(None, refcounts): - print("%s leaks:\n\t" % testcase, refcounts) - elif verbosity: - print("%s: ok." % testcase) - -class TestRunner(unittest.TextTestRunner): - def run(self, test, skipped): - "Run the given test case or test suite." - # Same as unittest.TextTestRunner.run, except that it reports - # skipped tests. - result = self._makeResult() - startTime = time.time() - test(result) - stopTime = time.time() - timeTaken = stopTime - startTime - result.printErrors() - self.stream.writeln(result.separator2) - run = result.testsRun - if _unavail: #skipped: - requested = list(_unavail.keys()) - requested.sort() - self.stream.writeln("Ran %d test%s in %.3fs (%s module%s skipped)" % - (run, run != 1 and "s" or "", timeTaken, - len(skipped), - len(skipped) != 1 and "s" or "")) - self.stream.writeln("Unavailable resources: %s" % ", ".join(requested)) - else: - self.stream.writeln("Ran %d test%s in %.3fs" % - (run, run != 1 and "s" or "", timeTaken)) - self.stream.writeln() - if not result.wasSuccessful(): - self.stream.write("FAILED (") - failed, errored = map(len, (result.failures, result.errors)) - if failed: - self.stream.write("failures=%d" % failed) - if errored: - if failed: self.stream.write(", ") - self.stream.write("errors=%d" % errored) - self.stream.writeln(")") - else: - self.stream.writeln("OK") - return result - - -def main(*packages): - try: - opts, args = getopt.getopt(sys.argv[1:], "rqvu:x:") - except getopt.error: - return usage() - - verbosity = 1 - search_leaks = False - exclude = [] - for flag, value in opts: - if flag == "-q": - verbosity -= 1 - elif flag == "-v": - verbosity += 1 - elif flag == "-r": - try: - sys.gettotalrefcount - except AttributeError: - print("-r flag requires Python debug build", file=sys.stderr) - return -1 - search_leaks = True - elif flag == "-u": - use_resources.extend(value.split(",")) - elif flag == "-x": - exclude.extend(value.split(",")) - - mask = "test_*.py" - if args: - mask = args[0] - - for package in packages: - run_tests(package, mask, verbosity, search_leaks, exclude) - - -def run_tests(package, mask, verbosity, search_leaks, exclude): - skipped, testcases = get_tests(package, mask, verbosity, exclude) - runner = TestRunner(verbosity=verbosity) - - suites = [unittest.makeSuite(o) for o in testcases] - suite = unittest.TestSuite(suites) - result = runner.run(suite, skipped) - - if search_leaks: - # hunt for refcount leaks - runner = BasicTestRunner() - for t in testcases: - test_with_refcounts(runner, verbosity, t) - - return bool(result.errors) - -class BasicTestRunner: - def run(self, test): - result = unittest.TestResult() - test(result) - return result diff --git a/Darwin/lib/python3.4/ctypes/test/runtests.py b/Darwin/lib/python3.4/ctypes/test/runtests.py deleted file mode 100644 index b7a2b26..0000000 --- a/Darwin/lib/python3.4/ctypes/test/runtests.py +++ /dev/null @@ -1,19 +0,0 @@ -"""Usage: runtests.py [-q] [-r] [-v] [-u resources] [mask] - -Run all tests found in this directory, and print a summary of the results. -Command line flags: - -q quiet mode: don't print anything while the tests are running - -r run tests repeatedly, look for refcount leaks - -u - Add resources to the lits of allowed resources. '*' allows all - resources. - -v verbose mode: print the test currently executed - -x - Exclude specified tests. - mask mask to select filenames containing testcases, wildcards allowed -""" -import sys -import ctypes.test - -if __name__ == "__main__": - sys.exit(ctypes.test.main(ctypes.test)) diff --git a/Darwin/lib/python3.4/ctypes/test/test_buffers.py b/Darwin/lib/python3.4/ctypes/test/test_buffers.py deleted file mode 100644 index 0d12f47..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_buffers.py +++ /dev/null @@ -1,62 +0,0 @@ -from ctypes import * -import unittest - -class StringBufferTestCase(unittest.TestCase): - - def test_buffer(self): - b = create_string_buffer(32) - self.assertEqual(len(b), 32) - self.assertEqual(sizeof(b), 32 * sizeof(c_char)) - self.assertIs(type(b[0]), bytes) - - b = create_string_buffer(b"abc") - self.assertEqual(len(b), 4) # trailing nul char - self.assertEqual(sizeof(b), 4 * sizeof(c_char)) - self.assertIs(type(b[0]), bytes) - self.assertEqual(b[0], b"a") - self.assertEqual(b[:], b"abc\0") - self.assertEqual(b[::], b"abc\0") - self.assertEqual(b[::-1], b"\0cba") - self.assertEqual(b[::2], b"ac") - self.assertEqual(b[::5], b"a") - - def test_buffer_interface(self): - self.assertEqual(len(bytearray(create_string_buffer(0))), 0) - self.assertEqual(len(bytearray(create_string_buffer(1))), 1) - - try: - c_wchar - except NameError: - pass - else: - def test_unicode_buffer(self): - b = create_unicode_buffer(32) - self.assertEqual(len(b), 32) - self.assertEqual(sizeof(b), 32 * sizeof(c_wchar)) - self.assertIs(type(b[0]), str) - - b = create_unicode_buffer("abc") - self.assertEqual(len(b), 4) # trailing nul char - self.assertEqual(sizeof(b), 4 * sizeof(c_wchar)) - self.assertIs(type(b[0]), str) - self.assertEqual(b[0], "a") - self.assertEqual(b[:], "abc\0") - self.assertEqual(b[::], "abc\0") - self.assertEqual(b[::-1], "\0cba") - self.assertEqual(b[::2], "ac") - self.assertEqual(b[::5], "a") - - def test_unicode_conversion(self): - b = create_unicode_buffer("abc") - self.assertEqual(len(b), 4) # trailing nul char - self.assertEqual(sizeof(b), 4 * sizeof(c_wchar)) - self.assertIs(type(b[0]), str) - self.assertEqual(b[0], "a") - self.assertEqual(b[:], "abc\0") - self.assertEqual(b[::], "abc\0") - self.assertEqual(b[::-1], "\0cba") - self.assertEqual(b[::2], "ac") - self.assertEqual(b[::5], "a") - -if __name__ == "__main__": - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_errcheck.py b/Darwin/lib/python3.4/ctypes/test/test_errcheck.py deleted file mode 100644 index a4913f9..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_errcheck.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys -from ctypes import * - -##class HMODULE(Structure): -## _fields_ = [("value", c_void_p)] - -## def __repr__(self): -## return "" % self.value - -##windll.kernel32.GetModuleHandleA.restype = HMODULE - -##print windll.kernel32.GetModuleHandleA("python23.dll") -##print hex(sys.dllhandle) - -##def nonzero(handle): -## return (GetLastError(), handle) - -##windll.kernel32.GetModuleHandleA.errcheck = nonzero -##print windll.kernel32.GetModuleHandleA("spam") diff --git a/Darwin/lib/python3.4/ctypes/test/test_find.py b/Darwin/lib/python3.4/ctypes/test/test_find.py deleted file mode 100644 index c54b69b..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_find.py +++ /dev/null @@ -1,82 +0,0 @@ -import unittest -import sys -from ctypes import * -from ctypes.util import find_library -from ctypes.test import is_resource_enabled - -if sys.platform == "win32": - lib_gl = find_library("OpenGL32") - lib_glu = find_library("Glu32") - lib_gle = None -elif sys.platform == "darwin": - lib_gl = lib_glu = find_library("OpenGL") - lib_gle = None -else: - lib_gl = find_library("GL") - lib_glu = find_library("GLU") - lib_gle = find_library("gle") - -## print, for debugging -if is_resource_enabled("printing"): - if lib_gl or lib_glu or lib_gle: - print("OpenGL libraries:") - for item in (("GL", lib_gl), - ("GLU", lib_glu), - ("gle", lib_gle)): - print("\t", item) - - -# On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode. -class Test_OpenGL_libs(unittest.TestCase): - def setUp(self): - self.gl = self.glu = self.gle = None - if lib_gl: - self.gl = CDLL(lib_gl, mode=RTLD_GLOBAL) - if lib_glu: - self.glu = CDLL(lib_glu, RTLD_GLOBAL) - if lib_gle: - try: - self.gle = CDLL(lib_gle) - except OSError: - pass - - if lib_gl: - def test_gl(self): - if self.gl: - self.gl.glClearIndex - - if lib_glu: - def test_glu(self): - if self.glu: - self.glu.gluBeginCurve - - if lib_gle: - def test_gle(self): - if self.gle: - self.gle.gleGetJoinStyle - -##if os.name == "posix" and sys.platform != "darwin": - -## # On platforms where the default shared library suffix is '.so', -## # at least some libraries can be loaded as attributes of the cdll -## # object, since ctypes now tries loading the lib again -## # with '.so' appended of the first try fails. -## # -## # Won't work for libc, unfortunately. OTOH, it isn't -## # needed for libc since this is already mapped into the current -## # process (?) -## # -## # On MAC OSX, it won't work either, because dlopen() needs a full path, -## # and the default suffix is either none or '.dylib'. - -## class LoadLibs(unittest.TestCase): -## def test_libm(self): -## import math -## libm = cdll.libm -## sqrt = libm.sqrt -## sqrt.argtypes = (c_double,) -## sqrt.restype = c_double -## self.assertEqual(sqrt(2), math.sqrt(2)) - -if __name__ == "__main__": - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_integers.py b/Darwin/lib/python3.4/ctypes/test/test_integers.py deleted file mode 100644 index 62e4b08..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_integers.py +++ /dev/null @@ -1,5 +0,0 @@ -# superseded by test_numbers.py -import unittest - -if __name__ == '__main__': - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_loading.py b/Darwin/lib/python3.4/ctypes/test/test_loading.py deleted file mode 100644 index 414363d..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_loading.py +++ /dev/null @@ -1,106 +0,0 @@ -from ctypes import * -import sys, unittest -import os -from ctypes.util import find_library -from ctypes.test import is_resource_enabled - -libc_name = None -if os.name == "nt": - libc_name = find_library("c") -elif os.name == "ce": - libc_name = "coredll" -elif sys.platform == "cygwin": - libc_name = "cygwin1.dll" -else: - libc_name = find_library("c") - -if is_resource_enabled("printing"): - print("libc_name is", libc_name) - -class LoaderTest(unittest.TestCase): - - unknowndll = "xxrandomnamexx" - - if libc_name is not None: - def test_load(self): - CDLL(libc_name) - CDLL(os.path.basename(libc_name)) - self.assertRaises(OSError, CDLL, self.unknowndll) - - if libc_name is not None and os.path.basename(libc_name) == "libc.so.6": - def test_load_version(self): - cdll.LoadLibrary("libc.so.6") - # linux uses version, libc 9 should not exist - self.assertRaises(OSError, cdll.LoadLibrary, "libc.so.9") - self.assertRaises(OSError, cdll.LoadLibrary, self.unknowndll) - - def test_find(self): - for name in ("c", "m"): - lib = find_library(name) - if lib: - cdll.LoadLibrary(lib) - CDLL(lib) - - if os.name in ("nt", "ce"): - def test_load_library(self): - self.assertIsNotNone(libc_name) - if is_resource_enabled("printing"): - print(find_library("kernel32")) - print(find_library("user32")) - - if os.name == "nt": - windll.kernel32.GetModuleHandleW - windll["kernel32"].GetModuleHandleW - windll.LoadLibrary("kernel32").GetModuleHandleW - WinDLL("kernel32").GetModuleHandleW - elif os.name == "ce": - windll.coredll.GetModuleHandleW - windll["coredll"].GetModuleHandleW - windll.LoadLibrary("coredll").GetModuleHandleW - WinDLL("coredll").GetModuleHandleW - - def test_load_ordinal_functions(self): - import _ctypes_test - dll = WinDLL(_ctypes_test.__file__) - # We load the same function both via ordinal and name - func_ord = dll[2] - func_name = dll.GetString - # addressof gets the address where the function pointer is stored - a_ord = addressof(func_ord) - a_name = addressof(func_name) - f_ord_addr = c_void_p.from_address(a_ord).value - f_name_addr = c_void_p.from_address(a_name).value - self.assertEqual(hex(f_ord_addr), hex(f_name_addr)) - - self.assertRaises(AttributeError, dll.__getitem__, 1234) - - if os.name == "nt": - def test_1703286_A(self): - from _ctypes import LoadLibrary, FreeLibrary - # On winXP 64-bit, advapi32 loads at an address that does - # NOT fit into a 32-bit integer. FreeLibrary must be able - # to accept this address. - - # These are tests for http://www.python.org/sf/1703286 - handle = LoadLibrary("advapi32") - FreeLibrary(handle) - - def test_1703286_B(self): - # Since on winXP 64-bit advapi32 loads like described - # above, the (arbitrarily selected) CloseEventLog function - # also has a high address. 'call_function' should accept - # addresses so large. - from _ctypes import call_function - advapi32 = windll.advapi32 - # Calling CloseEventLog with a NULL argument should fail, - # but the call should not segfault or so. - self.assertEqual(0, advapi32.CloseEventLog(None)) - windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p - windll.kernel32.GetProcAddress.restype = c_void_p - proc = windll.kernel32.GetProcAddress(advapi32._handle, b"CloseEventLog") - self.assertTrue(proc) - # This is the real test: call the function via 'call_function' - self.assertEqual(0, call_function(proc, (None,))) - -if __name__ == "__main__": - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_unicode.py b/Darwin/lib/python3.4/ctypes/test/test_unicode.py deleted file mode 100644 index c3b2d48..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_unicode.py +++ /dev/null @@ -1,59 +0,0 @@ -import unittest -import ctypes - -try: - ctypes.c_wchar -except AttributeError: - pass -else: - import _ctypes_test - - class UnicodeTestCase(unittest.TestCase): - def test_wcslen(self): - dll = ctypes.CDLL(_ctypes_test.__file__) - wcslen = dll.my_wcslen - wcslen.argtypes = [ctypes.c_wchar_p] - - self.assertEqual(wcslen("abc"), 3) - self.assertEqual(wcslen("ab\u2070"), 3) - self.assertRaises(ctypes.ArgumentError, wcslen, b"ab\xe4") - - def test_buffers(self): - buf = ctypes.create_unicode_buffer("abc") - self.assertEqual(len(buf), 3+1) - - buf = ctypes.create_unicode_buffer("ab\xe4\xf6\xfc") - self.assertEqual(buf[:], "ab\xe4\xf6\xfc\0") - self.assertEqual(buf[::], "ab\xe4\xf6\xfc\0") - self.assertEqual(buf[::-1], '\x00\xfc\xf6\xe4ba') - self.assertEqual(buf[::2], 'a\xe4\xfc') - self.assertEqual(buf[6:5:-1], "") - - func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p - - class StringTestCase(UnicodeTestCase): - def setUp(self): - func.argtypes = [ctypes.c_char_p] - func.restype = ctypes.c_char_p - - def tearDown(self): - func.argtypes = None - func.restype = ctypes.c_int - - def test_func(self): - self.assertEqual(func(b"abc\xe4"), b"abc\xe4") - - def test_buffers(self): - buf = ctypes.create_string_buffer(b"abc") - self.assertEqual(len(buf), 3+1) - - buf = ctypes.create_string_buffer(b"ab\xe4\xf6\xfc") - self.assertEqual(buf[:], b"ab\xe4\xf6\xfc\0") - self.assertEqual(buf[::], b"ab\xe4\xf6\xfc\0") - self.assertEqual(buf[::-1], b'\x00\xfc\xf6\xe4ba') - self.assertEqual(buf[::2], b'a\xe4\xfc') - self.assertEqual(buf[6:5:-1], b"") - - -if __name__ == '__main__': - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_values.py b/Darwin/lib/python3.4/ctypes/test/test_values.py deleted file mode 100644 index e464102..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_values.py +++ /dev/null @@ -1,88 +0,0 @@ -""" -A testcase which accesses *values* in a dll. -""" - -import unittest -from ctypes import * - -import _ctypes_test - -class ValuesTestCase(unittest.TestCase): - - def test_an_integer(self): - # This test checks and changes an integer stored inside the - # _ctypes_test dll/shared lib. - ctdll = CDLL(_ctypes_test.__file__) - an_integer = c_int.in_dll(ctdll, "an_integer") - x = an_integer.value - self.assertEqual(x, ctdll.get_an_integer()) - an_integer.value *= 2 - self.assertEqual(x*2, ctdll.get_an_integer()) - # To avoid test failures when this test is repeated several - # times the original value must be restored - an_integer.value = x - self.assertEqual(x, ctdll.get_an_integer()) - - def test_undefined(self): - ctdll = CDLL(_ctypes_test.__file__) - self.assertRaises(ValueError, c_int.in_dll, ctdll, "Undefined_Symbol") - - class Win_ValuesTestCase(unittest.TestCase): - """This test only works when python itself is a dll/shared library""" - - def test_optimizeflag(self): - # This test accesses the Py_OptimizeFlag intger, which is - # exported by the Python dll. - - # It's value is set depending on the -O and -OO flags: - # if not given, it is 0 and __debug__ is 1. - # If -O is given, the flag is 1, for -OO it is 2. - # docstrings are also removed in the latter case. - opt = c_int.in_dll(pydll, "Py_OptimizeFlag").value - if __debug__: - self.assertEqual(opt, 0) - elif ValuesTestCase.__doc__ is not None: - self.assertEqual(opt, 1) - else: - self.assertEqual(opt, 2) - - def test_frozentable(self): - # Python exports a PyImport_FrozenModules symbol. This is a - # pointer to an array of struct _frozen entries. The end of the - # array is marked by an entry containing a NULL name and zero - # size. - - # In standard Python, this table contains a __hello__ - # module, and a __phello__ package containing a spam - # module. - class struct_frozen(Structure): - _fields_ = [("name", c_char_p), - ("code", POINTER(c_ubyte)), - ("size", c_int)] - FrozenTable = POINTER(struct_frozen) - - ft = FrozenTable.in_dll(pydll, "PyImport_FrozenModules") - # ft is a pointer to the struct_frozen entries: - items = [] - for entry in ft: - # This is dangerous. We *can* iterate over a pointer, but - # the loop will not terminate (maybe with an access - # violation;-) because the pointer instance has no size. - if entry.name is None: - break - items.append((entry.name, entry.size)) - import sys - if sys.version_info[:2] >= (2, 3): - expected = [("__hello__", 104), ("__phello__", -104), ("__phello__.spam", 104)] - else: - expected = [("__hello__", 100), ("__phello__", -100), ("__phello__.spam", 100)] - self.assertEqual(items, expected) - - from ctypes import _pointer_type_cache - del _pointer_type_cache[struct_frozen] - - def test_undefined(self): - self.assertRaises(ValueError, c_int.in_dll, pydll, "Undefined_Symbol") - -if __name__ == '__main__': - unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_win32.py b/Darwin/lib/python3.4/ctypes/test/test_win32.py deleted file mode 100644 index 91ad314..0000000 --- a/Darwin/lib/python3.4/ctypes/test/test_win32.py +++ /dev/null @@ -1,115 +0,0 @@ -# Windows specific tests - -from ctypes import * -from ctypes.test import is_resource_enabled -import unittest, sys -from test import support - -import _ctypes_test - -if sys.platform == "win32" and sizeof(c_void_p) == sizeof(c_int): - # Only windows 32-bit has different calling conventions. - - class WindowsTestCase(unittest.TestCase): - def test_callconv_1(self): - # Testing stdcall function - - IsWindow = windll.user32.IsWindow - # ValueError: Procedure probably called with not enough arguments (4 bytes missing) - self.assertRaises(ValueError, IsWindow) - - # This one should succeed... - self.assertEqual(0, IsWindow(0)) - - # ValueError: Procedure probably called with too many arguments (8 bytes in excess) - self.assertRaises(ValueError, IsWindow, 0, 0, 0) - - def test_callconv_2(self): - # Calling stdcall function as cdecl - - IsWindow = cdll.user32.IsWindow - - # ValueError: Procedure called with not enough arguments (4 bytes missing) - # or wrong calling convention - self.assertRaises(ValueError, IsWindow, None) - -if sys.platform == "win32": - class FunctionCallTestCase(unittest.TestCase): - - if is_resource_enabled("SEH"): - def test_SEH(self): - # Call functions with invalid arguments, and make sure - # that access violations are trapped and raise an - # exception. - self.assertRaises(OSError, windll.kernel32.GetModuleHandleA, 32) - - def test_noargs(self): - # This is a special case on win32 x64 - windll.user32.GetDesktopWindow() - - class TestWintypes(unittest.TestCase): - def test_HWND(self): - from ctypes import wintypes - self.assertEqual(sizeof(wintypes.HWND), sizeof(c_void_p)) - - def test_PARAM(self): - from ctypes import wintypes - self.assertEqual(sizeof(wintypes.WPARAM), - sizeof(c_void_p)) - self.assertEqual(sizeof(wintypes.LPARAM), - sizeof(c_void_p)) - - def test_COMError(self): - from _ctypes import COMError - if support.HAVE_DOCSTRINGS: - self.assertEqual(COMError.__doc__, - "Raised when a COM method call failed.") - - ex = COMError(-1, "text", ("details",)) - self.assertEqual(ex.hresult, -1) - self.assertEqual(ex.text, "text") - self.assertEqual(ex.details, ("details",)) - - class TestWinError(unittest.TestCase): - def test_winerror(self): - # see Issue 16169 - import errno - ERROR_INVALID_PARAMETER = 87 - msg = FormatError(ERROR_INVALID_PARAMETER).strip() - args = (errno.EINVAL, msg, None, ERROR_INVALID_PARAMETER) - - e = WinError(ERROR_INVALID_PARAMETER) - self.assertEqual(e.args, args) - self.assertEqual(e.errno, errno.EINVAL) - self.assertEqual(e.winerror, ERROR_INVALID_PARAMETER) - - windll.kernel32.SetLastError(ERROR_INVALID_PARAMETER) - try: - raise WinError() - except OSError as exc: - e = exc - self.assertEqual(e.args, args) - self.assertEqual(e.errno, errno.EINVAL) - self.assertEqual(e.winerror, ERROR_INVALID_PARAMETER) - -class Structures(unittest.TestCase): - - def test_struct_by_value(self): - class POINT(Structure): - _fields_ = [("x", c_long), - ("y", c_long)] - - class RECT(Structure): - _fields_ = [("left", c_long), - ("top", c_long), - ("right", c_long), - ("bottom", c_long)] - - dll = CDLL(_ctypes_test.__file__) - - pt = POINT(10, 10) - rect = RECT(0, 0, 20, 20) - self.assertEqual(1, dll.PointInRect(byref(rect), pt)) - -if __name__ == '__main__': - unittest.main() diff --git a/Darwin/lib/python3.4/distutils/tests/test_file_util.py b/Darwin/lib/python3.4/distutils/tests/test_file_util.py deleted file mode 100644 index 3c3e3dc..0000000 --- a/Darwin/lib/python3.4/distutils/tests/test_file_util.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Tests for distutils.file_util.""" -import unittest -import os -import shutil - -from distutils.file_util import move_file -from distutils import log -from distutils.tests import support -from test.support import run_unittest - -class FileUtilTestCase(support.TempdirManager, unittest.TestCase): - - def _log(self, msg, *args): - if len(args) > 0: - self._logs.append(msg % args) - else: - self._logs.append(msg) - - def setUp(self): - super(FileUtilTestCase, self).setUp() - self._logs = [] - self.old_log = log.info - log.info = self._log - tmp_dir = self.mkdtemp() - self.source = os.path.join(tmp_dir, 'f1') - self.target = os.path.join(tmp_dir, 'f2') - self.target_dir = os.path.join(tmp_dir, 'd1') - - def tearDown(self): - log.info = self.old_log - super(FileUtilTestCase, self).tearDown() - - def test_move_file_verbosity(self): - f = open(self.source, 'w') - try: - f.write('some content') - finally: - f.close() - - move_file(self.source, self.target, verbose=0) - wanted = [] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - move_file(self.source, self.target, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target)] - self.assertEqual(self._logs, wanted) - - # back to original state - move_file(self.target, self.source, verbose=0) - - self._logs = [] - # now the target is a dir - os.mkdir(self.target_dir) - move_file(self.source, self.target_dir, verbose=1) - wanted = ['moving %s -> %s' % (self.source, self.target_dir)] - self.assertEqual(self._logs, wanted) - - -def test_suite(): - return unittest.makeSuite(FileUtilTestCase) - -if __name__ == "__main__": - run_unittest(test_suite()) diff --git a/Darwin/lib/python3.4/ensurepip/_bundled/pip-1.5.6-py2.py3-none-any.whl b/Darwin/lib/python3.4/ensurepip/_bundled/pip-1.5.6-py2.py3-none-any.whl deleted file mode 100644 index 097ab43..0000000 Binary files a/Darwin/lib/python3.4/ensurepip/_bundled/pip-1.5.6-py2.py3-none-any.whl and /dev/null differ diff --git a/Darwin/lib/python3.4/idlelib/HyperParser.py b/Darwin/lib/python3.4/idlelib/HyperParser.py deleted file mode 100644 index 4af4b08..0000000 --- a/Darwin/lib/python3.4/idlelib/HyperParser.py +++ /dev/null @@ -1,246 +0,0 @@ -""" -HyperParser -=========== -This module defines the HyperParser class, which provides advanced parsing -abilities for the ParenMatch and other extensions. -The HyperParser uses PyParser. PyParser is intended mostly to give information -on the proper indentation of code. HyperParser gives some information on the -structure of code, used by extensions to help the user. -""" - -import string -import keyword -from idlelib import PyParse - -class HyperParser: - - def __init__(self, editwin, index): - """Initialize the HyperParser to analyze the surroundings of the given - index. - """ - - self.editwin = editwin - self.text = text = editwin.text - - parser = PyParse.Parser(editwin.indentwidth, editwin.tabwidth) - - def index2line(index): - return int(float(index)) - lno = index2line(text.index(index)) - - if not editwin.context_use_ps1: - for context in editwin.num_context_lines: - startat = max(lno - context, 1) - startatindex = repr(startat) + ".0" - stopatindex = "%d.end" % lno - # We add the newline because PyParse requires a newline at end. - # We add a space so that index won't be at end of line, so that - # its status will be the same as the char before it, if should. - parser.set_str(text.get(startatindex, stopatindex)+' \n') - bod = parser.find_good_parse_start( - editwin._build_char_in_string_func(startatindex)) - if bod is not None or startat == 1: - break - parser.set_lo(bod or 0) - else: - r = text.tag_prevrange("console", index) - if r: - startatindex = r[1] - else: - startatindex = "1.0" - stopatindex = "%d.end" % lno - # We add the newline because PyParse requires a newline at end. - # We add a space so that index won't be at end of line, so that - # its status will be the same as the char before it, if should. - parser.set_str(text.get(startatindex, stopatindex)+' \n') - parser.set_lo(0) - - # We want what the parser has, except for the last newline and space. - self.rawtext = parser.str[:-2] - # As far as I can see, parser.str preserves the statement we are in, - # so that stopatindex can be used to synchronize the string with the - # text box indices. - self.stopatindex = stopatindex - self.bracketing = parser.get_last_stmt_bracketing() - # find which pairs of bracketing are openers. These always correspond - # to a character of rawtext. - self.isopener = [i>0 and self.bracketing[i][1] > self.bracketing[i-1][1] - for i in range(len(self.bracketing))] - - self.set_index(index) - - def set_index(self, index): - """Set the index to which the functions relate. Note that it must be - in the same statement. - """ - indexinrawtext = \ - len(self.rawtext) - len(self.text.get(index, self.stopatindex)) - if indexinrawtext < 0: - raise ValueError("The index given is before the analyzed statement") - self.indexinrawtext = indexinrawtext - # find the rightmost bracket to which index belongs - self.indexbracket = 0 - while self.indexbracket < len(self.bracketing)-1 and \ - self.bracketing[self.indexbracket+1][0] < self.indexinrawtext: - self.indexbracket += 1 - if self.indexbracket < len(self.bracketing)-1 and \ - self.bracketing[self.indexbracket+1][0] == self.indexinrawtext and \ - not self.isopener[self.indexbracket+1]: - self.indexbracket += 1 - - def is_in_string(self): - """Is the index given to the HyperParser is in a string?""" - # The bracket to which we belong should be an opener. - # If it's an opener, it has to have a character. - return self.isopener[self.indexbracket] and \ - self.rawtext[self.bracketing[self.indexbracket][0]] in ('"', "'") - - def is_in_code(self): - """Is the index given to the HyperParser is in a normal code?""" - return not self.isopener[self.indexbracket] or \ - self.rawtext[self.bracketing[self.indexbracket][0]] not in \ - ('#', '"', "'") - - def get_surrounding_brackets(self, openers='([{', mustclose=False): - """If the index given to the HyperParser is surrounded by a bracket - defined in openers (or at least has one before it), return the - indices of the opening bracket and the closing bracket (or the - end of line, whichever comes first). - If it is not surrounded by brackets, or the end of line comes before - the closing bracket and mustclose is True, returns None. - """ - bracketinglevel = self.bracketing[self.indexbracket][1] - before = self.indexbracket - while not self.isopener[before] or \ - self.rawtext[self.bracketing[before][0]] not in openers or \ - self.bracketing[before][1] > bracketinglevel: - before -= 1 - if before < 0: - return None - bracketinglevel = min(bracketinglevel, self.bracketing[before][1]) - after = self.indexbracket + 1 - while after < len(self.bracketing) and \ - self.bracketing[after][1] >= bracketinglevel: - after += 1 - - beforeindex = self.text.index("%s-%dc" % - (self.stopatindex, len(self.rawtext)-self.bracketing[before][0])) - if after >= len(self.bracketing) or \ - self.bracketing[after][0] > len(self.rawtext): - if mustclose: - return None - afterindex = self.stopatindex - else: - # We are after a real char, so it is a ')' and we give the index - # before it. - afterindex = self.text.index("%s-%dc" % - (self.stopatindex, - len(self.rawtext)-(self.bracketing[after][0]-1))) - - return beforeindex, afterindex - - # This string includes all chars that may be in a white space - _whitespace_chars = " \t\n\\" - # This string includes all chars that may be in an identifier - _id_chars = string.ascii_letters + string.digits + "_" - # This string includes all chars that may be the first char of an identifier - _id_first_chars = string.ascii_letters + "_" - - # Given a string and pos, return the number of chars in the identifier - # which ends at pos, or 0 if there is no such one. Saved words are not - # identifiers. - def _eat_identifier(self, str, limit, pos): - i = pos - while i > limit and str[i-1] in self._id_chars: - i -= 1 - if i < pos and (str[i] not in self._id_first_chars or \ - keyword.iskeyword(str[i:pos])): - i = pos - return pos - i - - def get_expression(self): - """Return a string with the Python expression which ends at the given - index, which is empty if there is no real one. - """ - if not self.is_in_code(): - raise ValueError("get_expression should only be called if index "\ - "is inside a code.") - - rawtext = self.rawtext - bracketing = self.bracketing - - brck_index = self.indexbracket - brck_limit = bracketing[brck_index][0] - pos = self.indexinrawtext - - last_identifier_pos = pos - postdot_phase = True - - while 1: - # Eat whitespaces, comments, and if postdot_phase is False - one dot - while 1: - if pos>brck_limit and rawtext[pos-1] in self._whitespace_chars: - # Eat a whitespace - pos -= 1 - elif not postdot_phase and \ - pos > brck_limit and rawtext[pos-1] == '.': - # Eat a dot - pos -= 1 - postdot_phase = True - # The next line will fail if we are *inside* a comment, but we - # shouldn't be. - elif pos == brck_limit and brck_index > 0 and \ - rawtext[bracketing[brck_index-1][0]] == '#': - # Eat a comment - brck_index -= 2 - brck_limit = bracketing[brck_index][0] - pos = bracketing[brck_index+1][0] - else: - # If we didn't eat anything, quit. - break - - if not postdot_phase: - # We didn't find a dot, so the expression end at the last - # identifier pos. - break - - ret = self._eat_identifier(rawtext, brck_limit, pos) - if ret: - # There is an identifier to eat - pos = pos - ret - last_identifier_pos = pos - # Now, in order to continue the search, we must find a dot. - postdot_phase = False - # (the loop continues now) - - elif pos == brck_limit: - # We are at a bracketing limit. If it is a closing bracket, - # eat the bracket, otherwise, stop the search. - level = bracketing[brck_index][1] - while brck_index > 0 and bracketing[brck_index-1][1] > level: - brck_index -= 1 - if bracketing[brck_index][0] == brck_limit: - # We were not at the end of a closing bracket - break - pos = bracketing[brck_index][0] - brck_index -= 1 - brck_limit = bracketing[brck_index][0] - last_identifier_pos = pos - if rawtext[pos] in "([": - # [] and () may be used after an identifier, so we - # continue. postdot_phase is True, so we don't allow a dot. - pass - else: - # We can't continue after other types of brackets - if rawtext[pos] in "'\"": - # Scan a string prefix - while pos > 0 and rawtext[pos - 1] in "rRbBuU": - pos -= 1 - last_identifier_pos = pos - break - - else: - # We've found an operator or something. - break - - return rawtext[last_identifier_pos:self.indexinrawtext] diff --git a/Darwin/lib/python3.4/idlelib/MultiStatusBar.py b/Darwin/lib/python3.4/idlelib/MultiStatusBar.py deleted file mode 100644 index 4fc8dcf..0000000 --- a/Darwin/lib/python3.4/idlelib/MultiStatusBar.py +++ /dev/null @@ -1,32 +0,0 @@ -from tkinter import * - -class MultiStatusBar(Frame): - - def __init__(self, master=None, **kw): - if master is None: - master = Tk() - Frame.__init__(self, master, **kw) - self.labels = {} - - def set_label(self, name, text='', side=LEFT): - if name not in self.labels: - label = Label(self, bd=1, relief=SUNKEN, anchor=W) - label.pack(side=side) - self.labels[name] = label - else: - label = self.labels[name] - label.config(text=text) - -def _test(): - b = Frame() - c = Text(b) - c.pack(side=TOP) - a = MultiStatusBar(b) - a.set_label("one", "hello") - a.set_label("two", "world") - a.pack(side=BOTTOM, fill=X) - b.pack() - b.mainloop() - -if __name__ == '__main__': - _test() diff --git a/Darwin/lib/python3.4/idlelib/SearchDialogBase.py b/Darwin/lib/python3.4/idlelib/SearchDialogBase.py deleted file mode 100644 index b8b49b2..0000000 --- a/Darwin/lib/python3.4/idlelib/SearchDialogBase.py +++ /dev/null @@ -1,157 +0,0 @@ -'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' -from tkinter import * - -class SearchDialogBase: - '''Create most of a modal search dialog (make_frame, create_widgets). - - The wide left column contains: - 1 or 2 text entry lines (create_entries, make_entry); - a row of standard radiobuttons (create_option_buttons); - a row of dialog specific radiobuttons (create_other_buttons). - - The narrow right column contains command buttons - (create_command_buttons, make_button). - These are bound to functions that execute the command. - - Except for command buttons, this base class is not limited to - items common to all three subclasses. Rather, it is the Find dialog - minus the "Find Next" command and its execution function. - The other dialogs override methods to replace and add widgets. - ''' - - title = "Search Dialog" - icon = "Search" - needwrapbutton = 1 - - def __init__(self, root, engine): - self.root = root - self.engine = engine - self.top = None - - def open(self, text, searchphrase=None): - self.text = text - if not self.top: - self.create_widgets() - else: - self.top.deiconify() - self.top.tkraise() - if searchphrase: - self.ent.delete(0,"end") - self.ent.insert("end",searchphrase) - self.ent.focus_set() - self.ent.selection_range(0, "end") - self.ent.icursor(0) - self.top.grab_set() - - def close(self, event=None): - if self.top: - self.top.grab_release() - self.top.withdraw() - - def create_widgets(self): - top = Toplevel(self.root) - top.bind("", self.default_command) - top.bind("", self.close) - top.protocol("WM_DELETE_WINDOW", self.close) - top.wm_title(self.title) - top.wm_iconname(self.icon) - self.top = top - - self.row = 0 - self.top.grid_columnconfigure(0, pad=2, weight=0) - self.top.grid_columnconfigure(1, pad=2, minsize=100, weight=100) - - self.create_entries() - self.create_option_buttons() - self.create_other_buttons() - return self.create_command_buttons() - - def make_entry(self, label, var): - l = Label(self.top, text=label) - l.grid(row=self.row, column=0, sticky="nw") - e = Entry(self.top, textvariable=var, exportselection=0) - e.grid(row=self.row, column=1, sticky="nwe") - self.row = self.row + 1 - return e - - def make_frame(self,labeltext=None): - if labeltext: - l = Label(self.top, text=labeltext) - l.grid(row=self.row, column=0, sticky="nw") - f = Frame(self.top) - f.grid(row=self.row, column=1, columnspan=1, sticky="nwe") - self.row = self.row + 1 - return f - - def make_button(self, label, command, isdef=0): - b = Button(self.buttonframe, - text=label, command=command, - default=isdef and "active" or "normal") - cols,rows=self.buttonframe.grid_size() - b.grid(pady=1,row=rows,column=0,sticky="ew") - self.buttonframe.grid(rowspan=rows+1) - return b - - def create_entries(self): - self.ent = self.make_entry("Find:", self.engine.patvar) - - def create_option_buttons(self): - f = self.make_frame("Options") - - btn = Checkbutton(f, anchor="w", - variable=self.engine.revar, - text="Regular expression") - btn.pack(side="left", fill="both") - if self.engine.isre(): - btn.select() - - btn = Checkbutton(f, anchor="w", - variable=self.engine.casevar, - text="Match case") - btn.pack(side="left", fill="both") - if self.engine.iscase(): - btn.select() - - btn = Checkbutton(f, anchor="w", - variable=self.engine.wordvar, - text="Whole word") - btn.pack(side="left", fill="both") - if self.engine.isword(): - btn.select() - - if self.needwrapbutton: - btn = Checkbutton(f, anchor="w", - variable=self.engine.wrapvar, - text="Wrap around") - btn.pack(side="left", fill="both") - if self.engine.iswrap(): - btn.select() - - def create_other_buttons(self): - f = self.make_frame("Direction") - - #lbl = Label(f, text="Direction: ") - #lbl.pack(side="left") - - btn = Radiobutton(f, anchor="w", - variable=self.engine.backvar, value=1, - text="Up") - btn.pack(side="left", fill="both") - if self.engine.isback(): - btn.select() - - btn = Radiobutton(f, anchor="w", - variable=self.engine.backvar, value=0, - text="Down") - btn.pack(side="left", fill="both") - if not self.engine.isback(): - btn.select() - - def create_command_buttons(self): - # - # place button frame on the right - f = self.buttonframe = Frame(self.top) - f.grid(row=0,column=2,padx=2,pady=2,ipadx=2,ipady=2) - - b = self.make_button("close", self.close) - b.lower() diff --git a/Darwin/lib/python3.4/idlelib/WidgetRedirector.py b/Darwin/lib/python3.4/idlelib/WidgetRedirector.py deleted file mode 100644 index ba5251f..0000000 --- a/Darwin/lib/python3.4/idlelib/WidgetRedirector.py +++ /dev/null @@ -1,126 +0,0 @@ -from tkinter import * - -class WidgetRedirector: - - """Support for redirecting arbitrary widget subcommands. - - Some Tk operations don't normally pass through Tkinter. For example, if a - character is inserted into a Text widget by pressing a key, a default Tk - binding to the widget's 'insert' operation is activated, and the Tk library - processes the insert without calling back into Tkinter. - - Although a binding to could be made via Tkinter, what we really want - to do is to hook the Tk 'insert' operation itself. - - When a widget is instantiated, a Tcl command is created whose name is the - same as the pathname widget._w. This command is used to invoke the various - widget operations, e.g. insert (for a Text widget). We are going to hook - this command and provide a facility ('register') to intercept the widget - operation. - - In IDLE, the function being registered provides access to the top of a - Percolator chain. At the bottom of the chain is a call to the original - Tk widget operation. - - """ - def __init__(self, widget): - self._operations = {} - self.widget = widget # widget instance - self.tk = tk = widget.tk # widget's root - w = widget._w # widget's (full) Tk pathname - self.orig = w + "_orig" - # Rename the Tcl command within Tcl: - tk.call("rename", w, self.orig) - # Create a new Tcl command whose name is the widget's pathname, and - # whose action is to dispatch on the operation passed to the widget: - tk.createcommand(w, self.dispatch) - - def __repr__(self): - return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__, - self.widget._w) - - def close(self): - for operation in list(self._operations): - self.unregister(operation) - widget = self.widget; del self.widget - orig = self.orig; del self.orig - tk = widget.tk - w = widget._w - tk.deletecommand(w) - # restore the original widget Tcl command: - tk.call("rename", orig, w) - - def register(self, operation, function): - self._operations[operation] = function - setattr(self.widget, operation, function) - return OriginalCommand(self, operation) - - def unregister(self, operation): - if operation in self._operations: - function = self._operations[operation] - del self._operations[operation] - if hasattr(self.widget, operation): - delattr(self.widget, operation) - return function - else: - return None - - def dispatch(self, operation, *args): - '''Callback from Tcl which runs when the widget is referenced. - - If an operation has been registered in self._operations, apply the - associated function to the args passed into Tcl. Otherwise, pass the - operation through to Tk via the original Tcl function. - - Note that if a registered function is called, the operation is not - passed through to Tk. Apply the function returned by self.register() - to *args to accomplish that. For an example, see ColorDelegator.py. - - ''' - m = self._operations.get(operation) - try: - if m: - return m(*args) - else: - return self.tk.call((self.orig, operation) + args) - except TclError: - return "" - - -class OriginalCommand: - - def __init__(self, redir, operation): - self.redir = redir - self.operation = operation - self.tk = redir.tk - self.orig = redir.orig - self.tk_call = self.tk.call - self.orig_and_operation = (self.orig, self.operation) - - def __repr__(self): - return "OriginalCommand(%r, %r)" % (self.redir, self.operation) - - def __call__(self, *args): - return self.tk_call(self.orig_and_operation + args) - - -def main(): - root = Tk() - root.wm_protocol("WM_DELETE_WINDOW", root.quit) - text = Text() - text.pack() - text.focus_set() - redir = WidgetRedirector(text) - global previous_tcl_fcn - def my_insert(*args): - print("insert", args) - previous_tcl_fcn(*args) - previous_tcl_fcn = redir.register("insert", my_insert) - root.mainloop() - redir.unregister("insert") # runs after first 'close window' - redir.close() - root.mainloop() - root.destroy() - -if __name__ == "__main__": - main() diff --git a/Darwin/lib/python3.4/idlelib/configDialog.py b/Darwin/lib/python3.4/idlelib/configDialog.py deleted file mode 100644 index fefe42b..0000000 --- a/Darwin/lib/python3.4/idlelib/configDialog.py +++ /dev/null @@ -1,1148 +0,0 @@ -"""IDLE Configuration Dialog: support user customization of IDLE by GUI - -Customize font faces, sizes, and colorization attributes. Set indentation -defaults. Customize keybindings. Colorization and keybindings can be -saved as user defined sets. Select startup options including shell/editor -and default window size. Define additional help sources. - -Note that tab width in IDLE is currently fixed at eight due to Tk issues. -Refer to comments in EditorWindow autoindent code for details. - -""" -from tkinter import * -import tkinter.messagebox as tkMessageBox -import tkinter.colorchooser as tkColorChooser -import tkinter.font as tkFont -import copy - -from idlelib.configHandler import idleConf -from idlelib.dynOptionMenuWidget import DynOptionMenu -from idlelib.tabbedpages import TabbedPageSet -from idlelib.keybindingDialog import GetKeysDialog -from idlelib.configSectionNameDialog import GetCfgSectionNameDialog -from idlelib.configHelpSourceEdit import GetHelpSourceDialog -from idlelib import macosxSupport - -class ConfigDialog(Toplevel): - - def __init__(self,parent,title): - Toplevel.__init__(self, parent) - self.wm_withdraw() - - self.configure(borderwidth=5) - self.title('IDLE Preferences') - self.geometry("+%d+%d" % (parent.winfo_rootx()+20, - parent.winfo_rooty()+30)) - #Theme Elements. Each theme element key is its display name. - #The first value of the tuple is the sample area tag name. - #The second value is the display name list sort index. - self.themeElements={'Normal Text':('normal','00'), - 'Python Keywords':('keyword','01'), - 'Python Definitions':('definition','02'), - 'Python Builtins':('builtin', '03'), - 'Python Comments':('comment','04'), - 'Python Strings':('string','05'), - 'Selected Text':('hilite','06'), - 'Found Text':('hit','07'), - 'Cursor':('cursor','08'), - 'Error Text':('error','09'), - 'Shell Normal Text':('console','10'), - 'Shell Stdout Text':('stdout','11'), - 'Shell Stderr Text':('stderr','12'), - } - self.ResetChangedItems() #load initial values in changed items dict - self.CreateWidgets() - self.resizable(height=FALSE,width=FALSE) - self.transient(parent) - self.grab_set() - self.protocol("WM_DELETE_WINDOW", self.Cancel) - self.parent = parent - self.tabPages.focus_set() - #key bindings for this dialog - #self.bind('',self.Cancel) #dismiss dialog, no save - #self.bind('',self.Apply) #apply changes, save - #self.bind('',self.Help) #context help - self.LoadConfigs() - self.AttachVarCallbacks() #avoid callbacks during LoadConfigs - - self.wm_deiconify() - self.wait_window() - - def CreateWidgets(self): - self.tabPages = TabbedPageSet(self, - page_names=['Fonts/Tabs','Highlighting','Keys','General']) - frameActionButtons = Frame(self,pady=2) - #action buttons - - if macosxSupport.isAquaTk(): - # Surpress the padx and pady arguments when - # running as IDLE.app, otherwise the text - # on these buttons will not be readable. - extraKwds={} - else: - extraKwds=dict(padx=6, pady=3) - -# Comment out button creation and packing until implement self.Help -## self.buttonHelp = Button(frameActionButtons,text='Help', -## command=self.Help,takefocus=FALSE, -## **extraKwds) - self.buttonOk = Button(frameActionButtons,text='Ok', - command=self.Ok,takefocus=FALSE, - **extraKwds) - self.buttonApply = Button(frameActionButtons,text='Apply', - command=self.Apply,takefocus=FALSE, - **extraKwds) - self.buttonCancel = Button(frameActionButtons,text='Cancel', - command=self.Cancel,takefocus=FALSE, - **extraKwds) - self.CreatePageFontTab() - self.CreatePageHighlight() - self.CreatePageKeys() - self.CreatePageGeneral() -## self.buttonHelp.pack(side=RIGHT,padx=5) - self.buttonOk.pack(side=LEFT,padx=5) - self.buttonApply.pack(side=LEFT,padx=5) - self.buttonCancel.pack(side=LEFT,padx=5) - frameActionButtons.pack(side=BOTTOM) - Frame(self, height=2, borderwidth=0).pack(side=BOTTOM) - self.tabPages.pack(side=TOP,expand=TRUE,fill=BOTH) - - def CreatePageFontTab(self): - #tkVars - self.fontSize=StringVar(self) - self.fontBold=BooleanVar(self) - self.fontName=StringVar(self) - self.spaceNum=IntVar(self) - self.editFont=tkFont.Font(self,('courier',10,'normal')) - ##widget creation - #body frame - frame=self.tabPages.pages['Fonts/Tabs'].frame - #body section frames - frameFont=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Base Editor Font ') - frameIndent=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Indentation Width ') - #frameFont - frameFontName=Frame(frameFont) - frameFontParam=Frame(frameFont) - labelFontNameTitle=Label(frameFontName,justify=LEFT, - text='Font Face :') - self.listFontName=Listbox(frameFontName,height=5,takefocus=FALSE, - exportselection=FALSE) - self.listFontName.bind('',self.OnListFontButtonRelease) - scrollFont=Scrollbar(frameFontName) - scrollFont.config(command=self.listFontName.yview) - self.listFontName.config(yscrollcommand=scrollFont.set) - labelFontSizeTitle=Label(frameFontParam,text='Size :') - self.optMenuFontSize=DynOptionMenu(frameFontParam,self.fontSize,None, - command=self.SetFontSample) - checkFontBold=Checkbutton(frameFontParam,variable=self.fontBold, - onvalue=1,offvalue=0,text='Bold',command=self.SetFontSample) - frameFontSample=Frame(frameFont,relief=SOLID,borderwidth=1) - self.labelFontSample=Label(frameFontSample, - text='AaBbCcDdEe\nFfGgHhIiJjK\n1234567890\n#:+=(){}[]', - justify=LEFT,font=self.editFont) - #frameIndent - frameIndentSize=Frame(frameIndent) - labelSpaceNumTitle=Label(frameIndentSize, justify=LEFT, - text='Python Standard: 4 Spaces!') - self.scaleSpaceNum=Scale(frameIndentSize, variable=self.spaceNum, - orient='horizontal', - tickinterval=2, from_=2, to=16) - #widget packing - #body - frameFont.pack(side=LEFT,padx=5,pady=5,expand=TRUE,fill=BOTH) - frameIndent.pack(side=LEFT,padx=5,pady=5,fill=Y) - #frameFont - frameFontName.pack(side=TOP,padx=5,pady=5,fill=X) - frameFontParam.pack(side=TOP,padx=5,pady=5,fill=X) - labelFontNameTitle.pack(side=TOP,anchor=W) - self.listFontName.pack(side=LEFT,expand=TRUE,fill=X) - scrollFont.pack(side=LEFT,fill=Y) - labelFontSizeTitle.pack(side=LEFT,anchor=W) - self.optMenuFontSize.pack(side=LEFT,anchor=W) - checkFontBold.pack(side=LEFT,anchor=W,padx=20) - frameFontSample.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH) - self.labelFontSample.pack(expand=TRUE,fill=BOTH) - #frameIndent - frameIndentSize.pack(side=TOP,fill=X) - labelSpaceNumTitle.pack(side=TOP,anchor=W,padx=5) - self.scaleSpaceNum.pack(side=TOP,padx=5,fill=X) - return frame - - def CreatePageHighlight(self): - self.builtinTheme=StringVar(self) - self.customTheme=StringVar(self) - self.fgHilite=BooleanVar(self) - self.colour=StringVar(self) - self.fontName=StringVar(self) - self.themeIsBuiltin=BooleanVar(self) - self.highlightTarget=StringVar(self) - ##widget creation - #body frame - frame=self.tabPages.pages['Highlighting'].frame - #body section frames - frameCustom=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Custom Highlighting ') - frameTheme=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Highlighting Theme ') - #frameCustom - self.textHighlightSample=Text(frameCustom,relief=SOLID,borderwidth=1, - font=('courier',12,''),cursor='hand2',width=21,height=11, - takefocus=FALSE,highlightthickness=0,wrap=NONE) - text=self.textHighlightSample - text.bind('',lambda e: 'break') - text.bind('',lambda e: 'break') - textAndTags=(('#you can click here','comment'),('\n','normal'), - ('#to choose items','comment'),('\n','normal'),('def','keyword'), - (' ','normal'),('func','definition'),('(param):','normal'), - ('\n ','normal'),('"""string"""','string'),('\n var0 = ','normal'), - ("'string'",'string'),('\n var1 = ','normal'),("'selected'",'hilite'), - ('\n var2 = ','normal'),("'found'",'hit'), - ('\n var3 = ','normal'),('list', 'builtin'), ('(','normal'), - ('None', 'keyword'),(')\n\n','normal'), - (' error ','error'),(' ','normal'),('cursor |','cursor'), - ('\n ','normal'),('shell','console'),(' ','normal'),('stdout','stdout'), - (' ','normal'),('stderr','stderr'),('\n','normal')) - for txTa in textAndTags: - text.insert(END,txTa[0],txTa[1]) - for element in self.themeElements: - text.tag_bind(self.themeElements[element][0],'', - lambda event,elem=element: event.widget.winfo_toplevel() - .highlightTarget.set(elem)) - text.config(state=DISABLED) - self.frameColourSet=Frame(frameCustom,relief=SOLID,borderwidth=1) - frameFgBg=Frame(frameCustom) - buttonSetColour=Button(self.frameColourSet,text='Choose Colour for :', - command=self.GetColour,highlightthickness=0) - self.optMenuHighlightTarget=DynOptionMenu(self.frameColourSet, - self.highlightTarget,None,highlightthickness=0)#,command=self.SetHighlightTargetBinding - self.radioFg=Radiobutton(frameFgBg,variable=self.fgHilite, - value=1,text='Foreground',command=self.SetColourSampleBinding) - self.radioBg=Radiobutton(frameFgBg,variable=self.fgHilite, - value=0,text='Background',command=self.SetColourSampleBinding) - self.fgHilite.set(1) - buttonSaveCustomTheme=Button(frameCustom, - text='Save as New Custom Theme',command=self.SaveAsNewTheme) - #frameTheme - labelTypeTitle=Label(frameTheme,text='Select : ') - self.radioThemeBuiltin=Radiobutton(frameTheme,variable=self.themeIsBuiltin, - value=1,command=self.SetThemeType,text='a Built-in Theme') - self.radioThemeCustom=Radiobutton(frameTheme,variable=self.themeIsBuiltin, - value=0,command=self.SetThemeType,text='a Custom Theme') - self.optMenuThemeBuiltin=DynOptionMenu(frameTheme, - self.builtinTheme,None,command=None) - self.optMenuThemeCustom=DynOptionMenu(frameTheme, - self.customTheme,None,command=None) - self.buttonDeleteCustomTheme=Button(frameTheme,text='Delete Custom Theme', - command=self.DeleteCustomTheme) - ##widget packing - #body - frameCustom.pack(side=LEFT,padx=5,pady=5,expand=TRUE,fill=BOTH) - frameTheme.pack(side=LEFT,padx=5,pady=5,fill=Y) - #frameCustom - self.frameColourSet.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=X) - frameFgBg.pack(side=TOP,padx=5,pady=0) - self.textHighlightSample.pack(side=TOP,padx=5,pady=5,expand=TRUE, - fill=BOTH) - buttonSetColour.pack(side=TOP,expand=TRUE,fill=X,padx=8,pady=4) - self.optMenuHighlightTarget.pack(side=TOP,expand=TRUE,fill=X,padx=8,pady=3) - self.radioFg.pack(side=LEFT,anchor=E) - self.radioBg.pack(side=RIGHT,anchor=W) - buttonSaveCustomTheme.pack(side=BOTTOM,fill=X,padx=5,pady=5) - #frameTheme - labelTypeTitle.pack(side=TOP,anchor=W,padx=5,pady=5) - self.radioThemeBuiltin.pack(side=TOP,anchor=W,padx=5) - self.radioThemeCustom.pack(side=TOP,anchor=W,padx=5,pady=2) - self.optMenuThemeBuiltin.pack(side=TOP,fill=X,padx=5,pady=5) - self.optMenuThemeCustom.pack(side=TOP,fill=X,anchor=W,padx=5,pady=5) - self.buttonDeleteCustomTheme.pack(side=TOP,fill=X,padx=5,pady=5) - return frame - - def CreatePageKeys(self): - #tkVars - self.bindingTarget=StringVar(self) - self.builtinKeys=StringVar(self) - self.customKeys=StringVar(self) - self.keysAreBuiltin=BooleanVar(self) - self.keyBinding=StringVar(self) - ##widget creation - #body frame - frame=self.tabPages.pages['Keys'].frame - #body section frames - frameCustom=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Custom Key Bindings ') - frameKeySets=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Key Set ') - #frameCustom - frameTarget=Frame(frameCustom) - labelTargetTitle=Label(frameTarget,text='Action - Key(s)') - scrollTargetY=Scrollbar(frameTarget) - scrollTargetX=Scrollbar(frameTarget,orient=HORIZONTAL) - self.listBindings=Listbox(frameTarget,takefocus=FALSE, - exportselection=FALSE) - self.listBindings.bind('',self.KeyBindingSelected) - scrollTargetY.config(command=self.listBindings.yview) - scrollTargetX.config(command=self.listBindings.xview) - self.listBindings.config(yscrollcommand=scrollTargetY.set) - self.listBindings.config(xscrollcommand=scrollTargetX.set) - self.buttonNewKeys=Button(frameCustom,text='Get New Keys for Selection', - command=self.GetNewKeys,state=DISABLED) - #frameKeySets - frames = [Frame(frameKeySets, padx=2, pady=2, borderwidth=0) - for i in range(2)] - self.radioKeysBuiltin=Radiobutton(frames[0],variable=self.keysAreBuiltin, - value=1,command=self.SetKeysType,text='Use a Built-in Key Set') - self.radioKeysCustom=Radiobutton(frames[0],variable=self.keysAreBuiltin, - value=0,command=self.SetKeysType,text='Use a Custom Key Set') - self.optMenuKeysBuiltin=DynOptionMenu(frames[0], - self.builtinKeys,None,command=None) - self.optMenuKeysCustom=DynOptionMenu(frames[0], - self.customKeys,None,command=None) - self.buttonDeleteCustomKeys=Button(frames[1],text='Delete Custom Key Set', - command=self.DeleteCustomKeys) - buttonSaveCustomKeys=Button(frames[1], - text='Save as New Custom Key Set',command=self.SaveAsNewKeySet) - ##widget packing - #body - frameCustom.pack(side=BOTTOM,padx=5,pady=5,expand=TRUE,fill=BOTH) - frameKeySets.pack(side=BOTTOM,padx=5,pady=5,fill=BOTH) - #frameCustom - self.buttonNewKeys.pack(side=BOTTOM,fill=X,padx=5,pady=5) - frameTarget.pack(side=LEFT,padx=5,pady=5,expand=TRUE,fill=BOTH) - #frame target - frameTarget.columnconfigure(0,weight=1) - frameTarget.rowconfigure(1,weight=1) - labelTargetTitle.grid(row=0,column=0,columnspan=2,sticky=W) - self.listBindings.grid(row=1,column=0,sticky=NSEW) - scrollTargetY.grid(row=1,column=1,sticky=NS) - scrollTargetX.grid(row=2,column=0,sticky=EW) - #frameKeySets - self.radioKeysBuiltin.grid(row=0, column=0, sticky=W+NS) - self.radioKeysCustom.grid(row=1, column=0, sticky=W+NS) - self.optMenuKeysBuiltin.grid(row=0, column=1, sticky=NSEW) - self.optMenuKeysCustom.grid(row=1, column=1, sticky=NSEW) - self.buttonDeleteCustomKeys.pack(side=LEFT,fill=X,expand=True,padx=2) - buttonSaveCustomKeys.pack(side=LEFT,fill=X,expand=True,padx=2) - frames[0].pack(side=TOP, fill=BOTH, expand=True) - frames[1].pack(side=TOP, fill=X, expand=True, pady=2) - return frame - - def CreatePageGeneral(self): - #tkVars - self.winWidth=StringVar(self) - self.winHeight=StringVar(self) - self.paraWidth=StringVar(self) - self.startupEdit=IntVar(self) - self.autoSave=IntVar(self) - self.encoding=StringVar(self) - self.userHelpBrowser=BooleanVar(self) - self.helpBrowser=StringVar(self) - #widget creation - #body - frame=self.tabPages.pages['General'].frame - #body section frames - frameRun=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Startup Preferences ') - frameSave=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Autosave Preferences ') - frameWinSize=Frame(frame,borderwidth=2,relief=GROOVE) - frameParaSize=Frame(frame,borderwidth=2,relief=GROOVE) - frameHelp=LabelFrame(frame,borderwidth=2,relief=GROOVE, - text=' Additional Help Sources ') - #frameRun - labelRunChoiceTitle=Label(frameRun,text='At Startup') - radioStartupEdit=Radiobutton(frameRun,variable=self.startupEdit, - value=1,command=self.SetKeysType,text="Open Edit Window") - radioStartupShell=Radiobutton(frameRun,variable=self.startupEdit, - value=0,command=self.SetKeysType,text='Open Shell Window') - #frameSave - labelRunSaveTitle=Label(frameSave,text='At Start of Run (F5) ') - radioSaveAsk=Radiobutton(frameSave,variable=self.autoSave, - value=0,command=self.SetKeysType,text="Prompt to Save") - radioSaveAuto=Radiobutton(frameSave,variable=self.autoSave, - value=1,command=self.SetKeysType,text='No Prompt') - #frameWinSize - labelWinSizeTitle=Label(frameWinSize,text='Initial Window Size'+ - ' (in characters)') - labelWinWidthTitle=Label(frameWinSize,text='Width') - entryWinWidth=Entry(frameWinSize,textvariable=self.winWidth, - width=3) - labelWinHeightTitle=Label(frameWinSize,text='Height') - entryWinHeight=Entry(frameWinSize,textvariable=self.winHeight, - width=3) - #paragraphFormatWidth - labelParaWidthTitle=Label(frameParaSize,text='Paragraph reformat'+ - ' width (in characters)') - entryParaWidth=Entry(frameParaSize,textvariable=self.paraWidth, - width=3) - #frameHelp - frameHelpList=Frame(frameHelp) - frameHelpListButtons=Frame(frameHelpList) - scrollHelpList=Scrollbar(frameHelpList) - self.listHelp=Listbox(frameHelpList,height=5,takefocus=FALSE, - exportselection=FALSE) - scrollHelpList.config(command=self.listHelp.yview) - self.listHelp.config(yscrollcommand=scrollHelpList.set) - self.listHelp.bind('',self.HelpSourceSelected) - self.buttonHelpListEdit=Button(frameHelpListButtons,text='Edit', - state=DISABLED,width=8,command=self.HelpListItemEdit) - self.buttonHelpListAdd=Button(frameHelpListButtons,text='Add', - width=8,command=self.HelpListItemAdd) - self.buttonHelpListRemove=Button(frameHelpListButtons,text='Remove', - state=DISABLED,width=8,command=self.HelpListItemRemove) - #widget packing - #body - frameRun.pack(side=TOP,padx=5,pady=5,fill=X) - frameSave.pack(side=TOP,padx=5,pady=5,fill=X) - frameWinSize.pack(side=TOP,padx=5,pady=5,fill=X) - frameParaSize.pack(side=TOP,padx=5,pady=5,fill=X) - frameHelp.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH) - #frameRun - labelRunChoiceTitle.pack(side=LEFT,anchor=W,padx=5,pady=5) - radioStartupShell.pack(side=RIGHT,anchor=W,padx=5,pady=5) - radioStartupEdit.pack(side=RIGHT,anchor=W,padx=5,pady=5) - #frameSave - labelRunSaveTitle.pack(side=LEFT,anchor=W,padx=5,pady=5) - radioSaveAuto.pack(side=RIGHT,anchor=W,padx=5,pady=5) - radioSaveAsk.pack(side=RIGHT,anchor=W,padx=5,pady=5) - #frameWinSize - labelWinSizeTitle.pack(side=LEFT,anchor=W,padx=5,pady=5) - entryWinHeight.pack(side=RIGHT,anchor=E,padx=10,pady=5) - labelWinHeightTitle.pack(side=RIGHT,anchor=E,pady=5) - entryWinWidth.pack(side=RIGHT,anchor=E,padx=10,pady=5) - labelWinWidthTitle.pack(side=RIGHT,anchor=E,pady=5) - #paragraphFormatWidth - labelParaWidthTitle.pack(side=LEFT,anchor=W,padx=5,pady=5) - entryParaWidth.pack(side=RIGHT,anchor=E,padx=10,pady=5) - #frameHelp - frameHelpListButtons.pack(side=RIGHT,padx=5,pady=5,fill=Y) - frameHelpList.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH) - scrollHelpList.pack(side=RIGHT,anchor=W,fill=Y) - self.listHelp.pack(side=LEFT,anchor=E,expand=TRUE,fill=BOTH) - self.buttonHelpListEdit.pack(side=TOP,anchor=W,pady=5) - self.buttonHelpListAdd.pack(side=TOP,anchor=W) - self.buttonHelpListRemove.pack(side=TOP,anchor=W,pady=5) - return frame - - def AttachVarCallbacks(self): - self.fontSize.trace_variable('w',self.VarChanged_fontSize) - self.fontName.trace_variable('w',self.VarChanged_fontName) - self.fontBold.trace_variable('w',self.VarChanged_fontBold) - self.spaceNum.trace_variable('w',self.VarChanged_spaceNum) - self.colour.trace_variable('w',self.VarChanged_colour) - self.builtinTheme.trace_variable('w',self.VarChanged_builtinTheme) - self.customTheme.trace_variable('w',self.VarChanged_customTheme) - self.themeIsBuiltin.trace_variable('w',self.VarChanged_themeIsBuiltin) - self.highlightTarget.trace_variable('w',self.VarChanged_highlightTarget) - self.keyBinding.trace_variable('w',self.VarChanged_keyBinding) - self.builtinKeys.trace_variable('w',self.VarChanged_builtinKeys) - self.customKeys.trace_variable('w',self.VarChanged_customKeys) - self.keysAreBuiltin.trace_variable('w',self.VarChanged_keysAreBuiltin) - self.winWidth.trace_variable('w',self.VarChanged_winWidth) - self.winHeight.trace_variable('w',self.VarChanged_winHeight) - self.paraWidth.trace_variable('w',self.VarChanged_paraWidth) - self.startupEdit.trace_variable('w',self.VarChanged_startupEdit) - self.autoSave.trace_variable('w',self.VarChanged_autoSave) - self.encoding.trace_variable('w',self.VarChanged_encoding) - - def VarChanged_fontSize(self,*params): - value=self.fontSize.get() - self.AddChangedItem('main','EditorWindow','font-size',value) - - def VarChanged_fontName(self,*params): - value=self.fontName.get() - self.AddChangedItem('main','EditorWindow','font',value) - - def VarChanged_fontBold(self,*params): - value=self.fontBold.get() - self.AddChangedItem('main','EditorWindow','font-bold',value) - - def VarChanged_spaceNum(self,*params): - value=self.spaceNum.get() - self.AddChangedItem('main','Indent','num-spaces',value) - - def VarChanged_colour(self,*params): - self.OnNewColourSet() - - def VarChanged_builtinTheme(self,*params): - value=self.builtinTheme.get() - self.AddChangedItem('main','Theme','name',value) - self.PaintThemeSample() - - def VarChanged_customTheme(self,*params): - value=self.customTheme.get() - if value != '- no custom themes -': - self.AddChangedItem('main','Theme','name',value) - self.PaintThemeSample() - - def VarChanged_themeIsBuiltin(self,*params): - value=self.themeIsBuiltin.get() - self.AddChangedItem('main','Theme','default',value) - if value: - self.VarChanged_builtinTheme() - else: - self.VarChanged_customTheme() - - def VarChanged_highlightTarget(self,*params): - self.SetHighlightTarget() - - def VarChanged_keyBinding(self,*params): - value=self.keyBinding.get() - keySet=self.customKeys.get() - event=self.listBindings.get(ANCHOR).split()[0] - if idleConf.IsCoreBinding(event): - #this is a core keybinding - self.AddChangedItem('keys',keySet,event,value) - else: #this is an extension key binding - extName=idleConf.GetExtnNameForEvent(event) - extKeybindSection=extName+'_cfgBindings' - self.AddChangedItem('extensions',extKeybindSection,event,value) - - def VarChanged_builtinKeys(self,*params): - value=self.builtinKeys.get() - self.AddChangedItem('main','Keys','name',value) - self.LoadKeysList(value) - - def VarChanged_customKeys(self,*params): - value=self.customKeys.get() - if value != '- no custom keys -': - self.AddChangedItem('main','Keys','name',value) - self.LoadKeysList(value) - - def VarChanged_keysAreBuiltin(self,*params): - value=self.keysAreBuiltin.get() - self.AddChangedItem('main','Keys','default',value) - if value: - self.VarChanged_builtinKeys() - else: - self.VarChanged_customKeys() - - def VarChanged_winWidth(self,*params): - value=self.winWidth.get() - self.AddChangedItem('main','EditorWindow','width',value) - - def VarChanged_winHeight(self,*params): - value=self.winHeight.get() - self.AddChangedItem('main','EditorWindow','height',value) - - def VarChanged_paraWidth(self,*params): - value=self.paraWidth.get() - self.AddChangedItem('main','FormatParagraph','paragraph',value) - - def VarChanged_startupEdit(self,*params): - value=self.startupEdit.get() - self.AddChangedItem('main','General','editor-on-startup',value) - - def VarChanged_autoSave(self,*params): - value=self.autoSave.get() - self.AddChangedItem('main','General','autosave',value) - - def VarChanged_encoding(self,*params): - value=self.encoding.get() - self.AddChangedItem('main','EditorWindow','encoding',value) - - def ResetChangedItems(self): - #When any config item is changed in this dialog, an entry - #should be made in the relevant section (config type) of this - #dictionary. The key should be the config file section name and the - #value a dictionary, whose key:value pairs are item=value pairs for - #that config file section. - self.changedItems={'main':{},'highlight':{},'keys':{},'extensions':{}} - - def AddChangedItem(self,type,section,item,value): - value=str(value) #make sure we use a string - if section not in self.changedItems[type]: - self.changedItems[type][section]={} - self.changedItems[type][section][item]=value - - def GetDefaultItems(self): - dItems={'main':{},'highlight':{},'keys':{},'extensions':{}} - for configType in dItems: - sections=idleConf.GetSectionList('default',configType) - for section in sections: - dItems[configType][section]={} - options=idleConf.defaultCfg[configType].GetOptionList(section) - for option in options: - dItems[configType][section][option]=( - idleConf.defaultCfg[configType].Get(section,option)) - return dItems - - def SetThemeType(self): - if self.themeIsBuiltin.get(): - self.optMenuThemeBuiltin.config(state=NORMAL) - self.optMenuThemeCustom.config(state=DISABLED) - self.buttonDeleteCustomTheme.config(state=DISABLED) - else: - self.optMenuThemeBuiltin.config(state=DISABLED) - self.radioThemeCustom.config(state=NORMAL) - self.optMenuThemeCustom.config(state=NORMAL) - self.buttonDeleteCustomTheme.config(state=NORMAL) - - def SetKeysType(self): - if self.keysAreBuiltin.get(): - self.optMenuKeysBuiltin.config(state=NORMAL) - self.optMenuKeysCustom.config(state=DISABLED) - self.buttonDeleteCustomKeys.config(state=DISABLED) - else: - self.optMenuKeysBuiltin.config(state=DISABLED) - self.radioKeysCustom.config(state=NORMAL) - self.optMenuKeysCustom.config(state=NORMAL) - self.buttonDeleteCustomKeys.config(state=NORMAL) - - def GetNewKeys(self): - listIndex=self.listBindings.index(ANCHOR) - binding=self.listBindings.get(listIndex) - bindName=binding.split()[0] #first part, up to first space - if self.keysAreBuiltin.get(): - currentKeySetName=self.builtinKeys.get() - else: - currentKeySetName=self.customKeys.get() - currentBindings=idleConf.GetCurrentKeySet() - if currentKeySetName in self.changedItems['keys']: #unsaved changes - keySetChanges=self.changedItems['keys'][currentKeySetName] - for event in keySetChanges: - currentBindings[event]=keySetChanges[event].split() - currentKeySequences = list(currentBindings.values()) - newKeys=GetKeysDialog(self,'Get New Keys',bindName, - currentKeySequences).result - if newKeys: #new keys were specified - if self.keysAreBuiltin.get(): #current key set is a built-in - message=('Your changes will be saved as a new Custom Key Set. '+ - 'Enter a name for your new Custom Key Set below.') - newKeySet=self.GetNewKeysName(message) - if not newKeySet: #user cancelled custom key set creation - self.listBindings.select_set(listIndex) - self.listBindings.select_anchor(listIndex) - return - else: #create new custom key set based on previously active key set - self.CreateNewKeySet(newKeySet) - self.listBindings.delete(listIndex) - self.listBindings.insert(listIndex,bindName+' - '+newKeys) - self.listBindings.select_set(listIndex) - self.listBindings.select_anchor(listIndex) - self.keyBinding.set(newKeys) - else: - self.listBindings.select_set(listIndex) - self.listBindings.select_anchor(listIndex) - - def GetNewKeysName(self,message): - usedNames=(idleConf.GetSectionList('user','keys')+ - idleConf.GetSectionList('default','keys')) - newKeySet=GetCfgSectionNameDialog(self,'New Custom Key Set', - message,usedNames).result - return newKeySet - - def SaveAsNewKeySet(self): - newKeysName=self.GetNewKeysName('New Key Set Name:') - if newKeysName: - self.CreateNewKeySet(newKeysName) - - def KeyBindingSelected(self,event): - self.buttonNewKeys.config(state=NORMAL) - - def CreateNewKeySet(self,newKeySetName): - #creates new custom key set based on the previously active key set, - #and makes the new key set active - if self.keysAreBuiltin.get(): - prevKeySetName=self.builtinKeys.get() - else: - prevKeySetName=self.customKeys.get() - prevKeys=idleConf.GetCoreKeys(prevKeySetName) - newKeys={} - for event in prevKeys: #add key set to changed items - eventName=event[2:-2] #trim off the angle brackets - binding=' '.join(prevKeys[event]) - newKeys[eventName]=binding - #handle any unsaved changes to prev key set - if prevKeySetName in self.changedItems['keys']: - keySetChanges=self.changedItems['keys'][prevKeySetName] - for event in keySetChanges: - newKeys[event]=keySetChanges[event] - #save the new theme - self.SaveNewKeySet(newKeySetName,newKeys) - #change gui over to the new key set - customKeyList=idleConf.GetSectionList('user','keys') - customKeyList.sort() - self.optMenuKeysCustom.SetMenu(customKeyList,newKeySetName) - self.keysAreBuiltin.set(0) - self.SetKeysType() - - def LoadKeysList(self,keySetName): - reselect=0 - newKeySet=0 - if self.listBindings.curselection(): - reselect=1 - listIndex=self.listBindings.index(ANCHOR) - keySet=idleConf.GetKeySet(keySetName) - bindNames = list(keySet.keys()) - bindNames.sort() - self.listBindings.delete(0,END) - for bindName in bindNames: - key=' '.join(keySet[bindName]) #make key(s) into a string - bindName=bindName[2:-2] #trim off the angle brackets - if keySetName in self.changedItems['keys']: - #handle any unsaved changes to this key set - if bindName in self.changedItems['keys'][keySetName]: - key=self.changedItems['keys'][keySetName][bindName] - self.listBindings.insert(END, bindName+' - '+key) - if reselect: - self.listBindings.see(listIndex) - self.listBindings.select_set(listIndex) - self.listBindings.select_anchor(listIndex) - - def DeleteCustomKeys(self): - keySetName=self.customKeys.get() - if not tkMessageBox.askyesno('Delete Key Set','Are you sure you wish '+ - 'to delete the key set %r ?' % (keySetName), - parent=self): - return - #remove key set from config - idleConf.userCfg['keys'].remove_section(keySetName) - if keySetName in self.changedItems['keys']: - del(self.changedItems['keys'][keySetName]) - #write changes - idleConf.userCfg['keys'].Save() - #reload user key set list - itemList=idleConf.GetSectionList('user','keys') - itemList.sort() - if not itemList: - self.radioKeysCustom.config(state=DISABLED) - self.optMenuKeysCustom.SetMenu(itemList,'- no custom keys -') - else: - self.optMenuKeysCustom.SetMenu(itemList,itemList[0]) - #revert to default key set - self.keysAreBuiltin.set(idleConf.defaultCfg['main'].Get('Keys','default')) - self.builtinKeys.set(idleConf.defaultCfg['main'].Get('Keys','name')) - #user can't back out of these changes, they must be applied now - self.Apply() - self.SetKeysType() - - def DeleteCustomTheme(self): - themeName=self.customTheme.get() - if not tkMessageBox.askyesno('Delete Theme','Are you sure you wish '+ - 'to delete the theme %r ?' % (themeName,), - parent=self): - return - #remove theme from config - idleConf.userCfg['highlight'].remove_section(themeName) - if themeName in self.changedItems['highlight']: - del(self.changedItems['highlight'][themeName]) - #write changes - idleConf.userCfg['highlight'].Save() - #reload user theme list - itemList=idleConf.GetSectionList('user','highlight') - itemList.sort() - if not itemList: - self.radioThemeCustom.config(state=DISABLED) - self.optMenuThemeCustom.SetMenu(itemList,'- no custom themes -') - else: - self.optMenuThemeCustom.SetMenu(itemList,itemList[0]) - #revert to default theme - self.themeIsBuiltin.set(idleConf.defaultCfg['main'].Get('Theme','default')) - self.builtinTheme.set(idleConf.defaultCfg['main'].Get('Theme','name')) - #user can't back out of these changes, they must be applied now - self.Apply() - self.SetThemeType() - - def GetColour(self): - target=self.highlightTarget.get() - prevColour=self.frameColourSet.cget('bg') - rgbTuplet, colourString = tkColorChooser.askcolor(parent=self, - title='Pick new colour for : '+target,initialcolor=prevColour) - if colourString and (colourString!=prevColour): - #user didn't cancel, and they chose a new colour - if self.themeIsBuiltin.get(): #current theme is a built-in - message=('Your changes will be saved as a new Custom Theme. '+ - 'Enter a name for your new Custom Theme below.') - newTheme=self.GetNewThemeName(message) - if not newTheme: #user cancelled custom theme creation - return - else: #create new custom theme based on previously active theme - self.CreateNewTheme(newTheme) - self.colour.set(colourString) - else: #current theme is user defined - self.colour.set(colourString) - - def OnNewColourSet(self): - newColour=self.colour.get() - self.frameColourSet.config(bg=newColour)#set sample - if self.fgHilite.get(): plane='foreground' - else: plane='background' - sampleElement=self.themeElements[self.highlightTarget.get()][0] - self.textHighlightSample.tag_config(sampleElement, **{plane:newColour}) - theme=self.customTheme.get() - themeElement=sampleElement+'-'+plane - self.AddChangedItem('highlight',theme,themeElement,newColour) - - def GetNewThemeName(self,message): - usedNames=(idleConf.GetSectionList('user','highlight')+ - idleConf.GetSectionList('default','highlight')) - newTheme=GetCfgSectionNameDialog(self,'New Custom Theme', - message,usedNames).result - return newTheme - - def SaveAsNewTheme(self): - newThemeName=self.GetNewThemeName('New Theme Name:') - if newThemeName: - self.CreateNewTheme(newThemeName) - - def CreateNewTheme(self,newThemeName): - #creates new custom theme based on the previously active theme, - #and makes the new theme active - if self.themeIsBuiltin.get(): - themeType='default' - themeName=self.builtinTheme.get() - else: - themeType='user' - themeName=self.customTheme.get() - newTheme=idleConf.GetThemeDict(themeType,themeName) - #apply any of the old theme's unsaved changes to the new theme - if themeName in self.changedItems['highlight']: - themeChanges=self.changedItems['highlight'][themeName] - for element in themeChanges: - newTheme[element]=themeChanges[element] - #save the new theme - self.SaveNewTheme(newThemeName,newTheme) - #change gui over to the new theme - customThemeList=idleConf.GetSectionList('user','highlight') - customThemeList.sort() - self.optMenuThemeCustom.SetMenu(customThemeList,newThemeName) - self.themeIsBuiltin.set(0) - self.SetThemeType() - - def OnListFontButtonRelease(self,event): - font = self.listFontName.get(ANCHOR) - self.fontName.set(font.lower()) - self.SetFontSample() - - def SetFontSample(self,event=None): - fontName=self.fontName.get() - if self.fontBold.get(): - fontWeight=tkFont.BOLD - else: - fontWeight=tkFont.NORMAL - newFont = (fontName, self.fontSize.get(), fontWeight) - self.labelFontSample.config(font=newFont) - self.textHighlightSample.configure(font=newFont) - - def SetHighlightTarget(self): - if self.highlightTarget.get()=='Cursor': #bg not possible - self.radioFg.config(state=DISABLED) - self.radioBg.config(state=DISABLED) - self.fgHilite.set(1) - else: #both fg and bg can be set - self.radioFg.config(state=NORMAL) - self.radioBg.config(state=NORMAL) - self.fgHilite.set(1) - self.SetColourSample() - - def SetColourSampleBinding(self,*args): - self.SetColourSample() - - def SetColourSample(self): - #set the colour smaple area - tag=self.themeElements[self.highlightTarget.get()][0] - if self.fgHilite.get(): plane='foreground' - else: plane='background' - colour=self.textHighlightSample.tag_cget(tag,plane) - self.frameColourSet.config(bg=colour) - - def PaintThemeSample(self): - if self.themeIsBuiltin.get(): #a default theme - theme=self.builtinTheme.get() - else: #a user theme - theme=self.customTheme.get() - for elementTitle in self.themeElements: - element=self.themeElements[elementTitle][0] - colours=idleConf.GetHighlight(theme,element) - if element=='cursor': #cursor sample needs special painting - colours['background']=idleConf.GetHighlight(theme, - 'normal', fgBg='bg') - #handle any unsaved changes to this theme - if theme in self.changedItems['highlight']: - themeDict=self.changedItems['highlight'][theme] - if element+'-foreground' in themeDict: - colours['foreground']=themeDict[element+'-foreground'] - if element+'-background' in themeDict: - colours['background']=themeDict[element+'-background'] - self.textHighlightSample.tag_config(element, **colours) - self.SetColourSample() - - def HelpSourceSelected(self,event): - self.SetHelpListButtonStates() - - def SetHelpListButtonStates(self): - if self.listHelp.size()<1: #no entries in list - self.buttonHelpListEdit.config(state=DISABLED) - self.buttonHelpListRemove.config(state=DISABLED) - else: #there are some entries - if self.listHelp.curselection(): #there currently is a selection - self.buttonHelpListEdit.config(state=NORMAL) - self.buttonHelpListRemove.config(state=NORMAL) - else: #there currently is not a selection - self.buttonHelpListEdit.config(state=DISABLED) - self.buttonHelpListRemove.config(state=DISABLED) - - def HelpListItemAdd(self): - helpSource=GetHelpSourceDialog(self,'New Help Source').result - if helpSource: - self.userHelpList.append( (helpSource[0],helpSource[1]) ) - self.listHelp.insert(END,helpSource[0]) - self.UpdateUserHelpChangedItems() - self.SetHelpListButtonStates() - - def HelpListItemEdit(self): - itemIndex=self.listHelp.index(ANCHOR) - helpSource=self.userHelpList[itemIndex] - newHelpSource=GetHelpSourceDialog(self,'Edit Help Source', - menuItem=helpSource[0],filePath=helpSource[1]).result - if (not newHelpSource) or (newHelpSource==helpSource): - return #no changes - self.userHelpList[itemIndex]=newHelpSource - self.listHelp.delete(itemIndex) - self.listHelp.insert(itemIndex,newHelpSource[0]) - self.UpdateUserHelpChangedItems() - self.SetHelpListButtonStates() - - def HelpListItemRemove(self): - itemIndex=self.listHelp.index(ANCHOR) - del(self.userHelpList[itemIndex]) - self.listHelp.delete(itemIndex) - self.UpdateUserHelpChangedItems() - self.SetHelpListButtonStates() - - def UpdateUserHelpChangedItems(self): - "Clear and rebuild the HelpFiles section in self.changedItems" - self.changedItems['main']['HelpFiles'] = {} - for num in range(1,len(self.userHelpList)+1): - self.AddChangedItem('main','HelpFiles',str(num), - ';'.join(self.userHelpList[num-1][:2])) - - def LoadFontCfg(self): - ##base editor font selection list - fonts=list(tkFont.families(self)) - fonts.sort() - for font in fonts: - self.listFontName.insert(END,font) - configuredFont=idleConf.GetOption('main','EditorWindow','font', - default='courier') - lc_configuredFont = configuredFont.lower() - self.fontName.set(lc_configuredFont) - lc_fonts = [s.lower() for s in fonts] - if lc_configuredFont in lc_fonts: - currentFontIndex = lc_fonts.index(lc_configuredFont) - self.listFontName.see(currentFontIndex) - self.listFontName.select_set(currentFontIndex) - self.listFontName.select_anchor(currentFontIndex) - ##font size dropdown - fontSize=idleConf.GetOption('main', 'EditorWindow', 'font-size', - type='int', default='10') - self.optMenuFontSize.SetMenu(('7','8','9','10','11','12','13','14', - '16','18','20','22'), fontSize ) - ##fontWeight - self.fontBold.set(idleConf.GetOption('main','EditorWindow', - 'font-bold',default=0,type='bool')) - ##font sample - self.SetFontSample() - - def LoadTabCfg(self): - ##indent sizes - spaceNum=idleConf.GetOption('main','Indent','num-spaces', - default=4,type='int') - self.spaceNum.set(spaceNum) - - def LoadThemeCfg(self): - ##current theme type radiobutton - self.themeIsBuiltin.set(idleConf.GetOption('main','Theme','default', - type='bool',default=1)) - ##currently set theme - currentOption=idleConf.CurrentTheme() - ##load available theme option menus - if self.themeIsBuiltin.get(): #default theme selected - itemList=idleConf.GetSectionList('default','highlight') - itemList.sort() - self.optMenuThemeBuiltin.SetMenu(itemList,currentOption) - itemList=idleConf.GetSectionList('user','highlight') - itemList.sort() - if not itemList: - self.radioThemeCustom.config(state=DISABLED) - self.customTheme.set('- no custom themes -') - else: - self.optMenuThemeCustom.SetMenu(itemList,itemList[0]) - else: #user theme selected - itemList=idleConf.GetSectionList('user','highlight') - itemList.sort() - self.optMenuThemeCustom.SetMenu(itemList,currentOption) - itemList=idleConf.GetSectionList('default','highlight') - itemList.sort() - self.optMenuThemeBuiltin.SetMenu(itemList,itemList[0]) - self.SetThemeType() - ##load theme element option menu - themeNames = list(self.themeElements.keys()) - themeNames.sort(key=lambda x: self.themeElements[x][1]) - self.optMenuHighlightTarget.SetMenu(themeNames,themeNames[0]) - self.PaintThemeSample() - self.SetHighlightTarget() - - def LoadKeyCfg(self): - ##current keys type radiobutton - self.keysAreBuiltin.set(idleConf.GetOption('main','Keys','default', - type='bool',default=1)) - ##currently set keys - currentOption=idleConf.CurrentKeys() - ##load available keyset option menus - if self.keysAreBuiltin.get(): #default theme selected - itemList=idleConf.GetSectionList('default','keys') - itemList.sort() - self.optMenuKeysBuiltin.SetMenu(itemList,currentOption) - itemList=idleConf.GetSectionList('user','keys') - itemList.sort() - if not itemList: - self.radioKeysCustom.config(state=DISABLED) - self.customKeys.set('- no custom keys -') - else: - self.optMenuKeysCustom.SetMenu(itemList,itemList[0]) - else: #user key set selected - itemList=idleConf.GetSectionList('user','keys') - itemList.sort() - self.optMenuKeysCustom.SetMenu(itemList,currentOption) - itemList=idleConf.GetSectionList('default','keys') - itemList.sort() - self.optMenuKeysBuiltin.SetMenu(itemList,itemList[0]) - self.SetKeysType() - ##load keyset element list - keySetName=idleConf.CurrentKeys() - self.LoadKeysList(keySetName) - - def LoadGeneralCfg(self): - #startup state - self.startupEdit.set(idleConf.GetOption('main','General', - 'editor-on-startup',default=1,type='bool')) - #autosave state - self.autoSave.set(idleConf.GetOption('main', 'General', 'autosave', - default=0, type='bool')) - #initial window size - self.winWidth.set(idleConf.GetOption('main','EditorWindow','width', - type='int')) - self.winHeight.set(idleConf.GetOption('main','EditorWindow','height', - type='int')) - #initial paragraph reformat size - self.paraWidth.set(idleConf.GetOption('main','FormatParagraph','paragraph', - type='int')) - # default source encoding - self.encoding.set(idleConf.GetOption('main', 'EditorWindow', - 'encoding', default='none')) - # additional help sources - self.userHelpList = idleConf.GetAllExtraHelpSourcesList() - for helpItem in self.userHelpList: - self.listHelp.insert(END,helpItem[0]) - self.SetHelpListButtonStates() - - def LoadConfigs(self): - """ - load configuration from default and user config files and populate - the widgets on the config dialog pages. - """ - ### fonts / tabs page - self.LoadFontCfg() - self.LoadTabCfg() - ### highlighting page - self.LoadThemeCfg() - ### keys page - self.LoadKeyCfg() - ### general page - self.LoadGeneralCfg() - - def SaveNewKeySet(self,keySetName,keySet): - """ - save a newly created core key set. - keySetName - string, the name of the new key set - keySet - dictionary containing the new key set - """ - if not idleConf.userCfg['keys'].has_section(keySetName): - idleConf.userCfg['keys'].add_section(keySetName) - for event in keySet: - value=keySet[event] - idleConf.userCfg['keys'].SetOption(keySetName,event,value) - - def SaveNewTheme(self,themeName,theme): - """ - save a newly created theme. - themeName - string, the name of the new theme - theme - dictionary containing the new theme - """ - if not idleConf.userCfg['highlight'].has_section(themeName): - idleConf.userCfg['highlight'].add_section(themeName) - for element in theme: - value=theme[element] - idleConf.userCfg['highlight'].SetOption(themeName,element,value) - - def SetUserValue(self,configType,section,item,value): - if idleConf.defaultCfg[configType].has_option(section,item): - if idleConf.defaultCfg[configType].Get(section,item)==value: - #the setting equals a default setting, remove it from user cfg - return idleConf.userCfg[configType].RemoveOption(section,item) - #if we got here set the option - return idleConf.userCfg[configType].SetOption(section,item,value) - - def SaveAllChangedConfigs(self): - "Save configuration changes to the user config file." - idleConf.userCfg['main'].Save() - for configType in self.changedItems: - cfgTypeHasChanges = False - for section in self.changedItems[configType]: - if section == 'HelpFiles': - #this section gets completely replaced - idleConf.userCfg['main'].remove_section('HelpFiles') - cfgTypeHasChanges = True - for item in self.changedItems[configType][section]: - value = self.changedItems[configType][section][item] - if self.SetUserValue(configType,section,item,value): - cfgTypeHasChanges = True - if cfgTypeHasChanges: - idleConf.userCfg[configType].Save() - for configType in ['keys', 'highlight']: - # save these even if unchanged! - idleConf.userCfg[configType].Save() - self.ResetChangedItems() #clear the changed items dict - - def DeactivateCurrentConfig(self): - #Before a config is saved, some cleanup of current - #config must be done - remove the previous keybindings - winInstances = self.parent.instance_dict.keys() - for instance in winInstances: - instance.RemoveKeybindings() - - def ActivateConfigChanges(self): - "Dynamically apply configuration changes" - winInstances = self.parent.instance_dict.keys() - for instance in winInstances: - instance.ResetColorizer() - instance.ResetFont() - instance.set_notabs_indentwidth() - instance.ApplyKeybindings() - instance.reset_help_menu_entries() - - def Cancel(self): - self.destroy() - - def Ok(self): - self.Apply() - self.destroy() - - def Apply(self): - self.DeactivateCurrentConfig() - self.SaveAllChangedConfigs() - self.ActivateConfigChanges() - - def Help(self): - pass - -if __name__ == '__main__': - #test the dialog - root=Tk() - Button(root,text='Dialog', - command=lambda:ConfigDialog(root,'Settings')).pack() - root.instance_dict={} - root.mainloop() diff --git a/Darwin/lib/python3.4/idlelib/idle.pyw b/Darwin/lib/python3.4/idlelib/idle.pyw deleted file mode 100644 index 0db5fd4..0000000 --- a/Darwin/lib/python3.4/idlelib/idle.pyw +++ /dev/null @@ -1,21 +0,0 @@ -try: - import idlelib.PyShell -except ImportError: - # IDLE is not installed, but maybe PyShell is on sys.path: - try: - from . import PyShell - except ImportError: - raise - else: - import os - idledir = os.path.dirname(os.path.abspath(PyShell.__file__)) - if idledir != os.getcwd(): - # We're not in the IDLE directory, help the subprocess find run.py - pypath = os.environ.get('PYTHONPATH', '') - if pypath: - os.environ['PYTHONPATH'] = pypath + ':' + idledir - else: - os.environ['PYTHONPATH'] = idledir - PyShell.main() -else: - idlelib.PyShell.main() diff --git a/Darwin/lib/python3.4/idlelib/idle_test/README.txt b/Darwin/lib/python3.4/idlelib/idle_test/README.txt deleted file mode 100644 index 6b92483..0000000 --- a/Darwin/lib/python3.4/idlelib/idle_test/README.txt +++ /dev/null @@ -1,110 +0,0 @@ -README FOR IDLE TESTS IN IDLELIB.IDLE_TEST - - -1. Test Files - -The idle directory, idlelib, has over 60 xyz.py files. The idle_test -subdirectory should contain a test_xyy.py for each. (For test modules, make -'xyz' lower case, and possibly shorten it.) Each file should start with the -something like the following template, with the blanks after after '.' and 'as', -and before and after '_' filled in. ---- -import unittest -from test.support import requires -import idlelib. as - -class _Test(unittest.TestCase): - - def test_(self): - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=2) ---- -Idle tests are run with unittest; do not use regrtest's test_main. - -Once test_xyy is written, the following should go at the end of xyy.py, -with xyz (lowercased) added after 'test_'. ---- -if __name__ == "__main__": - from test import support; support.use_resources = ['gui'] - import unittest - unittest.main('idlelib.idle_test.test_', verbosity=2, exit=False) ---- - - -2. Gui Tests - -Gui tests need 'requires' and 'use_resources' from test.support -(test.test_support in 2.7). A test is a gui test if it creates a Tk root or -master object either directly or indirectly by instantiating a tkinter or -idle class. For the benefit of buildbot machines that do not have a graphics -screen, gui tests must be 'guarded' by "requires('gui')" in a setUp -function or method. This will typically be setUpClass. - -To avoid interfering with other gui tests, all gui objects must be destroyed -and deleted by the end of the test. If a widget, such as a Tk root, is created -in a setUpX function, destroy it in the corresponding tearDownX. For module -and class attributes, also delete the widget. ---- - @classmethod - def setUpClass(cls): - requires('gui') - cls.root = tk.Tk() - - @classmethod - def tearDownClass(cls): - cls.root.destroy() - del cls.root ---- - -Support.requires('gui') returns true if it is either called in a main module -(which never happens on buildbots) or if use_resources contains 'gui'. -Use_resources is set by test.regrtest but not by unittest. So when running -tests in another module with unittest, we set it ourselves, as in the xyz.py -template above. - -Since non-gui tests always run, but gui tests only sometimes, tests of non-gui -operations should best avoid needing a gui. Methods that make incidental use of -tkinter (tk) variables and messageboxes can do this by using the mock classes in -idle_test/mock_tk.py. There is also a mock text that will handle some uses of the -tk Text widget. - - -3. Running Tests - -Assume that xyz.py and test_xyz.py end with the "if __name__" statements given -above. In Idle, pressing F5 in an editor window with either loaded will run all -tests in the test_xyz file with the version of Python running Idle. The test -report and any tracebacks will appear in the Shell window. The options in these -"if __name__" statements are appropriate for developers running (as opposed to -importing) either of the files during development: verbosity=2 lists all test -methods in the file; exit=False avoids a spurious sys.exit traceback that would -otherwise occur when running in Idle. The following command lines also run -all test methods, including gui tests, in test_xyz.py. (The exceptions are that -idlelib and idlelib.idle start Idle and idlelib.PyShell should (issue 18330).) - -python -m idlelib.xyz # With the capitalization of the xyz module -python -m idlelib.idle_test.test_xyz - -To run all idle_test/test_*.py tests, either interactively -('>>>', with unittest imported) or from a command line, use one of the -following. (Notes: unittest does not run gui tests; in 2.7, 'test ' (with the -space) is 'test.regrtest '; where present, -v and -ugui can be omitted.) - ->>> unittest.main('idlelib.idle_test', verbosity=2, exit=False) -python -m unittest -v idlelib.idle_test -python -m test -v -ugui test_idle -python -m test.test_idle - -The idle tests are 'discovered' by idlelib.idle_test.__init__.load_tests, -which is also imported into test.test_idle. Normally, neither file should be -changed when working on individual test modules. The third command runs runs -unittest indirectly through regrtest. The same happens when the entire test -suite is run with 'python -m test'. So that command must work for buildbots -to stay green. Idle tests must not disturb the environment in a way that -makes other tests fail (issue 18081). - -To run an individual Testcase or test method, extend the dotted name given to -unittest on the command line. (But gui tests will not this way.) - -python -m unittest -v idlelib.idle_test.test_xyz.Test_case.test_meth diff --git a/Darwin/lib/python3.4/idlelib/idle_test/htest.py b/Darwin/lib/python3.4/idlelib/idle_test/htest.py deleted file mode 100644 index 001f7ee..0000000 --- a/Darwin/lib/python3.4/idlelib/idle_test/htest.py +++ /dev/null @@ -1,93 +0,0 @@ -'''Run human tests of Idle's window, dialog, and popup widgets. - -run(test): run *test*, a callable that causes a widget to be displayed. -runall(): run all tests defined in this file. - -Let X be a global name bound to a widget callable. End the module with - -if __name__ == '__main__': - - from idlelib.idle_test.htest import run - run(X) - -The X object must have a .__name__ attribute and a 'parent' parameter. -X will often be a widget class, but a callable instance with .__name__ -or a wrapper function also work. The name of wrapper functions, like -'_Editor_Window', should start with '_'. - -This file must contain a matching instance of the folling template, -with X.__name__ prepended, as in '_Editor_window_spec ...'. - -_spec = { - 'file': '', - 'kwds': {'title': ''}, - 'msg': "" - } - -file (no .py): used in runall() to import the file and get X. -kwds: passed to X (**kwds), after 'parent' is added, to initialize X. -title: an example; used for some widgets, delete if not. -msg: displayed in a master window. Hints as to how the user might - test the widget. Close the window to skip or end the test. -''' -from importlib import import_module -import tkinter as tk - - -_Editor_window_spec = { - 'file': 'EditorWindow', - 'kwds': {}, - 'msg': "Test editor functions of interest" - } - -_Help_dialog_spec = { - 'file': 'EditorWindow', - 'kwds': {}, - 'msg': "If the help text displays, this works" - } - -AboutDialog_spec = { - 'file': 'aboutDialog', - 'kwds': {'title': 'About test'}, - 'msg': "Try each button" - } - - -GetCfgSectionNameDialog_spec = { - 'file': 'configSectionNameDialog', - 'kwds': {'title':'Get Name', - 'message':'Enter something', - 'used_names': {'abc'}, - '_htest': True}, - 'msg': "After the text entered with [Ok] is stripped, , " - "'abc', or more that 30 chars are errors.\n" - "Close 'Get Name' with a valid entry (printed to Shell), [Cancel], or [X]", - } - -def run(test): - "Display a widget with callable *test* using a _spec dict" - root = tk.Tk() - test_spec = globals()[test.__name__ + '_spec'] - test_kwds = test_spec['kwds'] - test_kwds['parent'] = root - - def run_test(): - widget = test(**test_kwds) - try: - print(widget.result) - except AttributeError: - pass - tk.Label(root, text=test_spec['msg'], justify='left').pack() - tk.Button(root, text='Test ' + test.__name__, command=run_test).pack() - root.mainloop() - -def runall(): - "Run all tests. Quick and dirty version." - for k, d in globals().items(): - if k.endswith('_spec'): - mod = import_module('idlelib.' + d['file']) - test = getattr(mod, k[:-5]) - run(test) - -if __name__ == '__main__': - runall() diff --git a/Darwin/lib/python3.4/idlelib/idle_test/mock_idle.py b/Darwin/lib/python3.4/idlelib/idle_test/mock_idle.py deleted file mode 100644 index c364a24..0000000 --- a/Darwin/lib/python3.4/idlelib/idle_test/mock_idle.py +++ /dev/null @@ -1,27 +0,0 @@ -'''Mock classes that imitate idlelib modules or classes. - -Attributes and methods will be added as needed for tests. -''' - -from idlelib.idle_test.mock_tk import Text - -class Editor: - '''Minimally imitate EditorWindow.EditorWindow class. - ''' - def __init__(self, flist=None, filename=None, key=None, root=None): - self.text = Text() - self.undo = UndoDelegator() - - def get_selection_indices(self): - first = self.text.index('1.0') - last = self.text.index('end') - return first, last - -class UndoDelegator: - '''Minimally imitate UndoDelegator,UndoDelegator class. - ''' - # A real undo block is only needed for user interaction. - def undo_block_start(*args): - pass - def undo_block_stop(*args): - pass diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_pathbrowser.py b/Darwin/lib/python3.4/idlelib/idle_test/test_pathbrowser.py deleted file mode 100644 index 7ad7c97..0000000 --- a/Darwin/lib/python3.4/idlelib/idle_test/test_pathbrowser.py +++ /dev/null @@ -1,12 +0,0 @@ -import unittest -import idlelib.PathBrowser as PathBrowser - -class PathBrowserTest(unittest.TestCase): - - def test_DirBrowserTreeItem(self): - # Issue16226 - make sure that getting a sublist works - d = PathBrowser.DirBrowserTreeItem('') - d.GetSubList() - -if __name__ == '__main__': - unittest.main(verbosity=2, exit=False) diff --git a/Darwin/lib/python3.4/idlelib/idlever.py b/Darwin/lib/python3.4/idlelib/idlever.py deleted file mode 100644 index 22acb41..0000000 --- a/Darwin/lib/python3.4/idlelib/idlever.py +++ /dev/null @@ -1 +0,0 @@ -IDLE_VERSION = "3.4.1" diff --git a/Darwin/lib/python3.4/idlelib/testcode.py b/Darwin/lib/python3.4/idlelib/testcode.py deleted file mode 100644 index 05eaa56..0000000 --- a/Darwin/lib/python3.4/idlelib/testcode.py +++ /dev/null @@ -1,31 +0,0 @@ -import string - -def f(): - a = 0 - b = 1 - c = 2 - d = 3 - e = 4 - g() - -def g(): - h() - -def h(): - i() - -def i(): - j() - -def j(): - k() - -def k(): - l() - -l = lambda: test() - -def test(): - string.capwords(1) - -f() diff --git a/Darwin/lib/python3.4/importlib/machinery.py b/Darwin/lib/python3.4/importlib/machinery.py deleted file mode 100644 index 2e1b2d7..0000000 --- a/Darwin/lib/python3.4/importlib/machinery.py +++ /dev/null @@ -1,21 +0,0 @@ -"""The machinery of importlib: finders, loaders, hooks, etc.""" - -import _imp - -from ._bootstrap import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES, - OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES, - EXTENSION_SUFFIXES) -from ._bootstrap import ModuleSpec -from ._bootstrap import BuiltinImporter -from ._bootstrap import FrozenImporter -from ._bootstrap import WindowsRegistryFinder -from ._bootstrap import PathFinder -from ._bootstrap import FileFinder -from ._bootstrap import SourceFileLoader -from ._bootstrap import SourcelessFileLoader -from ._bootstrap import ExtensionFileLoader - - -def all_suffixes(): - """Returns a list of all recognized module suffixes for this process""" - return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES diff --git a/Darwin/lib/python3.4/json/tool.py b/Darwin/lib/python3.4/json/tool.py deleted file mode 100644 index 7db4528..0000000 --- a/Darwin/lib/python3.4/json/tool.py +++ /dev/null @@ -1,39 +0,0 @@ -r"""Command-line tool to validate and pretty-print JSON - -Usage:: - - $ echo '{"json":"obj"}' | python -m json.tool - { - "json": "obj" - } - $ echo '{ 1.2:3.4}' | python -m json.tool - Expecting property name enclosed in double quotes: line 1 column 3 (char 2) - -""" -import sys -import json - -def main(): - if len(sys.argv) == 1: - infile = sys.stdin - outfile = sys.stdout - elif len(sys.argv) == 2: - infile = open(sys.argv[1], 'r') - outfile = sys.stdout - elif len(sys.argv) == 3: - infile = open(sys.argv[1], 'r') - outfile = open(sys.argv[2], 'w') - else: - raise SystemExit(sys.argv[0] + " [infile [outfile]]") - with infile: - try: - obj = json.load(infile) - except ValueError as e: - raise SystemExit(e) - with outfile: - json.dump(obj, outfile, sort_keys=True, indent=4) - outfile.write('\n') - - -if __name__ == '__main__': - main() diff --git a/Darwin/lib/python3.4/lib-dynload/_bisect.so b/Darwin/lib/python3.4/lib-dynload/_bisect.so deleted file mode 100755 index 5c89562..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_bisect.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_bz2.so b/Darwin/lib/python3.4/lib-dynload/_bz2.so deleted file mode 100755 index 445fedd..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_bz2.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_cn.so b/Darwin/lib/python3.4/lib-dynload/_codecs_cn.so deleted file mode 100755 index 4875843..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_cn.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_hk.so b/Darwin/lib/python3.4/lib-dynload/_codecs_hk.so deleted file mode 100755 index 72847d5..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_hk.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_iso2022.so b/Darwin/lib/python3.4/lib-dynload/_codecs_iso2022.so deleted file mode 100755 index d416d01..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_iso2022.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_jp.so b/Darwin/lib/python3.4/lib-dynload/_codecs_jp.so deleted file mode 100755 index 1b1e84b..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_jp.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_kr.so b/Darwin/lib/python3.4/lib-dynload/_codecs_kr.so deleted file mode 100755 index a0f09d1..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_kr.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_codecs_tw.so b/Darwin/lib/python3.4/lib-dynload/_codecs_tw.so deleted file mode 100755 index 75ea7f9..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_codecs_tw.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_csv.so b/Darwin/lib/python3.4/lib-dynload/_csv.so deleted file mode 100755 index 8208855..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_csv.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_ctypes.so b/Darwin/lib/python3.4/lib-dynload/_ctypes.so deleted file mode 100755 index abcfcc3..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_ctypes.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_ctypes_test.so b/Darwin/lib/python3.4/lib-dynload/_ctypes_test.so deleted file mode 100755 index 6b4c34d..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_ctypes_test.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_curses.so b/Darwin/lib/python3.4/lib-dynload/_curses.so deleted file mode 100755 index c6df6b8..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_curses.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_curses_panel.so b/Darwin/lib/python3.4/lib-dynload/_curses_panel.so deleted file mode 100755 index 3828a1f..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_curses_panel.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_datetime.so b/Darwin/lib/python3.4/lib-dynload/_datetime.so deleted file mode 100755 index d3bfa6b..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_datetime.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_dbm.so b/Darwin/lib/python3.4/lib-dynload/_dbm.so deleted file mode 100755 index a92abda..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_dbm.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_decimal.so b/Darwin/lib/python3.4/lib-dynload/_decimal.so deleted file mode 100755 index 6886f34..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_decimal.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_elementtree.so b/Darwin/lib/python3.4/lib-dynload/_elementtree.so deleted file mode 100755 index 00c4193..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_elementtree.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_hashlib.so b/Darwin/lib/python3.4/lib-dynload/_hashlib.so deleted file mode 100755 index 08a23d5..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_hashlib.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_json.so b/Darwin/lib/python3.4/lib-dynload/_json.so deleted file mode 100755 index 96fa397..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_json.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_lsprof.so b/Darwin/lib/python3.4/lib-dynload/_lsprof.so deleted file mode 100755 index 8124ebf..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_lsprof.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_lzma.so b/Darwin/lib/python3.4/lib-dynload/_lzma.so deleted file mode 100755 index a5f4ce1..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_lzma.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_md5.so b/Darwin/lib/python3.4/lib-dynload/_md5.so deleted file mode 100755 index ece0555..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_md5.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_multibytecodec.so b/Darwin/lib/python3.4/lib-dynload/_multibytecodec.so deleted file mode 100755 index ddc4a09..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_multibytecodec.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_multiprocessing.so b/Darwin/lib/python3.4/lib-dynload/_multiprocessing.so deleted file mode 100755 index f28c791..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_multiprocessing.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_pickle.so b/Darwin/lib/python3.4/lib-dynload/_pickle.so deleted file mode 100755 index f656fa9..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_pickle.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_posixsubprocess.so b/Darwin/lib/python3.4/lib-dynload/_posixsubprocess.so deleted file mode 100755 index ca83205..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_posixsubprocess.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_random.so b/Darwin/lib/python3.4/lib-dynload/_random.so deleted file mode 100755 index 29ff606..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_random.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_scproxy.so b/Darwin/lib/python3.4/lib-dynload/_scproxy.so deleted file mode 100755 index c0ec853..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_scproxy.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_sha1.so b/Darwin/lib/python3.4/lib-dynload/_sha1.so deleted file mode 100755 index 803ca6b..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_sha1.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_sha256.so b/Darwin/lib/python3.4/lib-dynload/_sha256.so deleted file mode 100755 index ac0353c..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_sha256.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_sha512.so b/Darwin/lib/python3.4/lib-dynload/_sha512.so deleted file mode 100755 index 032cf80..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_sha512.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_socket.so b/Darwin/lib/python3.4/lib-dynload/_socket.so deleted file mode 100755 index 1f2dea8..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_socket.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_sqlite3.so b/Darwin/lib/python3.4/lib-dynload/_sqlite3.so deleted file mode 100755 index fd731f2..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_sqlite3.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_ssl.so b/Darwin/lib/python3.4/lib-dynload/_ssl.so deleted file mode 100755 index a7a9127..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_ssl.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_struct.so b/Darwin/lib/python3.4/lib-dynload/_struct.so deleted file mode 100755 index dc15fec..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_struct.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_testbuffer.so b/Darwin/lib/python3.4/lib-dynload/_testbuffer.so deleted file mode 100755 index f2c9695..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_testbuffer.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_testcapi.so b/Darwin/lib/python3.4/lib-dynload/_testcapi.so deleted file mode 100755 index bc1ae17..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_testcapi.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/_tkinter.so b/Darwin/lib/python3.4/lib-dynload/_tkinter.so deleted file mode 100755 index 0bcd793..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/_tkinter.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/array.so b/Darwin/lib/python3.4/lib-dynload/array.so deleted file mode 100755 index e30391d..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/array.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/audioop.so b/Darwin/lib/python3.4/lib-dynload/audioop.so deleted file mode 100755 index c2810c3..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/audioop.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/binascii.so b/Darwin/lib/python3.4/lib-dynload/binascii.so deleted file mode 100755 index 9fb8243..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/binascii.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/cmath.so b/Darwin/lib/python3.4/lib-dynload/cmath.so deleted file mode 100755 index cc98a33..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/cmath.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/fcntl.so b/Darwin/lib/python3.4/lib-dynload/fcntl.so deleted file mode 100755 index d448e98..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/fcntl.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/grp.so b/Darwin/lib/python3.4/lib-dynload/grp.so deleted file mode 100755 index 6b9ed36..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/grp.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/math.so b/Darwin/lib/python3.4/lib-dynload/math.so deleted file mode 100755 index 033508e..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/math.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/mmap.so b/Darwin/lib/python3.4/lib-dynload/mmap.so deleted file mode 100755 index e401162..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/mmap.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/nis.so b/Darwin/lib/python3.4/lib-dynload/nis.so deleted file mode 100755 index e6041fe..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/nis.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/parser.so b/Darwin/lib/python3.4/lib-dynload/parser.so deleted file mode 100755 index 6dee505..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/parser.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/pyexpat.so b/Darwin/lib/python3.4/lib-dynload/pyexpat.so deleted file mode 100755 index 161c4d8..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/pyexpat.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/readline.so b/Darwin/lib/python3.4/lib-dynload/readline.so deleted file mode 100755 index 2b7fbd6..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/readline.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/resource.so b/Darwin/lib/python3.4/lib-dynload/resource.so deleted file mode 100755 index 352f293..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/resource.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/select.so b/Darwin/lib/python3.4/lib-dynload/select.so deleted file mode 100755 index 488e20c..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/select.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/syslog.so b/Darwin/lib/python3.4/lib-dynload/syslog.so deleted file mode 100755 index 21b1eb2..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/syslog.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/termios.so b/Darwin/lib/python3.4/lib-dynload/termios.so deleted file mode 100755 index 11fcecb..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/termios.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/time.so b/Darwin/lib/python3.4/lib-dynload/time.so deleted file mode 100755 index 690e77c..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/time.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/unicodedata.so b/Darwin/lib/python3.4/lib-dynload/unicodedata.so deleted file mode 100755 index 9906182..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/unicodedata.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/xxlimited.so b/Darwin/lib/python3.4/lib-dynload/xxlimited.so deleted file mode 100755 index 589049f..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/xxlimited.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib-dynload/zlib.so b/Darwin/lib/python3.4/lib-dynload/zlib.so deleted file mode 100755 index 54efc58..0000000 Binary files a/Darwin/lib/python3.4/lib-dynload/zlib.so and /dev/null differ diff --git a/Darwin/lib/python3.4/lib2to3/Grammar3.4.1.final.0.pickle b/Darwin/lib/python3.4/lib2to3/Grammar3.4.1.final.0.pickle deleted file mode 100644 index 6ee5a60..0000000 Binary files a/Darwin/lib/python3.4/lib2to3/Grammar3.4.1.final.0.pickle and /dev/null differ diff --git a/Darwin/lib/python3.4/lib2to3/PatternGrammar3.4.1.final.0.pickle b/Darwin/lib/python3.4/lib2to3/PatternGrammar3.4.1.final.0.pickle deleted file mode 100644 index 1f0a238..0000000 Binary files a/Darwin/lib/python3.4/lib2to3/PatternGrammar3.4.1.final.0.pickle and /dev/null differ diff --git a/Darwin/lib/python3.4/lib2to3/tests/__init__.py b/Darwin/lib/python3.4/lib2to3/tests/__init__.py deleted file mode 100644 index cfaea0d..0000000 --- a/Darwin/lib/python3.4/lib2to3/tests/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Make tests/ into a package. This allows us to "import tests" and -have tests.all_tests be a TestSuite representing all test cases -from all test_*.py files in tests/.""" -# Author: Collin Winter - -import os -import os.path -import unittest -import types - -from . import support - -all_tests = unittest.TestSuite() - -tests_dir = os.path.join(os.path.dirname(__file__), '..', 'tests') -tests = [t[0:-3] for t in os.listdir(tests_dir) - if t.startswith('test_') and t.endswith('.py')] - -loader = unittest.TestLoader() - -for t in tests: - __import__("",globals(),locals(),[t],level=1) - mod = globals()[t] - all_tests.addTests(loader.loadTestsFromModule(mod)) diff --git a/Darwin/lib/python3.4/pprint.py b/Darwin/lib/python3.4/pprint.py deleted file mode 100644 index 3be9c36..0000000 --- a/Darwin/lib/python3.4/pprint.py +++ /dev/null @@ -1,416 +0,0 @@ -# Author: Fred L. Drake, Jr. -# fdrake@acm.org -# -# This is a simple little module I wrote to make life easier. I didn't -# see anything quite like it in the library, though I may have overlooked -# something. I wrote this when I was trying to read some heavily nested -# tuples with fairly non-descriptive content. This is modeled very much -# after Lisp/Scheme - style pretty-printing of lists. If you find it -# useful, thank small children who sleep at night. - -"""Support to pretty-print lists, tuples, & dictionaries recursively. - -Very simple, but useful, especially in debugging data structures. - -Classes -------- - -PrettyPrinter() - Handle pretty-printing operations onto a stream using a configured - set of formatting parameters. - -Functions ---------- - -pformat() - Format a Python object into a pretty-printed representation. - -pprint() - Pretty-print a Python object to a stream [default is sys.stdout]. - -saferepr() - Generate a 'standard' repr()-like value, but protect against recursive - data structures. - -""" - -import re -import sys as _sys -from collections import OrderedDict as _OrderedDict -from io import StringIO as _StringIO - -__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", - "PrettyPrinter"] - - -def pprint(object, stream=None, indent=1, width=80, depth=None, *, - compact=False): - """Pretty-print a Python object to a stream [default is sys.stdout].""" - printer = PrettyPrinter( - stream=stream, indent=indent, width=width, depth=depth, - compact=compact) - printer.pprint(object) - -def pformat(object, indent=1, width=80, depth=None, *, compact=False): - """Format a Python object into a pretty-printed representation.""" - return PrettyPrinter(indent=indent, width=width, depth=depth, - compact=compact).pformat(object) - -def saferepr(object): - """Version of repr() which can handle recursive data structures.""" - return _safe_repr(object, {}, None, 0)[0] - -def isreadable(object): - """Determine if saferepr(object) is readable by eval().""" - return _safe_repr(object, {}, None, 0)[1] - -def isrecursive(object): - """Determine if object requires a recursive representation.""" - return _safe_repr(object, {}, None, 0)[2] - -class _safe_key: - """Helper function for key functions when sorting unorderable objects. - - The wrapped-object will fallback to an Py2.x style comparison for - unorderable types (sorting first comparing the type name and then by - the obj ids). Does not work recursively, so dict.items() must have - _safe_key applied to both the key and the value. - - """ - - __slots__ = ['obj'] - - def __init__(self, obj): - self.obj = obj - - def __lt__(self, other): - try: - rv = self.obj.__lt__(other.obj) - except TypeError: - rv = NotImplemented - - if rv is NotImplemented: - rv = (str(type(self.obj)), id(self.obj)) < \ - (str(type(other.obj)), id(other.obj)) - return rv - -def _safe_tuple(t): - "Helper function for comparing 2-tuples" - return _safe_key(t[0]), _safe_key(t[1]) - -class PrettyPrinter: - def __init__(self, indent=1, width=80, depth=None, stream=None, *, - compact=False): - """Handle pretty printing operations onto a stream using a set of - configured parameters. - - indent - Number of spaces to indent for each level of nesting. - - width - Attempted maximum number of columns in the output. - - depth - The maximum depth to print out nested structures. - - stream - The desired output stream. If omitted (or false), the standard - output stream available at construction will be used. - - compact - If true, several items will be combined in one line. - - """ - indent = int(indent) - width = int(width) - assert indent >= 0, "indent must be >= 0" - assert depth is None or depth > 0, "depth must be > 0" - assert width, "width must be != 0" - self._depth = depth - self._indent_per_level = indent - self._width = width - if stream is not None: - self._stream = stream - else: - self._stream = _sys.stdout - self._compact = bool(compact) - - def pprint(self, object): - self._format(object, self._stream, 0, 0, {}, 0) - self._stream.write("\n") - - def pformat(self, object): - sio = _StringIO() - self._format(object, sio, 0, 0, {}, 0) - return sio.getvalue() - - def isrecursive(self, object): - return self.format(object, {}, 0, 0)[2] - - def isreadable(self, object): - s, readable, recursive = self.format(object, {}, 0, 0) - return readable and not recursive - - def _format(self, object, stream, indent, allowance, context, level): - level = level + 1 - objid = id(object) - if objid in context: - stream.write(_recursion(object)) - self._recursive = True - self._readable = False - return - rep = self._repr(object, context, level - 1) - typ = type(object) - max_width = self._width - 1 - indent - allowance - sepLines = len(rep) > max_width - write = stream.write - - if sepLines: - r = getattr(typ, "__repr__", None) - if issubclass(typ, dict): - write('{') - if self._indent_per_level > 1: - write((self._indent_per_level - 1) * ' ') - length = len(object) - if length: - context[objid] = 1 - indent = indent + self._indent_per_level - if issubclass(typ, _OrderedDict): - items = list(object.items()) - else: - items = sorted(object.items(), key=_safe_tuple) - key, ent = items[0] - rep = self._repr(key, context, level) - write(rep) - write(': ') - self._format(ent, stream, indent + len(rep) + 2, - allowance + 1, context, level) - if length > 1: - for key, ent in items[1:]: - rep = self._repr(key, context, level) - write(',\n%s%s: ' % (' '*indent, rep)) - self._format(ent, stream, indent + len(rep) + 2, - allowance + 1, context, level) - indent = indent - self._indent_per_level - del context[objid] - write('}') - return - - if ((issubclass(typ, list) and r is list.__repr__) or - (issubclass(typ, tuple) and r is tuple.__repr__) or - (issubclass(typ, set) and r is set.__repr__) or - (issubclass(typ, frozenset) and r is frozenset.__repr__) - ): - length = len(object) - if issubclass(typ, list): - write('[') - endchar = ']' - elif issubclass(typ, tuple): - write('(') - endchar = ')' - else: - if not length: - write(rep) - return - if typ is set: - write('{') - endchar = '}' - else: - write(typ.__name__) - write('({') - endchar = '})' - indent += len(typ.__name__) + 1 - object = sorted(object, key=_safe_key) - if self._indent_per_level > 1: - write((self._indent_per_level - 1) * ' ') - if length: - context[objid] = 1 - self._format_items(object, stream, - indent + self._indent_per_level, - allowance + 1, context, level) - del context[objid] - if issubclass(typ, tuple) and length == 1: - write(',') - write(endchar) - return - - if issubclass(typ, str) and len(object) > 0 and r is str.__repr__: - def _str_parts(s): - """ - Return a list of string literals comprising the repr() - of the given string using literal concatenation. - """ - lines = s.splitlines(True) - for i, line in enumerate(lines): - rep = repr(line) - if len(rep) <= max_width: - yield rep - else: - # A list of alternating (non-space, space) strings - parts = re.split(r'(\s+)', line) + [''] - current = '' - for i in range(0, len(parts), 2): - part = parts[i] + parts[i+1] - candidate = current + part - if len(repr(candidate)) > max_width: - if current: - yield repr(current) - current = part - else: - current = candidate - if current: - yield repr(current) - for i, rep in enumerate(_str_parts(object)): - if i > 0: - write('\n' + ' '*indent) - write(rep) - return - write(rep) - - def _format_items(self, items, stream, indent, allowance, context, level): - write = stream.write - delimnl = ',\n' + ' ' * indent - delim = '' - width = max_width = self._width - indent - allowance + 2 - for ent in items: - if self._compact: - rep = self._repr(ent, context, level) - w = len(rep) + 2 - if width < w: - width = max_width - if delim: - delim = delimnl - if width >= w: - width -= w - write(delim) - delim = ', ' - write(rep) - continue - write(delim) - delim = delimnl - self._format(ent, stream, indent, allowance, context, level) - - def _repr(self, object, context, level): - repr, readable, recursive = self.format(object, context.copy(), - self._depth, level) - if not readable: - self._readable = False - if recursive: - self._recursive = True - return repr - - def format(self, object, context, maxlevels, level): - """Format object for a specific context, returning a string - and flags indicating whether the representation is 'readable' - and whether the object represents a recursive construct. - """ - return _safe_repr(object, context, maxlevels, level) - - -# Return triple (repr_string, isreadable, isrecursive). - -def _safe_repr(object, context, maxlevels, level): - typ = type(object) - if typ is str: - if 'locale' not in _sys.modules: - return repr(object), True, False - if "'" in object and '"' not in object: - closure = '"' - quotes = {'"': '\\"'} - else: - closure = "'" - quotes = {"'": "\\'"} - qget = quotes.get - sio = _StringIO() - write = sio.write - for char in object: - if char.isalpha(): - write(char) - else: - write(qget(char, repr(char)[1:-1])) - return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False - - r = getattr(typ, "__repr__", None) - if issubclass(typ, dict) and r is dict.__repr__: - if not object: - return "{}", True, False - objid = id(object) - if maxlevels and level >= maxlevels: - return "{...}", False, objid in context - if objid in context: - return _recursion(object), False, True - context[objid] = 1 - readable = True - recursive = False - components = [] - append = components.append - level += 1 - saferepr = _safe_repr - items = sorted(object.items(), key=_safe_tuple) - for k, v in items: - krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) - vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) - append("%s: %s" % (krepr, vrepr)) - readable = readable and kreadable and vreadable - if krecur or vrecur: - recursive = True - del context[objid] - return "{%s}" % ", ".join(components), readable, recursive - - if (issubclass(typ, list) and r is list.__repr__) or \ - (issubclass(typ, tuple) and r is tuple.__repr__): - if issubclass(typ, list): - if not object: - return "[]", True, False - format = "[%s]" - elif len(object) == 1: - format = "(%s,)" - else: - if not object: - return "()", True, False - format = "(%s)" - objid = id(object) - if maxlevels and level >= maxlevels: - return format % "...", False, objid in context - if objid in context: - return _recursion(object), False, True - context[objid] = 1 - readable = True - recursive = False - components = [] - append = components.append - level += 1 - for o in object: - orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) - append(orepr) - if not oreadable: - readable = False - if orecur: - recursive = True - del context[objid] - return format % ", ".join(components), readable, recursive - - rep = repr(object) - return rep, (rep and not rep.startswith('<')), False - - -def _recursion(object): - return ("" - % (type(object).__name__, id(object))) - - -def _perfcheck(object=None): - import time - if object is None: - object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 - p = PrettyPrinter() - t1 = time.time() - _safe_repr(object, {}, None, 0) - t2 = time.time() - p.pformat(object) - t3 = time.time() - print("_safe_repr:", t2 - t1) - print("pformat:", t3 - t2) - -if __name__ == "__main__": - _perfcheck() diff --git a/Darwin/lib/python3.4/pydoc_data/topics.py b/Darwin/lib/python3.4/pydoc_data/topics.py deleted file mode 100644 index 0d2d83c..0000000 --- a/Darwin/lib/python3.4/pydoc_data/topics.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Sat May 17 21:42:09 2014 -topics = {'assert': '\nThe "assert" statement\n**********************\n\nAssert statements are a convenient way to insert debugging assertions\ninto a program:\n\n assert_stmt ::= "assert" expression ["," expression]\n\nThe simple form, "assert expression", is equivalent to\n\n if __debug__:\n if not expression: raise AssertionError\n\nThe extended form, "assert expression1, expression2", is equivalent to\n\n if __debug__:\n if not expression1: raise AssertionError(expression2)\n\nThese equivalences assume that "__debug__" and "AssertionError" refer\nto the built-in variables with those names. In the current\nimplementation, the built-in variable "__debug__" is "True" under\nnormal circumstances, "False" when optimization is requested (command\nline option -O). The current code generator emits no code for an\nassert statement when optimization is requested at compile time. Note\nthat it is unnecessary to include the source code for the expression\nthat failed in the error message; it will be displayed as part of the\nstack trace.\n\nAssignments to "__debug__" are illegal. The value for the built-in\nvariable is determined when the interpreter starts.\n', - 'assignment': '\nAssignment statements\n*********************\n\nAssignment statements are used to (re)bind names to values and to\nmodify attributes or items of mutable objects:\n\n assignment_stmt ::= (target_list "=")+ (expression_list | yield_expression)\n target_list ::= target ("," target)* [","]\n target ::= identifier\n | "(" target_list ")"\n | "[" target_list "]"\n | attributeref\n | subscription\n | slicing\n | "*" target\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn assignment statement evaluates the expression list (remember that\nthis can be a single expression or a comma-separated list, the latter\nyielding a tuple) and assigns the single resulting object to each of\nthe target lists, from left to right.\n\nAssignment is defined recursively depending on the form of the target\n(list). When a target is part of a mutable object (an attribute\nreference, subscription or slicing), the mutable object must\nultimately perform the assignment and decide about its validity, and\nmay raise an exception if the assignment is unacceptable. The rules\nobserved by various types and the exceptions raised are given with the\ndefinition of the object types (see section *The standard type\nhierarchy*).\n\nAssignment of an object to a target list, optionally enclosed in\nparentheses or square brackets, is recursively defined as follows.\n\n* If the target list is a single target: The object is assigned to\n that target.\n\n* If the target list is a comma-separated list of targets: The object\n must be an iterable with the same number of items as there are\n targets in the target list, and the items are assigned, from left to\n right, to the corresponding targets.\n\n * If the target list contains one target prefixed with an asterisk,\n called a "starred" target: The object must be a sequence with at\n least as many items as there are targets in the target list, minus\n one. The first items of the sequence are assigned, from left to\n right, to the targets before the starred target. The final items\n of the sequence are assigned to the targets after the starred\n target. A list of the remaining items in the sequence is then\n assigned to the starred target (the list can be empty).\n\n * Else: The object must be a sequence with the same number of items\n as there are targets in the target list, and the items are\n assigned, from left to right, to the corresponding targets.\n\nAssignment of an object to a single target is recursively defined as\nfollows.\n\n* If the target is an identifier (name):\n\n * If the name does not occur in a "global" or "nonlocal" statement\n in the current code block: the name is bound to the object in the\n current local namespace.\n\n * Otherwise: the name is bound to the object in the global namespace\n or the outer namespace determined by "nonlocal", respectively.\n\n The name is rebound if it was already bound. This may cause the\n reference count for the object previously bound to the name to reach\n zero, causing the object to be deallocated and its destructor (if it\n has one) to be called.\n\n* If the target is a target list enclosed in parentheses or in square\n brackets: The object must be an iterable with the same number of\n items as there are targets in the target list, and its items are\n assigned, from left to right, to the corresponding targets.\n\n* If the target is an attribute reference: The primary expression in\n the reference is evaluated. It should yield an object with\n assignable attributes; if this is not the case, "TypeError" is\n raised. That object is then asked to assign the assigned object to\n the given attribute; if it cannot perform the assignment, it raises\n an exception (usually but not necessarily "AttributeError").\n\n Note: If the object is a class instance and the attribute reference\n occurs on both sides of the assignment operator, the RHS expression,\n "a.x" can access either an instance attribute or (if no instance\n attribute exists) a class attribute. The LHS target "a.x" is always\n set as an instance attribute, creating it if necessary. Thus, the\n two occurrences of "a.x" do not necessarily refer to the same\n attribute: if the RHS expression refers to a class attribute, the\n LHS creates a new instance attribute as the target of the\n assignment:\n\n class Cls:\n x = 3 # class variable\n inst = Cls()\n inst.x = inst.x + 1 # writes inst.x as 4 leaving Cls.x as 3\n\n This description does not necessarily apply to descriptor\n attributes, such as properties created with "property()".\n\n* If the target is a subscription: The primary expression in the\n reference is evaluated. It should yield either a mutable sequence\n object (such as a list) or a mapping object (such as a dictionary).\n Next, the subscript expression is evaluated.\n\n If the primary is a mutable sequence object (such as a list), the\n subscript must yield an integer. If it is negative, the sequence\'s\n length is added to it. The resulting value must be a nonnegative\n integer less than the sequence\'s length, and the sequence is asked\n to assign the assigned object to its item with that index. If the\n index is out of range, "IndexError" is raised (assignment to a\n subscripted sequence cannot add new items to a list).\n\n If the primary is a mapping object (such as a dictionary), the\n subscript must have a type compatible with the mapping\'s key type,\n and the mapping is then asked to create a key/datum pair which maps\n the subscript to the assigned object. This can either replace an\n existing key/value pair with the same key value, or insert a new\n key/value pair (if no key with the same value existed).\n\n For user-defined objects, the "__setitem__()" method is called with\n appropriate arguments.\n\n* If the target is a slicing: The primary expression in the reference\n is evaluated. It should yield a mutable sequence object (such as a\n list). The assigned object should be a sequence object of the same\n type. Next, the lower and upper bound expressions are evaluated,\n insofar they are present; defaults are zero and the sequence\'s\n length. The bounds should evaluate to integers. If either bound is\n negative, the sequence\'s length is added to it. The resulting\n bounds are clipped to lie between zero and the sequence\'s length,\n inclusive. Finally, the sequence object is asked to replace the\n slice with the items of the assigned sequence. The length of the\n slice may be different from the length of the assigned sequence,\n thus changing the length of the target sequence, if the object\n allows it.\n\n**CPython implementation detail:** In the current implementation, the\nsyntax for targets is taken to be the same as for expressions, and\ninvalid syntax is rejected during the code generation phase, causing\nless detailed error messages.\n\nWARNING: Although the definition of assignment implies that overlaps\nbetween the left-hand side and the right-hand side are \'safe\' (for\nexample "a, b = b, a" swaps two variables), overlaps *within* the\ncollection of assigned-to variables are not safe! For instance, the\nfollowing program prints "[0, 2]":\n\n x = [0, 1]\n i = 0\n i, x[i] = 1, 2\n print(x)\n\nSee also:\n\n **PEP 3132** - Extended Iterable Unpacking\n The specification for the "*target" feature.\n\n\nAugmented assignment statements\n===============================\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', - 'atom-identifiers': '\nIdentifiers (Names)\n*******************\n\nAn identifier occurring as an atom is a name. See section\n*Identifiers and keywords* for lexical definition and section *Naming\nand binding* for documentation of naming and binding.\n\nWhen the name is bound to an object, evaluation of the atom yields\nthat object. When a name is not bound, an attempt to evaluate it\nraises a "NameError" exception.\n\n**Private name mangling:** When an identifier that textually occurs in\na class definition begins with two or more underscore characters and\ndoes not end in two or more underscores, it is considered a *private\nname* of that class. Private names are transformed to a longer form\nbefore code is generated for them. The transformation inserts the\nclass name, with leading underscores removed and a single underscore\ninserted, in front of the name. For example, the identifier "__spam"\noccurring in a class named "Ham" will be transformed to "_Ham__spam".\nThis transformation is independent of the syntactical context in which\nthe identifier is used. If the transformed name is extremely long\n(longer than 255 characters), implementation defined truncation may\nhappen. If the class name consists only of underscores, no\ntransformation is done.\n', - 'atom-literals': "\nLiterals\n********\n\nPython supports string and bytes literals and various numeric\nliterals:\n\n literal ::= stringliteral | bytesliteral\n | integer | floatnumber | imagnumber\n\nEvaluation of a literal yields an object of the given type (string,\nbytes, integer, floating point number, complex number) with the given\nvalue. The value may be approximated in the case of floating point\nand imaginary (complex) literals. See section *Literals* for details.\n\nAll literals correspond to immutable data types, and hence the\nobject's identity is less important than its value. Multiple\nevaluations of literals with the same value (either the same\noccurrence in the program text or a different occurrence) may obtain\nthe same object or a different object with the same value.\n", - 'attribute-access': '\nCustomizing attribute access\n****************************\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of "x.name") for\nclass instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for "self"). "name" is the attribute name. This\n method should return the (computed) attribute value or raise an\n "AttributeError" exception.\n\n Note that if the attribute is found through the normal mechanism,\n "__getattr__()" is not called. (This is an intentional asymmetry\n between "__getattr__()" and "__setattr__()".) This is done both for\n efficiency reasons and because otherwise "__getattr__()" would have\n no way to access other attributes of the instance. Note that at\n least for instance variables, you can fake total control by not\n inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n "__getattribute__()" method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines "__getattr__()",\n the latter will not be called unless "__getattribute__()" either\n calls it explicitly or raises an "AttributeError". This method\n should return the (computed) attribute value or raise an\n "AttributeError" exception. In order to avoid infinite recursion in\n this method, its implementation should always call the base class\n method with the same name to access any attributes it needs, for\n example, "object.__getattribute__(self, name)".\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If "__setattr__()" wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n "object.__setattr__(self, name, value)".\n\nobject.__delattr__(self, name)\n\n Like "__setattr__()" but for attribute deletion instead of\n assignment. This should only be implemented if "del obj.name" is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when "dir()" is called on the object. A sequence must be\n returned. "dir()" converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n========================\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' "__dict__".\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or "None" when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an "AttributeError"\n exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\nThe attribute "__objclass__" is interpreted by the "inspect" module as\nspecifying the class where this object was defined (setting this\nappropriately can assist in runtime introspection of dynamic class\nattributes). For callables, it may indicate that an instance of the\ngiven type (or a subclass) is expected or required as the first\npositional argument (for example, CPython sets this attribute for\nunbound methods that are implemented in C).\n\n\nInvoking Descriptors\n====================\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: "__get__()", "__set__()", and\n"__delete__()". If any of those methods are defined for an object, it\nis said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, "a.x" has a\nlookup chain starting with "a.__dict__[\'x\']", then\n"type(a).__dict__[\'x\']", and continuing through the base classes of\n"type(a)" excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, "a.x". How\nthe arguments are assembled depends on "a":\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: "x.__get__(a)".\n\nInstance Binding\n If binding to an object instance, "a.x" is transformed into the\n call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n\nClass Binding\n If binding to a class, "A.x" is transformed into the call:\n "A.__dict__[\'x\'].__get__(None, A)".\n\nSuper Binding\n If "a" is an instance of "super", then the binding "super(B,\n obj).m()" searches "obj.__class__.__mro__" for the base class "A"\n immediately preceding "B" and then invokes the descriptor with the\n call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of "__get__()", "__set__()" and "__delete__()". If it\ndoes not define "__get__()", then accessing the attribute will return\nthe descriptor object itself unless there is a value in the object\'s\ninstance dictionary. If the descriptor defines "__set__()" and/or\n"__delete__()", it is a data descriptor; if it defines neither, it is\na non-data descriptor. Normally, data descriptors define both\n"__get__()" and "__set__()", while non-data descriptors have just the\n"__get__()" method. Data descriptors with "__set__()" and "__get__()"\ndefined always override a redefinition in an instance dictionary. In\ncontrast, non-data descriptors can be overridden by instances.\n\nPython methods (including "staticmethod()" and "classmethod()") are\nimplemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe "property()" function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n=========\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n--------------------------\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises "AttributeError". If\n dynamic assignment of new variables is desired, then add\n "\'__dict__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add "\'__weakref__\'" to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as "int", "bytes" and "tuple".\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n', - 'attribute-references': '\nAttribute references\n********************\n\nAn attribute reference is a primary followed by a period and a name:\n\n attributeref ::= primary "." identifier\n\nThe primary must evaluate to an object of a type that supports\nattribute references, which most objects do. This object is then\nasked to produce the attribute whose name is the identifier (which can\nbe customized by overriding the "__getattr__()" method). If this\nattribute is not available, the exception "AttributeError" is raised.\nOtherwise, the type and value of the object produced is determined by\nthe object. Multiple evaluations of the same attribute reference may\nyield different objects.\n', - 'augassign': '\nAugmented assignment statements\n*******************************\n\nAugmented assignment is the combination, in a single statement, of a\nbinary operation and an assignment statement:\n\n augmented_assignment_stmt ::= augtarget augop (expression_list | yield_expression)\n augtarget ::= identifier | attributeref | subscription | slicing\n augop ::= "+=" | "-=" | "*=" | "/=" | "//=" | "%=" | "**="\n | ">>=" | "<<=" | "&=" | "^=" | "|="\n\n(See section *Primaries* for the syntax definitions for the last three\nsymbols.)\n\nAn augmented assignment evaluates the target (which, unlike normal\nassignment statements, cannot be an unpacking) and the expression\nlist, performs the binary operation specific to the type of assignment\non the two operands, and assigns the result to the original target.\nThe target is only evaluated once.\n\nAn augmented assignment expression like "x += 1" can be rewritten as\n"x = x + 1" to achieve a similar, but not exactly equal effect. In the\naugmented version, "x" is only evaluated once. Also, when possible,\nthe actual operation is performed *in-place*, meaning that rather than\ncreating a new object and assigning that to the target, the old object\nis modified instead.\n\nWith the exception of assigning to tuples and multiple targets in a\nsingle statement, the assignment done by augmented assignment\nstatements is handled the same way as normal assignments. Similarly,\nwith the exception of the possible *in-place* behavior, the binary\noperation performed by augmented assignment is the same as the normal\nbinary operations.\n\nFor targets which are attribute references, the same *caveat about\nclass and instance attributes* applies as for regular assignments.\n', - 'binary': '\nBinary arithmetic operations\n****************************\n\nThe binary arithmetic operations have the conventional priority\nlevels. Note that some of these operations also apply to certain non-\nnumeric types. Apart from the power operator, there are only two\nlevels, one for multiplicative operators and one for additive\noperators:\n\n m_expr ::= u_expr | m_expr "*" u_expr | m_expr "//" u_expr | m_expr "/" u_expr\n | m_expr "%" u_expr\n a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n\nThe "*" (multiplication) operator yields the product of its arguments.\nThe arguments must either both be numbers, or one argument must be an\ninteger and the other must be a sequence. In the former case, the\nnumbers are converted to a common type and then multiplied together.\nIn the latter case, sequence repetition is performed; a negative\nrepetition factor yields an empty sequence.\n\nThe "/" (division) and "//" (floor division) operators yield the\nquotient of their arguments. The numeric arguments are first\nconverted to a common type. Division of integers yields a float, while\nfloor division of integers results in an integer; the result is that\nof mathematical division with the \'floor\' function applied to the\nresult. Division by zero raises the "ZeroDivisionError" exception.\n\nThe "%" (modulo) operator yields the remainder from the division of\nthe first argument by the second. The numeric arguments are first\nconverted to a common type. A zero right argument raises the\n"ZeroDivisionError" exception. The arguments may be floating point\nnumbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals "4*0.7 +\n0.34".) The modulo operator always yields a result with the same sign\nas its second operand (or zero); the absolute value of the result is\nstrictly smaller than the absolute value of the second operand [1].\n\nThe floor division and modulo operators are connected by the following\nidentity: "x == (x//y)*y + (x%y)". Floor division and modulo are also\nconnected with the built-in function "divmod()": "divmod(x, y) ==\n(x//y, x%y)". [2].\n\nIn addition to performing the modulo operation on numbers, the "%"\noperator is also overloaded by string objects to perform old-style\nstring formatting (also known as interpolation). The syntax for\nstring formatting is described in the Python Library Reference,\nsection *printf-style String Formatting*.\n\nThe floor division operator, the modulo operator, and the "divmod()"\nfunction are not defined for complex numbers. Instead, convert to a\nfloating point number using the "abs()" function if appropriate.\n\nThe "+" (addition) operator yields the sum of its arguments. The\narguments must either both be numbers or both sequences of the same\ntype. In the former case, the numbers are converted to a common type\nand then added together. In the latter case, the sequences are\nconcatenated.\n\nThe "-" (subtraction) operator yields the difference of its arguments.\nThe numeric arguments are first converted to a common type.\n', - 'bitwise': '\nBinary bitwise operations\n*************************\n\nEach of the three bitwise operations has a different priority level:\n\n and_expr ::= shift_expr | and_expr "&" shift_expr\n xor_expr ::= and_expr | xor_expr "^" and_expr\n or_expr ::= xor_expr | or_expr "|" xor_expr\n\nThe "&" operator yields the bitwise AND of its arguments, which must\nbe integers.\n\nThe "^" operator yields the bitwise XOR (exclusive OR) of its\narguments, which must be integers.\n\nThe "|" operator yields the bitwise (inclusive) OR of its arguments,\nwhich must be integers.\n', - 'bltin-code-objects': '\nCode Objects\n************\n\nCode objects are used by the implementation to represent "pseudo-\ncompiled" executable Python code such as a function body. They differ\nfrom function objects because they don\'t contain a reference to their\nglobal execution environment. Code objects are returned by the built-\nin "compile()" function and can be extracted from function objects\nthrough their "__code__" attribute. See also the "code" module.\n\nA code object can be executed or evaluated by passing it (instead of a\nsource string) to the "exec()" or "eval()" built-in functions.\n\nSee *The standard type hierarchy* for more information.\n', - 'bltin-ellipsis-object': '\nThe Ellipsis Object\n*******************\n\nThis object is commonly used by slicing (see *Slicings*). It supports\nno special operations. There is exactly one ellipsis object, named\n"Ellipsis" (a built-in name). "type(Ellipsis)()" produces the\n"Ellipsis" singleton.\n\nIt is written as "Ellipsis" or "...".\n', - 'bltin-null-object': '\nThe Null Object\n***************\n\nThis object is returned by functions that don\'t explicitly return a\nvalue. It supports no special operations. There is exactly one null\nobject, named "None" (a built-in name). "type(None)()" produces the\nsame singleton.\n\nIt is written as "None".\n', - 'bltin-type-objects': '\nType Objects\n************\n\nType objects represent the various object types. An object\'s type is\naccessed by the built-in function "type()". There are no special\noperations on types. The standard module "types" defines names for\nall standard built-in types.\n\nTypes are written like this: "".\n', - 'booleans': '\nBoolean operations\n******************\n\n or_test ::= and_test | or_test "or" and_test\n and_test ::= not_test | and_test "and" not_test\n not_test ::= comparison | "not" not_test\n\nIn the context of Boolean operations, and also when expressions are\nused by control flow statements, the following values are interpreted\nas false: "False", "None", numeric zero of all types, and empty\nstrings and containers (including strings, tuples, lists,\ndictionaries, sets and frozensets). All other values are interpreted\nas true. User-defined objects can customize their truth value by\nproviding a "__bool__()" method.\n\nThe operator "not" yields "True" if its argument is false, "False"\notherwise.\n\nThe expression "x and y" first evaluates *x*; if *x* is false, its\nvalue is returned; otherwise, *y* is evaluated and the resulting value\nis returned.\n\nThe expression "x or y" first evaluates *x*; if *x* is true, its value\nis returned; otherwise, *y* is evaluated and the resulting value is\nreturned.\n\n(Note that neither "and" nor "or" restrict the value and type they\nreturn to "False" and "True", but rather return the last evaluated\nargument. This is sometimes useful, e.g., if "s" is a string that\nshould be replaced by a default value if it is empty, the expression\n"s or \'foo\'" yields the desired value. Because "not" has to invent a\nvalue anyway, it does not bother to return a value of the same type as\nits argument, so e.g., "not \'foo\'" yields "False", not "\'\'".)\n', - 'break': '\nThe "break" statement\n*********************\n\n break_stmt ::= "break"\n\n"break" may only occur syntactically nested in a "for" or "while"\nloop, but not nested in a function or class definition within that\nloop.\n\nIt terminates the nearest enclosing loop, skipping the optional "else"\nclause if the loop has one.\n\nIf a "for" loop is terminated by "break", the loop control target\nkeeps its current value.\n\nWhen "break" passes control out of a "try" statement with a "finally"\nclause, that "finally" clause is executed before really leaving the\nloop.\n', - 'callable-types': '\nEmulating callable objects\n**************************\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, "x(arg1, arg2, ...)" is a shorthand for\n "x.__call__(arg1, arg2, ...)".\n', - 'calls': '\nCalls\n*****\n\nA call calls a callable object (e.g., a *function*) with a possibly\nempty series of *arguments*:\n\n call ::= primary "(" [argument_list [","] | comprehension] ")"\n argument_list ::= positional_arguments ["," keyword_arguments]\n ["," "*" expression] ["," keyword_arguments]\n ["," "**" expression]\n | keyword_arguments ["," "*" expression]\n ["," keyword_arguments] ["," "**" expression]\n | "*" expression ["," keyword_arguments] ["," "**" expression]\n | "**" expression\n positional_arguments ::= expression ("," expression)*\n keyword_arguments ::= keyword_item ("," keyword_item)*\n keyword_item ::= identifier "=" expression\n\nA trailing comma may be present after the positional and keyword\narguments but does not affect the semantics.\n\nThe primary must evaluate to a callable object (user-defined\nfunctions, built-in functions, methods of built-in objects, class\nobjects, methods of class instances, and all objects having a\n"__call__()" method are callable). All argument expressions are\nevaluated before the call is attempted. Please refer to section\n*Function definitions* for the syntax of formal *parameter* lists.\n\nIf keyword arguments are present, they are first converted to\npositional arguments, as follows. First, a list of unfilled slots is\ncreated for the formal parameters. If there are N positional\narguments, they are placed in the first N slots. Next, for each\nkeyword argument, the identifier is used to determine the\ncorresponding slot (if the identifier is the same as the first formal\nparameter name, the first slot is used, and so on). If the slot is\nalready filled, a "TypeError" exception is raised. Otherwise, the\nvalue of the argument is placed in the slot, filling it (even if the\nexpression is "None", it fills the slot). When all arguments have\nbeen processed, the slots that are still unfilled are filled with the\ncorresponding default value from the function definition. (Default\nvalues are calculated, once, when the function is defined; thus, a\nmutable object such as a list or dictionary used as default value will\nbe shared by all calls that don\'t specify an argument value for the\ncorresponding slot; this should usually be avoided.) If there are any\nunfilled slots for which no default value is specified, a "TypeError"\nexception is raised. Otherwise, the list of filled slots is used as\nthe argument list for the call.\n\n**CPython implementation detail:** An implementation may provide\nbuilt-in functions whose positional parameters do not have names, even\nif they are \'named\' for the purpose of documentation, and which\ntherefore cannot be supplied by keyword. In CPython, this is the case\nfor functions implemented in C that use "PyArg_ParseTuple()" to parse\ntheir arguments.\n\nIf there are more positional arguments than there are formal parameter\nslots, a "TypeError" exception is raised, unless a formal parameter\nusing the syntax "*identifier" is present; in this case, that formal\nparameter receives a tuple containing the excess positional arguments\n(or an empty tuple if there were no excess positional arguments).\n\nIf any keyword argument does not correspond to a formal parameter\nname, a "TypeError" exception is raised, unless a formal parameter\nusing the syntax "**identifier" is present; in this case, that formal\nparameter receives a dictionary containing the excess keyword\narguments (using the keywords as keys and the argument values as\ncorresponding values), or a (new) empty dictionary if there were no\nexcess keyword arguments.\n\nIf the syntax "*expression" appears in the function call, "expression"\nmust evaluate to an iterable. Elements from this iterable are treated\nas if they were additional positional arguments; if there are\npositional arguments *x1*, ..., *xN*, and "expression" evaluates to a\nsequence *y1*, ..., *yM*, this is equivalent to a call with M+N\npositional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\n\nA consequence of this is that although the "*expression" syntax may\nappear *after* some keyword arguments, it is processed *before* the\nkeyword arguments (and the "**expression" argument, if any -- see\nbelow). So:\n\n >>> def f(a, b):\n ... print(a, b)\n ...\n >>> f(b=1, *(2,))\n 2 1\n >>> f(a=1, *(2,))\n Traceback (most recent call last):\n File "", line 1, in ?\n TypeError: f() got multiple values for keyword argument \'a\'\n >>> f(1, *(2,))\n 1 2\n\nIt is unusual for both keyword arguments and the "*expression" syntax\nto be used in the same call, so in practice this confusion does not\narise.\n\nIf the syntax "**expression" appears in the function call,\n"expression" must evaluate to a mapping, the contents of which are\ntreated as additional keyword arguments. In the case of a keyword\nappearing in both "expression" and as an explicit keyword argument, a\n"TypeError" exception is raised.\n\nFormal parameters using the syntax "*identifier" or "**identifier"\ncannot be used as positional argument slots or as keyword argument\nnames.\n\nA call always returns some value, possibly "None", unless it raises an\nexception. How this value is computed depends on the type of the\ncallable object.\n\nIf it is---\n\na user-defined function:\n The code block for the function is executed, passing it the\n argument list. The first thing the code block will do is bind the\n formal parameters to the arguments; this is described in section\n *Function definitions*. When the code block executes a "return"\n statement, this specifies the return value of the function call.\n\na built-in function or method:\n The result is up to the interpreter; see *Built-in Functions* for\n the descriptions of built-in functions and methods.\n\na class object:\n A new instance of that class is returned.\n\na class instance method:\n The corresponding user-defined function is called, with an argument\n list that is one longer than the argument list of the call: the\n instance becomes the first argument.\n\na class instance:\n The class must define a "__call__()" method; the effect is then the\n same as if that method was called.\n', - 'class': '\nClass definitions\n*****************\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n classdef ::= [decorators] "class" classname [inheritance] ":" suite\n inheritance ::= "(" [parameter_list] ")"\n classname ::= identifier\n\nA class definition is an executable statement. The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing. Classes without an inheritance\nlist inherit, by default, from the base class "object"; hence,\n\n class Foo:\n pass\n\nis equivalent to\n\n class Foo(object):\n pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.) When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n @f1(arg)\n @f2\n class Foo: pass\n\nis equivalent to\n\n class Foo: pass\n Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators. The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances. Instance attributes\ncan be set in a method with "self.name = value". Both class and\ninstance attributes are accessible through the notation ""self.name"",\nand an instance attribute hides a class attribute with the same name\nwhen accessed in this way. Class attributes can be used as defaults\nfor instance attributes, but using mutable values there can lead to\nunexpected results. *Descriptors* can be used to create instance\nvariables with different implementation details.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\n Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless there\n is a "finally" clause which happens to raise another exception.\n That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of an\n exception or the execution of a "return", "continue", or "break"\n statement.\n\n[3] A string literal appearing as the first statement in the function\n body is transformed into the function\'s "__doc__" attribute and\n therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n body is transformed into the namespace\'s "__doc__" item and\n therefore the class\'s *docstring*.\n', - 'comparisons': '\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation. Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n comparison ::= or_expr ( comp_operator or_expr )*\n comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects. The objects need not have the same type. If both are\nnumbers, they are converted to a common type. Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types. You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. The are\n identical to themselves, "x is x" but are not equal to themselves,\n "x != x". Additionally, comparing any value to a not-a-number value\n will return "False". For example, both "3 < float(\'NaN\')" and\n "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n values of their elements.\n\n* Strings are compared lexicographically using the numeric equivalents\n (the result of the built-in function "ord()") of their characters.\n [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison of\n corresponding elements. This means that to compare equal, each\n element must compare equal and the two sequences must be of the same\n type and have the same length.\n\n If not equal, the sequences are ordered the same as their first\n differing elements. For example, "[1,2,x] <= [1,2,y]" has the same\n value as "x <= y". If the corresponding element does not exist, the\n shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n superset tests. Those relations do not define total orderings (the\n two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n another, nor supersets of one another). Accordingly, sets are not\n appropriate arguments for functions which depend on total ordering.\n For example, "min()", "max()", and "sorted()" produce undefined\n results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they are\n the same object; the choice whether one object is considered smaller\n or larger than another one is made arbitrarily but consistently\n within one execution of a program.\n\nComparison of objects of the differing types depends on whether either\nof the types provide explicit support for the comparison. Most\nnumeric types can be compared with one another. When cross-type\ncomparison is not supported, the comparison method returns\n"NotImplemented".\n\nThe operators "in" and "not in" test for membership. "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise. "x\nnot in s" returns the negation of "x in s". All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether a the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*. An equivalent test is "y.find(x) != -1". Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y". If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception. (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object. "x is not y"\nyields the inverse truth value. [4]\n', - 'compound': '\nCompound statements\n*******************\n\nCompound statements contain (groups of) other statements; they affect\nor control the execution of those other statements in some way. In\ngeneral, compound statements span multiple lines, although in simple\nincarnations a whole compound statement may be contained in one line.\n\nThe "if", "while" and "for" statements implement traditional control\nflow constructs. "try" specifies exception handlers and/or cleanup\ncode for a group of statements, while the "with" statement allows the\nexecution of initialization and finalization code around a block of\ncode. Function and class definitions are also syntactically compound\nstatements.\n\nCompound statements consist of one or more \'clauses.\' A clause\nconsists of a header and a \'suite.\' The clause headers of a\nparticular compound statement are all at the same indentation level.\nEach clause header begins with a uniquely identifying keyword and ends\nwith a colon. A suite is a group of statements controlled by a\nclause. A suite can be one or more semicolon-separated simple\nstatements on the same line as the header, following the header\'s\ncolon, or it can be one or more indented statements on subsequent\nlines. Only the latter form of suite can contain nested compound\nstatements; the following is illegal, mostly because it wouldn\'t be\nclear to which "if" clause a following "else" clause would belong:\n\n if test1: if test2: print(x)\n\nAlso note that the semicolon binds tighter than the colon in this\ncontext, so that in the following example, either all or none of the\n"print()" calls are executed:\n\n if x < y < z: print(x); print(y); print(z)\n\nSummarizing:\n\n compound_stmt ::= if_stmt\n | while_stmt\n | for_stmt\n | try_stmt\n | with_stmt\n | funcdef\n | classdef\n suite ::= stmt_list NEWLINE | NEWLINE INDENT statement+ DEDENT\n statement ::= stmt_list NEWLINE | compound_stmt\n stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n\nNote that statements always end in a "NEWLINE" possibly followed by a\n"DEDENT". Also note that optional continuation clauses always begin\nwith a keyword that cannot start a statement, thus there are no\nambiguities (the \'dangling "else"\' problem is solved in Python by\nrequiring nested "if" statements to be indented).\n\nThe formatting of the grammar rules in the following sections places\neach clause on a separate line for clarity.\n\n\nThe "if" statement\n==================\n\nThe "if" statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the "if" statement is executed or evaluated).\nIf all expressions are false, the suite of the "else" clause, if\npresent, is executed.\n\n\nThe "while" statement\n=====================\n\nThe "while" statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the "else" clause, if present, is executed\nand the loop terminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite. A "continue" statement\nexecuted in the first suite skips the rest of the suite and goes back\nto testing the expression.\n\n\nThe "for" statement\n===================\n\nThe "for" statement is used to iterate over the elements of a sequence\n(such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n"expression_list". The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted. When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a "StopIteration" exception),\nthe suite in the "else" clause, if present, is executed, and the loop\nterminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite. A "continue" statement\nexecuted in the first suite skips the rest of the suite and continues\nwith the next item, or with the "else" clause if there was no next\nitem.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, it will not have been assigned to at all\nby the loop. Hint: the built-in function "range()" returns an\niterator of integers suitable to emulate the effect of Pascal\'s "for i\n:= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An\n internal counter is used to keep track of which item is used next,\n and this is incremented on each iteration. When this counter has\n reached the length of the sequence the loop terminates. This means\n that if the suite deletes the current (or a previous) item from the\n sequence, the next item will be skipped (since it gets the index of\n the current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n\n\nThe "try" statement\n===================\n\nThe "try" statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression ["as" target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nThe "except" clause(s) specify one or more exception handlers. When no\nexception occurs in the "try" clause, no exception handler is\nexecuted. When an exception occurs in the "try" suite, a search for an\nexception handler is started. This search inspects the except clauses\nin turn until one is found that matches the exception. An expression-\nless except clause, if present, must be last; it matches any\nexception. For an except clause with an expression, that expression\nis evaluated, and the clause matches the exception if the resulting\nobject is "compatible" with the exception. An object is compatible\nwith an exception if it is the class or a base class of the exception\nobject or a tuple containing an item compatible with the exception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire "try" statement raised\nthe exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the "as" keyword in that except clause, if\npresent, and the except clause\'s suite is executed. All except\nclauses must have an executable block. When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using "as target", it is cleared\nat the end of the except clause. This is as if\n\n except E as N:\n foo\n\nwas translated to\n\n except E as N:\n try:\n foo\n finally:\n del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause. Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the "sys" module and can be access via\n"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of the\nexception class, the exception instance and a traceback object (see\nsection *The standard type hierarchy*) identifying the point in the\nprogram where the exception occurred. "sys.exc_info()" values are\nrestored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional "else" clause is executed if and when control flows off\nthe end of the "try" clause. [2] Exceptions in the "else" clause are\nnot handled by the preceding "except" clauses.\n\nIf "finally" is present, it specifies a \'cleanup\' handler. The "try"\nclause is executed, including any "except" and "else" clauses. If an\nexception occurs in any of the clauses and is not handled, the\nexception is temporarily saved. The "finally" clause is executed. If\nthere is a saved exception it is re-raised at the end of the "finally"\nclause. If the "finally" clause raises another exception, the saved\nexception is set as the context of the new exception. If the "finally"\nclause executes a "return" or "break" statement, the saved exception\nis discarded:\n\n >>> def f():\n ... try:\n ... 1/0\n ... finally:\n ... return 42\n ...\n >>> f()\n 42\n\nThe exception information is not available to the program during\nexecution of the "finally" clause.\n\nWhen a "return", "break" or "continue" statement is executed in the\n"try" suite of a "try"..."finally" statement, the "finally" clause is\nalso executed \'on the way out.\' A "continue" statement is illegal in\nthe "finally" clause. (The reason is a problem with the current\nimplementation --- this restriction may be lifted in the future).\n\nThe return value of a function is determined by the last "return"\nstatement executed. Since the "finally" clause always executes, a\n"return" statement executed in the "finally" clause will always be the\nlast one executed:\n\n >>> def foo():\n ... try:\n ... return \'try\'\n ... finally:\n ... return \'finally\'\n ...\n >>> foo()\n \'finally\'\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the "raise" statement to\ngenerate exceptions may be found in section *The raise statement*.\n\n\nThe "with" statement\n====================\n\nThe "with" statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common "try"..."except"..."finally"\nusage patterns to be encapsulated for convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the "with" statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the "with_item") is\n evaluated to obtain a context manager.\n\n2. The context manager\'s "__exit__()" is loaded for later use.\n\n3. The context manager\'s "__enter__()" method is invoked.\n\n4. If a target was included in the "with" statement, the return value\n from "__enter__()" is assigned to it.\n\n Note: The "with" statement guarantees that if the "__enter__()" method\n returns without an error, then "__exit__()" will always be\n called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s "__exit__()" method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to "__exit__()". Otherwise, three\n "None" arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the "__exit__()" method was false, the exception is reraised.\n If the return value was true, the exception is suppressed, and\n execution continues with the statement following the "with"\n statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from "__exit__()" is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple "with" statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n "with" statement.\n\n\nFunction definitions\n====================\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* "="\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the ""*"" must also have a default value --- this\nis a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated from left to right when the\nfunction definition is executed.** This means that the expression is\nevaluated once, when the function is defined, and that the same "pre-\ncomputed" value is used for each call. This is especially important\nto understand when a default parameter is a mutable object, such as a\nlist or a dictionary: if the function modifies the object (e.g. by\nappending an item to a list), the default value is in effect modified.\nThis is generally not what was intended. A way around this is to use\n"None" as the default, and explicitly test for it in the body of the\nfunction, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n""*identifier"" is present, it is initialized to a tuple receiving any\nexcess positional parameters, defaulting to the empty tuple. If the\nform ""**identifier"" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after ""*"" or ""*identifier"" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "": expression"" following\nthe parameter name. Any parameter may have an annotation even those\nof the form "*identifier" or "**identifier". Functions may have\n"return" annotation of the form ""-> expression"" after the parameter\nlist. These annotations can be any valid Python expression and are\nevaluated when the function definition is executed. Annotations may\nbe evaluated in a different order than they appear in the source code.\nThe presence of annotations does not change the semantics of a\nfunction. The annotation values are available as values of a\ndictionary keyed by the parameters\' names in the "__annotations__"\nattribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda\nexpressions, described in section *Lambdas*. Note that the lambda\nexpression is merely a shorthand for a simplified function definition;\na function defined in a ""def"" statement can be passed around or\nassigned to another name just like a function defined by a lambda\nexpression. The ""def"" form is actually more powerful since it\nallows the execution of multiple statements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A ""def""\nstatement executed inside a function definition defines a local\nfunction that can be returned or passed around. Free variables used\nin the nested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n\n\nClass definitions\n=================\n\nA class definition defines a class object (see section *The standard\ntype hierarchy*):\n\n classdef ::= [decorators] "class" classname [inheritance] ":" suite\n inheritance ::= "(" [parameter_list] ")"\n classname ::= identifier\n\nA class definition is an executable statement. The inheritance list\nusually gives a list of base classes (see *Customizing class creation*\nfor more advanced uses), so each item in the list should evaluate to a\nclass object which allows subclassing. Classes without an inheritance\nlist inherit, by default, from the base class "object"; hence,\n\n class Foo:\n pass\n\nis equivalent to\n\n class Foo(object):\n pass\n\nThe class\'s suite is then executed in a new execution frame (see\n*Naming and binding*), using a newly created local namespace and the\noriginal global namespace. (Usually, the suite contains mostly\nfunction definitions.) When the class\'s suite finishes execution, its\nexecution frame is discarded but its local namespace is saved. [4] A\nclass object is then created using the inheritance list for the base\nclasses and the saved local namespace for the attribute dictionary.\nThe class name is bound to this class object in the original local\nnamespace.\n\nClass creation can be customized heavily using *metaclasses*.\n\nClasses can also be decorated: just like when decorating functions,\n\n @f1(arg)\n @f2\n class Foo: pass\n\nis equivalent to\n\n class Foo: pass\n Foo = f1(arg)(f2(Foo))\n\nThe evaluation rules for the decorator expressions are the same as for\nfunction decorators. The result must be a class object, which is then\nbound to the class name.\n\n**Programmer\'s note:** Variables defined in the class definition are\nclass attributes; they are shared by instances. Instance attributes\ncan be set in a method with "self.name = value". Both class and\ninstance attributes are accessible through the notation ""self.name"",\nand an instance attribute hides a class attribute with the same name\nwhen accessed in this way. Class attributes can be used as defaults\nfor instance attributes, but using mutable values there can lead to\nunexpected results. *Descriptors* can be used to create instance\nvariables with different implementation details.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3 **PEP 3129** - Class\n Decorators\n\n-[ Footnotes ]-\n\n[1] The exception is propagated to the invocation stack unless there\n is a "finally" clause which happens to raise another exception.\n That new exception causes the old one to be lost.\n\n[2] Currently, control "flows off the end" except in the case of an\n exception or the execution of a "return", "continue", or "break"\n statement.\n\n[3] A string literal appearing as the first statement in the function\n body is transformed into the function\'s "__doc__" attribute and\n therefore the function\'s *docstring*.\n\n[4] A string literal appearing as the first statement in the class\n body is transformed into the namespace\'s "__doc__" item and\n therefore the class\'s *docstring*.\n', - 'context-managers': '\nWith Statement Context Managers\n*******************************\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code. Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The "with"\n statement will bind this method\'s return value to the target(s)\n specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be "None".\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that "__exit__()" methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n "with" statement.\n', - 'continue': '\nThe "continue" statement\n************************\n\n continue_stmt ::= "continue"\n\n"continue" may only occur syntactically nested in a "for" or "while"\nloop, but not nested in a function or class definition or "finally"\nclause within that loop. It continues with the next cycle of the\nnearest enclosing loop.\n\nWhen "continue" passes control out of a "try" statement with a\n"finally" clause, that "finally" clause is executed before really\nstarting the next loop cycle.\n', - 'conversions': '\nArithmetic conversions\n**********************\n\nWhen a description of an arithmetic operator below uses the phrase\n"the numeric arguments are converted to a common type," this means\nthat the operator implementation for built-in types works that way:\n\n* If either argument is a complex number, the other is converted to\n complex;\n\n* otherwise, if either argument is a floating point number, the other\n is converted to floating point;\n\n* otherwise, both must be integers and no conversion is necessary.\n\nSome additional rules apply for certain operators (e.g., a string left\nargument to the \'%\' operator). Extensions must define their own\nconversion behavior.\n', - 'customization': '\nBasic customization\n*******************\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which "__del__()"\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to "sys.stderr" instead.\n Also, when "__del__()" is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the "__del__()" method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, "__del__()"\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\n Changed in version 3.4: The __format__ method of "object" itself\n raises a "TypeError" if passed any non-empty string.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s custom\n "__hash__()" method to the size of a "Py_ssize_t". This is\n typically 8 bytes on 64-bit builds and 4 bytes on 32-bit builds.\n If an object\'s "__hash__()" must interoperate on builds of\n different bit sizes, be sure to check the width on all supported\n builds. An easy way to do this is with "python -c "import sys;\n print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n', - 'debugger': '\n"pdb" --- The Python Debugger\n*****************************\n\nThe module "pdb" defines an interactive source code debugger for\nPython programs. It supports setting (conditional) breakpoints and\nsingle stepping at the source line level, inspection of stack frames,\nsource code listing, and evaluation of arbitrary Python code in the\ncontext of any stack frame. It also supports post-mortem debugging\nand can be called under program control.\n\nThe debugger is extensible -- it is actually defined as the class\n"Pdb". This is currently undocumented but easily understood by reading\nthe source. The extension interface uses the modules "bdb" and "cmd".\n\nThe debugger\'s prompt is "(Pdb)". Typical usage to run a program under\ncontrol of the debugger is:\n\n >>> import pdb\n >>> import mymodule\n >>> pdb.run(\'mymodule.test()\')\n > (0)?()\n (Pdb) continue\n > (1)?()\n (Pdb) continue\n NameError: \'spam\'\n > (1)?()\n (Pdb)\n\nChanged in version 3.3: Tab-completion via the "readline" module is\navailable for commands and command arguments, e.g. the current global\nand local names are offered as arguments of the "p" command.\n\n"pdb.py" can also be invoked as a script to debug other scripts. For\nexample:\n\n python3 -m pdb myscript.py\n\nWhen invoked as a script, pdb will automatically enter post-mortem\ndebugging if the program being debugged exits abnormally. After post-\nmortem debugging (or after normal exit of the program), pdb will\nrestart the program. Automatic restarting preserves pdb\'s state (such\nas breakpoints) and in most cases is more useful than quitting the\ndebugger upon program\'s exit.\n\nNew in version 3.2: "pdb.py" now accepts a "-c" option that executes\ncommands as if given in a ".pdbrc" file, see *Debugger Commands*.\n\nThe typical usage to break into the debugger from a running program is\nto insert\n\n import pdb; pdb.set_trace()\n\nat the location you want to break into the debugger. You can then\nstep through the code following this statement, and continue running\nwithout the debugger using the "continue" command.\n\nThe typical usage to inspect a crashed program is:\n\n >>> import pdb\n >>> import mymodule\n >>> mymodule.test()\n Traceback (most recent call last):\n File "", line 1, in ?\n File "./mymodule.py", line 4, in test\n test2()\n File "./mymodule.py", line 3, in test2\n print(spam)\n NameError: spam\n >>> pdb.pm()\n > ./mymodule.py(3)test2()\n -> print(spam)\n (Pdb)\n\nThe module defines the following functions; each enters the debugger\nin a slightly different way:\n\npdb.run(statement, globals=None, locals=None)\n\n Execute the *statement* (given as a string or a code object) under\n debugger control. The debugger prompt appears before any code is\n executed; you can set breakpoints and type "continue", or you can\n step through the statement using "step" or "next" (all these\n commands are explained below). The optional *globals* and *locals*\n arguments specify the environment in which the code is executed; by\n default the dictionary of the module "__main__" is used. (See the\n explanation of the built-in "exec()" or "eval()" functions.)\n\npdb.runeval(expression, globals=None, locals=None)\n\n Evaluate the *expression* (given as a string or a code object)\n under debugger control. When "runeval()" returns, it returns the\n value of the expression. Otherwise this function is similar to\n "run()".\n\npdb.runcall(function, *args, **kwds)\n\n Call the *function* (a function or method object, not a string)\n with the given arguments. When "runcall()" returns, it returns\n whatever the function call returned. The debugger prompt appears\n as soon as the function is entered.\n\npdb.set_trace()\n\n Enter the debugger at the calling stack frame. This is useful to\n hard-code a breakpoint at a given point in a program, even if the\n code is not otherwise being debugged (e.g. when an assertion\n fails).\n\npdb.post_mortem(traceback=None)\n\n Enter post-mortem debugging of the given *traceback* object. If no\n *traceback* is given, it uses the one of the exception that is\n currently being handled (an exception must be being handled if the\n default is to be used).\n\npdb.pm()\n\n Enter post-mortem debugging of the traceback found in\n "sys.last_traceback".\n\nThe "run*" functions and "set_trace()" are aliases for instantiating\nthe "Pdb" class and calling the method of the same name. If you want\nto access further features, you have to do this yourself:\n\nclass class pdb.Pdb(completekey=\'tab\', stdin=None, stdout=None, skip=None, nosigint=False)\n\n "Pdb" is the debugger class.\n\n The *completekey*, *stdin* and *stdout* arguments are passed to the\n underlying "cmd.Cmd" class; see the description there.\n\n The *skip* argument, if given, must be an iterable of glob-style\n module name patterns. The debugger will not step into frames that\n originate in a module that matches one of these patterns. [1]\n\n By default, Pdb sets a handler for the SIGINT signal (which is sent\n when the user presses Ctrl-C on the console) when you give a\n "continue" command. This allows you to break into the debugger\n again by pressing Ctrl-C. If you want Pdb not to touch the SIGINT\n handler, set *nosigint* tot true.\n\n Example call to enable tracing with *skip*:\n\n import pdb; pdb.Pdb(skip=[\'django.*\']).set_trace()\n\n New in version 3.1: The *skip* argument.\n\n New in version 3.2: The *nosigint* argument. Previously, a SIGINT\n handler was never set by Pdb.\n\n run(statement, globals=None, locals=None)\n runeval(expression, globals=None, locals=None)\n runcall(function, *args, **kwds)\n set_trace()\n\n See the documentation for the functions explained above.\n\n\nDebugger Commands\n=================\n\nThe commands recognized by the debugger are listed below. Most\ncommands can be abbreviated to one or two letters as indicated; e.g.\n"h(elp)" means that either "h" or "help" can be used to enter the help\ncommand (but not "he" or "hel", nor "H" or "Help" or "HELP").\nArguments to commands must be separated by whitespace (spaces or\ntabs). Optional arguments are enclosed in square brackets ("[]") in\nthe command syntax; the square brackets must not be typed.\nAlternatives in the command syntax are separated by a vertical bar\n("|").\n\nEntering a blank line repeats the last command entered. Exception: if\nthe last command was a "list" command, the next 11 lines are listed.\n\nCommands that the debugger doesn\'t recognize are assumed to be Python\nstatements and are executed in the context of the program being\ndebugged. Python statements can also be prefixed with an exclamation\npoint ("!"). This is a powerful way to inspect the program being\ndebugged; it is even possible to change a variable or call a function.\nWhen an exception occurs in such a statement, the exception name is\nprinted but the debugger\'s state is not changed.\n\nThe debugger supports *aliases*. Aliases can have parameters which\nallows one a certain level of adaptability to the context under\nexamination.\n\nMultiple commands may be entered on a single line, separated by ";;".\n(A single ";" is not used as it is the separator for multiple commands\nin a line that is passed to the Python parser.) No intelligence is\napplied to separating the commands; the input is split at the first\n";;" pair, even if it is in the middle of a quoted string.\n\nIf a file ".pdbrc" exists in the user\'s home directory or in the\ncurrent directory, it is read in and executed as if it had been typed\nat the debugger prompt. This is particularly useful for aliases. If\nboth files exist, the one in the home directory is read first and\naliases defined there can be overridden by the local file.\n\nChanged in version 3.2: ".pdbrc" can now contain commands that\ncontinue debugging, such as "continue" or "next". Previously, these\ncommands had no effect.\n\nh(elp) [command]\n\n Without argument, print the list of available commands. With a\n *command* as argument, print help about that command. "help pdb"\n displays the full documentation (the docstring of the "pdb"\n module). Since the *command* argument must be an identifier, "help\n exec" must be entered to get help on the "!" command.\n\nw(here)\n\n Print a stack trace, with the most recent frame at the bottom. An\n arrow indicates the current frame, which determines the context of\n most commands.\n\nd(own) [count]\n\n Move the current frame *count* (default one) levels down in the\n stack trace (to a newer frame).\n\nu(p) [count]\n\n Move the current frame *count* (default one) levels up in the stack\n trace (to an older frame).\n\nb(reak) [([filename:]lineno | function) [, condition]]\n\n With a *lineno* argument, set a break there in the current file.\n With a *function* argument, set a break at the first executable\n statement within that function. The line number may be prefixed\n with a filename and a colon, to specify a breakpoint in another\n file (probably one that hasn\'t been loaded yet). The file is\n searched on "sys.path". Note that each breakpoint is assigned a\n number to which all the other breakpoint commands refer.\n\n If a second argument is present, it is an expression which must\n evaluate to true before the breakpoint is honored.\n\n Without argument, list all breaks, including for each breakpoint,\n the number of times that breakpoint has been hit, the current\n ignore count, and the associated condition if any.\n\ntbreak [([filename:]lineno | function) [, condition]]\n\n Temporary breakpoint, which is removed automatically when it is\n first hit. The arguments are the same as for "break".\n\ncl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n\n With a *filename:lineno* argument, clear all the breakpoints at\n this line. With a space separated list of breakpoint numbers, clear\n those breakpoints. Without argument, clear all breaks (but first\n ask confirmation).\n\ndisable [bpnumber [bpnumber ...]]\n\n Disable the breakpoints given as a space separated list of\n breakpoint numbers. Disabling a breakpoint means it cannot cause\n the program to stop execution, but unlike clearing a breakpoint, it\n remains in the list of breakpoints and can be (re-)enabled.\n\nenable [bpnumber [bpnumber ...]]\n\n Enable the breakpoints specified.\n\nignore bpnumber [count]\n\n Set the ignore count for the given breakpoint number. If count is\n omitted, the ignore count is set to 0. A breakpoint becomes active\n when the ignore count is zero. When non-zero, the count is\n decremented each time the breakpoint is reached and the breakpoint\n is not disabled and any associated condition evaluates to true.\n\ncondition bpnumber [condition]\n\n Set a new *condition* for the breakpoint, an expression which must\n evaluate to true before the breakpoint is honored. If *condition*\n is absent, any existing condition is removed; i.e., the breakpoint\n is made unconditional.\n\ncommands [bpnumber]\n\n Specify a list of commands for breakpoint number *bpnumber*. The\n commands themselves appear on the following lines. Type a line\n containing just "end" to terminate the commands. An example:\n\n (Pdb) commands 1\n (com) p some_variable\n (com) end\n (Pdb)\n\n To remove all commands from a breakpoint, type commands and follow\n it immediately with "end"; that is, give no commands.\n\n With no *bpnumber* argument, commands refers to the last breakpoint\n set.\n\n You can use breakpoint commands to start your program up again.\n Simply use the continue command, or step, or any other command that\n resumes execution.\n\n Specifying any command resuming execution (currently continue,\n step, next, return, jump, quit and their abbreviations) terminates\n the command list (as if that command was immediately followed by\n end). This is because any time you resume execution (even with a\n simple next or step), you may encounter another breakpoint--which\n could have its own command list, leading to ambiguities about which\n list to execute.\n\n If you use the \'silent\' command in the command list, the usual\n message about stopping at a breakpoint is not printed. This may be\n desirable for breakpoints that are to print a specific message and\n then continue. If none of the other commands print anything, you\n see no sign that the breakpoint was reached.\n\ns(tep)\n\n Execute the current line, stop at the first possible occasion\n (either in a function that is called or on the next line in the\n current function).\n\nn(ext)\n\n Continue execution until the next line in the current function is\n reached or it returns. (The difference between "next" and "step"\n is that "step" stops inside a called function, while "next"\n executes called functions at (nearly) full speed, only stopping at\n the next line in the current function.)\n\nunt(il) [lineno]\n\n Without argument, continue execution until the line with a number\n greater than the current one is reached.\n\n With a line number, continue execution until a line with a number\n greater or equal to that is reached. In both cases, also stop when\n the current frame returns.\n\n Changed in version 3.2: Allow giving an explicit line number.\n\nr(eturn)\n\n Continue execution until the current function returns.\n\nc(ont(inue))\n\n Continue execution, only stop when a breakpoint is encountered.\n\nj(ump) lineno\n\n Set the next line that will be executed. Only available in the\n bottom-most frame. This lets you jump back and execute code again,\n or jump forward to skip code that you don\'t want to run.\n\n It should be noted that not all jumps are allowed -- for instance\n it is not possible to jump into the middle of a "for" loop or out\n of a "finally" clause.\n\nl(ist) [first[, last]]\n\n List source code for the current file. Without arguments, list 11\n lines around the current line or continue the previous listing.\n With "." as argument, list 11 lines around the current line. With\n one argument, list 11 lines around at that line. With two\n arguments, list the given range; if the second argument is less\n than the first, it is interpreted as a count.\n\n The current line in the current frame is indicated by "->". If an\n exception is being debugged, the line where the exception was\n originally raised or propagated is indicated by ">>", if it differs\n from the current line.\n\n New in version 3.2: The ">>" marker.\n\nll | longlist\n\n List all source code for the current function or frame.\n Interesting lines are marked as for "list".\n\n New in version 3.2.\n\na(rgs)\n\n Print the argument list of the current function.\n\np expression\n\n Evaluate the *expression* in the current context and print its\n value.\n\n Note: "print()" can also be used, but is not a debugger command ---\n this executes the Python "print()" function.\n\npp expression\n\n Like the "p" command, except the value of the expression is pretty-\n printed using the "pprint" module.\n\nwhatis expression\n\n Print the type of the *expression*.\n\nsource expression\n\n Try to get source code for the given object and display it.\n\n New in version 3.2.\n\ndisplay [expression]\n\n Display the value of the expression if it changed, each time\n execution stops in the current frame.\n\n Without expression, list all display expressions for the current\n frame.\n\n New in version 3.2.\n\nundisplay [expression]\n\n Do not display the expression any more in the current frame.\n Without expression, clear all display expressions for the current\n frame.\n\n New in version 3.2.\n\ninteract\n\n Start an interative interpreter (using the "code" module) whose\n global namespace contains all the (global and local) names found in\n the current scope.\n\n New in version 3.2.\n\nalias [name [command]]\n\n Create an alias called *name* that executes *command*. The command\n must *not* be enclosed in quotes. Replaceable parameters can be\n indicated by "%1", "%2", and so on, while "%*" is replaced by all\n the parameters. If no command is given, the current alias for\n *name* is shown. If no arguments are given, all aliases are listed.\n\n Aliases may be nested and can contain anything that can be legally\n typed at the pdb prompt. Note that internal pdb commands *can* be\n overridden by aliases. Such a command is then hidden until the\n alias is removed. Aliasing is recursively applied to the first\n word of the command line; all other words in the line are left\n alone.\n\n As an example, here are two useful aliases (especially when placed\n in the ".pdbrc" file):\n\n # Print instance variables (usage "pi classInst")\n alias pi for k in %1.__dict__.keys(): print("%1.",k,"=",%1.__dict__[k])\n # Print instance variables in self\n alias ps pi self\n\nunalias name\n\n Delete the specified alias.\n\n! statement\n\n Execute the (one-line) *statement* in the context of the current\n stack frame. The exclamation point can be omitted unless the first\n word of the statement resembles a debugger command. To set a\n global variable, you can prefix the assignment command with a\n "global" statement on the same line, e.g.:\n\n (Pdb) global list_options; list_options = [\'-l\']\n (Pdb)\n\nrun [args ...]\nrestart [args ...]\n\n Restart the debugged Python program. If an argument is supplied,\n it is split with "shlex" and the result is used as the new\n "sys.argv". History, breakpoints, actions and debugger options are\n preserved. "restart" is an alias for "run".\n\nq(uit)\n\n Quit from the debugger. The program being executed is aborted.\n\n-[ Footnotes ]-\n\n[1] Whether a frame is considered to originate in a certain module is\n determined by the "__name__" in the frame globals.\n', - 'del': '\nThe "del" statement\n*******************\n\n del_stmt ::= "del" target_list\n\nDeletion is recursively defined very similar to the way assignment is\ndefined. Rather than spelling it out in full details, here are some\nhints.\n\nDeletion of a target list recursively deletes each target, from left\nto right.\n\nDeletion of a name removes the binding of that name from the local or\nglobal namespace, depending on whether the name occurs in a "global"\nstatement in the same code block. If the name is unbound, a\n"NameError" exception will be raised.\n\nDeletion of attribute references, subscriptions and slicings is passed\nto the primary object involved; deletion of a slicing is in general\nequivalent to assignment of an empty slice of the right type (but even\nthis is determined by the sliced object).\n\nChanged in version 3.2: Previously it was illegal to delete a name\nfrom the local namespace if it occurs as a free variable in a nested\nblock.\n', - 'dict': '\nDictionary displays\n*******************\n\nA dictionary display is a possibly empty series of key/datum pairs\nenclosed in curly braces:\n\n dict_display ::= "{" [key_datum_list | dict_comprehension] "}"\n key_datum_list ::= key_datum ("," key_datum)* [","]\n key_datum ::= expression ":" expression\n dict_comprehension ::= expression ":" expression comp_for\n\nA dictionary display yields a new dictionary object.\n\nIf a comma-separated sequence of key/datum pairs is given, they are\nevaluated from left to right to define the entries of the dictionary:\neach key object is used as a key into the dictionary to store the\ncorresponding datum. This means that you can specify the same key\nmultiple times in the key/datum list, and the final dictionary\'s value\nfor that key will be the last one given.\n\nA dict comprehension, in contrast to list and set comprehensions,\nneeds two expressions separated with a colon followed by the usual\n"for" and "if" clauses. When the comprehension is run, the resulting\nkey and value elements are inserted in the new dictionary in the order\nthey are produced.\n\nRestrictions on the types of the key values are listed earlier in\nsection *The standard type hierarchy*. (To summarize, the key type\nshould be *hashable*, which excludes all mutable objects.) Clashes\nbetween duplicate keys are not detected; the last datum (textually\nrightmost in the display) stored for a given key value prevails.\n', - 'dynamic-features': '\nInteraction with dynamic features\n*********************************\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- "import *" --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a "SyntaxError".\n\nThe "eval()" and "exec()" functions do not have access to the full\nenvironment for resolving names. Names may be resolved in the local\nand global namespaces of the caller. Free variables are not resolved\nin the nearest enclosing namespace, but in the global namespace. [1]\nThe "exec()" and "eval()" functions have optional arguments to\noverride the global and local namespace. If only one namespace is\nspecified, it is used for both.\n', - 'else': '\nThe "if" statement\n******************\n\nThe "if" statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the "if" statement is executed or evaluated).\nIf all expressions are false, the suite of the "else" clause, if\npresent, is executed.\n', - 'exceptions': '\nExceptions\n**********\n\nExceptions are a means of breaking out of the normal flow of control\nof a code block in order to handle errors or other exceptional\nconditions. An exception is *raised* at the point where the error is\ndetected; it may be *handled* by the surrounding code block or by any\ncode block that directly or indirectly invoked the code block where\nthe error occurred.\n\nThe Python interpreter raises an exception when it detects a run-time\nerror (such as division by zero). A Python program can also\nexplicitly raise an exception with the "raise" statement. Exception\nhandlers are specified with the "try" ... "except" statement. The\n"finally" clause of such a statement can be used to specify cleanup\ncode which does not handle the exception, but is executed whether an\nexception occurred or not in the preceding code.\n\nPython uses the "termination" model of error handling: an exception\nhandler can find out what happened and continue execution at an outer\nlevel, but it cannot repair the cause of the error and retry the\nfailing operation (except by re-entering the offending piece of code\nfrom the top).\n\nWhen an exception is not handled at all, the interpreter terminates\nexecution of the program, or returns to its interactive main loop. In\neither case, it prints a stack backtrace, except when the exception is\n"SystemExit".\n\nExceptions are identified by class instances. The "except" clause is\nselected depending on the class of the instance: it must reference the\nclass of the instance or a base class thereof. The instance can be\nreceived by the handler and can carry additional information about the\nexceptional condition.\n\nNote: Exception messages are not part of the Python API. Their contents\n may change from one version of Python to the next without warning\n and should not be relied on by code which will run under multiple\n versions of the interpreter.\n\nSee also the description of the "try" statement in section *The try\nstatement* and "raise" statement in section *The raise statement*.\n\n-[ Footnotes ]-\n\n[1] This limitation occurs because the code that is executed by these\n operations is not available at the time the module is compiled.\n', - 'execmodel': '\nExecution model\n***************\n\n\nNaming and binding\n==================\n\n*Names* refer to objects. Names are introduced by name binding\noperations. Each occurrence of a name in the program text refers to\nthe *binding* of that name established in the innermost function block\ncontaining the use.\n\nA *block* is a piece of Python program text that is executed as a\nunit. The following are blocks: a module, a function body, and a class\ndefinition. Each command typed interactively is a block. A script\nfile (a file given as standard input to the interpreter or specified\non the interpreter command line the first argument) is a code block.\nA script command (a command specified on the interpreter command line\nwith the \'**-c**\' option) is a code block. The string argument passed\nto the built-in functions "eval()" and "exec()" is a code block.\n\nA code block is executed in an *execution frame*. A frame contains\nsome administrative information (used for debugging) and determines\nwhere and how execution continues after the code block\'s execution has\ncompleted.\n\nA *scope* defines the visibility of a name within a block. If a local\nvariable is defined in a block, its scope includes that block. If the\ndefinition occurs in a function block, the scope extends to any blocks\ncontained within the defining one, unless a contained block introduces\na different binding for the name. The scope of names defined in a\nclass block is limited to the class block; it does not extend to the\ncode blocks of methods -- this includes comprehensions and generator\nexpressions since they are implemented using a function scope. This\nmeans that the following will fail:\n\n class A:\n a = 42\n b = list(a + i for i in range(10))\n\nWhen a name is used in a code block, it is resolved using the nearest\nenclosing scope. The set of all such scopes visible to a code block\nis called the block\'s *environment*.\n\nIf a name is bound in a block, it is a local variable of that block,\nunless declared as "nonlocal". If a name is bound at the module\nlevel, it is a global variable. (The variables of the module code\nblock are local and global.) If a variable is used in a code block\nbut not defined there, it is a *free variable*.\n\nWhen a name is not found at all, a "NameError" exception is raised.\nIf the name refers to a local variable that has not been bound, a\n"UnboundLocalError" exception is raised. "UnboundLocalError" is a\nsubclass of "NameError".\n\nThe following constructs bind names: formal parameters to functions,\n"import" statements, class and function definitions (these bind the\nclass or function name in the defining block), and targets that are\nidentifiers if occurring in an assignment, "for" loop header, or after\n"as" in a "with" statement or "except" clause. The "import" statement\nof the form "from ... import *" binds all names defined in the\nimported module, except those beginning with an underscore. This form\nmay only be used at the module level.\n\nA target occurring in a "del" statement is also considered bound for\nthis purpose (though the actual semantics are to unbind the name).\n\nEach assignment or import statement occurs within a block defined by a\nclass or function definition or at the module level (the top-level\ncode block).\n\nIf a name binding operation occurs anywhere within a code block, all\nuses of the name within the block are treated as references to the\ncurrent block. This can lead to errors when a name is used within a\nblock before it is bound. This rule is subtle. Python lacks\ndeclarations and allows name binding operations to occur anywhere\nwithin a code block. The local variables of a code block can be\ndetermined by scanning the entire text of the block for name binding\noperations.\n\nIf the "global" statement occurs within a block, all uses of the name\nspecified in the statement refer to the binding of that name in the\ntop-level namespace. Names are resolved in the top-level namespace by\nsearching the global namespace, i.e. the namespace of the module\ncontaining the code block, and the builtins namespace, the namespace\nof the module "builtins". The global namespace is searched first. If\nthe name is not found there, the builtins namespace is searched. The\nglobal statement must precede all uses of the name.\n\nThe builtins namespace associated with the execution of a code block\nis actually found by looking up the name "__builtins__" in its global\nnamespace; this should be a dictionary or a module (in the latter case\nthe module\'s dictionary is used). By default, when in the "__main__"\nmodule, "__builtins__" is the built-in module "builtins"; when in any\nother module, "__builtins__" is an alias for the dictionary of the\n"builtins" module itself. "__builtins__" can be set to a user-created\ndictionary to create a weak form of restricted execution.\n\n**CPython implementation detail:** Users should not touch\n"__builtins__"; it is strictly an implementation detail. Users\nwanting to override values in the builtins namespace should "import"\nthe "builtins" module and modify its attributes appropriately.\n\nThe namespace for a module is automatically created the first time a\nmodule is imported. The main module for a script is always called\n"__main__".\n\nThe "global" statement has the same scope as a name binding operation\nin the same block. If the nearest enclosing scope for a free variable\ncontains a global statement, the free variable is treated as a global.\n\nA class definition is an executable statement that may use and define\nnames. These references follow the normal rules for name resolution.\nThe namespace of the class definition becomes the attribute dictionary\nof the class. Names defined at the class scope are not visible in\nmethods.\n\n\nInteraction with dynamic features\n---------------------------------\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- "import *" --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a "SyntaxError".\n\nThe "eval()" and "exec()" functions do not have access to the full\nenvironment for resolving names. Names may be resolved in the local\nand global namespaces of the caller. Free variables are not resolved\nin the nearest enclosing namespace, but in the global namespace. [1]\nThe "exec()" and "eval()" functions have optional arguments to\noverride the global and local namespace. If only one namespace is\nspecified, it is used for both.\n\n\nExceptions\n==========\n\nExceptions are a means of breaking out of the normal flow of control\nof a code block in order to handle errors or other exceptional\nconditions. An exception is *raised* at the point where the error is\ndetected; it may be *handled* by the surrounding code block or by any\ncode block that directly or indirectly invoked the code block where\nthe error occurred.\n\nThe Python interpreter raises an exception when it detects a run-time\nerror (such as division by zero). A Python program can also\nexplicitly raise an exception with the "raise" statement. Exception\nhandlers are specified with the "try" ... "except" statement. The\n"finally" clause of such a statement can be used to specify cleanup\ncode which does not handle the exception, but is executed whether an\nexception occurred or not in the preceding code.\n\nPython uses the "termination" model of error handling: an exception\nhandler can find out what happened and continue execution at an outer\nlevel, but it cannot repair the cause of the error and retry the\nfailing operation (except by re-entering the offending piece of code\nfrom the top).\n\nWhen an exception is not handled at all, the interpreter terminates\nexecution of the program, or returns to its interactive main loop. In\neither case, it prints a stack backtrace, except when the exception is\n"SystemExit".\n\nExceptions are identified by class instances. The "except" clause is\nselected depending on the class of the instance: it must reference the\nclass of the instance or a base class thereof. The instance can be\nreceived by the handler and can carry additional information about the\nexceptional condition.\n\nNote: Exception messages are not part of the Python API. Their contents\n may change from one version of Python to the next without warning\n and should not be relied on by code which will run under multiple\n versions of the interpreter.\n\nSee also the description of the "try" statement in section *The try\nstatement* and "raise" statement in section *The raise statement*.\n\n-[ Footnotes ]-\n\n[1] This limitation occurs because the code that is executed by these\n operations is not available at the time the module is compiled.\n', - 'exprlists': '\nExpression lists\n****************\n\n expression_list ::= expression ( "," expression )* [","]\n\nAn expression list containing at least one comma yields a tuple. The\nlength of the tuple is the number of expressions in the list. The\nexpressions are evaluated from left to right.\n\nThe trailing comma is required only to create a single tuple (a.k.a. a\n*singleton*); it is optional in all other cases. A single expression\nwithout a trailing comma doesn\'t create a tuple, but rather yields the\nvalue of that expression. (To create an empty tuple, use an empty pair\nof parentheses: "()".)\n', - 'floating': '\nFloating point literals\n***********************\n\nFloating point literals are described by the following lexical\ndefinitions:\n\n floatnumber ::= pointfloat | exponentfloat\n pointfloat ::= [intpart] fraction | intpart "."\n exponentfloat ::= (intpart | pointfloat) exponent\n intpart ::= digit+\n fraction ::= "." digit+\n exponent ::= ("e" | "E") ["+" | "-"] digit+\n\nNote that the integer and exponent parts are always interpreted using\nradix 10. For example, "077e010" is legal, and denotes the same number\nas "77e10". The allowed range of floating point literals is\nimplementation-dependent. Some examples of floating point literals:\n\n 3.14 10. .001 1e100 3.14e-10 0e0\n\nNote that numeric literals do not include a sign; a phrase like "-1"\nis actually an expression composed of the unary operator "-" and the\nliteral "1".\n', - 'for': '\nThe "for" statement\n*******************\n\nThe "for" statement is used to iterate over the elements of a sequence\n(such as a string, tuple or list) or other iterable object:\n\n for_stmt ::= "for" target_list "in" expression_list ":" suite\n ["else" ":" suite]\n\nThe expression list is evaluated once; it should yield an iterable\nobject. An iterator is created for the result of the\n"expression_list". The suite is then executed once for each item\nprovided by the iterator, in the order of ascending indices. Each\nitem in turn is assigned to the target list using the standard rules\nfor assignments (see *Assignment statements*), and then the suite is\nexecuted. When the items are exhausted (which is immediately when the\nsequence is empty or an iterator raises a "StopIteration" exception),\nthe suite in the "else" clause, if present, is executed, and the loop\nterminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite. A "continue" statement\nexecuted in the first suite skips the rest of the suite and continues\nwith the next item, or with the "else" clause if there was no next\nitem.\n\nThe suite may assign to the variable(s) in the target list; this does\nnot affect the next item assigned to it.\n\nNames in the target list are not deleted when the loop is finished,\nbut if the sequence is empty, it will not have been assigned to at all\nby the loop. Hint: the built-in function "range()" returns an\niterator of integers suitable to emulate the effect of Pascal\'s "for i\n:= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n\nNote: There is a subtlety when the sequence is being modified by the loop\n (this can only occur for mutable sequences, i.e. lists). An\n internal counter is used to keep track of which item is used next,\n and this is incremented on each iteration. When this counter has\n reached the length of the sequence the loop terminates. This means\n that if the suite deletes the current (or a previous) item from the\n sequence, the next item will be skipped (since it gets the index of\n the current item which has already been treated). Likewise, if the\n suite inserts an item in the sequence before the current item, the\n current item will be treated again the next time through the loop.\n This can lead to nasty bugs that can be avoided by making a\n temporary copy using a slice of the whole sequence, e.g.,\n\n for x in a[:]:\n if x < 0: a.remove(x)\n', - 'formatstrings': '\nFormat String Syntax\n********************\n\nThe "str.format()" method and the "Formatter" class share the same\nsyntax for format strings (although in the case of "Formatter",\nsubclasses can define their own format string syntax).\n\nFormat strings contain "replacement fields" surrounded by curly braces\n"{}". Anything that is not contained in braces is considered literal\ntext, which is copied unchanged to the output. If you need to include\na brace character in the literal text, it can be escaped by doubling:\n"{{" and "}}".\n\nThe grammar for a replacement field is as follows:\n\n replacement_field ::= "{" [field_name] ["!" conversion] [":" format_spec] "}"\n field_name ::= arg_name ("." attribute_name | "[" element_index "]")*\n arg_name ::= [identifier | integer]\n attribute_name ::= identifier\n element_index ::= integer | index_string\n index_string ::= +\n conversion ::= "r" | "s" | "a"\n format_spec ::= \n\nIn less formal terms, the replacement field can start with a\n*field_name* that specifies the object whose value is to be formatted\nand inserted into the output instead of the replacement field. The\n*field_name* is optionally followed by a *conversion* field, which is\npreceded by an exclamation point "\'!\'", and a *format_spec*, which is\npreceded by a colon "\':\'". These specify a non-default format for the\nreplacement value.\n\nSee also the *Format Specification Mini-Language* section.\n\nThe *field_name* itself begins with an *arg_name* that is either a\nnumber or a keyword. If it\'s a number, it refers to a positional\nargument, and if it\'s a keyword, it refers to a named keyword\nargument. If the numerical arg_names in a format string are 0, 1, 2,\n... in sequence, they can all be omitted (not just some) and the\nnumbers 0, 1, 2, ... will be automatically inserted in that order.\nBecause *arg_name* is not quote-delimited, it is not possible to\nspecify arbitrary dictionary keys (e.g., the strings "\'10\'" or\n"\':-]\'") within a format string. The *arg_name* can be followed by any\nnumber of index or attribute expressions. An expression of the form\n"\'.name\'" selects the named attribute using "getattr()", while an\nexpression of the form "\'[index]\'" does an index lookup using\n"__getitem__()".\n\nChanged in version 3.1: The positional argument specifiers can be\nomitted, so "\'{} {}\'" is equivalent to "\'{0} {1}\'".\n\nSome simple format string examples:\n\n "First, thou shalt count to {0}" # References first positional argument\n "Bring me a {}" # Implicitly references the first positional argument\n "From {} to {}" # Same as "From {0} to {1}"\n "My quest is {name}" # References keyword argument \'name\'\n "Weight in tons {0.weight}" # \'weight\' attribute of first positional arg\n "Units destroyed: {players[0]}" # First element of keyword argument \'players\'.\n\nThe *conversion* field causes a type coercion before formatting.\nNormally, the job of formatting a value is done by the "__format__()"\nmethod of the value itself. However, in some cases it is desirable to\nforce a type to be formatted as a string, overriding its own\ndefinition of formatting. By converting the value to a string before\ncalling "__format__()", the normal formatting logic is bypassed.\n\nThree conversion flags are currently supported: "\'!s\'" which calls\n"str()" on the value, "\'!r\'" which calls "repr()" and "\'!a\'" which\ncalls "ascii()".\n\nSome examples:\n\n "Harold\'s a clever {0!s}" # Calls str() on the argument first\n "Bring out the holy {name!r}" # Calls repr() on the argument first\n "More {!a}" # Calls ascii() on the argument first\n\nThe *format_spec* field contains a specification of how the value\nshould be presented, including such details as field width, alignment,\npadding, decimal precision and so on. Each value type can define its\nown "formatting mini-language" or interpretation of the *format_spec*.\n\nMost built-in types support a common formatting mini-language, which\nis described in the next section.\n\nA *format_spec* field can also include nested replacement fields\nwithin it. These nested replacement fields can contain only a field\nname; conversion flags and format specifications are not allowed. The\nreplacement fields within the format_spec are substituted before the\n*format_spec* string is interpreted. This allows the formatting of a\nvalue to be dynamically specified.\n\nSee the *Format examples* section for some examples.\n\n\nFormat Specification Mini-Language\n==================================\n\n"Format specifications" are used within replacement fields contained\nwithin a format string to define how individual values are presented\n(see *Format String Syntax*). They can also be passed directly to the\nbuilt-in "format()" function. Each formattable type may define how\nthe format specification is to be interpreted.\n\nMost built-in types implement the following options for format\nspecifications, although some of the formatting options are only\nsupported by the numeric types.\n\nA general convention is that an empty format string ("""") produces\nthe same result as if you had called "str()" on the value. A non-empty\nformat string typically modifies the result.\n\nThe general form of a *standard format specifier* is:\n\n format_spec ::= [[fill]align][sign][#][0][width][,][.precision][type]\n fill ::= \n align ::= "<" | ">" | "=" | "^"\n sign ::= "+" | "-" | " "\n width ::= integer\n precision ::= integer\n type ::= "b" | "c" | "d" | "e" | "E" | "f" | "F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n\nIf a valid *align* value is specified, it can be preceded by a *fill*\ncharacter that can be any character and defaults to a space if\nomitted. Note that it is not possible to use "{" and "}" as *fill*\nchar while using the "str.format()" method; this limitation however\ndoesn\'t affect the "format()" function.\n\nThe meaning of the various alignment options is as follows:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | "\'<\'" | Forces the field to be left-aligned within the available |\n +-----------+------------------------------------------------------------+\n | "\'>\'" | Forces the field to be right-aligned within the available |\n +-----------+------------------------------------------------------------+\n | "\'=\'" | Forces the padding to be placed after the sign (if any) |\n +-----------+------------------------------------------------------------+\n | "\'^\'" | Forces the field to be centered within the available |\n +-----------+------------------------------------------------------------+\n\nNote that unless a minimum field width is defined, the field width\nwill always be the same size as the data to fill it, so that the\nalignment option has no meaning in this case.\n\nThe *sign* option is only valid for number types, and can be one of\nthe following:\n\n +-----------+------------------------------------------------------------+\n | Option | Meaning |\n +===========+============================================================+\n | "\'+\'" | indicates that a sign should be used for both positive as |\n +-----------+------------------------------------------------------------+\n | "\'-\'" | indicates that a sign should be used only for negative |\n +-----------+------------------------------------------------------------+\n | space | indicates that a leading space should be used on positive |\n +-----------+------------------------------------------------------------+\n\nThe "\'#\'" option causes the "alternate form" to be used for the\nconversion. The alternate form is defined differently for different\ntypes. This option is only valid for integer, float, complex and\nDecimal types. For integers, when binary, octal, or hexadecimal output\nis used, this option adds the prefix respective "\'0b\'", "\'0o\'", or\n"\'0x\'" to the output value. For floats, complex and Decimal the\nalternate form causes the result of the conversion to always contain a\ndecimal-point character, even if no digits follow it. Normally, a\ndecimal-point character appears in the result of these conversions\nonly if a digit follows it. In addition, for "\'g\'" and "\'G\'"\nconversions, trailing zeros are not removed from the result.\n\nThe "\',\'" option signals the use of a comma for a thousands separator.\nFor a locale aware separator, use the "\'n\'" integer presentation type\ninstead.\n\nChanged in version 3.1: Added the "\',\'" option (see also **PEP 378**).\n\n*width* is a decimal integer defining the minimum field width. If not\nspecified, then the field width will be determined by the content.\n\nPreceding the *width* field by a zero ("\'0\'") character enables sign-\naware zero-padding for numeric types. This is equivalent to a *fill*\ncharacter of "\'0\'" with an *alignment* type of "\'=\'".\n\nThe *precision* is a decimal number indicating how many digits should\nbe displayed after the decimal point for a floating point value\nformatted with "\'f\'" and "\'F\'", or before and after the decimal point\nfor a floating point value formatted with "\'g\'" or "\'G\'". For non-\nnumber types the field indicates the maximum field size - in other\nwords, how many characters will be used from the field content. The\n*precision* is not allowed for integer values.\n\nFinally, the *type* determines how the data should be presented.\n\nThe available string presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | "\'s\'" | String format. This is the default type for strings and |\n +-----------+------------------------------------------------------------+\n | None | The same as "\'s\'". |\n +-----------+------------------------------------------------------------+\n\nThe available integer presentation types are:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | "\'b\'" | Binary format. Outputs the number in base 2. |\n +-----------+------------------------------------------------------------+\n | "\'c\'" | Character. Converts the integer to the corresponding |\n +-----------+------------------------------------------------------------+\n | "\'d\'" | Decimal Integer. Outputs the number in base 10. |\n +-----------+------------------------------------------------------------+\n | "\'o\'" | Octal format. Outputs the number in base 8. |\n +-----------+------------------------------------------------------------+\n | "\'x\'" | Hex format. Outputs the number in base 16, using lower- |\n +-----------+------------------------------------------------------------+\n | "\'X\'" | Hex format. Outputs the number in base 16, using upper- |\n +-----------+------------------------------------------------------------+\n | "\'n\'" | Number. This is the same as "\'d\'", except that it uses the |\n +-----------+------------------------------------------------------------+\n | None | The same as "\'d\'". |\n +-----------+------------------------------------------------------------+\n\nIn addition to the above presentation types, integers can be formatted\nwith the floating point presentation types listed below (except "\'n\'"\nand None). When doing so, "float()" is used to convert the integer to\na floating point number before formatting.\n\nThe available presentation types for floating point and decimal values\nare:\n\n +-----------+------------------------------------------------------------+\n | Type | Meaning |\n +===========+============================================================+\n | "\'e\'" | Exponent notation. Prints the number in scientific |\n +-----------+------------------------------------------------------------+\n | "\'E\'" | Exponent notation. Same as "\'e\'" except it uses an upper |\n +-----------+------------------------------------------------------------+\n | "\'f\'" | Fixed point. Displays the number as a fixed-point number. |\n +-----------+------------------------------------------------------------+\n | "\'F\'" | Fixed point. Same as "\'f\'", but converts "nan" to "NAN" |\n +-----------+------------------------------------------------------------+\n | "\'g\'" | General format. For a given precision "p >= 1", this |\n +-----------+------------------------------------------------------------+\n | "\'G\'" | General format. Same as "\'g\'" except switches to "\'E\'" if |\n +-----------+------------------------------------------------------------+\n | "\'n\'" | Number. This is the same as "\'g\'", except that it uses the |\n +-----------+------------------------------------------------------------+\n | "\'%\'" | Percentage. Multiplies the number by 100 and displays in |\n +-----------+------------------------------------------------------------+\n | None | Similar to "\'g\'", except with at least one digit past the |\n +-----------+------------------------------------------------------------+\n\n\nFormat examples\n===============\n\nThis section contains examples of the new format syntax and comparison\nwith the old "%"-formatting.\n\nIn most of the cases the syntax is similar to the old "%"-formatting,\nwith the addition of the "{}" and with ":" used instead of "%". For\nexample, "\'%03.2f\'" can be translated to "\'{:03.2f}\'".\n\nThe new format syntax also supports new and different options, shown\nin the follow examples.\n\nAccessing arguments by position:\n\n >>> \'{0}, {1}, {2}\'.format(\'a\', \'b\', \'c\')\n \'a, b, c\'\n >>> \'{}, {}, {}\'.format(\'a\', \'b\', \'c\') # 3.1+ only\n \'a, b, c\'\n >>> \'{2}, {1}, {0}\'.format(\'a\', \'b\', \'c\')\n \'c, b, a\'\n >>> \'{2}, {1}, {0}\'.format(*\'abc\') # unpacking argument sequence\n \'c, b, a\'\n >>> \'{0}{1}{0}\'.format(\'abra\', \'cad\') # arguments\' indices can be repeated\n \'abracadabra\'\n\nAccessing arguments by name:\n\n >>> \'Coordinates: {latitude}, {longitude}\'.format(latitude=\'37.24N\', longitude=\'-115.81W\')\n \'Coordinates: 37.24N, -115.81W\'\n >>> coord = {\'latitude\': \'37.24N\', \'longitude\': \'-115.81W\'}\n >>> \'Coordinates: {latitude}, {longitude}\'.format(**coord)\n \'Coordinates: 37.24N, -115.81W\'\n\nAccessing arguments\' attributes:\n\n >>> c = 3-5j\n >>> (\'The complex number {0} is formed from the real part {0.real} \'\n ... \'and the imaginary part {0.imag}.\').format(c)\n \'The complex number (3-5j) is formed from the real part 3.0 and the imaginary part -5.0.\'\n >>> class Point:\n ... def __init__(self, x, y):\n ... self.x, self.y = x, y\n ... def __str__(self):\n ... return \'Point({self.x}, {self.y})\'.format(self=self)\n ...\n >>> str(Point(4, 2))\n \'Point(4, 2)\'\n\nAccessing arguments\' items:\n\n >>> coord = (3, 5)\n >>> \'X: {0[0]}; Y: {0[1]}\'.format(coord)\n \'X: 3; Y: 5\'\n\nReplacing "%s" and "%r":\n\n >>> "repr() shows quotes: {!r}; str() doesn\'t: {!s}".format(\'test1\', \'test2\')\n "repr() shows quotes: \'test1\'; str() doesn\'t: test2"\n\nAligning the text and specifying a width:\n\n >>> \'{:<30}\'.format(\'left aligned\')\n \'left aligned \'\n >>> \'{:>30}\'.format(\'right aligned\')\n \' right aligned\'\n >>> \'{:^30}\'.format(\'centered\')\n \' centered \'\n >>> \'{:*^30}\'.format(\'centered\') # use \'*\' as a fill char\n \'***********centered***********\'\n\nReplacing "%+f", "%-f", and "% f" and specifying a sign:\n\n >>> \'{:+f}; {:+f}\'.format(3.14, -3.14) # show it always\n \'+3.140000; -3.140000\'\n >>> \'{: f}; {: f}\'.format(3.14, -3.14) # show a space for positive numbers\n \' 3.140000; -3.140000\'\n >>> \'{:-f}; {:-f}\'.format(3.14, -3.14) # show only the minus -- same as \'{:f}; {:f}\'\n \'3.140000; -3.140000\'\n\nReplacing "%x" and "%o" and converting the value to different bases:\n\n >>> # format also supports binary numbers\n >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: {0:b}".format(42)\n \'int: 42; hex: 2a; oct: 52; bin: 101010\'\n >>> # with 0x, 0o, or 0b as prefix:\n >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: {0:#b}".format(42)\n \'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010\'\n\nUsing the comma as a thousands separator:\n\n >>> \'{:,}\'.format(1234567890)\n \'1,234,567,890\'\n\nExpressing a percentage:\n\n >>> points = 19\n >>> total = 22\n >>> \'Correct answers: {:.2%}\'.format(points/total)\n \'Correct answers: 86.36%\'\n\nUsing type-specific formatting:\n\n >>> import datetime\n >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n >>> \'{:%Y-%m-%d %H:%M:%S}\'.format(d)\n \'2010-07-04 12:15:58\'\n\nNesting arguments and more complex examples:\n\n >>> for align, text in zip(\'<^>\', [\'left\', \'center\', \'right\']):\n ... \'{0:{fill}{align}16}\'.format(text, fill=align, align=align)\n ...\n \'left<<<<<<<<<<<<\'\n \'^^^^^center^^^^^\'\n \'>>>>>>>>>>>right\'\n >>>\n >>> octets = [192, 168, 0, 1]\n >>> \'{:02X}{:02X}{:02X}{:02X}\'.format(*octets)\n \'C0A80001\'\n >>> int(_, 16)\n 3232235521\n >>>\n >>> width = 5\n >>> for num in range(5,12): #doctest: +NORMALIZE_WHITESPACE\n ... for base in \'dXob\':\n ... print(\'{0:{width}{base}}\'.format(num, base=base, width=width), end=\' \')\n ... print()\n ...\n 5 5 5 101\n 6 6 6 110\n 7 7 7 111\n 8 8 10 1000\n 9 9 11 1001\n 10 A 12 1010\n 11 B 13 1011\n', - 'function': '\nFunction definitions\n********************\n\nA function definition defines a user-defined function object (see\nsection *The standard type hierarchy*):\n\n funcdef ::= [decorators] "def" funcname "(" [parameter_list] ")" ["->" expression] ":" suite\n decorators ::= decorator+\n decorator ::= "@" dotted_name ["(" [parameter_list [","]] ")"] NEWLINE\n dotted_name ::= identifier ("." identifier)*\n parameter_list ::= (defparameter ",")*\n ( "*" [parameter] ("," defparameter)* ["," "**" parameter]\n | "**" parameter\n | defparameter [","] )\n parameter ::= identifier [":" expression]\n defparameter ::= parameter ["=" expression]\n funcname ::= identifier\n\nA function definition is an executable statement. Its execution binds\nthe function name in the current local namespace to a function object\n(a wrapper around the executable code for the function). This\nfunction object contains a reference to the current global namespace\nas the global namespace to be used when the function is called.\n\nThe function definition does not execute the function body; this gets\nexecuted only when the function is called. [3]\n\nA function definition may be wrapped by one or more *decorator*\nexpressions. Decorator expressions are evaluated when the function is\ndefined, in the scope that contains the function definition. The\nresult must be a callable, which is invoked with the function object\nas the only argument. The returned value is bound to the function name\ninstead of the function object. Multiple decorators are applied in\nnested fashion. For example, the following code\n\n @f1(arg)\n @f2\n def func(): pass\n\nis equivalent to\n\n def func(): pass\n func = f1(arg)(f2(func))\n\nWhen one or more *parameters* have the form *parameter* "="\n*expression*, the function is said to have "default parameter values."\nFor a parameter with a default value, the corresponding *argument* may\nbe omitted from a call, in which case the parameter\'s default value is\nsubstituted. If a parameter has a default value, all following\nparameters up until the ""*"" must also have a default value --- this\nis a syntactic restriction that is not expressed by the grammar.\n\n**Default parameter values are evaluated from left to right when the\nfunction definition is executed.** This means that the expression is\nevaluated once, when the function is defined, and that the same "pre-\ncomputed" value is used for each call. This is especially important\nto understand when a default parameter is a mutable object, such as a\nlist or a dictionary: if the function modifies the object (e.g. by\nappending an item to a list), the default value is in effect modified.\nThis is generally not what was intended. A way around this is to use\n"None" as the default, and explicitly test for it in the body of the\nfunction, e.g.:\n\n def whats_on_the_telly(penguin=None):\n if penguin is None:\n penguin = []\n penguin.append("property of the zoo")\n return penguin\n\nFunction call semantics are described in more detail in section\n*Calls*. A function call always assigns values to all parameters\nmentioned in the parameter list, either from position arguments, from\nkeyword arguments, or from default values. If the form\n""*identifier"" is present, it is initialized to a tuple receiving any\nexcess positional parameters, defaulting to the empty tuple. If the\nform ""**identifier"" is present, it is initialized to a new\ndictionary receiving any excess keyword arguments, defaulting to a new\nempty dictionary. Parameters after ""*"" or ""*identifier"" are\nkeyword-only parameters and may only be passed used keyword arguments.\n\nParameters may have annotations of the form "": expression"" following\nthe parameter name. Any parameter may have an annotation even those\nof the form "*identifier" or "**identifier". Functions may have\n"return" annotation of the form ""-> expression"" after the parameter\nlist. These annotations can be any valid Python expression and are\nevaluated when the function definition is executed. Annotations may\nbe evaluated in a different order than they appear in the source code.\nThe presence of annotations does not change the semantics of a\nfunction. The annotation values are available as values of a\ndictionary keyed by the parameters\' names in the "__annotations__"\nattribute of the function object.\n\nIt is also possible to create anonymous functions (functions not bound\nto a name), for immediate use in expressions. This uses lambda\nexpressions, described in section *Lambdas*. Note that the lambda\nexpression is merely a shorthand for a simplified function definition;\na function defined in a ""def"" statement can be passed around or\nassigned to another name just like a function defined by a lambda\nexpression. The ""def"" form is actually more powerful since it\nallows the execution of multiple statements and annotations.\n\n**Programmer\'s note:** Functions are first-class objects. A ""def""\nstatement executed inside a function definition defines a local\nfunction that can be returned or passed around. Free variables used\nin the nested function can access the local variables of the function\ncontaining the def. See section *Naming and binding* for details.\n\nSee also:\n\n **PEP 3107** - Function Annotations\n The original specification for function annotations.\n', - 'global': '\nThe "global" statement\n**********************\n\n global_stmt ::= "global" identifier ("," identifier)*\n\nThe "global" statement is a declaration which holds for the entire\ncurrent code block. It means that the listed identifiers are to be\ninterpreted as globals. It would be impossible to assign to a global\nvariable without "global", although free variables may refer to\nglobals without being declared global.\n\nNames listed in a "global" statement must not be used in the same code\nblock textually preceding that "global" statement.\n\nNames listed in a "global" statement must not be defined as formal\nparameters or in a "for" loop control target, "class" definition,\nfunction definition, or "import" statement.\n\n**CPython implementation detail:** The current implementation does not\nenforce the latter two restrictions, but programs should not abuse\nthis freedom, as future implementations may enforce them or silently\nchange the meaning of the program.\n\n**Programmer\'s note:** the "global" is a directive to the parser. It\napplies only to code parsed at the same time as the "global"\nstatement. In particular, a "global" statement contained in a string\nor code object supplied to the built-in "exec()" function does not\naffect the code block *containing* the function call, and code\ncontained in such a string is unaffected by "global" statements in the\ncode containing the function call. The same applies to the "eval()"\nand "compile()" functions.\n', - 'id-classes': '\nReserved classes of identifiers\n*******************************\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n"_*"\n Not imported by "from module import *". The special identifier "_"\n is used in the interactive interpreter to store the result of the\n last evaluation; it is stored in the "builtins" module. When not\n in interactive mode, "_" has no special meaning and is not defined.\n See section *The import statement*.\n\n Note: The name "_" is often used in conjunction with\n internationalization; refer to the documentation for the\n "gettext" module for more information on this convention.\n\n"__*__"\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of "__*__" names, in any context, that does not\n follow explicitly documented use, is subject to breakage without\n warning.\n\n"__*"\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', - 'identifiers': '\nIdentifiers and keywords\n************************\n\nIdentifiers (also referred to as *names*) are described by the\nfollowing lexical definitions.\n\nThe syntax of identifiers in Python is based on the Unicode standard\nannex UAX-31, with elaboration and changes as defined below; see also\n**PEP 3131** for further details.\n\nWithin the ASCII range (U+0001..U+007F), the valid characters for\nidentifiers are the same as in Python 2.x: the uppercase and lowercase\nletters "A" through "Z", the underscore "_" and, except for the first\ncharacter, the digits "0" through "9".\n\nPython 3.0 introduces additional characters from outside the ASCII\nrange (see **PEP 3131**). For these characters, the classification\nuses the version of the Unicode Character Database as included in the\n"unicodedata" module.\n\nIdentifiers are unlimited in length. Case is significant.\n\n identifier ::= xid_start xid_continue*\n id_start ::= \n id_continue ::= \n xid_start ::= \n xid_continue ::= \n\nThe Unicode category codes mentioned above stand for:\n\n* *Lu* - uppercase letters\n\n* *Ll* - lowercase letters\n\n* *Lt* - titlecase letters\n\n* *Lm* - modifier letters\n\n* *Lo* - other letters\n\n* *Nl* - letter numbers\n\n* *Mn* - nonspacing marks\n\n* *Mc* - spacing combining marks\n\n* *Nd* - decimal numbers\n\n* *Pc* - connector punctuations\n\n* *Other_ID_Start* - explicit list of characters in PropList.txt to\n support backwards compatibility\n\n* *Other_ID_Continue* - likewise\n\nAll identifiers are converted into the normal form NFKC while parsing;\ncomparison of identifiers is based on NFKC.\n\nA non-normative HTML file listing all valid identifier characters for\nUnicode 4.1 can be found at http://www.dcl.hpi.uni-\npotsdam.de/home/loewis/table-3131.html.\n\n\nKeywords\n========\n\nThe following identifiers are used as reserved words, or *keywords* of\nthe language, and cannot be used as ordinary identifiers. They must\nbe spelled exactly as written here:\n\n False class finally is return\n None continue for lambda try\n True def from nonlocal while\n and del global not with\n as elif if or yield\n assert else import pass\n break except in raise\n\n\nReserved classes of identifiers\n===============================\n\nCertain classes of identifiers (besides keywords) have special\nmeanings. These classes are identified by the patterns of leading and\ntrailing underscore characters:\n\n"_*"\n Not imported by "from module import *". The special identifier "_"\n is used in the interactive interpreter to store the result of the\n last evaluation; it is stored in the "builtins" module. When not\n in interactive mode, "_" has no special meaning and is not defined.\n See section *The import statement*.\n\n Note: The name "_" is often used in conjunction with\n internationalization; refer to the documentation for the\n "gettext" module for more information on this convention.\n\n"__*__"\n System-defined names. These names are defined by the interpreter\n and its implementation (including the standard library). Current\n system names are discussed in the *Special method names* section\n and elsewhere. More will likely be defined in future versions of\n Python. *Any* use of "__*__" names, in any context, that does not\n follow explicitly documented use, is subject to breakage without\n warning.\n\n"__*"\n Class-private names. Names in this category, when used within the\n context of a class definition, are re-written to use a mangled form\n to help avoid name clashes between "private" attributes of base and\n derived classes. See section *Identifiers (Names)*.\n', - 'if': '\nThe "if" statement\n******************\n\nThe "if" statement is used for conditional execution:\n\n if_stmt ::= "if" expression ":" suite\n ( "elif" expression ":" suite )*\n ["else" ":" suite]\n\nIt selects exactly one of the suites by evaluating the expressions one\nby one until one is found to be true (see section *Boolean operations*\nfor the definition of true and false); then that suite is executed\n(and no other part of the "if" statement is executed or evaluated).\nIf all expressions are false, the suite of the "else" clause, if\npresent, is executed.\n', - 'imaginary': '\nImaginary literals\n******************\n\nImaginary literals are described by the following lexical definitions:\n\n imagnumber ::= (floatnumber | intpart) ("j" | "J")\n\nAn imaginary literal yields a complex number with a real part of 0.0.\nComplex numbers are represented as a pair of floating point numbers\nand have the same restrictions on their range. To create a complex\nnumber with a nonzero real part, add a floating point number to it,\ne.g., "(3+4j)". Some examples of imaginary literals:\n\n 3.14j 10.j 10j .001j 1e100j 3.14e-10j\n', - 'import': '\nThe "import" statement\n**********************\n\n import_stmt ::= "import" module ["as" name] ( "," module ["as" name] )*\n | "from" relative_module "import" identifier ["as" name]\n ( "," identifier ["as" name] )*\n | "from" relative_module "import" "(" identifier ["as" name]\n ( "," identifier ["as" name] )* [","] ")"\n | "from" module "import" "*"\n module ::= (identifier ".")* identifier\n relative_module ::= "."* module | "."+\n name ::= identifier\n\nThe basic import statement (no "from" clause) is executed in two\nsteps:\n\n1. find a module, loading and initializing it if necessary\n\n2. define a name or names in the local namespace for the scope where\n the "import" statement occurs.\n\nWhen the statement contains multiple clauses (separated by commas) the\ntwo steps are carried out separately for each clause, just as though\nthe clauses had been separated out into individiual import statements.\n\nThe details of the first step, finding and loading modules is\ndescribed in greater detail in the section on the *import system*,\nwhich also describes the various types of packages and modules that\ncan be imported, as well as all the hooks that can be used to\ncustomize the import system. Note that failures in this step may\nindicate either that the module could not be located, *or* that an\nerror occurred while initializing the module, which includes execution\nof the module\'s code.\n\nIf the requested module is retrieved successfully, it will be made\navailable in the local namespace in one of three ways:\n\n* If the module name is followed by "as", then the name following "as"\n is bound directly to the imported module.\n\n* If no other name is specified, and the module being imported is a\n top level module, the module\'s name is bound in the local namespace\n as a reference to the imported module\n\n* If the module being imported is *not* a top level module, then the\n name of the top level package that contains the module is bound in\n the local namespace as a reference to the top level package. The\n imported module must be accessed using its full qualified name\n rather than directly\n\nThe "from" form uses a slightly more complex process:\n\n1. find the module specified in the "from" clause loading and\n initializing it if necessary;\n\n2. for each of the identifiers specified in the "import" clauses:\n\n 1. check if the imported module has an attribute by that name\n\n 2. if not, attempt to import a submodule with that name and then\n check the imported module again for that attribute\n\n 3. if the attribute is not found, "ImportError" is raised.\n\n 4. otherwise, a reference to that value is bound in the local\n namespace, using the name in the "as" clause if it is present,\n otherwise using the attribute name\n\nExamples:\n\n import foo # foo imported and bound locally\n import foo.bar.baz # foo.bar.baz imported, foo bound locally\n import foo.bar.baz as fbb # foo.bar.baz imported and bound as fbb\n from foo.bar import baz # foo.bar.baz imported and bound as baz\n from foo import attr # foo imported and foo.attr bound as attr\n\nIf the list of identifiers is replaced by a star ("\'*\'"), all public\nnames defined in the module are bound in the local namespace for the\nscope where the "import" statement occurs.\n\nThe *public names* defined by a module are determined by checking the\nmodule\'s namespace for a variable named "__all__"; if defined, it must\nbe a sequence of strings which are names defined or imported by that\nmodule. The names given in "__all__" are all considered public and\nare required to exist. If "__all__" is not defined, the set of public\nnames includes all names found in the module\'s namespace which do not\nbegin with an underscore character ("\'_\'"). "__all__" should contain\nthe entire public API. It is intended to avoid accidentally exporting\nitems that are not part of the API (such as library modules which were\nimported and used within the module).\n\nThe "from" form with "*" may only occur in a module scope. The wild\ncard form of import --- "import *" --- is only allowed at the module\nlevel. Attempting to use it in class or function definitions will\nraise a "SyntaxError".\n\nWhen specifying what module to import you do not have to specify the\nabsolute name of the module. When a module or package is contained\nwithin another package it is possible to make a relative import within\nthe same top package without having to mention the package name. By\nusing leading dots in the specified module or package after "from" you\ncan specify how high to traverse up the current package hierarchy\nwithout specifying exact names. One leading dot means the current\npackage where the module making the import exists. Two dots means up\none package level. Three dots is up two levels, etc. So if you execute\n"from . import mod" from a module in the "pkg" package then you will\nend up importing "pkg.mod". If you execute "from ..subpkg2 import mod"\nfrom within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\nspecification for relative imports is contained within **PEP 328**.\n\n"importlib.import_module()" is provided to support applications that\ndetermine which modules need to be loaded dynamically.\n\n\nFuture statements\n=================\n\nA *future statement* is a directive to the compiler that a particular\nmodule should be compiled using syntax or semantics that will be\navailable in a specified future release of Python. The future\nstatement is intended to ease migration to future versions of Python\nthat introduce incompatible changes to the language. It allows use of\nthe new features on a per-module basis before the release in which the\nfeature becomes standard.\n\n future_statement ::= "from" "__future__" "import" feature ["as" name]\n ("," feature ["as" name])*\n | "from" "__future__" "import" "(" feature ["as" name]\n ("," feature ["as" name])* [","] ")"\n feature ::= identifier\n name ::= identifier\n\nA future statement must appear near the top of the module. The only\nlines that can appear before a future statement are:\n\n* the module docstring (if any),\n\n* comments,\n\n* blank lines, and\n\n* other future statements.\n\nThe features recognized by Python 3.0 are "absolute_import",\n"division", "generators", "unicode_literals", "print_function",\n"nested_scopes" and "with_statement". They are all redundant because\nthey are always enabled, and only kept for backwards compatibility.\n\nA future statement is recognized and treated specially at compile\ntime: Changes to the semantics of core constructs are often\nimplemented by generating different code. It may even be the case\nthat a new feature introduces new incompatible syntax (such as a new\nreserved word), in which case the compiler may need to parse the\nmodule differently. Such decisions cannot be pushed off until\nruntime.\n\nFor any given release, the compiler knows which feature names have\nbeen defined, and raises a compile-time error if a future statement\ncontains a feature not known to it.\n\nThe direct runtime semantics are the same as for any import statement:\nthere is a standard module "__future__", described later, and it will\nbe imported in the usual way at the time the future statement is\nexecuted.\n\nThe interesting runtime semantics depend on the specific feature\nenabled by the future statement.\n\nNote that there is nothing special about the statement:\n\n import __future__ [as name]\n\nThat is not a future statement; it\'s an ordinary import statement with\nno special semantics or syntax restrictions.\n\nCode compiled by calls to the built-in functions "exec()" and\n"compile()" that occur in a module "M" containing a future statement\nwill, by default, use the new syntax or semantics associated with the\nfuture statement. This can be controlled by optional arguments to\n"compile()" --- see the documentation of that function for details.\n\nA future statement typed at an interactive interpreter prompt will\ntake effect for the rest of the interpreter session. If an\ninterpreter is started with the *-i* option, is passed a script name\nto execute, and the script includes a future statement, it will be in\neffect in the interactive session started after the script is\nexecuted.\n\nSee also:\n\n **PEP 236** - Back to the __future__\n The original proposal for the __future__ mechanism.\n', - 'in': '\nComparisons\n***********\n\nUnlike C, all comparison operations in Python have the same priority,\nwhich is lower than that of any arithmetic, shifting or bitwise\noperation. Also unlike C, expressions like "a < b < c" have the\ninterpretation that is conventional in mathematics:\n\n comparison ::= or_expr ( comp_operator or_expr )*\n comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n | "is" ["not"] | ["not"] "in"\n\nComparisons yield boolean values: "True" or "False".\n\nComparisons can be chained arbitrarily, e.g., "x < y <= z" is\nequivalent to "x < y and y <= z", except that "y" is evaluated only\nonce (but in both cases "z" is not evaluated at all when "x < y" is\nfound to be false).\n\nFormally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and *op1*,\n*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... y\nopN z" is equivalent to "a op1 b and b op2 c and ... y opN z", except\nthat each expression is evaluated at most once.\n\nNote that "a op1 b op2 c" doesn\'t imply any kind of comparison between\n*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\nperhaps not pretty).\n\nThe operators "<", ">", "==", ">=", "<=", and "!=" compare the values\nof two objects. The objects need not have the same type. If both are\nnumbers, they are converted to a common type. Otherwise, the "==" and\n"!=" operators *always* consider objects of different types to be\nunequal, while the "<", ">", ">=" and "<=" operators raise a\n"TypeError" when comparing objects of different types that do not\nimplement these operators for the given pair of types. You can\ncontrol comparison behavior of objects of non-built-in types by\ndefining rich comparison methods like "__gt__()", described in section\n*Basic customization*.\n\nComparison of objects of the same type depends on the type:\n\n* Numbers are compared arithmetically.\n\n* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. The are\n identical to themselves, "x is x" but are not equal to themselves,\n "x != x". Additionally, comparing any value to a not-a-number value\n will return "False". For example, both "3 < float(\'NaN\')" and\n "float(\'NaN\') < 3" will return "False".\n\n* Bytes objects are compared lexicographically using the numeric\n values of their elements.\n\n* Strings are compared lexicographically using the numeric equivalents\n (the result of the built-in function "ord()") of their characters.\n [3] String and bytes object can\'t be compared!\n\n* Tuples and lists are compared lexicographically using comparison of\n corresponding elements. This means that to compare equal, each\n element must compare equal and the two sequences must be of the same\n type and have the same length.\n\n If not equal, the sequences are ordered the same as their first\n differing elements. For example, "[1,2,x] <= [1,2,y]" has the same\n value as "x <= y". If the corresponding element does not exist, the\n shorter sequence is ordered first (for example, "[1,2] < [1,2,3]").\n\n* Mappings (dictionaries) compare equal if and only if they have the\n same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', \'>=\',\n \'>\')" raise "TypeError".\n\n* Sets and frozensets define comparison operators to mean subset and\n superset tests. Those relations do not define total orderings (the\n two sets "{1,2}" and {2,3} are not equal, nor subsets of one\n another, nor supersets of one another). Accordingly, sets are not\n appropriate arguments for functions which depend on total ordering.\n For example, "min()", "max()", and "sorted()" produce undefined\n results given a list of sets as inputs.\n\n* Most other objects of built-in types compare unequal unless they are\n the same object; the choice whether one object is considered smaller\n or larger than another one is made arbitrarily but consistently\n within one execution of a program.\n\nComparison of objects of the differing types depends on whether either\nof the types provide explicit support for the comparison. Most\nnumeric types can be compared with one another. When cross-type\ncomparison is not supported, the comparison method returns\n"NotImplemented".\n\nThe operators "in" and "not in" test for membership. "x in s"\nevaluates to true if *x* is a member of *s*, and false otherwise. "x\nnot in s" returns the negation of "x in s". All built-in sequences\nand set types support this as well as dictionary, for which "in" tests\nwhether a the dictionary has a given key. For container types such as\nlist, tuple, set, frozenset, dict, or collections.deque, the\nexpression "x in y" is equivalent to "any(x is e or x == e for e in\ny)".\n\nFor the string and bytes types, "x in y" is true if and only if *x* is\na substring of *y*. An equivalent test is "y.find(x) != -1". Empty\nstrings are always considered to be a substring of any other string,\nso """ in "abc"" will return "True".\n\nFor user-defined classes which define the "__contains__()" method, "x\nin y" is true if and only if "y.__contains__(x)" is true.\n\nFor user-defined classes which do not define "__contains__()" but do\ndefine "__iter__()", "x in y" is true if some value "z" with "x == z"\nis produced while iterating over "y". If an exception is raised\nduring the iteration, it is as if "in" raised that exception.\n\nLastly, the old-style iteration protocol is tried: if a class defines\n"__getitem__()", "x in y" is true if and only if there is a non-\nnegative integer index *i* such that "x == y[i]", and all lower\ninteger indices do not raise "IndexError" exception. (If any other\nexception is raised, it is as if "in" raised that exception).\n\nThe operator "not in" is defined to have the inverse true value of\n"in".\n\nThe operators "is" and "is not" test for object identity: "x is y" is\ntrue if and only if *x* and *y* are the same object. "x is not y"\nyields the inverse truth value. [4]\n', - 'integers': '\nInteger literals\n****************\n\nInteger literals are described by the following lexical definitions:\n\n integer ::= decimalinteger | octinteger | hexinteger | bininteger\n decimalinteger ::= nonzerodigit digit* | "0"+\n nonzerodigit ::= "1"..."9"\n digit ::= "0"..."9"\n octinteger ::= "0" ("o" | "O") octdigit+\n hexinteger ::= "0" ("x" | "X") hexdigit+\n bininteger ::= "0" ("b" | "B") bindigit+\n octdigit ::= "0"..."7"\n hexdigit ::= digit | "a"..."f" | "A"..."F"\n bindigit ::= "0" | "1"\n\nThere is no limit for the length of integer literals apart from what\ncan be stored in available memory.\n\nNote that leading zeros in a non-zero decimal number are not allowed.\nThis is for disambiguation with C-style octal literals, which Python\nused before version 3.0.\n\nSome examples of integer literals:\n\n 7 2147483647 0o177 0b100110111\n 3 79228162514264337593543950336 0o377 0x100000000\n 79228162514264337593543950336 0xdeadbeef\n', - 'lambda': '\nLambdas\n*******\n\n lambda_expr ::= "lambda" [parameter_list]: expression\n lambda_expr_nocond ::= "lambda" [parameter_list]: expression_nocond\n\nLambda expressions (sometimes called lambda forms) have the same\nsyntactic position as expressions. They are a shorthand to create\nanonymous functions; the expression "lambda arguments: expression"\nyields a function object. The unnamed object behaves like a function\nobject defined with\n\n def (arguments):\n return expression\n\nSee section *Function definitions* for the syntax of parameter lists.\nNote that functions created with lambda expressions cannot contain\nstatements or annotations.\n', - 'lists': '\nList displays\n*************\n\nA list display is a possibly empty series of expressions enclosed in\nsquare brackets:\n\n list_display ::= "[" [expression_list | comprehension] "]"\n\nA list display yields a new list object, the contents being specified\nby either a list of expressions or a comprehension. When a comma-\nseparated list of expressions is supplied, its elements are evaluated\nfrom left to right and placed into the list object in that order.\nWhen a comprehension is supplied, the list is constructed from the\nelements resulting from the comprehension.\n', - 'naming': '\nNaming and binding\n******************\n\n*Names* refer to objects. Names are introduced by name binding\noperations. Each occurrence of a name in the program text refers to\nthe *binding* of that name established in the innermost function block\ncontaining the use.\n\nA *block* is a piece of Python program text that is executed as a\nunit. The following are blocks: a module, a function body, and a class\ndefinition. Each command typed interactively is a block. A script\nfile (a file given as standard input to the interpreter or specified\non the interpreter command line the first argument) is a code block.\nA script command (a command specified on the interpreter command line\nwith the \'**-c**\' option) is a code block. The string argument passed\nto the built-in functions "eval()" and "exec()" is a code block.\n\nA code block is executed in an *execution frame*. A frame contains\nsome administrative information (used for debugging) and determines\nwhere and how execution continues after the code block\'s execution has\ncompleted.\n\nA *scope* defines the visibility of a name within a block. If a local\nvariable is defined in a block, its scope includes that block. If the\ndefinition occurs in a function block, the scope extends to any blocks\ncontained within the defining one, unless a contained block introduces\na different binding for the name. The scope of names defined in a\nclass block is limited to the class block; it does not extend to the\ncode blocks of methods -- this includes comprehensions and generator\nexpressions since they are implemented using a function scope. This\nmeans that the following will fail:\n\n class A:\n a = 42\n b = list(a + i for i in range(10))\n\nWhen a name is used in a code block, it is resolved using the nearest\nenclosing scope. The set of all such scopes visible to a code block\nis called the block\'s *environment*.\n\nIf a name is bound in a block, it is a local variable of that block,\nunless declared as "nonlocal". If a name is bound at the module\nlevel, it is a global variable. (The variables of the module code\nblock are local and global.) If a variable is used in a code block\nbut not defined there, it is a *free variable*.\n\nWhen a name is not found at all, a "NameError" exception is raised.\nIf the name refers to a local variable that has not been bound, a\n"UnboundLocalError" exception is raised. "UnboundLocalError" is a\nsubclass of "NameError".\n\nThe following constructs bind names: formal parameters to functions,\n"import" statements, class and function definitions (these bind the\nclass or function name in the defining block), and targets that are\nidentifiers if occurring in an assignment, "for" loop header, or after\n"as" in a "with" statement or "except" clause. The "import" statement\nof the form "from ... import *" binds all names defined in the\nimported module, except those beginning with an underscore. This form\nmay only be used at the module level.\n\nA target occurring in a "del" statement is also considered bound for\nthis purpose (though the actual semantics are to unbind the name).\n\nEach assignment or import statement occurs within a block defined by a\nclass or function definition or at the module level (the top-level\ncode block).\n\nIf a name binding operation occurs anywhere within a code block, all\nuses of the name within the block are treated as references to the\ncurrent block. This can lead to errors when a name is used within a\nblock before it is bound. This rule is subtle. Python lacks\ndeclarations and allows name binding operations to occur anywhere\nwithin a code block. The local variables of a code block can be\ndetermined by scanning the entire text of the block for name binding\noperations.\n\nIf the "global" statement occurs within a block, all uses of the name\nspecified in the statement refer to the binding of that name in the\ntop-level namespace. Names are resolved in the top-level namespace by\nsearching the global namespace, i.e. the namespace of the module\ncontaining the code block, and the builtins namespace, the namespace\nof the module "builtins". The global namespace is searched first. If\nthe name is not found there, the builtins namespace is searched. The\nglobal statement must precede all uses of the name.\n\nThe builtins namespace associated with the execution of a code block\nis actually found by looking up the name "__builtins__" in its global\nnamespace; this should be a dictionary or a module (in the latter case\nthe module\'s dictionary is used). By default, when in the "__main__"\nmodule, "__builtins__" is the built-in module "builtins"; when in any\nother module, "__builtins__" is an alias for the dictionary of the\n"builtins" module itself. "__builtins__" can be set to a user-created\ndictionary to create a weak form of restricted execution.\n\n**CPython implementation detail:** Users should not touch\n"__builtins__"; it is strictly an implementation detail. Users\nwanting to override values in the builtins namespace should "import"\nthe "builtins" module and modify its attributes appropriately.\n\nThe namespace for a module is automatically created the first time a\nmodule is imported. The main module for a script is always called\n"__main__".\n\nThe "global" statement has the same scope as a name binding operation\nin the same block. If the nearest enclosing scope for a free variable\ncontains a global statement, the free variable is treated as a global.\n\nA class definition is an executable statement that may use and define\nnames. These references follow the normal rules for name resolution.\nThe namespace of the class definition becomes the attribute dictionary\nof the class. Names defined at the class scope are not visible in\nmethods.\n\n\nInteraction with dynamic features\n=================================\n\nThere are several cases where Python statements are illegal when used\nin conjunction with nested scopes that contain free variables.\n\nIf a variable is referenced in an enclosing scope, it is illegal to\ndelete the name. An error will be reported at compile time.\n\nIf the wild card form of import --- "import *" --- is used in a\nfunction and the function contains or is a nested block with free\nvariables, the compiler will raise a "SyntaxError".\n\nThe "eval()" and "exec()" functions do not have access to the full\nenvironment for resolving names. Names may be resolved in the local\nand global namespaces of the caller. Free variables are not resolved\nin the nearest enclosing namespace, but in the global namespace. [1]\nThe "exec()" and "eval()" functions have optional arguments to\noverride the global and local namespace. If only one namespace is\nspecified, it is used for both.\n', - 'nonlocal': '\nThe "nonlocal" statement\n************************\n\n nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n\nThe "nonlocal" statement causes the listed identifiers to refer to\npreviously bound variables in the nearest enclosing scope. This is\nimportant because the default behavior for binding is to search the\nlocal namespace first. The statement allows encapsulated code to\nrebind variables outside of the local scope besides the global\n(module) scope.\n\nNames listed in a "nonlocal" statement, unlike to those listed in a\n"global" statement, must refer to pre-existing bindings in an\nenclosing scope (the scope in which a new binding should be created\ncannot be determined unambiguously).\n\nNames listed in a "nonlocal" statement must not collide with pre-\nexisting bindings in the local scope.\n\nSee also:\n\n **PEP 3104** - Access to Names in Outer Scopes\n The specification for the "nonlocal" statement.\n', - 'numbers': '\nNumeric literals\n****************\n\nThere are three types of numeric literals: integers, floating point\nnumbers, and imaginary numbers. There are no complex literals\n(complex numbers can be formed by adding a real number and an\nimaginary number).\n\nNote that numeric literals do not include a sign; a phrase like "-1"\nis actually an expression composed of the unary operator \'"-"\' and the\nliteral "1".\n', - 'numeric-types': '\nEmulating numeric types\n***********************\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, if *x* is an instance of a class with an\n "__iadd__()" method, "x += y" is equivalent to "x = x.__iadd__(y)"\n . Otherwise, "x.__add__(y)" and "y.__radd__(x)" are considered, as\n with the evaluation of "x + y". In certain situations, augmented\n assignment can result in unexpected errors (see *Why does\n a_tuple[i] += [\'item\'] raise an exception when the addition\n works?*), but this behavior is in fact part of the data model.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n', - 'objects': '\nObjects, values and types\n*************************\n\n*Objects* are Python\'s abstraction for data. All data in a Python\nprogram is represented by objects or by relations between objects. (In\na sense, and in conformance to Von Neumann\'s model of a "stored\nprogram computer," code is also represented by objects.)\n\nEvery object has an identity, a type and a value. An object\'s\n*identity* never changes once it has been created; you may think of it\nas the object\'s address in memory. The \'"is"\' operator compares the\nidentity of two objects; the "id()" function returns an integer\nrepresenting its identity.\n\n**CPython implementation detail:** For CPython, "id(x)" is the memory\naddress where "x" is stored.\n\nAn object\'s type determines the operations that the object supports\n(e.g., "does it have a length?") and also defines the possible values\nfor objects of that type. The "type()" function returns an object\'s\ntype (which is an object itself). Like its identity, an object\'s\n*type* is also unchangeable. [1]\n\nThe *value* of some objects can change. Objects whose value can\nchange are said to be *mutable*; objects whose value is unchangeable\nonce they are created are called *immutable*. (The value of an\nimmutable container object that contains a reference to a mutable\nobject can change when the latter\'s value is changed; however the\ncontainer is still considered immutable, because the collection of\nobjects it contains cannot be changed. So, immutability is not\nstrictly the same as having an unchangeable value, it is more subtle.)\nAn object\'s mutability is determined by its type; for instance,\nnumbers, strings and tuples are immutable, while dictionaries and\nlists are mutable.\n\nObjects are never explicitly destroyed; however, when they become\nunreachable they may be garbage-collected. An implementation is\nallowed to postpone garbage collection or omit it altogether --- it is\na matter of implementation quality how garbage collection is\nimplemented, as long as no objects are collected that are still\nreachable.\n\n**CPython implementation detail:** CPython currently uses a reference-\ncounting scheme with (optional) delayed detection of cyclically linked\ngarbage, which collects most objects as soon as they become\nunreachable, but is not guaranteed to collect garbage containing\ncircular references. See the documentation of the "gc" module for\ninformation on controlling the collection of cyclic garbage. Other\nimplementations act differently and CPython may change. Do not depend\non immediate finalization of objects when they become unreachable (ex:\nalways close files).\n\nNote that the use of the implementation\'s tracing or debugging\nfacilities may keep objects alive that would normally be collectable.\nAlso note that catching an exception with a \'"try"..."except"\'\nstatement may keep objects alive.\n\nSome objects contain references to "external" resources such as open\nfiles or windows. It is understood that these resources are freed\nwhen the object is garbage-collected, but since garbage collection is\nnot guaranteed to happen, such objects also provide an explicit way to\nrelease the external resource, usually a "close()" method. Programs\nare strongly recommended to explicitly close such objects. The\n\'"try"..."finally"\' statement and the \'"with"\' statement provide\nconvenient ways to do this.\n\nSome objects contain references to other objects; these are called\n*containers*. Examples of containers are tuples, lists and\ndictionaries. The references are part of a container\'s value. In\nmost cases, when we talk about the value of a container, we imply the\nvalues, not the identities of the contained objects; however, when we\ntalk about the mutability of a container, only the identities of the\nimmediately contained objects are implied. So, if an immutable\ncontainer (like a tuple) contains a reference to a mutable object, its\nvalue changes if that mutable object is changed.\n\nTypes affect almost all aspects of object behavior. Even the\nimportance of object identity is affected in some sense: for immutable\ntypes, operations that compute new values may actually return a\nreference to any existing object with the same type and value, while\nfor mutable objects this is not allowed. E.g., after "a = 1; b = 1",\n"a" and "b" may or may not refer to the same object with the value\none, depending on the implementation, but after "c = []; d = []", "c"\nand "d" are guaranteed to refer to two different, unique, newly\ncreated empty lists. (Note that "c = d = []" assigns the same object\nto both "c" and "d".)\n', - 'operator-summary': '\nOperator precedence\n*******************\n\nThe following table summarizes the operator precedences in Python,\nfrom lowest precedence (least binding) to highest precedence (most\nbinding). Operators in the same box have the same precedence. Unless\nthe syntax is explicitly given, operators are binary. Operators in\nthe same box group left to right (except for comparisons, including\ntests, which all have the same precedence and chain from left to right\n--- see section *Comparisons* --- and exponentiation, which groups\nfrom right to left).\n\n+-------------------------------------------------+---------------------------------------+\n| Operator | Description |\n+=================================================+=======================================+\n| "lambda" | Lambda expression |\n+-------------------------------------------------+---------------------------------------+\n| "if" -- "else" | Conditional expression |\n+-------------------------------------------------+---------------------------------------+\n| "or" | Boolean OR |\n+-------------------------------------------------+---------------------------------------+\n| "and" | Boolean AND |\n+-------------------------------------------------+---------------------------------------+\n| "not" "x" | Boolean NOT |\n+-------------------------------------------------+---------------------------------------+\n| "in", "not in", "is", "is not", "<", "<=", ">", | Comparisons, including membership |\n| ">=", "!=", "==" | tests and identity tests |\n+-------------------------------------------------+---------------------------------------+\n| "|" | Bitwise OR |\n+-------------------------------------------------+---------------------------------------+\n| "^" | Bitwise XOR |\n+-------------------------------------------------+---------------------------------------+\n| "&" | Bitwise AND |\n+-------------------------------------------------+---------------------------------------+\n| "<<", ">>" | Shifts |\n+-------------------------------------------------+---------------------------------------+\n| "+", "-" | Addition and subtraction |\n+-------------------------------------------------+---------------------------------------+\n| "*", "/", "//", "%" | Multiplication, division, remainder |\n+-------------------------------------------------+---------------------------------------+\n| "+x", "-x", "~x" | Positive, negative, bitwise NOT |\n+-------------------------------------------------+---------------------------------------+\n| "**" | Exponentiation [6] |\n+-------------------------------------------------+---------------------------------------+\n| "x[index]", "x[index:index]", | Subscription, slicing, call, |\n| "x(arguments...)", "x.attribute" | attribute reference |\n+-------------------------------------------------+---------------------------------------+\n| "(expressions...)", "[expressions...]", "{key: | Binding or tuple display, list |\n| value...}", "{expressions...}" | display, dictionary display, set |\n+-------------------------------------------------+---------------------------------------+\n\n-[ Footnotes ]-\n\n[1] While "abs(x%y) < abs(y)" is true mathematically, for floats it\n may not be true numerically due to roundoff. For example, and\n assuming a platform on which a Python float is an IEEE 754 double-\n precision number, in order that "-1e-100 % 1e100" have the same\n sign as "1e100", the computed result is "-1e-100 + 1e100", which\n is numerically exactly equal to "1e100". The function\n "math.fmod()" returns a result whose sign matches the sign of the\n first argument instead, and so returns "-1e-100" in this case.\n Which approach is more appropriate depends on the application.\n\n[2] If x is very close to an exact integer multiple of y, it\'s\n possible for "x//y" to be one larger than "(x-x%y)//y" due to\n rounding. In such cases, Python returns the latter result, in\n order to preserve that "divmod(x,y)[0] * y + x % y" be very close\n to "x".\n\n[3] While comparisons between strings make sense at the byte level,\n they may be counter-intuitive to users. For example, the strings\n ""\\u00C7"" and ""\\u0327\\u0043"" compare differently, even though\n they both represent the same unicode character (LATIN CAPITAL\n LETTER C WITH CEDILLA). To compare strings in a human\n recognizable way, compare using "unicodedata.normalize()".\n\n[4] Due to automatic garbage-collection, free lists, and the dynamic\n nature of descriptors, you may notice seemingly unusual behaviour\n in certain uses of the "is" operator, like those involving\n comparisons between instance methods, or constants. Check their\n documentation for more info.\n\n[5] The "%" operator is also used for string formatting; the same\n precedence applies.\n\n[6] The power operator "**" binds less tightly than an arithmetic or\n bitwise unary operator on its right, that is, "2**-1" is "0.5".\n', - 'pass': '\nThe "pass" statement\n********************\n\n pass_stmt ::= "pass"\n\n"pass" is a null operation --- when it is executed, nothing happens.\nIt is useful as a placeholder when a statement is required\nsyntactically, but no code needs to be executed, for example:\n\n def f(arg): pass # a function that does nothing (yet)\n\n class C: pass # a class with no methods (yet)\n', - 'power': '\nThe power operator\n******************\n\nThe power operator binds more tightly than unary operators on its\nleft; it binds less tightly than unary operators on its right. The\nsyntax is:\n\n power ::= primary ["**" u_expr]\n\nThus, in an unparenthesized sequence of power and unary operators, the\noperators are evaluated from right to left (this does not constrain\nthe evaluation order for the operands): "-1**2" results in "-1".\n\nThe power operator has the same semantics as the built-in "pow()"\nfunction, when called with two arguments: it yields its left argument\nraised to the power of its right argument. The numeric arguments are\nfirst converted to a common type, and the result is of that type.\n\nFor int operands, the result has the same type as the operands unless\nthe second argument is negative; in that case, all arguments are\nconverted to float and a float result is delivered. For example,\n"10**2" returns "100", but "10**-2" returns "0.01".\n\nRaising "0.0" to a negative power results in a "ZeroDivisionError".\nRaising a negative number to a fractional power results in a "complex"\nnumber. (In earlier versions it raised a "ValueError".)\n', - 'raise': '\nThe "raise" statement\n*********************\n\n raise_stmt ::= "raise" [expression ["from" expression]]\n\nIf no expressions are present, "raise" re-raises the last exception\nthat was active in the current scope. If no exception is active in\nthe current scope, a "RuntimeError" exception is raised indicating\nthat this is an error.\n\nOtherwise, "raise" evaluates the first expression as the exception\nobject. It must be either a subclass or an instance of\n"BaseException". If it is a class, the exception instance will be\nobtained when needed by instantiating the class with no arguments.\n\nThe *type* of the exception is the exception instance\'s class, the\n*value* is the instance itself.\n\nA traceback object is normally created automatically when an exception\nis raised and attached to it as the "__traceback__" attribute, which\nis writable. You can create an exception and set your own traceback in\none step using the "with_traceback()" exception method (which returns\nthe same exception instance, with its traceback set to its argument),\nlike so:\n\n raise Exception("foo occurred").with_traceback(tracebackobj)\n\nThe "from" clause is used for exception chaining: if given, the second\n*expression* must be another exception class or instance, which will\nthen be attached to the raised exception as the "__cause__" attribute\n(which is writable). If the raised exception is not handled, both\nexceptions will be printed:\n\n >>> try:\n ... print(1 / 0)\n ... except Exception as exc:\n ... raise RuntimeError("Something bad happened") from exc\n ...\n Traceback (most recent call last):\n File "", line 2, in \n ZeroDivisionError: int division or modulo by zero\n\n The above exception was the direct cause of the following exception:\n\n Traceback (most recent call last):\n File "", line 4, in \n RuntimeError: Something bad happened\n\nA similar mechanism works implicitly if an exception is raised inside\nan exception handler: the previous exception is then attached as the\nnew exception\'s "__context__" attribute:\n\n >>> try:\n ... print(1 / 0)\n ... except:\n ... raise RuntimeError("Something bad happened")\n ...\n Traceback (most recent call last):\n File "", line 2, in \n ZeroDivisionError: int division or modulo by zero\n\n During handling of the above exception, another exception occurred:\n\n Traceback (most recent call last):\n File "", line 4, in \n RuntimeError: Something bad happened\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information about handling exceptions is in section\n*The try statement*.\n', - 'return': '\nThe "return" statement\n**********************\n\n return_stmt ::= "return" [expression_list]\n\n"return" may only occur syntactically nested in a function definition,\nnot within a nested class definition.\n\nIf an expression list is present, it is evaluated, else "None" is\nsubstituted.\n\n"return" leaves the current function call with the expression list (or\n"None") as return value.\n\nWhen "return" passes control out of a "try" statement with a "finally"\nclause, that "finally" clause is executed before really leaving the\nfunction.\n\nIn a generator function, the "return" statement indicates that the\ngenerator is done and will cause "StopIteration" to be raised. The\nreturned value (if any) is used as an argument to construct\n"StopIteration" and becomes the "StopIteration.value" attribute.\n', - 'sequence-types': '\nEmulating container types\n*************************\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which "0 <= k < N" where\n*N* is the length of the sequence, or slice objects, which define a\nrange of items. It is also recommended that mappings provide the\nmethods "keys()", "values()", "items()", "get()", "clear()",\n"setdefault()", "pop()", "popitem()", "copy()", and "update()"\nbehaving similar to those for Python\'s standard dictionary objects.\nThe "collections" module provides a "MutableMapping" abstract base\nclass to help create those methods from a base set of "__getitem__()",\n"__setitem__()", "__delitem__()", and "keys()". Mutable sequences\nshould provide methods "append()", "count()", "index()", "extend()",\n"insert()", "pop()", "remove()", "reverse()" and "sort()", like Python\nstandard list objects. Finally, sequence types should implement\naddition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods "__add__()", "__radd__()",\n"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" described\nbelow; they should not define other numerical operators. It is\nrecommended that both mappings and sequences implement the\n"__contains__()" method to allow efficient use of the "in" operator;\nfor mappings, "in" should search the mapping\'s keys; for sequences, it\nshould search through the values. It is further recommended that both\nmappings and sequences implement the "__iter__()" method to allow\nefficient iteration through the container; for mappings, "__iter__()"\nshould be the same as "keys()"; for sequences, it should iterate\nthrough the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function "len()". Should return\n the length of the object, an integer ">=" 0. Also, an object that\n doesn\'t define a "__bool__()" method and whose "__len__()" method\n returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement "operator.length_hint()". Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ">=" 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with "None".\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of "self[key]". For sequence types,\n the accepted keys should be integers and slice objects. Note that\n the special interpretation of negative indexes (if the class wishes\n to emulate a sequence type) is up to the "__getitem__()" method. If\n *key* is of an inappropriate type, "TypeError" may be raised; if of\n a value outside the set of indexes for the sequence (after any\n special interpretation of negative values), "IndexError" should be\n raised. For mapping types, if *key* is missing (not in the\n container), "KeyError" should be raised.\n\n Note: "for" loops expect that an "IndexError" will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the "__getitem__()" method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the "__getitem__()" method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method "keys()".\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the "reversed()" built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the "__reversed__()" method is not provided, the "reversed()"\n built-in will fall back to using the sequence protocol ("__len__()"\n and "__getitem__()"). Objects that support the sequence protocol\n should only provide "__reversed__()" if they can provide an\n implementation that is more efficient than the one provided by\n "reversed()".\n\nThe membership test operators ("in" and "not in") are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define "__contains__()", the membership test\n first tries iteration via "__iter__()", then the old sequence\n iteration protocol via "__getitem__()", see *this section in the\n language reference*.\n', - 'shifting': '\nShifting operations\n*******************\n\nThe shifting operations have lower priority than the arithmetic\noperations:\n\n shift_expr ::= a_expr | shift_expr ( "<<" | ">>" ) a_expr\n\nThese operators accept integers as arguments. They shift the first\nargument to the left or right by the number of bits given by the\nsecond argument.\n\nA right shift by *n* bits is defined as floor division by "pow(2,n)".\nA left shift by *n* bits is defined as multiplication with "pow(2,n)".\n\nNote: In the current implementation, the right-hand operand is required to\n be at most "sys.maxsize". If the right-hand operand is larger than\n "sys.maxsize" an "OverflowError" exception is raised.\n', - 'slicings': '\nSlicings\n********\n\nA slicing selects a range of items in a sequence object (e.g., a\nstring, tuple or list). Slicings may be used as expressions or as\ntargets in assignment or "del" statements. The syntax for a slicing:\n\n slicing ::= primary "[" slice_list "]"\n slice_list ::= slice_item ("," slice_item)* [","]\n slice_item ::= expression | proper_slice\n proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" [stride] ]\n lower_bound ::= expression\n upper_bound ::= expression\n stride ::= expression\n\nThere is ambiguity in the formal syntax here: anything that looks like\nan expression list also looks like a slice list, so any subscription\ncan be interpreted as a slicing. Rather than further complicating the\nsyntax, this is disambiguated by defining that in this case the\ninterpretation as a subscription takes priority over the\ninterpretation as a slicing (this is the case if the slice list\ncontains no proper slice).\n\nThe semantics for a slicing are as follows. The primary must evaluate\nto a mapping object, and it is indexed (using the same "__getitem__()"\nmethod as normal subscription) with a key that is constructed from the\nslice list, as follows. If the slice list contains at least one\ncomma, the key is a tuple containing the conversion of the slice\nitems; otherwise, the conversion of the lone slice item is the key.\nThe conversion of a slice item that is an expression is that\nexpression. The conversion of a proper slice is a slice object (see\nsection *The standard type hierarchy*) whose "start", "stop" and\n"step" attributes are the values of the expressions given as lower\nbound, upper bound and stride, respectively, substituting "None" for\nmissing expressions.\n', - 'specialattrs': '\nSpecial Attributes\n******************\n\nThe implementation adds a few special read-only attributes to several\nobject types, where they are relevant. Some of these are not reported\nby the "dir()" built-in function.\n\nobject.__dict__\n\n A dictionary or other mapping object used to store an object\'s\n (writable) attributes.\n\ninstance.__class__\n\n The class to which a class instance belongs.\n\nclass.__bases__\n\n The tuple of base classes of a class object.\n\nclass.__name__\n\n The name of the class or type.\n\nclass.__qualname__\n\n The *qualified name* of the class or type.\n\n New in version 3.3.\n\nclass.__mro__\n\n This attribute is a tuple of classes that are considered when\n looking for base classes during method resolution.\n\nclass.mro()\n\n This method can be overridden by a metaclass to customize the\n method resolution order for its instances. It is called at class\n instantiation, and its result is stored in "__mro__".\n\nclass.__subclasses__()\n\n Each class keeps a list of weak references to its immediate\n subclasses. This method returns a list of all those references\n still alive. Example:\n\n >>> int.__subclasses__()\n []\n\n-[ Footnotes ]-\n\n[1] Additional information on these special methods may be found in\n the Python Reference Manual (*Basic customization*).\n\n[2] As a consequence, the list "[1, 2]" is considered equal to "[1.0,\n 2.0]", and similarly for tuples.\n\n[3] They must have since the parser can\'t tell the type of the\n operands.\n\n[4] Cased characters are those with general category property being\n one of "Lu" (Letter, uppercase), "Ll" (Letter, lowercase), or "Lt"\n (Letter, titlecase).\n\n[5] To format only a tuple you should therefore provide a singleton\n tuple whose only element is the tuple to be formatted.\n', - 'specialnames': '\nSpecial method names\n********************\n\nA class can implement certain operations that are invoked by special\nsyntax (such as arithmetic operations or subscripting and slicing) by\ndefining methods with special names. This is Python\'s approach to\n*operator overloading*, allowing classes to define their own behavior\nwith respect to language operators. For instance, if a class defines\na method named "__getitem__()", and "x" is an instance of this class,\nthen "x[i]" is roughly equivalent to "type(x).__getitem__(x, i)".\nExcept where mentioned, attempts to execute an operation raise an\nexception when no appropriate method is defined (typically\n"AttributeError" or "TypeError").\n\nWhen implementing a class that emulates any built-in type, it is\nimportant that the emulation only be implemented to the degree that it\nmakes sense for the object being modelled. For example, some\nsequences may work well with retrieval of individual elements, but\nextracting a slice may not make sense. (One example of this is the\n"NodeList" interface in the W3C\'s Document Object Model.)\n\n\nBasic customization\n===================\n\nobject.__new__(cls[, ...])\n\n Called to create a new instance of class *cls*. "__new__()" is a\n static method (special-cased so you need not declare it as such)\n that takes the class of which an instance was requested as its\n first argument. The remaining arguments are those passed to the\n object constructor expression (the call to the class). The return\n value of "__new__()" should be the new object instance (usually an\n instance of *cls*).\n\n Typical implementations create a new instance of the class by\n invoking the superclass\'s "__new__()" method using\n "super(currentclass, cls).__new__(cls[, ...])" with appropriate\n arguments and then modifying the newly-created instance as\n necessary before returning it.\n\n If "__new__()" returns an instance of *cls*, then the new\n instance\'s "__init__()" method will be invoked like\n "__init__(self[, ...])", where *self* is the new instance and the\n remaining arguments are the same as were passed to "__new__()".\n\n If "__new__()" does not return an instance of *cls*, then the new\n instance\'s "__init__()" method will not be invoked.\n\n "__new__()" is intended mainly to allow subclasses of immutable\n types (like int, str, or tuple) to customize instance creation. It\n is also commonly overridden in custom metaclasses in order to\n customize class creation.\n\nobject.__init__(self[, ...])\n\n Called when the instance is created. The arguments are those\n passed to the class constructor expression. If a base class has an\n "__init__()" method, the derived class\'s "__init__()" method, if\n any, must explicitly call it to ensure proper initialization of the\n base class part of the instance; for example:\n "BaseClass.__init__(self, [args...])". As a special constraint on\n constructors, no value may be returned; doing so will cause a\n "TypeError" to be raised at runtime.\n\nobject.__del__(self)\n\n Called when the instance is about to be destroyed. This is also\n called a destructor. If a base class has a "__del__()" method, the\n derived class\'s "__del__()" method, if any, must explicitly call it\n to ensure proper deletion of the base class part of the instance.\n Note that it is possible (though not recommended!) for the\n "__del__()" method to postpone destruction of the instance by\n creating a new reference to it. It may then be called at a later\n time when this new reference is deleted. It is not guaranteed that\n "__del__()" methods are called for objects that still exist when\n the interpreter exits.\n\n Note: "del x" doesn\'t directly call "x.__del__()" --- the former\n decrements the reference count for "x" by one, and the latter is\n only called when "x"\'s reference count reaches zero. Some common\n situations that may prevent the reference count of an object from\n going to zero include: circular references between objects (e.g.,\n a doubly-linked list or a tree data structure with parent and\n child pointers); a reference to the object on the stack frame of\n a function that caught an exception (the traceback stored in\n "sys.exc_info()[2]" keeps the stack frame alive); or a reference\n to the object on the stack frame that raised an unhandled\n exception in interactive mode (the traceback stored in\n "sys.last_traceback" keeps the stack frame alive). The first\n situation can only be remedied by explicitly breaking the cycles;\n the latter two situations can be resolved by storing "None" in\n "sys.last_traceback". Circular references which are garbage are\n detected and cleaned up when the cyclic garbage collector is\n enabled (it\'s on by default). Refer to the documentation for the\n "gc" module for more information about this topic.\n\n Warning: Due to the precarious circumstances under which "__del__()"\n methods are invoked, exceptions that occur during their execution\n are ignored, and a warning is printed to "sys.stderr" instead.\n Also, when "__del__()" is invoked in response to a module being\n deleted (e.g., when execution of the program is done), other\n globals referenced by the "__del__()" method may already have\n been deleted or in the process of being torn down (e.g. the\n import machinery shutting down). For this reason, "__del__()"\n methods should do the absolute minimum needed to maintain\n external invariants. Starting with version 1.5, Python\n guarantees that globals whose name begins with a single\n underscore are deleted from their module before other globals are\n deleted; if no other references to such globals exist, this may\n help in assuring that imported modules are still available at the\n time when the "__del__()" method is called.\n\nobject.__repr__(self)\n\n Called by the "repr()" built-in function to compute the "official"\n string representation of an object. If at all possible, this\n should look like a valid Python expression that could be used to\n recreate an object with the same value (given an appropriate\n environment). If this is not possible, a string of the form\n "<...some useful description...>" should be returned. The return\n value must be a string object. If a class defines "__repr__()" but\n not "__str__()", then "__repr__()" is also used when an "informal"\n string representation of instances of that class is required.\n\n This is typically used for debugging, so it is important that the\n representation is information-rich and unambiguous.\n\nobject.__str__(self)\n\n Called by "str(object)" and the built-in functions "format()" and\n "print()" to compute the "informal" or nicely printable string\n representation of an object. The return value must be a *string*\n object.\n\n This method differs from "object.__repr__()" in that there is no\n expectation that "__str__()" return a valid Python expression: a\n more convenient or concise representation can be used.\n\n The default implementation defined by the built-in type "object"\n calls "object.__repr__()".\n\nobject.__bytes__(self)\n\n Called by "bytes()" to compute a byte-string representation of an\n object. This should return a "bytes" object.\n\nobject.__format__(self, format_spec)\n\n Called by the "format()" built-in function (and by extension, the\n "str.format()" method of class "str") to produce a "formatted"\n string representation of an object. The "format_spec" argument is a\n string that contains a description of the formatting options\n desired. The interpretation of the "format_spec" argument is up to\n the type implementing "__format__()", however most classes will\n either delegate formatting to one of the built-in types, or use a\n similar formatting option syntax.\n\n See *Format Specification Mini-Language* for a description of the\n standard formatting syntax.\n\n The return value must be a string object.\n\n Changed in version 3.4: The __format__ method of "object" itself\n raises a "TypeError" if passed any non-empty string.\n\nobject.__lt__(self, other)\nobject.__le__(self, other)\nobject.__eq__(self, other)\nobject.__ne__(self, other)\nobject.__gt__(self, other)\nobject.__ge__(self, other)\n\n These are the so-called "rich comparison" methods. The\n correspondence between operator symbols and method names is as\n follows: "xy" calls\n "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n\n A rich comparison method may return the singleton "NotImplemented"\n if it does not implement the operation for a given pair of\n arguments. By convention, "False" and "True" are returned for a\n successful comparison. However, these methods can return any value,\n so if the comparison operator is used in a Boolean context (e.g.,\n in the condition of an "if" statement), Python will call "bool()"\n on the value to determine if the result is true or false.\n\n There are no implied relationships among the comparison operators.\n The truth of "x==y" does not imply that "x!=y" is false.\n Accordingly, when defining "__eq__()", one should also define\n "__ne__()" so that the operators will behave as expected. See the\n paragraph on "__hash__()" for some important notes on creating\n *hashable* objects which support custom comparison operations and\n are usable as dictionary keys.\n\n There are no swapped-argument versions of these methods (to be used\n when the left argument does not support the operation but the right\n argument does); rather, "__lt__()" and "__gt__()" are each other\'s\n reflection, "__le__()" and "__ge__()" are each other\'s reflection,\n and "__eq__()" and "__ne__()" are their own reflection.\n\n Arguments to rich comparison methods are never coerced.\n\n To automatically generate ordering operations from a single root\n operation, see "functools.total_ordering()".\n\nobject.__hash__(self)\n\n Called by built-in function "hash()" and for operations on members\n of hashed collections including "set", "frozenset", and "dict".\n "__hash__()" should return an integer. The only required property\n is that objects which compare equal have the same hash value; it is\n advised to somehow mix together (e.g. using exclusive or) the hash\n values for the components of the object that also play a part in\n comparison of objects.\n\n Note: "hash()" truncates the value returned from an object\'s custom\n "__hash__()" method to the size of a "Py_ssize_t". This is\n typically 8 bytes on 64-bit builds and 4 bytes on 32-bit builds.\n If an object\'s "__hash__()" must interoperate on builds of\n different bit sizes, be sure to check the width on all supported\n builds. An easy way to do this is with "python -c "import sys;\n print(sys.hash_info.width)""\n\n If a class does not define an "__eq__()" method it should not\n define a "__hash__()" operation either; if it defines "__eq__()"\n but not "__hash__()", its instances will not be usable as items in\n hashable collections. If a class defines mutable objects and\n implements an "__eq__()" method, it should not implement\n "__hash__()", since the implementation of hashable collections\n requires that a key\'s hash value is immutable (if the object\'s hash\n value changes, it will be in the wrong hash bucket).\n\n User-defined classes have "__eq__()" and "__hash__()" methods by\n default; with them, all objects compare unequal (except with\n themselves) and "x.__hash__()" returns an appropriate value such\n that "x == y" implies both that "x is y" and "hash(x) == hash(y)".\n\n A class that overrides "__eq__()" and does not define "__hash__()"\n will have its "__hash__()" implicitly set to "None". When the\n "__hash__()" method of a class is "None", instances of the class\n will raise an appropriate "TypeError" when a program attempts to\n retrieve their hash value, and will also be correctly identified as\n unhashable when checking "isinstance(obj, collections.Hashable").\n\n If a class that overrides "__eq__()" needs to retain the\n implementation of "__hash__()" from a parent class, the interpreter\n must be told this explicitly by setting "__hash__ =\n .__hash__".\n\n If a class that does not override "__eq__()" wishes to suppress\n hash support, it should include "__hash__ = None" in the class\n definition. A class which defines its own "__hash__()" that\n explicitly raises a "TypeError" would be incorrectly identified as\n hashable by an "isinstance(obj, collections.Hashable)" call.\n\n Note: By default, the "__hash__()" values of str, bytes and datetime\n objects are "salted" with an unpredictable random value.\n Although they remain constant within an individual Python\n process, they are not predictable between repeated invocations of\n Python.This is intended to provide protection against a denial-\n of-service caused by carefully-chosen inputs that exploit the\n worst case performance of a dict insertion, O(n^2) complexity.\n See http://www.ocert.org/advisories/ocert-2011-003.html for\n details.Changing hash values affects the iteration order of\n dicts, sets and other mappings. Python has never made guarantees\n about this ordering (and it typically varies between 32-bit and\n 64-bit builds).See also "PYTHONHASHSEED".\n\n Changed in version 3.3: Hash randomization is enabled by default.\n\nobject.__bool__(self)\n\n Called to implement truth value testing and the built-in operation\n "bool()"; should return "False" or "True". When this method is not\n defined, "__len__()" is called, if it is defined, and the object is\n considered true if its result is nonzero. If a class defines\n neither "__len__()" nor "__bool__()", all its instances are\n considered true.\n\n\nCustomizing attribute access\n============================\n\nThe following methods can be defined to customize the meaning of\nattribute access (use of, assignment to, or deletion of "x.name") for\nclass instances.\n\nobject.__getattr__(self, name)\n\n Called when an attribute lookup has not found the attribute in the\n usual places (i.e. it is not an instance attribute nor is it found\n in the class tree for "self"). "name" is the attribute name. This\n method should return the (computed) attribute value or raise an\n "AttributeError" exception.\n\n Note that if the attribute is found through the normal mechanism,\n "__getattr__()" is not called. (This is an intentional asymmetry\n between "__getattr__()" and "__setattr__()".) This is done both for\n efficiency reasons and because otherwise "__getattr__()" would have\n no way to access other attributes of the instance. Note that at\n least for instance variables, you can fake total control by not\n inserting any values in the instance attribute dictionary (but\n instead inserting them in another object). See the\n "__getattribute__()" method below for a way to actually get total\n control over attribute access.\n\nobject.__getattribute__(self, name)\n\n Called unconditionally to implement attribute accesses for\n instances of the class. If the class also defines "__getattr__()",\n the latter will not be called unless "__getattribute__()" either\n calls it explicitly or raises an "AttributeError". This method\n should return the (computed) attribute value or raise an\n "AttributeError" exception. In order to avoid infinite recursion in\n this method, its implementation should always call the base class\n method with the same name to access any attributes it needs, for\n example, "object.__getattribute__(self, name)".\n\n Note: This method may still be bypassed when looking up special methods\n as the result of implicit invocation via language syntax or\n built-in functions. See *Special method lookup*.\n\nobject.__setattr__(self, name, value)\n\n Called when an attribute assignment is attempted. This is called\n instead of the normal mechanism (i.e. store the value in the\n instance dictionary). *name* is the attribute name, *value* is the\n value to be assigned to it.\n\n If "__setattr__()" wants to assign to an instance attribute, it\n should call the base class method with the same name, for example,\n "object.__setattr__(self, name, value)".\n\nobject.__delattr__(self, name)\n\n Like "__setattr__()" but for attribute deletion instead of\n assignment. This should only be implemented if "del obj.name" is\n meaningful for the object.\n\nobject.__dir__(self)\n\n Called when "dir()" is called on the object. A sequence must be\n returned. "dir()" converts the returned sequence to a list and\n sorts it.\n\n\nImplementing Descriptors\n------------------------\n\nThe following methods only apply when an instance of the class\ncontaining the method (a so-called *descriptor* class) appears in an\n*owner* class (the descriptor must be in either the owner\'s class\ndictionary or in the class dictionary for one of its parents). In the\nexamples below, "the attribute" refers to the attribute whose name is\nthe key of the property in the owner class\' "__dict__".\n\nobject.__get__(self, instance, owner)\n\n Called to get the attribute of the owner class (class attribute\n access) or of an instance of that class (instance attribute\n access). *owner* is always the owner class, while *instance* is the\n instance that the attribute was accessed through, or "None" when\n the attribute is accessed through the *owner*. This method should\n return the (computed) attribute value or raise an "AttributeError"\n exception.\n\nobject.__set__(self, instance, value)\n\n Called to set the attribute on an instance *instance* of the owner\n class to a new value, *value*.\n\nobject.__delete__(self, instance)\n\n Called to delete the attribute on an instance *instance* of the\n owner class.\n\nThe attribute "__objclass__" is interpreted by the "inspect" module as\nspecifying the class where this object was defined (setting this\nappropriately can assist in runtime introspection of dynamic class\nattributes). For callables, it may indicate that an instance of the\ngiven type (or a subclass) is expected or required as the first\npositional argument (for example, CPython sets this attribute for\nunbound methods that are implemented in C).\n\n\nInvoking Descriptors\n--------------------\n\nIn general, a descriptor is an object attribute with "binding\nbehavior", one whose attribute access has been overridden by methods\nin the descriptor protocol: "__get__()", "__set__()", and\n"__delete__()". If any of those methods are defined for an object, it\nis said to be a descriptor.\n\nThe default behavior for attribute access is to get, set, or delete\nthe attribute from an object\'s dictionary. For instance, "a.x" has a\nlookup chain starting with "a.__dict__[\'x\']", then\n"type(a).__dict__[\'x\']", and continuing through the base classes of\n"type(a)" excluding metaclasses.\n\nHowever, if the looked-up value is an object defining one of the\ndescriptor methods, then Python may override the default behavior and\ninvoke the descriptor method instead. Where this occurs in the\nprecedence chain depends on which descriptor methods were defined and\nhow they were called.\n\nThe starting point for descriptor invocation is a binding, "a.x". How\nthe arguments are assembled depends on "a":\n\nDirect Call\n The simplest and least common call is when user code directly\n invokes a descriptor method: "x.__get__(a)".\n\nInstance Binding\n If binding to an object instance, "a.x" is transformed into the\n call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n\nClass Binding\n If binding to a class, "A.x" is transformed into the call:\n "A.__dict__[\'x\'].__get__(None, A)".\n\nSuper Binding\n If "a" is an instance of "super", then the binding "super(B,\n obj).m()" searches "obj.__class__.__mro__" for the base class "A"\n immediately preceding "B" and then invokes the descriptor with the\n call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n\nFor instance bindings, the precedence of descriptor invocation depends\non the which descriptor methods are defined. A descriptor can define\nany combination of "__get__()", "__set__()" and "__delete__()". If it\ndoes not define "__get__()", then accessing the attribute will return\nthe descriptor object itself unless there is a value in the object\'s\ninstance dictionary. If the descriptor defines "__set__()" and/or\n"__delete__()", it is a data descriptor; if it defines neither, it is\na non-data descriptor. Normally, data descriptors define both\n"__get__()" and "__set__()", while non-data descriptors have just the\n"__get__()" method. Data descriptors with "__set__()" and "__get__()"\ndefined always override a redefinition in an instance dictionary. In\ncontrast, non-data descriptors can be overridden by instances.\n\nPython methods (including "staticmethod()" and "classmethod()") are\nimplemented as non-data descriptors. Accordingly, instances can\nredefine and override methods. This allows individual instances to\nacquire behaviors that differ from other instances of the same class.\n\nThe "property()" function is implemented as a data descriptor.\nAccordingly, instances cannot override the behavior of a property.\n\n\n__slots__\n---------\n\nBy default, instances of classes have a dictionary for attribute\nstorage. This wastes space for objects having very few instance\nvariables. The space consumption can become acute when creating large\nnumbers of instances.\n\nThe default can be overridden by defining *__slots__* in a class\ndefinition. The *__slots__* declaration takes a sequence of instance\nvariables and reserves just enough space in each instance to hold a\nvalue for each variable. Space is saved because *__dict__* is not\ncreated for each instance.\n\nobject.__slots__\n\n This class variable can be assigned a string, iterable, or sequence\n of strings with variable names used by instances. If defined in a\n class, *__slots__* reserves space for the declared variables and\n prevents the automatic creation of *__dict__* and *__weakref__* for\n each instance.\n\n\nNotes on using *__slots__*\n~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n* When inheriting from a class without *__slots__*, the *__dict__*\n attribute of that class will always be accessible, so a *__slots__*\n definition in the subclass is meaningless.\n\n* Without a *__dict__* variable, instances cannot be assigned new\n variables not listed in the *__slots__* definition. Attempts to\n assign to an unlisted variable name raises "AttributeError". If\n dynamic assignment of new variables is desired, then add\n "\'__dict__\'" to the sequence of strings in the *__slots__*\n declaration.\n\n* Without a *__weakref__* variable for each instance, classes defining\n *__slots__* do not support weak references to its instances. If weak\n reference support is needed, then add "\'__weakref__\'" to the\n sequence of strings in the *__slots__* declaration.\n\n* *__slots__* are implemented at the class level by creating\n descriptors (*Implementing Descriptors*) for each variable name. As\n a result, class attributes cannot be used to set default values for\n instance variables defined by *__slots__*; otherwise, the class\n attribute would overwrite the descriptor assignment.\n\n* The action of a *__slots__* declaration is limited to the class\n where it is defined. As a result, subclasses will have a *__dict__*\n unless they also define *__slots__* (which must only contain names\n of any *additional* slots).\n\n* If a class defines a slot also defined in a base class, the instance\n variable defined by the base class slot is inaccessible (except by\n retrieving its descriptor directly from the base class). This\n renders the meaning of the program undefined. In the future, a\n check may be added to prevent this.\n\n* Nonempty *__slots__* does not work for classes derived from\n "variable-length" built-in types such as "int", "bytes" and "tuple".\n\n* Any non-string iterable may be assigned to *__slots__*. Mappings may\n also be used; however, in the future, special meaning may be\n assigned to the values corresponding to each key.\n\n* *__class__* assignment works only if both classes have the same\n *__slots__*.\n\n\nCustomizing class creation\n==========================\n\nBy default, classes are constructed using "type()". The class body is\nexecuted in a new namespace and the class name is bound locally to the\nresult of "type(name, bases, namespace)".\n\nThe class creation process can be customised by passing the\n"metaclass" keyword argument in the class definition line, or by\ninheriting from an existing class that included such an argument. In\nthe following example, both "MyClass" and "MySubclass" are instances\nof "Meta":\n\n class Meta(type):\n pass\n\n class MyClass(metaclass=Meta):\n pass\n\n class MySubclass(MyClass):\n pass\n\nAny other keyword arguments that are specified in the class definition\nare passed through to all metaclass operations described below.\n\nWhen a class definition is executed, the following steps occur:\n\n* the appropriate metaclass is determined\n\n* the class namespace is prepared\n\n* the class body is executed\n\n* the class object is created\n\n\nDetermining the appropriate metaclass\n-------------------------------------\n\nThe appropriate metaclass for a class definition is determined as\nfollows:\n\n* if no bases and no explicit metaclass are given, then "type()" is\n used\n\n* if an explicit metaclass is given and it is *not* an instance of\n "type()", then it is used directly as the metaclass\n\n* if an instance of "type()" is given as the explicit metaclass, or\n bases are defined, then the most derived metaclass is used\n\nThe most derived metaclass is selected from the explicitly specified\nmetaclass (if any) and the metaclasses (i.e. "type(cls)") of all\nspecified base classes. The most derived metaclass is one which is a\nsubtype of *all* of these candidate metaclasses. If none of the\ncandidate metaclasses meets that criterion, then the class definition\nwill fail with "TypeError".\n\n\nPreparing the class namespace\n-----------------------------\n\nOnce the appropriate metaclass has been identified, then the class\nnamespace is prepared. If the metaclass has a "__prepare__" attribute,\nit is called as "namespace = metaclass.__prepare__(name, bases,\n**kwds)" (where the additional keyword arguments, if any, come from\nthe class definition).\n\nIf the metaclass has no "__prepare__" attribute, then the class\nnamespace is initialised as an empty "dict()" instance.\n\nSee also:\n\n **PEP 3115** - Metaclasses in Python 3000\n Introduced the "__prepare__" namespace hook\n\n\nExecuting the class body\n------------------------\n\nThe class body is executed (approximately) as "exec(body, globals(),\nnamespace)". The key difference from a normal call to "exec()" is that\nlexical scoping allows the class body (including any methods) to\nreference names from the current and outer scopes when the class\ndefinition occurs inside a function.\n\nHowever, even when the class definition occurs inside the function,\nmethods defined inside the class still cannot see names defined at the\nclass scope. Class variables must be accessed through the first\nparameter of instance or class methods, and cannot be accessed at all\nfrom static methods.\n\n\nCreating the class object\n-------------------------\n\nOnce the class namespace has been populated by executing the class\nbody, the class object is created by calling "metaclass(name, bases,\nnamespace, **kwds)" (the additional keywords passed here are the same\nas those passed to "__prepare__").\n\nThis class object is the one that will be referenced by the zero-\nargument form of "super()". "__class__" is an implicit closure\nreference created by the compiler if any methods in a class body refer\nto either "__class__" or "super". This allows the zero argument form\nof "super()" to correctly identify the class being defined based on\nlexical scoping, while the class or instance that was used to make the\ncurrent call is identified based on the first argument passed to the\nmethod.\n\nAfter the class object is created, it is passed to the class\ndecorators included in the class definition (if any) and the resulting\nobject is bound in the local namespace as the defined class.\n\nSee also:\n\n **PEP 3135** - New super\n Describes the implicit "__class__" closure reference\n\n\nMetaclass example\n-----------------\n\nThe potential uses for metaclasses are boundless. Some ideas that have\nbeen explored include logging, interface checking, automatic\ndelegation, automatic property creation, proxies, frameworks, and\nautomatic resource locking/synchronization.\n\nHere is an example of a metaclass that uses an\n"collections.OrderedDict" to remember the order that class members\nwere defined:\n\n class OrderedClass(type):\n\n @classmethod\n def __prepare__(metacls, name, bases, **kwds):\n return collections.OrderedDict()\n\n def __new__(cls, name, bases, namespace, **kwds):\n result = type.__new__(cls, name, bases, dict(namespace))\n result.members = tuple(namespace)\n return result\n\n class A(metaclass=OrderedClass):\n def one(self): pass\n def two(self): pass\n def three(self): pass\n def four(self): pass\n\n >>> A.members\n (\'__module__\', \'one\', \'two\', \'three\', \'four\')\n\nWhen the class definition for *A* gets executed, the process begins\nwith calling the metaclass\'s "__prepare__()" method which returns an\nempty "collections.OrderedDict". That mapping records the methods and\nattributes of *A* as they are defined within the body of the class\nstatement. Once those definitions are executed, the ordered dictionary\nis fully populated and the metaclass\'s "__new__()" method gets\ninvoked. That method builds the new type and it saves the ordered\ndictionary keys in an attribute called "members".\n\n\nCustomizing instance and subclass checks\n========================================\n\nThe following methods are used to override the default behavior of the\n"isinstance()" and "issubclass()" built-in functions.\n\nIn particular, the metaclass "abc.ABCMeta" implements these methods in\norder to allow the addition of Abstract Base Classes (ABCs) as\n"virtual base classes" to any class or type (including built-in\ntypes), including other ABCs.\n\nclass.__instancecheck__(self, instance)\n\n Return true if *instance* should be considered a (direct or\n indirect) instance of *class*. If defined, called to implement\n "isinstance(instance, class)".\n\nclass.__subclasscheck__(self, subclass)\n\n Return true if *subclass* should be considered a (direct or\n indirect) subclass of *class*. If defined, called to implement\n "issubclass(subclass, class)".\n\nNote that these methods are looked up on the type (metaclass) of a\nclass. They cannot be defined as class methods in the actual class.\nThis is consistent with the lookup of special methods that are called\non instances, only in this case the instance is itself a class.\n\nSee also:\n\n **PEP 3119** - Introducing Abstract Base Classes\n Includes the specification for customizing "isinstance()" and\n "issubclass()" behavior through "__instancecheck__()" and\n "__subclasscheck__()", with motivation for this functionality in\n the context of adding Abstract Base Classes (see the "abc"\n module) to the language.\n\n\nEmulating callable objects\n==========================\n\nobject.__call__(self[, args...])\n\n Called when the instance is "called" as a function; if this method\n is defined, "x(arg1, arg2, ...)" is a shorthand for\n "x.__call__(arg1, arg2, ...)".\n\n\nEmulating container types\n=========================\n\nThe following methods can be defined to implement container objects.\nContainers usually are sequences (such as lists or tuples) or mappings\n(like dictionaries), but can represent other containers as well. The\nfirst set of methods is used either to emulate a sequence or to\nemulate a mapping; the difference is that for a sequence, the\nallowable keys should be the integers *k* for which "0 <= k < N" where\n*N* is the length of the sequence, or slice objects, which define a\nrange of items. It is also recommended that mappings provide the\nmethods "keys()", "values()", "items()", "get()", "clear()",\n"setdefault()", "pop()", "popitem()", "copy()", and "update()"\nbehaving similar to those for Python\'s standard dictionary objects.\nThe "collections" module provides a "MutableMapping" abstract base\nclass to help create those methods from a base set of "__getitem__()",\n"__setitem__()", "__delitem__()", and "keys()". Mutable sequences\nshould provide methods "append()", "count()", "index()", "extend()",\n"insert()", "pop()", "remove()", "reverse()" and "sort()", like Python\nstandard list objects. Finally, sequence types should implement\naddition (meaning concatenation) and multiplication (meaning\nrepetition) by defining the methods "__add__()", "__radd__()",\n"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" described\nbelow; they should not define other numerical operators. It is\nrecommended that both mappings and sequences implement the\n"__contains__()" method to allow efficient use of the "in" operator;\nfor mappings, "in" should search the mapping\'s keys; for sequences, it\nshould search through the values. It is further recommended that both\nmappings and sequences implement the "__iter__()" method to allow\nefficient iteration through the container; for mappings, "__iter__()"\nshould be the same as "keys()"; for sequences, it should iterate\nthrough the values.\n\nobject.__len__(self)\n\n Called to implement the built-in function "len()". Should return\n the length of the object, an integer ">=" 0. Also, an object that\n doesn\'t define a "__bool__()" method and whose "__len__()" method\n returns zero is considered to be false in a Boolean context.\n\nobject.__length_hint__(self)\n\n Called to implement "operator.length_hint()". Should return an\n estimated length for the object (which may be greater or less than\n the actual length). The length must be an integer ">=" 0. This\n method is purely an optimization and is never required for\n correctness.\n\n New in version 3.4.\n\nNote: Slicing is done exclusively with the following three methods. A\n call like\n\n a[1:2] = b\n\n is translated to\n\n a[slice(1, 2, None)] = b\n\n and so forth. Missing slice items are always filled in with "None".\n\nobject.__getitem__(self, key)\n\n Called to implement evaluation of "self[key]". For sequence types,\n the accepted keys should be integers and slice objects. Note that\n the special interpretation of negative indexes (if the class wishes\n to emulate a sequence type) is up to the "__getitem__()" method. If\n *key* is of an inappropriate type, "TypeError" may be raised; if of\n a value outside the set of indexes for the sequence (after any\n special interpretation of negative values), "IndexError" should be\n raised. For mapping types, if *key* is missing (not in the\n container), "KeyError" should be raised.\n\n Note: "for" loops expect that an "IndexError" will be raised for\n illegal indexes to allow proper detection of the end of the\n sequence.\n\nobject.__setitem__(self, key, value)\n\n Called to implement assignment to "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support changes to the values for keys, or if new keys\n can be added, or for sequences if elements can be replaced. The\n same exceptions should be raised for improper *key* values as for\n the "__getitem__()" method.\n\nobject.__delitem__(self, key)\n\n Called to implement deletion of "self[key]". Same note as for\n "__getitem__()". This should only be implemented for mappings if\n the objects support removal of keys, or for sequences if elements\n can be removed from the sequence. The same exceptions should be\n raised for improper *key* values as for the "__getitem__()" method.\n\nobject.__iter__(self)\n\n This method is called when an iterator is required for a container.\n This method should return a new iterator object that can iterate\n over all the objects in the container. For mappings, it should\n iterate over the keys of the container, and should also be made\n available as the method "keys()".\n\n Iterator objects also need to implement this method; they are\n required to return themselves. For more information on iterator\n objects, see *Iterator Types*.\n\nobject.__reversed__(self)\n\n Called (if present) by the "reversed()" built-in to implement\n reverse iteration. It should return a new iterator object that\n iterates over all the objects in the container in reverse order.\n\n If the "__reversed__()" method is not provided, the "reversed()"\n built-in will fall back to using the sequence protocol ("__len__()"\n and "__getitem__()"). Objects that support the sequence protocol\n should only provide "__reversed__()" if they can provide an\n implementation that is more efficient than the one provided by\n "reversed()".\n\nThe membership test operators ("in" and "not in") are normally\nimplemented as an iteration through a sequence. However, container\nobjects can supply the following special method with a more efficient\nimplementation, which also does not require the object be a sequence.\n\nobject.__contains__(self, item)\n\n Called to implement membership test operators. Should return true\n if *item* is in *self*, false otherwise. For mapping objects, this\n should consider the keys of the mapping rather than the values or\n the key-item pairs.\n\n For objects that don\'t define "__contains__()", the membership test\n first tries iteration via "__iter__()", then the old sequence\n iteration protocol via "__getitem__()", see *this section in the\n language reference*.\n\n\nEmulating numeric types\n=======================\n\nThe following methods can be defined to emulate numeric objects.\nMethods corresponding to operations that are not supported by the\nparticular kind of number implemented (e.g., bitwise operations for\nnon-integral numbers) should be left undefined.\n\nobject.__add__(self, other)\nobject.__sub__(self, other)\nobject.__mul__(self, other)\nobject.__truediv__(self, other)\nobject.__floordiv__(self, other)\nobject.__mod__(self, other)\nobject.__divmod__(self, other)\nobject.__pow__(self, other[, modulo])\nobject.__lshift__(self, other)\nobject.__rshift__(self, other)\nobject.__and__(self, other)\nobject.__xor__(self, other)\nobject.__or__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|"). For instance, to evaluate the\n expression "x + y", where *x* is an instance of a class that has an\n "__add__()" method, "x.__add__(y)" is called. The "__divmod__()"\n method should be the equivalent to using "__floordiv__()" and\n "__mod__()"; it should not be related to "__truediv__()". Note\n that "__pow__()" should be defined to accept an optional third\n argument if the ternary version of the built-in "pow()" function is\n to be supported.\n\n If one of those methods does not support the operation with the\n supplied arguments, it should return "NotImplemented".\n\nobject.__radd__(self, other)\nobject.__rsub__(self, other)\nobject.__rmul__(self, other)\nobject.__rtruediv__(self, other)\nobject.__rfloordiv__(self, other)\nobject.__rmod__(self, other)\nobject.__rdivmod__(self, other)\nobject.__rpow__(self, other)\nobject.__rlshift__(self, other)\nobject.__rrshift__(self, other)\nobject.__rand__(self, other)\nobject.__rxor__(self, other)\nobject.__ror__(self, other)\n\n These methods are called to implement the binary arithmetic\n operations ("+", "-", "*", "/", "//", "%", "divmod()", "pow()",\n "**", "<<", ">>", "&", "^", "|") with reflected (swapped) operands.\n These functions are only called if the left operand does not\n support the corresponding operation and the operands are of\n different types. [2] For instance, to evaluate the expression "x -\n y", where *y* is an instance of a class that has an "__rsub__()"\n method, "y.__rsub__(x)" is called if "x.__sub__(y)" returns\n *NotImplemented*.\n\n Note that ternary "pow()" will not try calling "__rpow__()" (the\n coercion rules would become too complicated).\n\n Note: If the right operand\'s type is a subclass of the left operand\'s\n type and that subclass provides the reflected method for the\n operation, this method will be called before the left operand\'s\n non-reflected method. This behavior allows subclasses to\n override their ancestors\' operations.\n\nobject.__iadd__(self, other)\nobject.__isub__(self, other)\nobject.__imul__(self, other)\nobject.__itruediv__(self, other)\nobject.__ifloordiv__(self, other)\nobject.__imod__(self, other)\nobject.__ipow__(self, other[, modulo])\nobject.__ilshift__(self, other)\nobject.__irshift__(self, other)\nobject.__iand__(self, other)\nobject.__ixor__(self, other)\nobject.__ior__(self, other)\n\n These methods are called to implement the augmented arithmetic\n assignments ("+=", "-=", "*=", "/=", "//=", "%=", "**=", "<<=",\n ">>=", "&=", "^=", "|="). These methods should attempt to do the\n operation in-place (modifying *self*) and return the result (which\n could be, but does not have to be, *self*). If a specific method\n is not defined, the augmented assignment falls back to the normal\n methods. For instance, if *x* is an instance of a class with an\n "__iadd__()" method, "x += y" is equivalent to "x = x.__iadd__(y)"\n . Otherwise, "x.__add__(y)" and "y.__radd__(x)" are considered, as\n with the evaluation of "x + y". In certain situations, augmented\n assignment can result in unexpected errors (see *Why does\n a_tuple[i] += [\'item\'] raise an exception when the addition\n works?*), but this behavior is in fact part of the data model.\n\nobject.__neg__(self)\nobject.__pos__(self)\nobject.__abs__(self)\nobject.__invert__(self)\n\n Called to implement the unary arithmetic operations ("-", "+",\n "abs()" and "~").\n\nobject.__complex__(self)\nobject.__int__(self)\nobject.__float__(self)\nobject.__round__(self[, n])\n\n Called to implement the built-in functions "complex()", "int()",\n "float()" and "round()". Should return a value of the appropriate\n type.\n\nobject.__index__(self)\n\n Called to implement "operator.index()", and whenever Python needs\n to losslessly convert the numeric object to an integer object (such\n as in slicing, or in the built-in "bin()", "hex()" and "oct()"\n functions). Presence of this method indicates that the numeric\n object is an integer type. Must return an integer.\n\n Note: When "__index__()" is defined, "__int__()" should also be\n defined, and both shuld return the same value, in order to have a\n coherent integer type class.\n\n\nWith Statement Context Managers\n===============================\n\nA *context manager* is an object that defines the runtime context to\nbe established when executing a "with" statement. The context manager\nhandles the entry into, and the exit from, the desired runtime context\nfor the execution of the block of code. Context managers are normally\ninvoked using the "with" statement (described in section *The with\nstatement*), but can also be used by directly invoking their methods.\n\nTypical uses of context managers include saving and restoring various\nkinds of global state, locking and unlocking resources, closing opened\nfiles, etc.\n\nFor more information on context managers, see *Context Manager Types*.\n\nobject.__enter__(self)\n\n Enter the runtime context related to this object. The "with"\n statement will bind this method\'s return value to the target(s)\n specified in the "as" clause of the statement, if any.\n\nobject.__exit__(self, exc_type, exc_value, traceback)\n\n Exit the runtime context related to this object. The parameters\n describe the exception that caused the context to be exited. If the\n context was exited without an exception, all three arguments will\n be "None".\n\n If an exception is supplied, and the method wishes to suppress the\n exception (i.e., prevent it from being propagated), it should\n return a true value. Otherwise, the exception will be processed\n normally upon exit from this method.\n\n Note that "__exit__()" methods should not reraise the passed-in\n exception; this is the caller\'s responsibility.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n "with" statement.\n\n\nSpecial method lookup\n=====================\n\nFor custom classes, implicit invocations of special methods are only\nguaranteed to work correctly if defined on an object\'s type, not in\nthe object\'s instance dictionary. That behaviour is the reason why\nthe following code raises an exception:\n\n >>> class C:\n ... pass\n ...\n >>> c = C()\n >>> c.__len__ = lambda: 5\n >>> len(c)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: object of type \'C\' has no len()\n\nThe rationale behind this behaviour lies with a number of special\nmethods such as "__hash__()" and "__repr__()" that are implemented by\nall objects, including type objects. If the implicit lookup of these\nmethods used the conventional lookup process, they would fail when\ninvoked on the type object itself:\n\n >>> 1 .__hash__() == hash(1)\n True\n >>> int.__hash__() == hash(int)\n Traceback (most recent call last):\n File "", line 1, in \n TypeError: descriptor \'__hash__\' of \'int\' object needs an argument\n\nIncorrectly attempting to invoke an unbound method of a class in this\nway is sometimes referred to as \'metaclass confusion\', and is avoided\nby bypassing the instance when looking up special methods:\n\n >>> type(1).__hash__(1) == hash(1)\n True\n >>> type(int).__hash__(int) == hash(int)\n True\n\nIn addition to bypassing any instance attributes in the interest of\ncorrectness, implicit special method lookup generally also bypasses\nthe "__getattribute__()" method even of the object\'s metaclass:\n\n >>> class Meta(type):\n ... def __getattribute__(*args):\n ... print("Metaclass getattribute invoked")\n ... return type.__getattribute__(*args)\n ...\n >>> class C(object, metaclass=Meta):\n ... def __len__(self):\n ... return 10\n ... def __getattribute__(*args):\n ... print("Class getattribute invoked")\n ... return object.__getattribute__(*args)\n ...\n >>> c = C()\n >>> c.__len__() # Explicit lookup via instance\n Class getattribute invoked\n 10\n >>> type(c).__len__(c) # Explicit lookup via type\n Metaclass getattribute invoked\n 10\n >>> len(c) # Implicit lookup\n 10\n\nBypassing the "__getattribute__()" machinery in this fashion provides\nsignificant scope for speed optimisations within the interpreter, at\nthe cost of some flexibility in the handling of special methods (the\nspecial method *must* be set on the class object itself in order to be\nconsistently invoked by the interpreter).\n\n-[ Footnotes ]-\n\n[1] It *is* possible in some cases to change an object\'s type, under\n certain controlled conditions. It generally isn\'t a good idea\n though, since it can lead to some very strange behaviour if it is\n handled incorrectly.\n\n[2] For operands of the same type, it is assumed that if the non-\n reflected method (such as "__add__()") fails the operation is not\n supported, which is why the reflected method is not called.\n', - 'string-methods': '\nString Methods\n**************\n\nStrings implement all of the *common* sequence operations, along with\nthe additional methods described below.\n\nStrings also support two styles of string formatting, one providing a\nlarge degree of flexibility and customization (see "str.format()",\n*Format String Syntax* and *String Formatting*) and the other based on\nC "printf" style formatting that handles a narrower range of types and\nis slightly harder to use correctly, but is often faster for the cases\nit can handle (*printf-style String Formatting*).\n\nThe *Text Processing Services* section of the standard library covers\na number of other modules that provide various text related utilities\n(including regular expression support in the "re" module).\n\nstr.capitalize()\n\n Return a copy of the string with its first character capitalized\n and the rest lowercased.\n\nstr.casefold()\n\n Return a casefolded copy of the string. Casefolded strings may be\n used for caseless matching.\n\n Casefolding is similar to lowercasing but more aggressive because\n it is intended to remove all case distinctions in a string. For\n example, the German lowercase letter "\'\xc3\x9f\'" is equivalent to ""ss"".\n Since it is already lowercase, "lower()" would do nothing to "\'\xc3\x9f\'";\n "casefold()" converts it to ""ss"".\n\n The casefolding algorithm is described in section 3.13 of the\n Unicode Standard.\n\n New in version 3.3.\n\nstr.center(width[, fillchar])\n\n Return centered in a string of length *width*. Padding is done\n using the specified *fillchar* (default is a space).\n\nstr.count(sub[, start[, end]])\n\n Return the number of non-overlapping occurrences of substring *sub*\n in the range [*start*, *end*]. Optional arguments *start* and\n *end* are interpreted as in slice notation.\n\nstr.encode(encoding="utf-8", errors="strict")\n\n Return an encoded version of the string as a bytes object. Default\n encoding is "\'utf-8\'". *errors* may be given to set a different\n error handling scheme. The default for *errors* is "\'strict\'",\n meaning that encoding errors raise a "UnicodeError". Other possible\n values are "\'ignore\'", "\'replace\'", "\'xmlcharrefreplace\'",\n "\'backslashreplace\'" and any other name registered via\n "codecs.register_error()", see section *Codec Base Classes*. For a\n list of possible encodings, see section *Standard Encodings*.\n\n Changed in version 3.1: Support for keyword arguments added.\n\nstr.endswith(suffix[, start[, end]])\n\n Return "True" if the string ends with the specified *suffix*,\n otherwise return "False". *suffix* can also be a tuple of suffixes\n to look for. With optional *start*, test beginning at that\n position. With optional *end*, stop comparing at that position.\n\nstr.expandtabs(tabsize=8)\n\n Return a copy of the string where all tab characters are replaced\n by one or more spaces, depending on the current column and the\n given tab size. Tab positions occur every *tabsize* characters\n (default is 8, giving tab positions at columns 0, 8, 16 and so on).\n To expand the string, the current column is set to zero and the\n string is examined character by character. If the character is a\n tab ("\\t"), one or more space characters are inserted in the result\n until the current column is equal to the next tab position. (The\n tab character itself is not copied.) If the character is a newline\n ("\\n") or return ("\\r"), it is copied and the current column is\n reset to zero. Any other character is copied unchanged and the\n current column is incremented by one regardless of how the\n character is represented when printed.\n\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs()\n \'01 012 0123 01234\'\n >>> \'01\\t012\\t0123\\t01234\'.expandtabs(4)\n \'01 012 0123 01234\'\n\nstr.find(sub[, start[, end]])\n\n Return the lowest index in the string where substring *sub* is\n found, such that *sub* is contained in the slice "s[start:end]".\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return "-1" if *sub* is not found.\n\n Note: The "find()" method should be used only if you need to know the\n position of *sub*. To check if *sub* is a substring or not, use\n the "in" operator:\n\n >>> \'Py\' in \'Python\'\n True\n\nstr.format(*args, **kwargs)\n\n Perform a string formatting operation. The string on which this\n method is called can contain literal text or replacement fields\n delimited by braces "{}". Each replacement field contains either\n the numeric index of a positional argument, or the name of a\n keyword argument. Returns a copy of the string where each\n replacement field is replaced with the string value of the\n corresponding argument.\n\n >>> "The sum of 1 + 2 is {0}".format(1+2)\n \'The sum of 1 + 2 is 3\'\n\n See *Format String Syntax* for a description of the various\n formatting options that can be specified in format strings.\n\nstr.format_map(mapping)\n\n Similar to "str.format(**mapping)", except that "mapping" is used\n directly and not copied to a "dict". This is useful if for example\n "mapping" is a dict subclass:\n\n >>> class Default(dict):\n ... def __missing__(self, key):\n ... return key\n ...\n >>> \'{name} was born in {country}\'.format_map(Default(name=\'Guido\'))\n \'Guido was born in country\'\n\n New in version 3.2.\n\nstr.index(sub[, start[, end]])\n\n Like "find()", but raise "ValueError" when the substring is not\n found.\n\nstr.isalnum()\n\n Return true if all characters in the string are alphanumeric and\n there is at least one character, false otherwise. A character "c"\n is alphanumeric if one of the following returns "True":\n "c.isalpha()", "c.isdecimal()", "c.isdigit()", or "c.isnumeric()".\n\nstr.isalpha()\n\n Return true if all characters in the string are alphabetic and\n there is at least one character, false otherwise. Alphabetic\n characters are those characters defined in the Unicode character\n database as "Letter", i.e., those with general category property\n being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note that this is\n different from the "Alphabetic" property defined in the Unicode\n Standard.\n\nstr.isdecimal()\n\n Return true if all characters in the string are decimal characters\n and there is at least one character, false otherwise. Decimal\n characters are those from general category "Nd". This category\n includes digit characters, and all characters that can be used to\n form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC DIGIT ZERO.\n\nstr.isdigit()\n\n Return true if all characters in the string are digits and there is\n at least one character, false otherwise. Digits include decimal\n characters and digits that need special handling, such as the\n compatibility superscript digits. Formally, a digit is a character\n that has the property value Numeric_Type=Digit or\n Numeric_Type=Decimal.\n\nstr.isidentifier()\n\n Return true if the string is a valid identifier according to the\n language definition, section *Identifiers and keywords*.\n\n Use "keyword.iskeyword()" to test for reserved identifiers such as\n "def" and "class".\n\nstr.islower()\n\n Return true if all cased characters [4] in the string are lowercase\n and there is at least one cased character, false otherwise.\n\nstr.isnumeric()\n\n Return true if all characters in the string are numeric characters,\n and there is at least one character, false otherwise. Numeric\n characters include digit characters, and all characters that have\n the Unicode numeric value property, e.g. U+2155, VULGAR FRACTION\n ONE FIFTH. Formally, numeric characters are those with the\n property value Numeric_Type=Digit, Numeric_Type=Decimal or\n Numeric_Type=Numeric.\n\nstr.isprintable()\n\n Return true if all characters in the string are printable or the\n string is empty, false otherwise. Nonprintable characters are\n those characters defined in the Unicode character database as\n "Other" or "Separator", excepting the ASCII space (0x20) which is\n considered printable. (Note that printable characters in this\n context are those which should not be escaped when "repr()" is\n invoked on a string. It has no bearing on the handling of strings\n written to "sys.stdout" or "sys.stderr".)\n\nstr.isspace()\n\n Return true if there are only whitespace characters in the string\n and there is at least one character, false otherwise. Whitespace\n characters are those characters defined in the Unicode character\n database as "Other" or "Separator" and those with bidirectional\n property being one of "WS", "B", or "S".\n\nstr.istitle()\n\n Return true if the string is a titlecased string and there is at\n least one character, for example uppercase characters may only\n follow uncased characters and lowercase characters only cased ones.\n Return false otherwise.\n\nstr.isupper()\n\n Return true if all cased characters [4] in the string are uppercase\n and there is at least one cased character, false otherwise.\n\nstr.join(iterable)\n\n Return a string which is the concatenation of the strings in the\n *iterable* *iterable*. A "TypeError" will be raised if there are\n any non-string values in *iterable*, including "bytes" objects.\n The separator between elements is the string providing this method.\n\nstr.ljust(width[, fillchar])\n\n Return the string left justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to "len(s)".\n\nstr.lower()\n\n Return a copy of the string with all the cased characters [4]\n converted to lowercase.\n\n The lowercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.lstrip([chars])\n\n Return a copy of the string with leading characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or "None", the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a prefix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.lstrip()\n \'spacious \'\n >>> \'www.example.com\'.lstrip(\'cmowz.\')\n \'example.com\'\n\nstatic str.maketrans(x[, y[, z]])\n\n This static method returns a translation table usable for\n "str.translate()".\n\n If there is only one argument, it must be a dictionary mapping\n Unicode ordinals (integers) or characters (strings of length 1) to\n Unicode ordinals, strings (of arbitrary lengths) or None.\n Character keys will then be converted to ordinals.\n\n If there are two arguments, they must be strings of equal length,\n and in the resulting dictionary, each character in x will be mapped\n to the character at the same position in y. If there is a third\n argument, it must be a string, whose characters will be mapped to\n None in the result.\n\nstr.partition(sep)\n\n Split the string at the first occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing the string itself, followed by\n two empty strings.\n\nstr.replace(old, new[, count])\n\n Return a copy of the string with all occurrences of substring *old*\n replaced by *new*. If the optional argument *count* is given, only\n the first *count* occurrences are replaced.\n\nstr.rfind(sub[, start[, end]])\n\n Return the highest index in the string where substring *sub* is\n found, such that *sub* is contained within "s[start:end]".\n Optional arguments *start* and *end* are interpreted as in slice\n notation. Return "-1" on failure.\n\nstr.rindex(sub[, start[, end]])\n\n Like "rfind()" but raises "ValueError" when the substring *sub* is\n not found.\n\nstr.rjust(width[, fillchar])\n\n Return the string right justified in a string of length *width*.\n Padding is done using the specified *fillchar* (default is a\n space). The original string is returned if *width* is less than or\n equal to "len(s)".\n\nstr.rpartition(sep)\n\n Split the string at the last occurrence of *sep*, and return a\n 3-tuple containing the part before the separator, the separator\n itself, and the part after the separator. If the separator is not\n found, return a 3-tuple containing two empty strings, followed by\n the string itself.\n\nstr.rsplit(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit* splits\n are done, the *rightmost* ones. If *sep* is not specified or\n "None", any whitespace string is a separator. Except for splitting\n from the right, "rsplit()" behaves like "split()" which is\n described in detail below.\n\nstr.rstrip([chars])\n\n Return a copy of the string with trailing characters removed. The\n *chars* argument is a string specifying the set of characters to be\n removed. If omitted or "None", the *chars* argument defaults to\n removing whitespace. The *chars* argument is not a suffix; rather,\n all combinations of its values are stripped:\n\n >>> \' spacious \'.rstrip()\n \' spacious\'\n >>> \'mississippi\'.rstrip(\'ipz\')\n \'mississ\'\n\nstr.split(sep=None, maxsplit=-1)\n\n Return a list of the words in the string, using *sep* as the\n delimiter string. If *maxsplit* is given, at most *maxsplit*\n splits are done (thus, the list will have at most "maxsplit+1"\n elements). If *maxsplit* is not specified or "-1", then there is\n no limit on the number of splits (all possible splits are made).\n\n If *sep* is given, consecutive delimiters are not grouped together\n and are deemed to delimit empty strings (for example,\n "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', \'2\']"). The *sep* argument\n may consist of multiple characters (for example,\n "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', \'3\']"). Splitting an\n empty string with a specified separator returns "[\'\']".\n\n If *sep* is not specified or is "None", a different splitting\n algorithm is applied: runs of consecutive whitespace are regarded\n as a single separator, and the result will contain no empty strings\n at the start or end if the string has leading or trailing\n whitespace. Consequently, splitting an empty string or a string\n consisting of just whitespace with a "None" separator returns "[]".\n\n For example, "\' 1 2 3 \'.split()" returns "[\'1\', \'2\', \'3\']", and\n "\' 1 2 3 \'.split(None, 1)" returns "[\'1\', \'2 3 \']".\n\nstr.splitlines([keepends])\n\n Return a list of the lines in the string, breaking at line\n boundaries. This method uses the *universal newlines* approach to\n splitting lines. Line breaks are not included in the resulting list\n unless *keepends* is given and true.\n\n For example, "\'ab c\\n\\nde fg\\rkl\\r\\n\'.splitlines()" returns "[\'ab\n c\', \'\', \'de fg\', \'kl\']", while the same call with\n "splitlines(True)" returns "[\'ab c\\n\', \'\\n\', \'de fg\\r\', \'kl\\r\\n\']".\n\n Unlike "split()" when a delimiter string *sep* is given, this\n method returns an empty list for the empty string, and a terminal\n line break does not result in an extra line.\n\nstr.startswith(prefix[, start[, end]])\n\n Return "True" if string starts with the *prefix*, otherwise return\n "False". *prefix* can also be a tuple of prefixes to look for.\n With optional *start*, test string beginning at that position.\n With optional *end*, stop comparing string at that position.\n\nstr.strip([chars])\n\n Return a copy of the string with the leading and trailing\n characters removed. The *chars* argument is a string specifying the\n set of characters to be removed. If omitted or "None", the *chars*\n argument defaults to removing whitespace. The *chars* argument is\n not a prefix or suffix; rather, all combinations of its values are\n stripped:\n\n >>> \' spacious \'.strip()\n \'spacious\'\n >>> \'www.example.com\'.strip(\'cmowz.\')\n \'example\'\n\nstr.swapcase()\n\n Return a copy of the string with uppercase characters converted to\n lowercase and vice versa. Note that it is not necessarily true that\n "s.swapcase().swapcase() == s".\n\nstr.title()\n\n Return a titlecased version of the string where words start with an\n uppercase character and the remaining characters are lowercase.\n\n The algorithm uses a simple language-independent definition of a\n word as groups of consecutive letters. The definition works in\n many contexts but it means that apostrophes in contractions and\n possessives form word boundaries, which may not be the desired\n result:\n\n >>> "they\'re bill\'s friends from the UK".title()\n "They\'Re Bill\'S Friends From The Uk"\n\n A workaround for apostrophes can be constructed using regular\n expressions:\n\n >>> import re\n >>> def titlecase(s):\n ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n ... lambda mo: mo.group(0)[0].upper() +\n ... mo.group(0)[1:].lower(),\n ... s)\n ...\n >>> titlecase("they\'re bill\'s friends.")\n "They\'re Bill\'s Friends."\n\nstr.translate(map)\n\n Return a copy of the *s* where all characters have been mapped\n through the *map* which must be a dictionary of Unicode ordinals\n (integers) to Unicode ordinals, strings or "None". Unmapped\n characters are left untouched. Characters mapped to "None" are\n deleted.\n\n You can use "str.maketrans()" to create a translation map from\n character-to-character mappings in different formats.\n\n Note: An even more flexible approach is to create a custom character\n mapping codec using the "codecs" module (see "encodings.cp1251"\n for an example).\n\nstr.upper()\n\n Return a copy of the string with all the cased characters [4]\n converted to uppercase. Note that "str.upper().isupper()" might be\n "False" if "s" contains uncased characters or if the Unicode\n category of the resulting character(s) is not "Lu" (Letter,\n uppercase), but e.g. "Lt" (Letter, titlecase).\n\n The uppercasing algorithm used is described in section 3.13 of the\n Unicode Standard.\n\nstr.zfill(width)\n\n Return the numeric string left filled with zeros in a string of\n length *width*. A sign prefix is handled correctly. The original\n string is returned if *width* is less than or equal to "len(s)".\n', - 'strings': '\nString and Bytes literals\n*************************\n\nString literals are described by the following lexical definitions:\n\n stringliteral ::= [stringprefix](shortstring | longstring)\n stringprefix ::= "r" | "u" | "R" | "U"\n shortstring ::= "\'" shortstringitem* "\'" | \'"\' shortstringitem* \'"\'\n longstring ::= "\'\'\'" longstringitem* "\'\'\'" | \'"""\' longstringitem* \'"""\'\n shortstringitem ::= shortstringchar | stringescapeseq\n longstringitem ::= longstringchar | stringescapeseq\n shortstringchar ::= \n longstringchar ::= \n stringescapeseq ::= "\\" \n\n bytesliteral ::= bytesprefix(shortbytes | longbytes)\n bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | "rb" | "rB" | "Rb" | "RB"\n shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' shortbytesitem* \'"\'\n longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | \'"""\' longbytesitem* \'"""\'\n shortbytesitem ::= shortbyteschar | bytesescapeseq\n longbytesitem ::= longbyteschar | bytesescapeseq\n shortbyteschar ::= \n longbyteschar ::= \n bytesescapeseq ::= "\\" \n\nOne syntactic restriction not indicated by these productions is that\nwhitespace is not allowed between the "stringprefix" or "bytesprefix"\nand the rest of the literal. The source character set is defined by\nthe encoding declaration; it is UTF-8 if no encoding declaration is\ngiven in the source file; see section *Encoding declarations*.\n\nIn plain English: Both types of literals can be enclosed in matching\nsingle quotes ("\'") or double quotes ("""). They can also be enclosed\nin matching groups of three single or double quotes (these are\ngenerally referred to as *triple-quoted strings*). The backslash\n("\\") character is used to escape characters that otherwise have a\nspecial meaning, such as newline, backslash itself, or the quote\ncharacter.\n\nBytes literals are always prefixed with "\'b\'" or "\'B\'"; they produce\nan instance of the "bytes" type instead of the "str" type. They may\nonly contain ASCII characters; bytes with a numeric value of 128 or\ngreater must be expressed with escapes.\n\nAs of Python 3.3 it is possible again to prefix unicode strings with a\n"u" prefix to simplify maintenance of dual 2.x and 3.x codebases.\n\nBoth string and bytes literals may optionally be prefixed with a\nletter "\'r\'" or "\'R\'"; such strings are called *raw strings* and treat\nbackslashes as literal characters. As a result, in string literals,\n"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated specially.\nGiven that Python 2.x\'s raw unicode literals behave differently than\nPython 3.x\'s the "\'ur\'" syntax is not supported.\n\n New in version 3.3: The "\'rb\'" prefix of raw bytes literals has\n been added as a synonym of "\'br\'".\n\n New in version 3.3: Support for the unicode legacy literal\n ("u\'value\'") was reintroduced to simplify the maintenance of dual\n Python 2.x and 3.x codebases. See **PEP 414** for more information.\n\nIn triple-quoted strings, unescaped newlines and quotes are allowed\n(and are retained), except that three unescaped quotes in a row\nterminate the string. (A "quote" is the character used to open the\nstring, i.e. either "\'" or """.)\n\nUnless an "\'r\'" or "\'R\'" prefix is present, escape sequences in\nstrings are interpreted according to rules similar to those used by\nStandard C. The recognized escape sequences are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n+-------------------+-----------------------------------+---------+\n| "\\ooo" | Character with octal value *ooo* | (1,3) |\n+-------------------+-----------------------------------+---------+\n| "\\xhh" | Character with hex value *hh* | (2,3) |\n+-------------------+-----------------------------------+---------+\n\nEscape sequences only recognized in string literals are:\n\n+-------------------+-----------------------------------+---------+\n| Escape Sequence | Meaning | Notes |\n+===================+===================================+=========+\n| "\\N{name}" | Character named *name* in the | (4) |\n+-------------------+-----------------------------------+---------+\n| "\\uxxxx" | Character with 16-bit hex value | (5) |\n+-------------------+-----------------------------------+---------+\n| "\\Uxxxxxxxx" | Character with 32-bit hex value | (6) |\n+-------------------+-----------------------------------+---------+\n\nNotes:\n\n1. As in Standard C, up to three octal digits are accepted.\n\n2. Unlike in Standard C, exactly two hex digits are required.\n\n3. In a bytes literal, hexadecimal and octal escapes denote the byte\n with the given value. In a string literal, these escapes denote a\n Unicode character with the given value.\n\n4. Changed in version 3.3: Support for name aliases [1] has been\n added.\n\n5. Individual code units which form parts of a surrogate pair can be\n encoded using this escape sequence. Exactly four hex digits are\n required.\n\n6. Any Unicode character can be encoded this way. Exactly eight hex\n digits are required.\n\nUnlike Standard C, all unrecognized escape sequences are left in the\nstring unchanged, i.e., *the backslash is left in the string*. (This\nbehavior is useful when debugging: if an escape sequence is mistyped,\nthe resulting output is more easily recognized as broken.) It is also\nimportant to note that the escape sequences only recognized in string\nliterals fall into the category of unrecognized escapes for bytes\nliterals.\n\nEven in a raw string, string quotes can be escaped with a backslash,\nbut the backslash remains in the string; for example, "r"\\""" is a\nvalid string literal consisting of two characters: a backslash and a\ndouble quote; "r"\\"" is not a valid string literal (even a raw string\ncannot end in an odd number of backslashes). Specifically, *a raw\nstring cannot end in a single backslash* (since the backslash would\nescape the following quote character). Note also that a single\nbackslash followed by a newline is interpreted as those two characters\nas part of the string, *not* as a line continuation.\n', - 'subscriptions': '\nSubscriptions\n*************\n\nA subscription selects an item of a sequence (string, tuple or list)\nor mapping (dictionary) object:\n\n subscription ::= primary "[" expression_list "]"\n\nThe primary must evaluate to an object that supports subscription,\ne.g. a list or dictionary. User-defined objects can support\nsubscription by defining a "__getitem__()" method.\n\nFor built-in objects, there are two types of objects that support\nsubscription:\n\nIf the primary is a mapping, the expression list must evaluate to an\nobject whose value is one of the keys of the mapping, and the\nsubscription selects the value in the mapping that corresponds to that\nkey. (The expression list is a tuple except if it has exactly one\nitem.)\n\nIf the primary is a sequence, the expression (list) must evaluate to\nan integer or a slice (as discussed in the following section).\n\nThe formal syntax makes no special provision for negative indices in\nsequences; however, built-in sequences all provide a "__getitem__()"\nmethod that interprets negative indices by adding the length of the\nsequence to the index (so that "x[-1]" selects the last item of "x").\nThe resulting value must be a nonnegative integer less than the number\nof items in the sequence, and the subscription selects the item whose\nindex is that value (counting from zero). Since the support for\nnegative indices and slicing occurs in the object\'s "__getitem__()"\nmethod, subclasses overriding this method will need to explicitly add\nthat support.\n\nA string\'s items are characters. A character is not a separate data\ntype but a string of exactly one character.\n', - 'truth': '\nTruth Value Testing\n*******************\n\nAny object can be tested for truth value, for use in an "if" or\n"while" condition or as operand of the Boolean operations below. The\nfollowing values are considered false:\n\n* "None"\n\n* "False"\n\n* zero of any numeric type, for example, "0", "0.0", "0j".\n\n* any empty sequence, for example, "\'\'", "()", "[]".\n\n* any empty mapping, for example, "{}".\n\n* instances of user-defined classes, if the class defines a\n "__bool__()" or "__len__()" method, when that method returns the\n integer zero or "bool" value "False". [1]\n\nAll other values are considered true --- so objects of many types are\nalways true.\n\nOperations and built-in functions that have a Boolean result always\nreturn "0" or "False" for false and "1" or "True" for true, unless\notherwise stated. (Important exception: the Boolean operations "or"\nand "and" always return one of their operands.)\n', - 'try': '\nThe "try" statement\n*******************\n\nThe "try" statement specifies exception handlers and/or cleanup code\nfor a group of statements:\n\n try_stmt ::= try1_stmt | try2_stmt\n try1_stmt ::= "try" ":" suite\n ("except" [expression ["as" target]] ":" suite)+\n ["else" ":" suite]\n ["finally" ":" suite]\n try2_stmt ::= "try" ":" suite\n "finally" ":" suite\n\nThe "except" clause(s) specify one or more exception handlers. When no\nexception occurs in the "try" clause, no exception handler is\nexecuted. When an exception occurs in the "try" suite, a search for an\nexception handler is started. This search inspects the except clauses\nin turn until one is found that matches the exception. An expression-\nless except clause, if present, must be last; it matches any\nexception. For an except clause with an expression, that expression\nis evaluated, and the clause matches the exception if the resulting\nobject is "compatible" with the exception. An object is compatible\nwith an exception if it is the class or a base class of the exception\nobject or a tuple containing an item compatible with the exception.\n\nIf no except clause matches the exception, the search for an exception\nhandler continues in the surrounding code and on the invocation stack.\n[1]\n\nIf the evaluation of an expression in the header of an except clause\nraises an exception, the original search for a handler is canceled and\na search starts for the new exception in the surrounding code and on\nthe call stack (it is treated as if the entire "try" statement raised\nthe exception).\n\nWhen a matching except clause is found, the exception is assigned to\nthe target specified after the "as" keyword in that except clause, if\npresent, and the except clause\'s suite is executed. All except\nclauses must have an executable block. When the end of this block is\nreached, execution continues normally after the entire try statement.\n(This means that if two nested handlers exist for the same exception,\nand the exception occurs in the try clause of the inner handler, the\nouter handler will not handle the exception.)\n\nWhen an exception has been assigned using "as target", it is cleared\nat the end of the except clause. This is as if\n\n except E as N:\n foo\n\nwas translated to\n\n except E as N:\n try:\n foo\n finally:\n del N\n\nThis means the exception must be assigned to a different name to be\nable to refer to it after the except clause. Exceptions are cleared\nbecause with the traceback attached to them, they form a reference\ncycle with the stack frame, keeping all locals in that frame alive\nuntil the next garbage collection occurs.\n\nBefore an except clause\'s suite is executed, details about the\nexception are stored in the "sys" module and can be access via\n"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of the\nexception class, the exception instance and a traceback object (see\nsection *The standard type hierarchy*) identifying the point in the\nprogram where the exception occurred. "sys.exc_info()" values are\nrestored to their previous values (before the call) when returning\nfrom a function that handled an exception.\n\nThe optional "else" clause is executed if and when control flows off\nthe end of the "try" clause. [2] Exceptions in the "else" clause are\nnot handled by the preceding "except" clauses.\n\nIf "finally" is present, it specifies a \'cleanup\' handler. The "try"\nclause is executed, including any "except" and "else" clauses. If an\nexception occurs in any of the clauses and is not handled, the\nexception is temporarily saved. The "finally" clause is executed. If\nthere is a saved exception it is re-raised at the end of the "finally"\nclause. If the "finally" clause raises another exception, the saved\nexception is set as the context of the new exception. If the "finally"\nclause executes a "return" or "break" statement, the saved exception\nis discarded:\n\n >>> def f():\n ... try:\n ... 1/0\n ... finally:\n ... return 42\n ...\n >>> f()\n 42\n\nThe exception information is not available to the program during\nexecution of the "finally" clause.\n\nWhen a "return", "break" or "continue" statement is executed in the\n"try" suite of a "try"..."finally" statement, the "finally" clause is\nalso executed \'on the way out.\' A "continue" statement is illegal in\nthe "finally" clause. (The reason is a problem with the current\nimplementation --- this restriction may be lifted in the future).\n\nThe return value of a function is determined by the last "return"\nstatement executed. Since the "finally" clause always executes, a\n"return" statement executed in the "finally" clause will always be the\nlast one executed:\n\n >>> def foo():\n ... try:\n ... return \'try\'\n ... finally:\n ... return \'finally\'\n ...\n >>> foo()\n \'finally\'\n\nAdditional information on exceptions can be found in section\n*Exceptions*, and information on using the "raise" statement to\ngenerate exceptions may be found in section *The raise statement*.\n', - 'types': '\nThe standard type hierarchy\n***************************\n\nBelow is a list of the types that are built into Python. Extension\nmodules (written in C, Java, or other languages, depending on the\nimplementation) can define additional types. Future versions of\nPython may add types to the type hierarchy (e.g., rational numbers,\nefficiently stored arrays of integers, etc.), although such additions\nwill often be provided via the standard library instead.\n\nSome of the type descriptions below contain a paragraph listing\n\'special attributes.\' These are attributes that provide access to the\nimplementation and are not intended for general use. Their definition\nmay change in the future.\n\nNone\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name "None". It\n is used to signify the absence of a value in many situations, e.g.,\n it is returned from functions that don\'t explicitly return\n anything. Its truth value is false.\n\nNotImplemented\n This type has a single value. There is a single object with this\n value. This object is accessed through the built-in name\n "NotImplemented". Numeric methods and rich comparison methods may\n return this value if they do not implement the operation for the\n operands provided. (The interpreter will then try the reflected\n operation, or some other fallback, depending on the operator.) Its\n truth value is true.\n\nEllipsis\n This type has a single value. There is a single object with this\n value. This object is accessed through the literal "..." or the\n built-in name "Ellipsis". Its truth value is true.\n\n"numbers.Number"\n These are created by numeric literals and returned as results by\n arithmetic operators and arithmetic built-in functions. Numeric\n objects are immutable; once created their value never changes.\n Python numbers are of course strongly related to mathematical\n numbers, but subject to the limitations of numerical representation\n in computers.\n\n Python distinguishes between integers, floating point numbers, and\n complex numbers:\n\n "numbers.Integral"\n These represent elements from the mathematical set of integers\n (positive and negative).\n\n There are two types of integers:\n\n Integers ("int")\n\n These represent numbers in an unlimited range, subject to\n available (virtual) memory only. For the purpose of shift\n and mask operations, a binary representation is assumed, and\n negative numbers are represented in a variant of 2\'s\n complement which gives the illusion of an infinite string of\n sign bits extending to the left.\n\n Booleans ("bool")\n These represent the truth values False and True. The two\n objects representing the values "False" and "True" are the\n only Boolean objects. The Boolean type is a subtype of the\n integer type, and Boolean values behave like the values 0 and\n 1, respectively, in almost all contexts, the exception being\n that when converted to a string, the strings ""False"" or\n ""True"" are returned, respectively.\n\n The rules for integer representation are intended to give the\n most meaningful interpretation of shift and mask operations\n involving negative integers.\n\n "numbers.Real" ("float")\n These represent machine-level double precision floating point\n numbers. You are at the mercy of the underlying machine\n architecture (and C or Java implementation) for the accepted\n range and handling of overflow. Python does not support single-\n precision floating point numbers; the savings in processor and\n memory usage that are usually the reason for using these is\n dwarfed by the overhead of using objects in Python, so there is\n no reason to complicate the language with two kinds of floating\n point numbers.\n\n "numbers.Complex" ("complex")\n These represent complex numbers as a pair of machine-level\n double precision floating point numbers. The same caveats apply\n as for floating point numbers. The real and imaginary parts of a\n complex number "z" can be retrieved through the read-only\n attributes "z.real" and "z.imag".\n\nSequences\n These represent finite ordered sets indexed by non-negative\n numbers. The built-in function "len()" returns the number of items\n of a sequence. When the length of a sequence is *n*, the index set\n contains the numbers 0, 1, ..., *n*-1. Item *i* of sequence *a* is\n selected by "a[i]".\n\n Sequences also support slicing: "a[i:j]" selects all items with\n index *k* such that *i* "<=" *k* "<" *j*. When used as an\n expression, a slice is a sequence of the same type. This implies\n that the index set is renumbered so that it starts at 0.\n\n Some sequences also support "extended slicing" with a third "step"\n parameter: "a[i:j:k]" selects all items of *a* with index *x* where\n "x = i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*.\n\n Sequences are distinguished according to their mutability:\n\n Immutable sequences\n An object of an immutable sequence type cannot change once it is\n created. (If the object contains references to other objects,\n these other objects may be mutable and may be changed; however,\n the collection of objects directly referenced by an immutable\n object cannot change.)\n\n The following types are immutable sequences:\n\n Strings\n A string is a sequence of values that represent Unicode\n codepoints. All the codepoints in range "U+0000 - U+10FFFF"\n can be represented in a string. Python doesn\'t have a "chr"\n type, and every character in the string is represented as a\n string object with length "1". The built-in function "ord()"\n converts a character to its codepoint (as an integer);\n "chr()" converts an integer in range "0 - 10FFFF" to the\n corresponding character. "str.encode()" can be used to\n convert a "str" to "bytes" using the given encoding, and\n "bytes.decode()" can be used to achieve the opposite.\n\n Tuples\n The items of a tuple are arbitrary Python objects. Tuples of\n two or more items are formed by comma-separated lists of\n expressions. A tuple of one item (a \'singleton\') can be\n formed by affixing a comma to an expression (an expression by\n itself does not create a tuple, since parentheses must be\n usable for grouping of expressions). An empty tuple can be\n formed by an empty pair of parentheses.\n\n Bytes\n A bytes object is an immutable array. The items are 8-bit\n bytes, represented by integers in the range 0 <= x < 256.\n Bytes literals (like "b\'abc\'") and the built-in function\n "bytes()" can be used to construct bytes objects. Also,\n bytes objects can be decoded to strings via the "decode()"\n method.\n\n Mutable sequences\n Mutable sequences can be changed after they are created. The\n subscription and slicing notations can be used as the target of\n assignment and "del" (delete) statements.\n\n There are currently two intrinsic mutable sequence types:\n\n Lists\n The items of a list are arbitrary Python objects. Lists are\n formed by placing a comma-separated list of expressions in\n square brackets. (Note that there are no special cases needed\n to form lists of length 0 or 1.)\n\n Byte Arrays\n A bytearray object is a mutable array. They are created by\n the built-in "bytearray()" constructor. Aside from being\n mutable (and hence unhashable), byte arrays otherwise provide\n the same interface and functionality as immutable bytes\n objects.\n\n The extension module "array" provides an additional example of a\n mutable sequence type, as does the "collections" module.\n\nSet types\n These represent unordered, finite sets of unique, immutable\n objects. As such, they cannot be indexed by any subscript. However,\n they can be iterated over, and the built-in function "len()"\n returns the number of items in a set. Common uses for sets are fast\n membership testing, removing duplicates from a sequence, and\n computing mathematical operations such as intersection, union,\n difference, and symmetric difference.\n\n For set elements, the same immutability rules apply as for\n dictionary keys. Note that numeric types obey the normal rules for\n numeric comparison: if two numbers compare equal (e.g., "1" and\n "1.0"), only one of them can be contained in a set.\n\n There are currently two intrinsic set types:\n\n Sets\n These represent a mutable set. They are created by the built-in\n "set()" constructor and can be modified afterwards by several\n methods, such as "add()".\n\n Frozen sets\n These represent an immutable set. They are created by the\n built-in "frozenset()" constructor. As a frozenset is immutable\n and *hashable*, it can be used again as an element of another\n set, or as a dictionary key.\n\nMappings\n These represent finite sets of objects indexed by arbitrary index\n sets. The subscript notation "a[k]" selects the item indexed by "k"\n from the mapping "a"; this can be used in expressions and as the\n target of assignments or "del" statements. The built-in function\n "len()" returns the number of items in a mapping.\n\n There is currently a single intrinsic mapping type:\n\n Dictionaries\n These represent finite sets of objects indexed by nearly\n arbitrary values. The only types of values not acceptable as\n keys are values containing lists or dictionaries or other\n mutable types that are compared by value rather than by object\n identity, the reason being that the efficient implementation of\n dictionaries requires a key\'s hash value to remain constant.\n Numeric types used for keys obey the normal rules for numeric\n comparison: if two numbers compare equal (e.g., "1" and "1.0")\n then they can be used interchangeably to index the same\n dictionary entry.\n\n Dictionaries are mutable; they can be created by the "{...}"\n notation (see section *Dictionary displays*).\n\n The extension modules "dbm.ndbm" and "dbm.gnu" provide\n additional examples of mapping types, as does the "collections"\n module.\n\nCallable types\n These are the types to which the function call operation (see\n section *Calls*) can be applied:\n\n User-defined functions\n A user-defined function object is created by a function\n definition (see section *Function definitions*). It should be\n called with an argument list containing the same number of items\n as the function\'s formal parameter list.\n\n Special attributes:\n\n +---------------------------+---------------------------------+-------------+\n +===========================+=================================+=============+\n | "__doc__" | The function\'s documentation | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__name__" | The function\'s name | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__qualname__" | The function\'s *qualified name* | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__module__" | The name of the module the | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__defaults__" | A tuple containing default | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__code__" | The code object representing | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__globals__" | A reference to the dictionary | Read-only |\n +---------------------------+---------------------------------+-------------+\n | "__dict__" | The namespace supporting | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__closure__" | "None" or a tuple of cells that | Read-only |\n +---------------------------+---------------------------------+-------------+\n | "__annotations__" | A dict containing annotations | Writable |\n +---------------------------+---------------------------------+-------------+\n | "__kwdefaults__" | A dict containing defaults for | Writable |\n +---------------------------+---------------------------------+-------------+\n\n Most of the attributes labelled "Writable" check the type of the\n assigned value.\n\n Function objects also support getting and setting arbitrary\n attributes, which can be used, for example, to attach metadata\n to functions. Regular attribute dot-notation is used to get and\n set such attributes. *Note that the current implementation only\n supports function attributes on user-defined functions. Function\n attributes on built-in functions may be supported in the\n future.*\n\n Additional information about a function\'s definition can be\n retrieved from its code object; see the description of internal\n types below.\n\n Instance methods\n An instance method object combines a class, a class instance and\n any callable object (normally a user-defined function).\n\n Special read-only attributes: "__self__" is the class instance\n object, "__func__" is the function object; "__doc__" is the\n method\'s documentation (same as "__func__.__doc__"); "__name__"\n is the method name (same as "__func__.__name__"); "__module__"\n is the name of the module the method was defined in, or "None"\n if unavailable.\n\n Methods also support accessing (but not setting) the arbitrary\n function attributes on the underlying function object.\n\n User-defined method objects may be created when getting an\n attribute of a class (perhaps via an instance of that class), if\n that attribute is a user-defined function object or a class\n method object.\n\n When an instance method object is created by retrieving a user-\n defined function object from a class via one of its instances,\n its "__self__" attribute is the instance, and the method object\n is said to be bound. The new method\'s "__func__" attribute is\n the original function object.\n\n When a user-defined method object is created by retrieving\n another method object from a class or instance, the behaviour is\n the same as for a function object, except that the "__func__"\n attribute of the new instance is not the original method object\n but its "__func__" attribute.\n\n When an instance method object is created by retrieving a class\n method object from a class or instance, its "__self__" attribute\n is the class itself, and its "__func__" attribute is the\n function object underlying the class method.\n\n When an instance method object is called, the underlying\n function ("__func__") is called, inserting the class instance\n ("__self__") in front of the argument list. For instance, when\n "C" is a class which contains a definition for a function "f()",\n and "x" is an instance of "C", calling "x.f(1)" is equivalent to\n calling "C.f(x, 1)".\n\n When an instance method object is derived from a class method\n object, the "class instance" stored in "__self__" will actually\n be the class itself, so that calling either "x.f(1)" or "C.f(1)"\n is equivalent to calling "f(C,1)" where "f" is the underlying\n function.\n\n Note that the transformation from function object to instance\n method object happens each time the attribute is retrieved from\n the instance. In some cases, a fruitful optimization is to\n assign the attribute to a local variable and call that local\n variable. Also notice that this transformation only happens for\n user-defined functions; other callable objects (and all non-\n callable objects) are retrieved without transformation. It is\n also important to note that user-defined functions which are\n attributes of a class instance are not converted to bound\n methods; this *only* happens when the function is an attribute\n of the class.\n\n Generator functions\n A function or method which uses the "yield" statement (see\n section *The yield statement*) is called a *generator function*.\n Such a function, when called, always returns an iterator object\n which can be used to execute the body of the function: calling\n the iterator\'s "iterator.__next__()" method will cause the\n function to execute until it provides a value using the "yield"\n statement. When the function executes a "return" statement or\n falls off the end, a "StopIteration" exception is raised and the\n iterator will have reached the end of the set of values to be\n returned.\n\n Built-in functions\n A built-in function object is a wrapper around a C function.\n Examples of built-in functions are "len()" and "math.sin()"\n ("math" is a standard built-in module). The number and type of\n the arguments are determined by the C function. Special read-\n only attributes: "__doc__" is the function\'s documentation\n string, or "None" if unavailable; "__name__" is the function\'s\n name; "__self__" is set to "None" (but see the next item);\n "__module__" is the name of the module the function was defined\n in or "None" if unavailable.\n\n Built-in methods\n This is really a different disguise of a built-in function, this\n time containing an object passed to the C function as an\n implicit extra argument. An example of a built-in method is\n "alist.append()", assuming *alist* is a list object. In this\n case, the special read-only attribute "__self__" is set to the\n object denoted by *alist*.\n\n Classes\n Classes are callable. These objects normally act as factories\n for new instances of themselves, but variations are possible for\n class types that override "__new__()". The arguments of the\n call are passed to "__new__()" and, in the typical case, to\n "__init__()" to initialize the new instance.\n\n Class Instances\n Instances of arbitrary classes can be made callable by defining\n a "__call__()" method in their class.\n\nModules\n Modules are a basic organizational unit of Python code, and are\n created by the *import system* as invoked either by the "import"\n statement (see "import"), or by calling functions such as\n "importlib.import_module()" and built-in "__import__()". A module\n object has a namespace implemented by a dictionary object (this is\n the dictionary referenced by the "__globals__" attribute of\n functions defined in the module). Attribute references are\n translated to lookups in this dictionary, e.g., "m.x" is equivalent\n to "m.__dict__["x"]". A module object does not contain the code\n object used to initialize the module (since it isn\'t needed once\n the initialization is done).\n\n Attribute assignment updates the module\'s namespace dictionary,\n e.g., "m.x = 1" is equivalent to "m.__dict__["x"] = 1".\n\n Special read-only attribute: "__dict__" is the module\'s namespace\n as a dictionary object.\n\n **CPython implementation detail:** Because of the way CPython\n clears module dictionaries, the module dictionary will be cleared\n when the module falls out of scope even if the dictionary still has\n live references. To avoid this, copy the dictionary or keep the\n module around while using its dictionary directly.\n\n Predefined (writable) attributes: "__name__" is the module\'s name;\n "__doc__" is the module\'s documentation string, or "None" if\n unavailable; "__file__" is the pathname of the file from which the\n module was loaded, if it was loaded from a file. The "__file__"\n attribute may be missing for certain types of modules, such as C\n modules that are statically linked into the interpreter; for\n extension modules loaded dynamically from a shared library, it is\n the pathname of the shared library file.\n\nCustom classes\n Custom class types are typically created by class definitions (see\n section *Class definitions*). A class has a namespace implemented\n by a dictionary object. Class attribute references are translated\n to lookups in this dictionary, e.g., "C.x" is translated to\n "C.__dict__["x"]" (although there are a number of hooks which allow\n for other means of locating attributes). When the attribute name is\n not found there, the attribute search continues in the base\n classes. This search of the base classes uses the C3 method\n resolution order which behaves correctly even in the presence of\n \'diamond\' inheritance structures where there are multiple\n inheritance paths leading back to a common ancestor. Additional\n details on the C3 MRO used by Python can be found in the\n documentation accompanying the 2.3 release at\n http://www.python.org/download/releases/2.3/mro/.\n\n When a class attribute reference (for class "C", say) would yield a\n class method object, it is transformed into an instance method\n object whose "__self__" attributes is "C". When it would yield a\n static method object, it is transformed into the object wrapped by\n the static method object. See section *Implementing Descriptors*\n for another way in which attributes retrieved from a class may\n differ from those actually contained in its "__dict__".\n\n Class attribute assignments update the class\'s dictionary, never\n the dictionary of a base class.\n\n A class object can be called (see above) to yield a class instance\n (see below).\n\n Special attributes: "__name__" is the class name; "__module__" is\n the module name in which the class was defined; "__dict__" is the\n dictionary containing the class\'s namespace; "__bases__" is a tuple\n (possibly empty or a singleton) containing the base classes, in the\n order of their occurrence in the base class list; "__doc__" is the\n class\'s documentation string, or None if undefined.\n\nClass instances\n A class instance is created by calling a class object (see above).\n A class instance has a namespace implemented as a dictionary which\n is the first place in which attribute references are searched.\n When an attribute is not found there, and the instance\'s class has\n an attribute by that name, the search continues with the class\n attributes. If a class attribute is found that is a user-defined\n function object, it is transformed into an instance method object\n whose "__self__" attribute is the instance. Static method and\n class method objects are also transformed; see above under\n "Classes". See section *Implementing Descriptors* for another way\n in which attributes of a class retrieved via its instances may\n differ from the objects actually stored in the class\'s "__dict__".\n If no class attribute is found, and the object\'s class has a\n "__getattr__()" method, that is called to satisfy the lookup.\n\n Attribute assignments and deletions update the instance\'s\n dictionary, never a class\'s dictionary. If the class has a\n "__setattr__()" or "__delattr__()" method, this is called instead\n of updating the instance dictionary directly.\n\n Class instances can pretend to be numbers, sequences, or mappings\n if they have methods with certain special names. See section\n *Special method names*.\n\n Special attributes: "__dict__" is the attribute dictionary;\n "__class__" is the instance\'s class.\n\nI/O objects (also known as file objects)\n A *file object* represents an open file. Various shortcuts are\n available to create file objects: the "open()" built-in function,\n and also "os.popen()", "os.fdopen()", and the "makefile()" method\n of socket objects (and perhaps by other functions or methods\n provided by extension modules).\n\n The objects "sys.stdin", "sys.stdout" and "sys.stderr" are\n initialized to file objects corresponding to the interpreter\'s\n standard input, output and error streams; they are all open in text\n mode and therefore follow the interface defined by the\n "io.TextIOBase" abstract class.\n\nInternal types\n A few types used internally by the interpreter are exposed to the\n user. Their definitions may change with future versions of the\n interpreter, but they are mentioned here for completeness.\n\n Code objects\n Code objects represent *byte-compiled* executable Python code,\n or *bytecode*. The difference between a code object and a\n function object is that the function object contains an explicit\n reference to the function\'s globals (the module in which it was\n defined), while a code object contains no context; also the\n default argument values are stored in the function object, not\n in the code object (because they represent values calculated at\n run-time). Unlike function objects, code objects are immutable\n and contain no references (directly or indirectly) to mutable\n objects.\n\n Special read-only attributes: "co_name" gives the function name;\n "co_argcount" is the number of positional arguments (including\n arguments with default values); "co_nlocals" is the number of\n local variables used by the function (including arguments);\n "co_varnames" is a tuple containing the names of the local\n variables (starting with the argument names); "co_cellvars" is a\n tuple containing the names of local variables that are\n referenced by nested functions; "co_freevars" is a tuple\n containing the names of free variables; "co_code" is a string\n representing the sequence of bytecode instructions; "co_consts"\n is a tuple containing the literals used by the bytecode;\n "co_names" is a tuple containing the names used by the bytecode;\n "co_filename" is the filename from which the code was compiled;\n "co_firstlineno" is the first line number of the function;\n "co_lnotab" is a string encoding the mapping from bytecode\n offsets to line numbers (for details see the source code of the\n interpreter); "co_stacksize" is the required stack size\n (including local variables); "co_flags" is an integer encoding a\n number of flags for the interpreter.\n\n The following flag bits are defined for "co_flags": bit "0x04"\n is set if the function uses the "*arguments" syntax to accept an\n arbitrary number of positional arguments; bit "0x08" is set if\n the function uses the "**keywords" syntax to accept arbitrary\n keyword arguments; bit "0x20" is set if the function is a\n generator.\n\n Future feature declarations ("from __future__ import division")\n also use bits in "co_flags" to indicate whether a code object\n was compiled with a particular feature enabled: bit "0x2000" is\n set if the function was compiled with future division enabled;\n bits "0x10" and "0x1000" were used in earlier versions of\n Python.\n\n Other bits in "co_flags" are reserved for internal use.\n\n If a code object represents a function, the first item in\n "co_consts" is the documentation string of the function, or\n "None" if undefined.\n\n Frame objects\n Frame objects represent execution frames. They may occur in\n traceback objects (see below).\n\n Special read-only attributes: "f_back" is to the previous stack\n frame (towards the caller), or "None" if this is the bottom\n stack frame; "f_code" is the code object being executed in this\n frame; "f_locals" is the dictionary used to look up local\n variables; "f_globals" is used for global variables;\n "f_builtins" is used for built-in (intrinsic) names; "f_lasti"\n gives the precise instruction (this is an index into the\n bytecode string of the code object).\n\n Special writable attributes: "f_trace", if not "None", is a\n function called at the start of each source code line (this is\n used by the debugger); "f_lineno" is the current line number of\n the frame --- writing to this from within a trace function jumps\n to the given line (only for the bottom-most frame). A debugger\n can implement a Jump command (aka Set Next Statement) by writing\n to f_lineno.\n\n Frame objects support one method:\n\n frame.clear()\n\n This method clears all references to local variables held by\n the frame. Also, if the frame belonged to a generator, the\n generator is finalized. This helps break reference cycles\n involving frame objects (for example when catching an\n exception and storing its traceback for later use).\n\n "RuntimeError" is raised if the frame is currently executing.\n\n New in version 3.4.\n\n Traceback objects\n Traceback objects represent a stack trace of an exception. A\n traceback object is created when an exception occurs. When the\n search for an exception handler unwinds the execution stack, at\n each unwound level a traceback object is inserted in front of\n the current traceback. When an exception handler is entered,\n the stack trace is made available to the program. (See section\n *The try statement*.) It is accessible as the third item of the\n tuple returned by "sys.exc_info()". When the program contains no\n suitable handler, the stack trace is written (nicely formatted)\n to the standard error stream; if the interpreter is interactive,\n it is also made available to the user as "sys.last_traceback".\n\n Special read-only attributes: "tb_next" is the next level in the\n stack trace (towards the frame where the exception occurred), or\n "None" if there is no next level; "tb_frame" points to the\n execution frame of the current level; "tb_lineno" gives the line\n number where the exception occurred; "tb_lasti" indicates the\n precise instruction. The line number and last instruction in\n the traceback may differ from the line number of its frame\n object if the exception occurred in a "try" statement with no\n matching except clause or with a finally clause.\n\n Slice objects\n Slice objects are used to represent slices for "__getitem__()"\n methods. They are also created by the built-in "slice()"\n function.\n\n Special read-only attributes: "start" is the lower bound; "stop"\n is the upper bound; "step" is the step value; each is "None" if\n omitted. These attributes can have any type.\n\n Slice objects support one method:\n\n slice.indices(self, length)\n\n This method takes a single integer argument *length* and\n computes information about the slice that the slice object\n would describe if applied to a sequence of *length* items.\n It returns a tuple of three integers; respectively these are\n the *start* and *stop* indices and the *step* or stride\n length of the slice. Missing or out-of-bounds indices are\n handled in a manner consistent with regular slices.\n\n Static method objects\n Static method objects provide a way of defeating the\n transformation of function objects to method objects described\n above. A static method object is a wrapper around any other\n object, usually a user-defined method object. When a static\n method object is retrieved from a class or a class instance, the\n object actually returned is the wrapped object, which is not\n subject to any further transformation. Static method objects are\n not themselves callable, although the objects they wrap usually\n are. Static method objects are created by the built-in\n "staticmethod()" constructor.\n\n Class method objects\n A class method object, like a static method object, is a wrapper\n around another object that alters the way in which that object\n is retrieved from classes and class instances. The behaviour of\n class method objects upon such retrieval is described above,\n under "User-defined methods". Class method objects are created\n by the built-in "classmethod()" constructor.\n', - 'typesfunctions': '\nFunctions\n*********\n\nFunction objects are created by function definitions. The only\noperation on a function object is to call it: "func(argument-list)".\n\nThere are really two flavors of function objects: built-in functions\nand user-defined functions. Both support the same operation (to call\nthe function), but the implementation is different, hence the\ndifferent object types.\n\nSee *Function definitions* for more information.\n', - 'typesmapping': '\nMapping Types --- "dict"\n************************\n\nA *mapping* object maps *hashable* values to arbitrary objects.\nMappings are mutable objects. There is currently only one standard\nmapping type, the *dictionary*. (For other containers see the built-\nin "list", "set", and "tuple" classes, and the "collections" module.)\n\nA dictionary\'s keys are *almost* arbitrary values. Values that are\nnot *hashable*, that is, values containing lists, dictionaries or\nother mutable types (that are compared by value rather than by object\nidentity) may not be used as keys. Numeric types used for keys obey\nthe normal rules for numeric comparison: if two numbers compare equal\n(such as "1" and "1.0") then they can be used interchangeably to index\nthe same dictionary entry. (Note however, that since computers store\nfloating-point numbers as approximations it is usually unwise to use\nthem as dictionary keys.)\n\nDictionaries can be created by placing a comma-separated list of "key:\nvalue" pairs within braces, for example: "{\'jack\': 4098, \'sjoerd\':\n4127}" or "{4098: \'jack\', 4127: \'sjoerd\'}", or by the "dict"\nconstructor.\n\nclass class dict(**kwarg)\nclass class dict(mapping, **kwarg)\nclass class dict(iterable, **kwarg)\n\n Return a new dictionary initialized from an optional positional\n argument and a possibly empty set of keyword arguments.\n\n If no positional argument is given, an empty dictionary is created.\n If a positional argument is given and it is a mapping object, a\n dictionary is created with the same key-value pairs as the mapping\n object. Otherwise, the positional argument must be an *iterator*\n object. Each item in the iterable must itself be an iterator with\n exactly two objects. The first object of each item becomes a key\n in the new dictionary, and the second object the corresponding\n value. If a key occurs more than once, the last value for that key\n becomes the corresponding value in the new dictionary.\n\n If keyword arguments are given, the keyword arguments and their\n values are added to the dictionary created from the positional\n argument. If a key being added is already present, the value from\n the keyword argument replaces the value from the positional\n argument.\n\n To illustrate, the following examples all return a dictionary equal\n to "{"one": 1, "two": 2, "three": 3}":\n\n >>> a = dict(one=1, two=2, three=3)\n >>> b = {\'one\': 1, \'two\': 2, \'three\': 3}\n >>> c = dict(zip([\'one\', \'two\', \'three\'], [1, 2, 3]))\n >>> d = dict([(\'two\', 2), (\'one\', 1), (\'three\', 3)])\n >>> e = dict({\'three\': 3, \'one\': 1, \'two\': 2})\n >>> a == b == c == d == e\n True\n\n Providing keyword arguments as in the first example only works for\n keys that are valid Python identifiers. Otherwise, any valid keys\n can be used.\n\n These are the operations that dictionaries support (and therefore,\n custom mapping types should support too):\n\n len(d)\n\n Return the number of items in the dictionary *d*.\n\n d[key]\n\n Return the item of *d* with key *key*. Raises a "KeyError" if\n *key* is not in the map.\n\n If a subclass of dict defines a method "__missing__()", if the\n key *key* is not present, the "d[key]" operation calls that\n method with the key *key* as argument. The "d[key]" operation\n then returns or raises whatever is returned or raised by the\n "__missing__(key)" call if the key is not present. No other\n operations or methods invoke "__missing__()". If "__missing__()"\n is not defined, "KeyError" is raised. "__missing__()" must be a\n method; it cannot be an instance variable:\n\n >>> class Counter(dict):\n ... def __missing__(self, key):\n ... return 0\n >>> c = Counter()\n >>> c[\'red\']\n 0\n >>> c[\'red\'] += 1\n >>> c[\'red\']\n 1\n\n See "collections.Counter" for a complete implementation\n including other methods helpful for accumulating and managing\n tallies.\n\n d[key] = value\n\n Set "d[key]" to *value*.\n\n del d[key]\n\n Remove "d[key]" from *d*. Raises a "KeyError" if *key* is not\n in the map.\n\n key in d\n\n Return "True" if *d* has a key *key*, else "False".\n\n key not in d\n\n Equivalent to "not key in d".\n\n iter(d)\n\n Return an iterator over the keys of the dictionary. This is a\n shortcut for "iter(d.keys())".\n\n clear()\n\n Remove all items from the dictionary.\n\n copy()\n\n Return a shallow copy of the dictionary.\n\n classmethod fromkeys(seq[, value])\n\n Create a new dictionary with keys from *seq* and values set to\n *value*.\n\n "fromkeys()" is a class method that returns a new dictionary.\n *value* defaults to "None".\n\n get(key[, default])\n\n Return the value for *key* if *key* is in the dictionary, else\n *default*. If *default* is not given, it defaults to "None", so\n that this method never raises a "KeyError".\n\n items()\n\n Return a new view of the dictionary\'s items ("(key, value)"\n pairs). See the *documentation of view objects*.\n\n keys()\n\n Return a new view of the dictionary\'s keys. See the\n *documentation of view objects*.\n\n pop(key[, default])\n\n If *key* is in the dictionary, remove it and return its value,\n else return *default*. If *default* is not given and *key* is\n not in the dictionary, a "KeyError" is raised.\n\n popitem()\n\n Remove and return an arbitrary "(key, value)" pair from the\n dictionary.\n\n "popitem()" is useful to destructively iterate over a\n dictionary, as often used in set algorithms. If the dictionary\n is empty, calling "popitem()" raises a "KeyError".\n\n setdefault(key[, default])\n\n If *key* is in the dictionary, return its value. If not, insert\n *key* with a value of *default* and return *default*. *default*\n defaults to "None".\n\n update([other])\n\n Update the dictionary with the key/value pairs from *other*,\n overwriting existing keys. Return "None".\n\n "update()" accepts either another dictionary object or an\n iterable of key/value pairs (as tuples or other iterables of\n length two). If keyword arguments are specified, the dictionary\n is then updated with those key/value pairs: "d.update(red=1,\n blue=2)".\n\n values()\n\n Return a new view of the dictionary\'s values. See the\n *documentation of view objects*.\n\nSee also:\n\n "types.MappingProxyType" can be used to create a read-only view of\n a "dict".\n\n\nDictionary view objects\n=======================\n\nThe objects returned by "dict.keys()", "dict.values()" and\n"dict.items()" are *view objects*. They provide a dynamic view on the\ndictionary\'s entries, which means that when the dictionary changes,\nthe view reflects these changes.\n\nDictionary views can be iterated over to yield their respective data,\nand support membership tests:\n\nlen(dictview)\n\n Return the number of entries in the dictionary.\n\niter(dictview)\n\n Return an iterator over the keys, values or items (represented as\n tuples of "(key, value)") in the dictionary.\n\n Keys and values are iterated over in an arbitrary order which is\n non-random, varies across Python implementations, and depends on\n the dictionary\'s history of insertions and deletions. If keys,\n values and items views are iterated over with no intervening\n modifications to the dictionary, the order of items will directly\n correspond. This allows the creation of "(value, key)" pairs using\n "zip()": "pairs = zip(d.values(), d.keys())". Another way to\n create the same list is "pairs = [(v, k) for (k, v) in d.items()]".\n\n Iterating views while adding or deleting entries in the dictionary\n may raise a "RuntimeError" or fail to iterate over all entries.\n\nx in dictview\n\n Return "True" if *x* is in the underlying dictionary\'s keys, values\n or items (in the latter case, *x* should be a "(key, value)"\n tuple).\n\nKeys views are set-like since their entries are unique and hashable.\nIf all values are hashable, so that "(key, value)" pairs are unique\nand hashable, then the items view is also set-like. (Values views are\nnot treated as set-like since the entries are generally not unique.)\nFor set-like views, all of the operations defined for the abstract\nbase class "collections.abc.Set" are available (for example, "==",\n"<", or "^").\n\nAn example of dictionary view usage:\n\n >>> dishes = {\'eggs\': 2, \'sausage\': 1, \'bacon\': 1, \'spam\': 500}\n >>> keys = dishes.keys()\n >>> values = dishes.values()\n\n >>> # iteration\n >>> n = 0\n >>> for val in values:\n ... n += val\n >>> print(n)\n 504\n\n >>> # keys and values are iterated over in the same order\n >>> list(keys)\n [\'eggs\', \'bacon\', \'sausage\', \'spam\']\n >>> list(values)\n [2, 1, 1, 500]\n\n >>> # view objects are dynamic and reflect dict changes\n >>> del dishes[\'eggs\']\n >>> del dishes[\'sausage\']\n >>> list(keys)\n [\'spam\', \'bacon\']\n\n >>> # set operations\n >>> keys & {\'eggs\', \'bacon\', \'salad\'}\n {\'bacon\'}\n >>> keys ^ {\'sausage\', \'juice\'}\n {\'juice\', \'sausage\', \'bacon\', \'spam\'}\n', - 'typesmethods': '\nMethods\n*******\n\nMethods are functions that are called using the attribute notation.\nThere are two flavors: built-in methods (such as "append()" on lists)\nand class instance methods. Built-in methods are described with the\ntypes that support them.\n\nIf you access a method (a function defined in a class namespace)\nthrough an instance, you get a special object: a *bound method* (also\ncalled *instance method*) object. When called, it will add the "self"\nargument to the argument list. Bound methods have two special read-\nonly attributes: "m.__self__" is the object on which the method\noperates, and "m.__func__" is the function implementing the method.\nCalling "m(arg-1, arg-2, ..., arg-n)" is completely equivalent to\ncalling "m.__func__(m.__self__, arg-1, arg-2, ..., arg-n)".\n\nLike function objects, bound method objects support getting arbitrary\nattributes. However, since method attributes are actually stored on\nthe underlying function object ("meth.__func__"), setting method\nattributes on bound methods is disallowed. Attempting to set an\nattribute on a method results in an "AttributeError" being raised. In\norder to set a method attribute, you need to explicitly set it on the\nunderlying function object:\n\n >>> class C:\n ... def method(self):\n ... pass\n ...\n >>> c = C()\n >>> c.method.whoami = \'my name is method\' # can\'t set on the method\n Traceback (most recent call last):\n File "", line 1, in \n AttributeError: \'method\' object has no attribute \'whoami\'\n >>> c.method.__func__.whoami = \'my name is method\'\n >>> c.method.whoami\n \'my name is method\'\n\nSee *The standard type hierarchy* for more information.\n', - 'typesmodules': '\nModules\n*******\n\nThe only special operation on a module is attribute access: "m.name",\nwhere *m* is a module and *name* accesses a name defined in *m*\'s\nsymbol table. Module attributes can be assigned to. (Note that the\n"import" statement is not, strictly speaking, an operation on a module\nobject; "import foo" does not require a module object named *foo* to\nexist, rather it requires an (external) *definition* for a module\nnamed *foo* somewhere.)\n\nA special attribute of every module is "__dict__". This is the\ndictionary containing the module\'s symbol table. Modifying this\ndictionary will actually change the module\'s symbol table, but direct\nassignment to the "__dict__" attribute is not possible (you can write\n"m.__dict__[\'a\'] = 1", which defines "m.a" to be "1", but you can\'t\nwrite "m.__dict__ = {}"). Modifying "__dict__" directly is not\nrecommended.\n\nModules built into the interpreter are written like this: "". If loaded from a file, they are written as\n"".\n', - 'typesseq': '\nSequence Types --- "list", "tuple", "range"\n*******************************************\n\nThere are three basic sequence types: lists, tuples, and range\nobjects. Additional sequence types tailored for processing of *binary\ndata* and *text strings* are described in dedicated sections.\n\n\nCommon Sequence Operations\n==========================\n\nThe operations in the following table are supported by most sequence\ntypes, both mutable and immutable. The "collections.abc.Sequence" ABC\nis provided to make it easier to correctly implement these operations\non custom sequence types.\n\nThis table lists the sequence operations sorted in ascending priority\n(operations in the same box have the same priority). In the table,\n*s* and *t* are sequences of the same type, *n*, *i*, *j* and *k* are\nintegers and *x* is an arbitrary object that meets any type and value\nrestrictions imposed by *s*.\n\nThe "in" and "not in" operations have the same priorities as the\ncomparison operations. The "+" (concatenation) and "*" (repetition)\noperations have the same priority as the corresponding numeric\noperations.\n\n+----------------------------+----------------------------------+------------+\n| Operation | Result | Notes |\n+============================+==================================+============+\n| "x in s" | "True" if an item of *s* is | (1) |\n+----------------------------+----------------------------------+------------+\n| "x not in s" | "False" if an item of *s* is | (1) |\n+----------------------------+----------------------------------+------------+\n| "s + t" | the concatenation of *s* and *t* | (6)(7) |\n+----------------------------+----------------------------------+------------+\n| "s * n" or "n * s" | *n* shallow copies of *s* | (2)(7) |\n+----------------------------+----------------------------------+------------+\n| "s[i]" | *i*th item of *s*, origin 0 | (3) |\n+----------------------------+----------------------------------+------------+\n| "s[i:j]" | slice of *s* from *i* to *j* | (3)(4) |\n+----------------------------+----------------------------------+------------+\n| "s[i:j:k]" | slice of *s* from *i* to *j* | (3)(5) |\n+----------------------------+----------------------------------+------------+\n+----------------------------+----------------------------------+------------+\n+----------------------------+----------------------------------+------------+\n+----------------------------+----------------------------------+------------+\n| "s.index(x[, i[, j]])" | index of the first occurrence of | (8) |\n+----------------------------+----------------------------------+------------+\n+----------------------------+----------------------------------+------------+\n\nSequences of the same type also support comparisons. In particular,\ntuples and lists are compared lexicographically by comparing\ncorresponding elements. This means that to compare equal, every\nelement must compare equal and the two sequences must be of the same\ntype and have the same length. (For full details see *Comparisons* in\nthe language reference.)\n\nNotes:\n\n1. While the "in" and "not in" operations are used only for simple\n containment testing in the general case, some specialised sequences\n (such as "str", "bytes" and "bytearray") also use them for\n subsequence testing:\n\n >>> "gg" in "eggs"\n True\n\n2. Values of *n* less than "0" are treated as "0" (which yields an\n empty sequence of the same type as *s*). Note also that the copies\n are shallow; nested structures are not copied. This often haunts\n new Python programmers; consider:\n\n >>> lists = [[]] * 3\n >>> lists\n [[], [], []]\n >>> lists[0].append(3)\n >>> lists\n [[3], [3], [3]]\n\n What has happened is that "[[]]" is a one-element list containing\n an empty list, so all three elements of "[[]] * 3" are (pointers\n to) this single empty list. Modifying any of the elements of\n "lists" modifies this single list. You can create a list of\n different lists this way:\n\n >>> lists = [[] for i in range(3)]\n >>> lists[0].append(3)\n >>> lists[1].append(5)\n >>> lists[2].append(7)\n >>> lists\n [[3], [5], [7]]\n\n3. If *i* or *j* is negative, the index is relative to the end of the\n string: "len(s) + i" or "len(s) + j" is substituted. But note that\n "-0" is still "0".\n\n4. The slice of *s* from *i* to *j* is defined as the sequence of\n items with index *k* such that "i <= k < j". If *i* or *j* is\n greater than "len(s)", use "len(s)". If *i* is omitted or "None",\n use "0". If *j* is omitted or "None", use "len(s)". If *i* is\n greater than or equal to *j*, the slice is empty.\n\n5. The slice of *s* from *i* to *j* with step *k* is defined as the\n sequence of items with index "x = i + n*k" such that "0 <= n <\n (j-i)/k". In other words, the indices are "i", "i+k", "i+2*k",\n "i+3*k" and so on, stopping when *j* is reached (but never\n including *j*). If *i* or *j* is greater than "len(s)", use\n "len(s)". If *i* or *j* are omitted or "None", they become "end"\n values (which end depends on the sign of *k*). Note, *k* cannot be\n zero. If *k* is "None", it is treated like "1".\n\n6. Concatenating immutable sequences always results in a new object.\n This means that building up a sequence by repeated concatenation\n will have a quadratic runtime cost in the total sequence length.\n To get a linear runtime cost, you must switch to one of the\n alternatives below:\n\n * if concatenating "str" objects, you can build a list and use\n "str.join()" at the end or else write to a "io.StringIO" instance\n and retrieve its value when complete\n\n * if concatenating "bytes" objects, you can similarly use\n "bytes.join()" or "io.BytesIO", or you can do in-place\n concatenation with a "bytearray" object. "bytearray" objects are\n mutable and have an efficient overallocation mechanism\n\n * if concatenating "tuple" objects, extend a "list" instead\n\n * for other types, investigate the relevant class documentation\n\n7. Some sequence types (such as "range") only support item sequences\n that follow specific patterns, and hence don\'t support sequence\n concatenation or repetition.\n\n8. "index" raises "ValueError" when *x* is not found in *s*. When\n supported, the additional arguments to the index method allow\n efficient searching of subsections of the sequence. Passing the\n extra arguments is roughly equivalent to using "s[i:j].index(x)",\n only without copying any data and with the returned index being\n relative to the start of the sequence rather than the start of the\n slice.\n\n\nImmutable Sequence Types\n========================\n\nThe only operation that immutable sequence types generally implement\nthat is not also implemented by mutable sequence types is support for\nthe "hash()" built-in.\n\nThis support allows immutable sequences, such as "tuple" instances, to\nbe used as "dict" keys and stored in "set" and "frozenset" instances.\n\nAttempting to hash an immutable sequence that contains unhashable\nvalues will result in "TypeError".\n\n\nMutable Sequence Types\n======================\n\nThe operations in the following table are defined on mutable sequence\ntypes. The "collections.abc.MutableSequence" ABC is provided to make\nit easier to correctly implement these operations on custom sequence\ntypes.\n\nIn the table *s* is an instance of a mutable sequence type, *t* is any\niterable object and *x* is an arbitrary object that meets any type and\nvalue restrictions imposed by *s* (for example, "bytearray" only\naccepts integers that meet the value restriction "0 <= x <= 255").\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) |\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s.clear()" | removes all items from "s" (same | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.copy()" | creates a shallow copy of "s" | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s.pop([i])" | retrieves the item at *i* and | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.remove(x)" | remove the first item from *s* | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.reverse()" | reverses the items of *s* in | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The optional argument *i* defaults to "-1", so that by default the\n last item is removed and returned.\n\n3. "remove" raises "ValueError" when *x* is not found in *s*.\n\n4. The "reverse()" method modifies the sequence in place for economy\n of space when reversing a large sequence. To remind users that it\n operates by side effect, it does not return the reversed sequence.\n\n5. "clear()" and "copy()" are included for consistency with the\n interfaces of mutable containers that don\'t support slicing\n operations (such as "dict" and "set")\n\n New in version 3.3: "clear()" and "copy()" methods.\n\n\nLists\n=====\n\nLists are mutable sequences, typically used to store collections of\nhomogeneous items (where the precise degree of similarity will vary by\napplication).\n\nclass class list([iterable])\n\n Lists may be constructed in several ways:\n\n * Using a pair of square brackets to denote the empty list: "[]"\n\n * Using square brackets, separating items with commas: "[a]", "[a,\n b, c]"\n\n * Using a list comprehension: "[x for x in iterable]"\n\n * Using the type constructor: "list()" or "list(iterable)"\n\n The constructor builds a list whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a list, a copy is made and\n returned, similar to "iterable[:]". For example, "list(\'abc\')"\n returns "[\'a\', \'b\', \'c\']" and "list( (1, 2, 3) )" returns "[1, 2,\n 3]". If no argument is given, the constructor creates a new empty\n list, "[]".\n\n Many other operations also produce lists, including the "sorted()"\n built-in.\n\n Lists implement all of the *common* and *mutable* sequence\n operations. Lists also provide the following additional method:\n\n sort(*, key=None, reverse=None)\n\n This method sorts the list in place, using only "<" comparisons\n between items. Exceptions are not suppressed - if any comparison\n operations fail, the entire sort operation will fail (and the\n list will likely be left in a partially modified state).\n\n "sort()" accepts two arguments that can only be passed by\n keyword (*keyword-only arguments*):\n\n *key* specifies a function of one argument that is used to\n extract a comparison key from each list element (for example,\n "key=str.lower"). The key corresponding to each item in the list\n is calculated once and then used for the entire sorting process.\n The default value of "None" means that list items are sorted\n directly without calculating a separate key value.\n\n The "functools.cmp_to_key()" utility is available to convert a\n 2.x style *cmp* function to a *key* function.\n\n *reverse* is a boolean value. If set to "True", then the list\n elements are sorted as if each comparison were reversed.\n\n This method modifies the sequence in place for economy of space\n when sorting a large sequence. To remind users that it operates\n by side effect, it does not return the sorted sequence (use\n "sorted()" to explicitly request a new sorted list instance).\n\n The "sort()" method is guaranteed to be stable. A sort is\n stable if it guarantees not to change the relative order of\n elements that compare equal --- this is helpful for sorting in\n multiple passes (for example, sort by department, then by salary\n grade).\n\n **CPython implementation detail:** While a list is being sorted,\n the effect of attempting to mutate, or even inspect, the list is\n undefined. The C implementation of Python makes the list appear\n empty for the duration, and raises "ValueError" if it can detect\n that the list has been mutated during a sort.\n\n\nTuples\n======\n\nTuples are immutable sequences, typically used to store collections of\nheterogeneous data (such as the 2-tuples produced by the "enumerate()"\nbuilt-in). Tuples are also used for cases where an immutable sequence\nof homogeneous data is needed (such as allowing storage in a "set" or\n"dict" instance).\n\nclass class tuple([iterable])\n\n Tuples may be constructed in a number of ways:\n\n * Using a pair of parentheses to denote the empty tuple: "()"\n\n * Using a trailing comma for a singleton tuple: "a," or "(a,)"\n\n * Separating items with commas: "a, b, c" or "(a, b, c)"\n\n * Using the "tuple()" built-in: "tuple()" or "tuple(iterable)"\n\n The constructor builds a tuple whose items are the same and in the\n same order as *iterable*\'s items. *iterable* may be either a\n sequence, a container that supports iteration, or an iterator\n object. If *iterable* is already a tuple, it is returned\n unchanged. For example, "tuple(\'abc\')" returns "(\'a\', \'b\', \'c\')"\n and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no argument is\n given, the constructor creates a new empty tuple, "()".\n\n Note that it is actually the comma which makes a tuple, not the\n parentheses. The parentheses are optional, except in the empty\n tuple case, or when they are needed to avoid syntactic ambiguity.\n For example, "f(a, b, c)" is a function call with three arguments,\n while "f((a, b, c))" is a function call with a 3-tuple as the sole\n argument.\n\n Tuples implement all of the *common* sequence operations.\n\nFor heterogeneous collections of data where access by name is clearer\nthan access by index, "collections.namedtuple()" may be a more\nappropriate choice than a simple tuple object.\n\n\nRanges\n======\n\nThe "range" type represents an immutable sequence of numbers and is\ncommonly used for looping a specific number of times in "for" loops.\n\nclass class range(stop)\nclass class range(start, stop[, step])\n\n The arguments to the range constructor must be integers (either\n built-in "int" or any object that implements the "__index__"\n special method). If the *step* argument is omitted, it defaults to\n "1". If the *start* argument is omitted, it defaults to "0". If\n *step* is zero, "ValueError" is raised.\n\n For a positive *step*, the contents of a range "r" are determined\n by the formula "r[i] = start + step*i" where "i >= 0" and "r[i] <\n stop".\n\n For a negative *step*, the contents of the range are still\n determined by the formula "r[i] = start + step*i", but the\n constraints are "i >= 0" and "r[i] > stop".\n\n A range object will be empty if "r[0]" does not meet the value\n constraint. Ranges do support negative indices, but these are\n interpreted as indexing from the end of the sequence determined by\n the positive indices.\n\n Ranges containing absolute values larger than "sys.maxsize" are\n permitted but some features (such as "len()") may raise\n "OverflowError".\n\n Range examples:\n\n >>> list(range(10))\n [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n >>> list(range(1, 11))\n [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n >>> list(range(0, 30, 5))\n [0, 5, 10, 15, 20, 25]\n >>> list(range(0, 10, 3))\n [0, 3, 6, 9]\n >>> list(range(0, -10, -1))\n [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n >>> list(range(0))\n []\n >>> list(range(1, 0))\n []\n\n Ranges implement all of the *common* sequence operations except\n concatenation and repetition (due to the fact that range objects\n can only represent sequences that follow a strict pattern and\n repetition and concatenation will usually violate that pattern).\n\nThe advantage of the "range" type over a regular "list" or "tuple" is\nthat a "range" object will always take the same (small) amount of\nmemory, no matter the size of the range it represents (as it only\nstores the "start", "stop" and "step" values, calculating individual\nitems and subranges as needed).\n\nRange objects implement the "collections.abc.Sequence" ABC, and\nprovide features such as containment tests, element index lookup,\nslicing and support for negative indices (see *Sequence Types ---\nlist, tuple, range*):\n\n>>> r = range(0, 20, 2)\n>>> r\nrange(0, 20, 2)\n>>> 11 in r\nFalse\n>>> 10 in r\nTrue\n>>> r.index(10)\n5\n>>> r[5]\n10\n>>> r[:5]\nrange(0, 10, 2)\n>>> r[-1]\n18\n\nTesting range objects for equality with "==" and "!=" compares them as\nsequences. That is, two range objects are considered equal if they\nrepresent the same sequence of values. (Note that two range objects\nthat compare equal might have different "start", "stop" and "step"\nattributes, for example "range(0) == range(2, 1, 3)" or "range(0, 3,\n2) == range(0, 4, 2)".)\n\nChanged in version 3.2: Implement the Sequence ABC. Support slicing\nand negative indices. Test "int" objects for membership in constant\ntime instead of iterating through all items.\n\nChanged in version 3.3: Define \'==\' and \'!=\' to compare range objects\nbased on the sequence of values they define (instead of comparing\nbased on object identity).\n\nNew in version 3.3: The "start", "stop" and "step" attributes.\n', - 'typesseq-mutable': '\nMutable Sequence Types\n**********************\n\nThe operations in the following table are defined on mutable sequence\ntypes. The "collections.abc.MutableSequence" ABC is provided to make\nit easier to correctly implement these operations on custom sequence\ntypes.\n\nIn the table *s* is an instance of a mutable sequence type, *t* is any\niterable object and *x* is an arbitrary object that meets any type and\nvalue restrictions imposed by *s* (for example, "bytearray" only\naccepts integers that meet the value restriction "0 <= x <= 255").\n\n+--------------------------------+----------------------------------+-----------------------+\n| Operation | Result | Notes |\n+================================+==================================+=======================+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s[i:j:k] = t" | the elements of "s[i:j:k]" are | (1) |\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s.clear()" | removes all items from "s" (same | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.copy()" | creates a shallow copy of "s" | (5) |\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n+--------------------------------+----------------------------------+-----------------------+\n| "s.pop([i])" | retrieves the item at *i* and | (2) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.remove(x)" | remove the first item from *s* | (3) |\n+--------------------------------+----------------------------------+-----------------------+\n| "s.reverse()" | reverses the items of *s* in | (4) |\n+--------------------------------+----------------------------------+-----------------------+\n\nNotes:\n\n1. *t* must have the same length as the slice it is replacing.\n\n2. The optional argument *i* defaults to "-1", so that by default the\n last item is removed and returned.\n\n3. "remove" raises "ValueError" when *x* is not found in *s*.\n\n4. The "reverse()" method modifies the sequence in place for economy\n of space when reversing a large sequence. To remind users that it\n operates by side effect, it does not return the reversed sequence.\n\n5. "clear()" and "copy()" are included for consistency with the\n interfaces of mutable containers that don\'t support slicing\n operations (such as "dict" and "set")\n\n New in version 3.3: "clear()" and "copy()" methods.\n', - 'unary': '\nUnary arithmetic and bitwise operations\n***************************************\n\nAll unary arithmetic and bitwise operations have the same priority:\n\n u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n\nThe unary "-" (minus) operator yields the negation of its numeric\nargument.\n\nThe unary "+" (plus) operator yields its numeric argument unchanged.\n\nThe unary "~" (invert) operator yields the bitwise inversion of its\ninteger argument. The bitwise inversion of "x" is defined as\n"-(x+1)". It only applies to integral numbers.\n\nIn all three cases, if the argument does not have the proper type, a\n"TypeError" exception is raised.\n', - 'while': '\nThe "while" statement\n*********************\n\nThe "while" statement is used for repeated execution as long as an\nexpression is true:\n\n while_stmt ::= "while" expression ":" suite\n ["else" ":" suite]\n\nThis repeatedly tests the expression and, if it is true, executes the\nfirst suite; if the expression is false (which may be the first time\nit is tested) the suite of the "else" clause, if present, is executed\nand the loop terminates.\n\nA "break" statement executed in the first suite terminates the loop\nwithout executing the "else" clause\'s suite. A "continue" statement\nexecuted in the first suite skips the rest of the suite and goes back\nto testing the expression.\n', - 'with': '\nThe "with" statement\n********************\n\nThe "with" statement is used to wrap the execution of a block with\nmethods defined by a context manager (see section *With Statement\nContext Managers*). This allows common "try"..."except"..."finally"\nusage patterns to be encapsulated for convenient reuse.\n\n with_stmt ::= "with" with_item ("," with_item)* ":" suite\n with_item ::= expression ["as" target]\n\nThe execution of the "with" statement with one "item" proceeds as\nfollows:\n\n1. The context expression (the expression given in the "with_item") is\n evaluated to obtain a context manager.\n\n2. The context manager\'s "__exit__()" is loaded for later use.\n\n3. The context manager\'s "__enter__()" method is invoked.\n\n4. If a target was included in the "with" statement, the return value\n from "__enter__()" is assigned to it.\n\n Note: The "with" statement guarantees that if the "__enter__()" method\n returns without an error, then "__exit__()" will always be\n called. Thus, if an error occurs during the assignment to the\n target list, it will be treated the same as an error occurring\n within the suite would be. See step 6 below.\n\n5. The suite is executed.\n\n6. The context manager\'s "__exit__()" method is invoked. If an\n exception caused the suite to be exited, its type, value, and\n traceback are passed as arguments to "__exit__()". Otherwise, three\n "None" arguments are supplied.\n\n If the suite was exited due to an exception, and the return value\n from the "__exit__()" method was false, the exception is reraised.\n If the return value was true, the exception is suppressed, and\n execution continues with the statement following the "with"\n statement.\n\n If the suite was exited for any reason other than an exception, the\n return value from "__exit__()" is ignored, and execution proceeds\n at the normal location for the kind of exit that was taken.\n\nWith more than one item, the context managers are processed as if\nmultiple "with" statements were nested:\n\n with A() as a, B() as b:\n suite\n\nis equivalent to\n\n with A() as a:\n with B() as b:\n suite\n\nChanged in version 3.1: Support for multiple context expressions.\n\nSee also:\n\n **PEP 0343** - The "with" statement\n The specification, background, and examples for the Python\n "with" statement.\n', - 'yield': '\nThe "yield" statement\n*********************\n\n yield_stmt ::= yield_expression\n\nA "yield" statement is semantically equivalent to a *yield\nexpression*. The yield statement can be used to omit the parentheses\nthat would otherwise be required in the equivalent yield expression\nstatement. For example, the yield statements\n\n yield \n yield from \n\nare equivalent to the yield expression statements\n\n (yield )\n (yield from )\n\nYield expressions and statements are only used when defining a\n*generator* function, and are only used in the body of the generator\nfunction. Using yield in a function definition is sufficient to cause\nthat definition to create a generator function instead of a normal\nfunction.\n\nFor full details of "yield" semantics, refer to the *Yield\nexpressions* section.\n'} diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/_util.py b/Darwin/lib/python3.4/site-packages/OpenSSL/_util.py deleted file mode 100644 index baeecc6..0000000 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/_util.py +++ /dev/null @@ -1,53 +0,0 @@ -from six import PY3, binary_type, text_type - -from cryptography.hazmat.bindings.openssl.binding import Binding -binding = Binding() -ffi = binding.ffi -lib = binding.lib - -def exception_from_error_queue(exceptionType): - def text(charp): - return native(ffi.string(charp)) - - errors = [] - while True: - error = lib.ERR_get_error() - if error == 0: - break - errors.append(( - text(lib.ERR_lib_error_string(error)), - text(lib.ERR_func_error_string(error)), - text(lib.ERR_reason_error_string(error)))) - - raise exceptionType(errors) - - - -def native(s): - """ - Convert :py:class:`bytes` or :py:class:`unicode` to the native - :py:class:`str` type, using UTF-8 encoding if conversion is necessary. - - :raise UnicodeError: The input string is not UTF-8 decodeable. - - :raise TypeError: The input is neither :py:class:`bytes` nor - :py:class:`unicode`. - """ - if not isinstance(s, (binary_type, text_type)): - raise TypeError("%r is neither bytes nor unicode" % s) - if PY3: - if isinstance(s, binary_type): - return s.decode("utf-8") - else: - if isinstance(s, text_type): - return s.encode("utf-8") - return s - - - -if PY3: - def byte_string(s): - return s.encode("charmap") -else: - def byte_string(s): - return s diff --git a/Darwin/lib/python3.4/site-packages/PIL/BmpImagePlugin.py b/Darwin/lib/python3.4/site-packages/PIL/BmpImagePlugin.py deleted file mode 100644 index fae6bd3..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/BmpImagePlugin.py +++ /dev/null @@ -1,260 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# BMP file handler -# -# Windows (and OS/2) native bitmap storage format. -# -# history: -# 1995-09-01 fl Created -# 1996-04-30 fl Added save -# 1997-08-27 fl Fixed save of 1-bit images -# 1998-03-06 fl Load P images as L where possible -# 1998-07-03 fl Load P images as 1 where possible -# 1998-12-29 fl Handle small palettes -# 2002-12-30 fl Fixed load of 1-bit palette images -# 2003-04-21 fl Fixed load of 1-bit monochrome images -# 2003-04-23 fl Added limited support for BI_BITFIELDS compression -# -# Copyright (c) 1997-2003 by Secret Labs AB -# Copyright (c) 1995-2003 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - - -__version__ = "0.7" - - -from PIL import Image, ImageFile, ImagePalette, _binary -import math - -i8 = _binary.i8 -i16 = _binary.i16le -i32 = _binary.i32le -o8 = _binary.o8 -o16 = _binary.o16le -o32 = _binary.o32le - -# -# -------------------------------------------------------------------- -# Read BMP file - -BIT2MODE = { - # bits => mode, rawmode - 1: ("P", "P;1"), - 4: ("P", "P;4"), - 8: ("P", "P"), - 16: ("RGB", "BGR;15"), - 24: ("RGB", "BGR"), - 32: ("RGB", "BGRX") -} - -def _accept(prefix): - return prefix[:2] == b"BM" - -## -# Image plugin for the Windows BMP format. - -class BmpImageFile(ImageFile.ImageFile): - - format = "BMP" - format_description = "Windows Bitmap" - - def _bitmap(self, header = 0, offset = 0): - - if header: - self.fp.seek(header) - - read = self.fp.read - - # CORE/INFO - s = read(4) - s = s + ImageFile._safe_read(self.fp, i32(s)-4) - - if len(s) == 12: - - # OS/2 1.0 CORE - bits = i16(s[10:]) - self.size = i16(s[4:]), i16(s[6:]) - compression = 0 - lutsize = 3 - colors = 0 - direction = -1 - - elif len(s) in [40, 64, 108, 124]: - - # WIN 3.1 or OS/2 2.0 INFO - bits = i16(s[14:]) - self.size = i32(s[4:]), i32(s[8:]) - compression = i32(s[16:]) - pxperm = (i32(s[24:]), i32(s[28:])) # Pixels per meter - lutsize = 4 - colors = i32(s[32:]) - direction = -1 - if i8(s[11]) == 0xff: - # upside-down storage - self.size = self.size[0], 2**32 - self.size[1] - direction = 0 - - self.info["dpi"] = tuple(map(lambda x: math.ceil(x / 39.3701), pxperm)) - - else: - raise IOError("Unsupported BMP header type (%d)" % len(s)) - - if (self.size[0]*self.size[1]) > 2**31: - # Prevent DOS for > 2gb images - raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) - - if not colors: - colors = 1 << bits - - # MODE - try: - self.mode, rawmode = BIT2MODE[bits] - except KeyError: - raise IOError("Unsupported BMP pixel depth (%d)" % bits) - - if compression == 3: - # BI_BITFIELDS compression - mask = i32(read(4)), i32(read(4)), i32(read(4)) - if bits == 32 and mask == (0xff0000, 0x00ff00, 0x0000ff): - rawmode = "BGRX" - elif bits == 16 and mask == (0x00f800, 0x0007e0, 0x00001f): - rawmode = "BGR;16" - elif bits == 16 and mask == (0x007c00, 0x0003e0, 0x00001f): - rawmode = "BGR;15" - else: - # print bits, map(hex, mask) - raise IOError("Unsupported BMP bitfields layout") - elif compression != 0: - raise IOError("Unsupported BMP compression (%d)" % compression) - - # LUT - if self.mode == "P": - palette = [] - greyscale = 1 - if colors == 2: - indices = (0, 255) - elif colors > 2**16 or colors <=0: #We're reading a i32. - raise IOError("Unsupported BMP Palette size (%d)" % colors) - else: - indices = list(range(colors)) - for i in indices: - rgb = read(lutsize)[:3] - if rgb != o8(i)*3: - greyscale = 0 - palette.append(rgb) - if greyscale: - if colors == 2: - self.mode = rawmode = "1" - else: - self.mode = rawmode = "L" - else: - self.mode = "P" - self.palette = ImagePalette.raw( - "BGR", b"".join(palette) - ) - - if not offset: - offset = self.fp.tell() - - self.tile = [("raw", - (0, 0) + self.size, - offset, - (rawmode, ((self.size[0]*bits+31)>>3)&(~3), direction))] - - self.info["compression"] = compression - - def _open(self): - - # HEAD - s = self.fp.read(14) - if s[:2] != b"BM": - raise SyntaxError("Not a BMP file") - offset = i32(s[10:]) - - self._bitmap(offset=offset) - - -class DibImageFile(BmpImageFile): - - format = "DIB" - format_description = "Windows Bitmap" - - def _open(self): - self._bitmap() - -# -# -------------------------------------------------------------------- -# Write BMP file - -SAVE = { - "1": ("1", 1, 2), - "L": ("L", 8, 256), - "P": ("P", 8, 256), - "RGB": ("BGR", 24, 0), -} - -def _save(im, fp, filename, check=0): - - try: - rawmode, bits, colors = SAVE[im.mode] - except KeyError: - raise IOError("cannot write mode %s as BMP" % im.mode) - - if check: - return check - - info = im.encoderinfo - - dpi = info.get("dpi", (96, 96)) - - # 1 meter == 39.3701 inches - ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) - - stride = ((im.size[0]*bits+7)//8+3)&(~3) - header = 40 # or 64 for OS/2 version 2 - offset = 14 + header + colors * 4 - image = stride * im.size[1] - - # bitmap header - fp.write(b"BM" + # file type (magic) - o32(offset+image) + # file size - o32(0) + # reserved - o32(offset)) # image data offset - - # bitmap info header - fp.write(o32(header) + # info header size - o32(im.size[0]) + # width - o32(im.size[1]) + # height - o16(1) + # planes - o16(bits) + # depth - o32(0) + # compression (0=uncompressed) - o32(image) + # size of bitmap - o32(ppm[0]) + o32(ppm[1]) + # resolution - o32(colors) + # colors used - o32(colors)) # colors important - - fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) - - if im.mode == "1": - for i in (0, 255): - fp.write(o8(i) * 4) - elif im.mode == "L": - for i in range(256): - fp.write(o8(i) * 4) - elif im.mode == "P": - fp.write(im.im.getpalette("RGB", "BGRX")) - - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, stride, -1))]) - -# -# -------------------------------------------------------------------- -# Registry - -Image.register_open(BmpImageFile.format, BmpImageFile, _accept) -Image.register_save(BmpImageFile.format, _save) - -Image.register_extension(BmpImageFile.format, ".bmp") diff --git a/Darwin/lib/python3.4/site-packages/PIL/GifImagePlugin.py b/Darwin/lib/python3.4/site-packages/PIL/GifImagePlugin.py deleted file mode 100644 index ec83019..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/GifImagePlugin.py +++ /dev/null @@ -1,506 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# GIF file handling -# -# History: -# 1995-09-01 fl Created -# 1996-12-14 fl Added interlace support -# 1996-12-30 fl Added animation support -# 1997-01-05 fl Added write support, fixed local colour map bug -# 1997-02-23 fl Make sure to load raster data in getdata() -# 1997-07-05 fl Support external decoder (0.4) -# 1998-07-09 fl Handle all modes when saving (0.5) -# 1998-07-15 fl Renamed offset attribute to avoid name clash -# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) -# 2001-04-17 fl Added palette optimization (0.7) -# 2002-06-06 fl Added transparency support for save (0.8) -# 2004-02-24 fl Disable interlacing for small images -# -# Copyright (c) 1997-2004 by Secret Labs AB -# Copyright (c) 1995-2004 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - - -__version__ = "0.9" - - -from PIL import Image, ImageFile, ImagePalette, _binary - - -# -------------------------------------------------------------------- -# Helpers - -i8 = _binary.i8 -i16 = _binary.i16le -o8 = _binary.o8 -o16 = _binary.o16le - - -# -------------------------------------------------------------------- -# Identify/read GIF files - -def _accept(prefix): - return prefix[:6] in [b"GIF87a", b"GIF89a"] - -## -# Image plugin for GIF images. This plugin supports both GIF87 and -# GIF89 images. - -class GifImageFile(ImageFile.ImageFile): - - format = "GIF" - format_description = "Compuserve GIF" - global_palette = None - - def data(self): - s = self.fp.read(1) - if s and i8(s): - return self.fp.read(i8(s)) - return None - - def _open(self): - - # Screen - s = self.fp.read(13) - if s[:6] not in [b"GIF87a", b"GIF89a"]: - raise SyntaxError("not a GIF file") - - self.info["version"] = s[:6] - self.size = i16(s[6:]), i16(s[8:]) - self.tile = [] - flags = i8(s[10]) - bits = (flags & 7) + 1 - - if flags & 128: - # get global palette - self.info["background"] = i8(s[11]) - # check if palette contains colour indices - p = self.fp.read(3<= 3 and i8(block[0]) == 1: - self.info["loop"] = i16(block[1:3]) - while self.data(): - pass - - elif s == b",": - # - # local image - # - s = self.fp.read(9) - - # extent - x0, y0 = i16(s[0:]), i16(s[2:]) - x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) - flags = i8(s[8]) - - interlace = (flags & 64) != 0 - - if flags & 128: - bits = (flags & 7) + 1 - self.palette =\ - ImagePalette.raw("RGB", self.fp.read(3< %s" % (file, filename) - quant_cmd = ["ppmquant", "256", file] - togif_cmd = ["ppmtogif"] - stderr = tempfile.TemporaryFile() - quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr) - stderr = tempfile.TemporaryFile() - togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f, stderr=stderr) - - # Allow ppmquant to receive SIGPIPE if ppmtogif exits - quant_proc.stdout.close() - - retcode = quant_proc.wait() - if retcode: - raise CalledProcessError(retcode, quant_cmd) - - retcode = togif_proc.wait() - if retcode: - raise CalledProcessError(retcode, togif_cmd) - - try: - os.unlink(file) - except: - pass - - -# -------------------------------------------------------------------- -# GIF utilities - -def getheader(im, palette=None, info=None): - """Return a list of strings representing a GIF header""" - - optimize = info and info.get("optimize", 0) - - # Header Block - # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp - header = [ - b"GIF87a" + # signature + version - o16(im.size[0]) + # canvas width - o16(im.size[1]) # canvas height - ] - - if im.mode == "P": - if palette and isinstance(palette, bytes): - sourcePalette = palette[:768] - else: - sourcePalette = im.im.getpalette("RGB")[:768] - else: # L-mode - if palette and isinstance(palette, bytes): - sourcePalette = palette[:768] - else: - sourcePalette = bytearray([i//3 for i in range(768)]) - - usedPaletteColors = paletteBytes = None - - if optimize: - usedPaletteColors = [] - - # check which colors are used - i = 0 - for count in im.histogram(): - if count: - usedPaletteColors.append(i) - i += 1 - - # create the new palette if not every color is used - if len(usedPaletteColors) < 256: - paletteBytes = b"" - newPositions = {} - - i = 0 - # pick only the used colors from the palette - for oldPosition in usedPaletteColors: - paletteBytes += sourcePalette[oldPosition*3:oldPosition*3+3] - newPositions[oldPosition] = i - i += 1 - - # replace the palette color id of all pixel with the new id - imageBytes = bytearray(im.tobytes()) - for i in range(len(imageBytes)): - imageBytes[i] = newPositions[imageBytes[i]] - im.frombytes(bytes(imageBytes)) - newPaletteBytes = paletteBytes + (768 - len(paletteBytes)) * b'\x00' - im.putpalette(newPaletteBytes) - im.palette = ImagePalette.ImagePalette("RGB", palette = paletteBytes, size = len(paletteBytes)) - - if not paletteBytes: - paletteBytes = sourcePalette - - # Logical Screen Descriptor - # calculate the palette size for the header - import math - colorTableSize = int(math.ceil(math.log(len(paletteBytes)//3, 2)))-1 - if colorTableSize < 0: colorTableSize = 0 - # size of global color table + global color table flag - header.append(o8(colorTableSize + 128)) - # background + reserved/aspect - header.append(o8(0) + o8(0)) - # end of Logical Screen Descriptor - - # add the missing amount of bytes - # the palette has to be 2< 0: - paletteBytes += o8(0) * 3 * actualTargetSizeDiff - - # Header + Logical Screen Descriptor + Global Color Table - header.append(paletteBytes) - return header, usedPaletteColors - - -def getdata(im, offset = (0, 0), **params): - """Return a list of strings representing this image. - The first string is a local image header, the rest contains - encoded image data.""" - - class collector: - data = [] - def write(self, data): - self.data.append(data) - - im.load() # make sure raster data is available - - fp = collector() - - try: - im.encoderinfo = params - - # local image header - fp.write(b"," + - o16(offset[0]) + # offset - o16(offset[1]) + - o16(im.size[0]) + # size - o16(im.size[1]) + - o8(0) + # flags - o8(8)) # bits - - ImageFile._save(im, fp, [("gif", (0,0)+im.size, 0, RAWMODE[im.mode])]) - - fp.write(b"\0") # end of image data - - finally: - del im.encoderinfo - - return fp.data - - -# -------------------------------------------------------------------- -# Registry - -Image.register_open(GifImageFile.format, GifImageFile, _accept) -Image.register_save(GifImageFile.format, _save) -Image.register_extension(GifImageFile.format, ".gif") -Image.register_mime(GifImageFile.format, "image/gif") - -# -# Uncomment the following line if you wish to use NETPBM/PBMPLUS -# instead of the built-in "uncompressed" GIF encoder - -# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageFileIO.py b/Darwin/lib/python3.4/site-packages/PIL/ImageFileIO.py deleted file mode 100644 index e57d3f4..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageFileIO.py +++ /dev/null @@ -1,40 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# kludge to get basic ImageFileIO functionality -# -# History: -# 1998-08-06 fl Recreated -# -# Copyright (c) Secret Labs AB 1998-2002. -# -# See the README file for information on usage and redistribution. -# -""" -The **ImageFileIO** module can be used to read an image from a -socket, or any other stream device. - -Deprecated. New code should use the :class:`PIL.ImageFile.Parser` -class in the :mod:`PIL.ImageFile` module instead. - -.. seealso:: modules :class:`PIL.ImageFile.Parser` -""" - -from io import BytesIO - - -class ImageFileIO(BytesIO): - def __init__(self, fp): - """ - Adds buffering to a stream file object, in order to - provide **seek** and **tell** methods required - by the :func:`PIL.Image.Image.open` method. The stream object must - implement **read** and **close** methods. - - :param fp: Stream file handle. - - .. seealso:: modules :func:`PIL.Image.open` - """ - data = fp.read() - BytesIO.__init__(self, data) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageGrab.py b/Darwin/lib/python3.4/site-packages/PIL/ImageGrab.py deleted file mode 100644 index 9bb1909..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageGrab.py +++ /dev/null @@ -1,49 +0,0 @@ -# -# The Python Imaging Library -# $Id$ -# -# screen grabber (windows only) -# -# History: -# 2001-04-26 fl created -# 2001-09-17 fl use builtin driver, if present -# 2002-11-19 fl added grabclipboard support -# -# Copyright (c) 2001-2002 by Secret Labs AB -# Copyright (c) 2001-2002 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - -from PIL import Image - - -try: - # built-in driver (1.1.3 and later) - grabber = Image.core.grabscreen -except AttributeError: - # stand-alone driver (pil plus) - import _grabscreen - grabber = _grabscreen.grab - - -def grab(bbox=None): - size, data = grabber() - im = Image.frombytes( - "RGB", size, data, - # RGB, 32-bit line padding, origo in lower left corner - "raw", "BGR", (size[0]*3 + 3) & -4, -1 - ) - if bbox: - im = im.crop(bbox) - return im - - -def grabclipboard(): - debug = 0 # temporary interface - data = Image.core.grabclipboard(debug) - if isinstance(data, bytes): - from PIL import BmpImagePlugin - import io - return BmpImagePlugin.DibImageFile(io.BytesIO(data)) - return data diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageQt.py b/Darwin/lib/python3.4/site-packages/PIL/ImageQt.py deleted file mode 100644 index ca8b14b..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageQt.py +++ /dev/null @@ -1,89 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# a simple Qt image interface. -# -# history: -# 2006-06-03 fl: created -# 2006-06-04 fl: inherit from QImage instead of wrapping it -# 2006-06-05 fl: removed toimage helper; move string support to ImageQt -# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) -# -# Copyright (c) 2006 by Secret Labs AB -# Copyright (c) 2006 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - -from PIL import Image -from PIL._util import isPath - -try: - from PyQt5.QtGui import QImage, qRgba -except: - from PyQt4.QtGui import QImage, qRgba - -## -# (Internal) Turns an RGB color into a Qt compatible color integer. - -def rgb(r, g, b, a=255): - # use qRgb to pack the colors, and then turn the resulting long - # into a negative integer with the same bitpattern. - return (qRgba(r, g, b, a) & 0xffffffff) - -## -# An PIL image wrapper for Qt. This is a subclass of PyQt4's QImage -# class. -# -# @param im A PIL Image object, or a file name (given either as Python -# string or a PyQt string object). - -class ImageQt(QImage): - - def __init__(self, im): - - data = None - colortable = None - - # handle filename, if given instead of image name - if hasattr(im, "toUtf8"): - # FIXME - is this really the best way to do this? - im = unicode(im.toUtf8(), "utf-8") - if isPath(im): - im = Image.open(im) - - if im.mode == "1": - format = QImage.Format_Mono - elif im.mode == "L": - format = QImage.Format_Indexed8 - colortable = [] - for i in range(256): - colortable.append(rgb(i, i, i)) - elif im.mode == "P": - format = QImage.Format_Indexed8 - colortable = [] - palette = im.getpalette() - for i in range(0, len(palette), 3): - colortable.append(rgb(*palette[i:i+3])) - elif im.mode == "RGB": - data = im.tobytes("raw", "BGRX") - format = QImage.Format_RGB32 - elif im.mode == "RGBA": - try: - data = im.tobytes("raw", "BGRA") - except SystemError: - # workaround for earlier versions - r, g, b, a = im.split() - im = Image.merge("RGBA", (b, g, r, a)) - format = QImage.Format_ARGB32 - else: - raise ValueError("unsupported image mode %r" % im.mode) - - # must keep a reference, or Qt will crash! - self.__data = data or im.tobytes() - - QImage.__init__(self, self.__data, im.size[0], im.size[1], format) - - if colortable: - self.setColorTable(colortable) diff --git a/Darwin/lib/python3.4/site-packages/PIL/PalmImagePlugin.py b/Darwin/lib/python3.4/site-packages/PIL/PalmImagePlugin.py deleted file mode 100644 index 203a6d9..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/PalmImagePlugin.py +++ /dev/null @@ -1,225 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# - -## -# Image plugin for Palm pixmap images (output only). -## - -__version__ = "1.0" - -from PIL import Image, ImageFile, _binary - -_Palm8BitColormapValues = ( - ( 255, 255, 255 ), ( 255, 204, 255 ), ( 255, 153, 255 ), ( 255, 102, 255 ), - ( 255, 51, 255 ), ( 255, 0, 255 ), ( 255, 255, 204 ), ( 255, 204, 204 ), - ( 255, 153, 204 ), ( 255, 102, 204 ), ( 255, 51, 204 ), ( 255, 0, 204 ), - ( 255, 255, 153 ), ( 255, 204, 153 ), ( 255, 153, 153 ), ( 255, 102, 153 ), - ( 255, 51, 153 ), ( 255, 0, 153 ), ( 204, 255, 255 ), ( 204, 204, 255 ), - ( 204, 153, 255 ), ( 204, 102, 255 ), ( 204, 51, 255 ), ( 204, 0, 255 ), - ( 204, 255, 204 ), ( 204, 204, 204 ), ( 204, 153, 204 ), ( 204, 102, 204 ), - ( 204, 51, 204 ), ( 204, 0, 204 ), ( 204, 255, 153 ), ( 204, 204, 153 ), - ( 204, 153, 153 ), ( 204, 102, 153 ), ( 204, 51, 153 ), ( 204, 0, 153 ), - ( 153, 255, 255 ), ( 153, 204, 255 ), ( 153, 153, 255 ), ( 153, 102, 255 ), - ( 153, 51, 255 ), ( 153, 0, 255 ), ( 153, 255, 204 ), ( 153, 204, 204 ), - ( 153, 153, 204 ), ( 153, 102, 204 ), ( 153, 51, 204 ), ( 153, 0, 204 ), - ( 153, 255, 153 ), ( 153, 204, 153 ), ( 153, 153, 153 ), ( 153, 102, 153 ), - ( 153, 51, 153 ), ( 153, 0, 153 ), ( 102, 255, 255 ), ( 102, 204, 255 ), - ( 102, 153, 255 ), ( 102, 102, 255 ), ( 102, 51, 255 ), ( 102, 0, 255 ), - ( 102, 255, 204 ), ( 102, 204, 204 ), ( 102, 153, 204 ), ( 102, 102, 204 ), - ( 102, 51, 204 ), ( 102, 0, 204 ), ( 102, 255, 153 ), ( 102, 204, 153 ), - ( 102, 153, 153 ), ( 102, 102, 153 ), ( 102, 51, 153 ), ( 102, 0, 153 ), - ( 51, 255, 255 ), ( 51, 204, 255 ), ( 51, 153, 255 ), ( 51, 102, 255 ), - ( 51, 51, 255 ), ( 51, 0, 255 ), ( 51, 255, 204 ), ( 51, 204, 204 ), - ( 51, 153, 204 ), ( 51, 102, 204 ), ( 51, 51, 204 ), ( 51, 0, 204 ), - ( 51, 255, 153 ), ( 51, 204, 153 ), ( 51, 153, 153 ), ( 51, 102, 153 ), - ( 51, 51, 153 ), ( 51, 0, 153 ), ( 0, 255, 255 ), ( 0, 204, 255 ), - ( 0, 153, 255 ), ( 0, 102, 255 ), ( 0, 51, 255 ), ( 0, 0, 255 ), - ( 0, 255, 204 ), ( 0, 204, 204 ), ( 0, 153, 204 ), ( 0, 102, 204 ), - ( 0, 51, 204 ), ( 0, 0, 204 ), ( 0, 255, 153 ), ( 0, 204, 153 ), - ( 0, 153, 153 ), ( 0, 102, 153 ), ( 0, 51, 153 ), ( 0, 0, 153 ), - ( 255, 255, 102 ), ( 255, 204, 102 ), ( 255, 153, 102 ), ( 255, 102, 102 ), - ( 255, 51, 102 ), ( 255, 0, 102 ), ( 255, 255, 51 ), ( 255, 204, 51 ), - ( 255, 153, 51 ), ( 255, 102, 51 ), ( 255, 51, 51 ), ( 255, 0, 51 ), - ( 255, 255, 0 ), ( 255, 204, 0 ), ( 255, 153, 0 ), ( 255, 102, 0 ), - ( 255, 51, 0 ), ( 255, 0, 0 ), ( 204, 255, 102 ), ( 204, 204, 102 ), - ( 204, 153, 102 ), ( 204, 102, 102 ), ( 204, 51, 102 ), ( 204, 0, 102 ), - ( 204, 255, 51 ), ( 204, 204, 51 ), ( 204, 153, 51 ), ( 204, 102, 51 ), - ( 204, 51, 51 ), ( 204, 0, 51 ), ( 204, 255, 0 ), ( 204, 204, 0 ), - ( 204, 153, 0 ), ( 204, 102, 0 ), ( 204, 51, 0 ), ( 204, 0, 0 ), - ( 153, 255, 102 ), ( 153, 204, 102 ), ( 153, 153, 102 ), ( 153, 102, 102 ), - ( 153, 51, 102 ), ( 153, 0, 102 ), ( 153, 255, 51 ), ( 153, 204, 51 ), - ( 153, 153, 51 ), ( 153, 102, 51 ), ( 153, 51, 51 ), ( 153, 0, 51 ), - ( 153, 255, 0 ), ( 153, 204, 0 ), ( 153, 153, 0 ), ( 153, 102, 0 ), - ( 153, 51, 0 ), ( 153, 0, 0 ), ( 102, 255, 102 ), ( 102, 204, 102 ), - ( 102, 153, 102 ), ( 102, 102, 102 ), ( 102, 51, 102 ), ( 102, 0, 102 ), - ( 102, 255, 51 ), ( 102, 204, 51 ), ( 102, 153, 51 ), ( 102, 102, 51 ), - ( 102, 51, 51 ), ( 102, 0, 51 ), ( 102, 255, 0 ), ( 102, 204, 0 ), - ( 102, 153, 0 ), ( 102, 102, 0 ), ( 102, 51, 0 ), ( 102, 0, 0 ), - ( 51, 255, 102 ), ( 51, 204, 102 ), ( 51, 153, 102 ), ( 51, 102, 102 ), - ( 51, 51, 102 ), ( 51, 0, 102 ), ( 51, 255, 51 ), ( 51, 204, 51 ), - ( 51, 153, 51 ), ( 51, 102, 51 ), ( 51, 51, 51 ), ( 51, 0, 51 ), - ( 51, 255, 0 ), ( 51, 204, 0 ), ( 51, 153, 0 ), ( 51, 102, 0 ), - ( 51, 51, 0 ), ( 51, 0, 0 ), ( 0, 255, 102 ), ( 0, 204, 102 ), - ( 0, 153, 102 ), ( 0, 102, 102 ), ( 0, 51, 102 ), ( 0, 0, 102 ), - ( 0, 255, 51 ), ( 0, 204, 51 ), ( 0, 153, 51 ), ( 0, 102, 51 ), - ( 0, 51, 51 ), ( 0, 0, 51 ), ( 0, 255, 0 ), ( 0, 204, 0 ), - ( 0, 153, 0 ), ( 0, 102, 0 ), ( 0, 51, 0 ), ( 17, 17, 17 ), - ( 34, 34, 34 ), ( 68, 68, 68 ), ( 85, 85, 85 ), ( 119, 119, 119 ), - ( 136, 136, 136 ), ( 170, 170, 170 ), ( 187, 187, 187 ), ( 221, 221, 221 ), - ( 238, 238, 238 ), ( 192, 192, 192 ), ( 128, 0, 0 ), ( 128, 0, 128 ), - ( 0, 128, 0 ), ( 0, 128, 128 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), - ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 ), ( 0, 0, 0 )) - -# so build a prototype image to be used for palette resampling -def build_prototype_image(): - image = Image.new("L", (1,len(_Palm8BitColormapValues),)) - image.putdata(list(range(len(_Palm8BitColormapValues)))) - palettedata = () - for i in range(len(_Palm8BitColormapValues)): - palettedata = palettedata + _Palm8BitColormapValues[i] - for i in range(256 - len(_Palm8BitColormapValues)): - palettedata = palettedata + (0, 0, 0) - image.putpalette(palettedata) - return image - -Palm8BitColormapImage = build_prototype_image() - -# OK, we now have in Palm8BitColormapImage, a "P"-mode image with the right palette -# -# -------------------------------------------------------------------- - -_FLAGS = { - "custom-colormap": 0x4000, - "is-compressed": 0x8000, - "has-transparent": 0x2000, - } - -_COMPRESSION_TYPES = { - "none": 0xFF, - "rle": 0x01, - "scanline": 0x00, - } - -o8 = _binary.o8 -o16b = _binary.o16be - -# -# -------------------------------------------------------------------- - -## -# (Internal) Image save plugin for the Palm format. - -def _save(im, fp, filename, check=0): - - if im.mode == "P": - - # we assume this is a color Palm image with the standard colormap, - # unless the "info" dict has a "custom-colormap" field - - rawmode = "P" - bpp = 8 - version = 1 - - elif im.mode == "L" and "bpp" in im.encoderinfo and im.encoderinfo["bpp"] in (1, 2, 4): - - # this is 8-bit grayscale, so we shift it to get the high-order bits, and invert it because - # Palm does greyscale from white (0) to black (1) - bpp = im.encoderinfo["bpp"] - im = im.point(lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) - # we ignore the palette here - im.mode = "P" - rawmode = "P;" + str(bpp) - version = 1 - - elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): - - # here we assume that even though the inherent mode is 8-bit grayscale, only - # the lower bpp bits are significant. We invert them to match the Palm. - bpp = im.info["bpp"] - im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) - # we ignore the palette here - im.mode = "P" - rawmode = "P;" + str(bpp) - version = 1 - - elif im.mode == "1": - - # monochrome -- write it inverted, as is the Palm standard - rawmode = "1;I" - bpp = 1 - version = 0 - - else: - - raise IOError("cannot write mode %s as Palm" % im.mode) - - if check: - return check - - # - # make sure image data is available - im.load() - - # write header - - cols = im.size[0] - rows = im.size[1] - - rowbytes = ((cols + (16//bpp - 1)) / (16 // bpp)) * 2 - transparent_index = 0 - compression_type = _COMPRESSION_TYPES["none"] - - flags = 0 - if im.mode == "P" and "custom-colormap" in im.info: - flags = flags & _FLAGS["custom-colormap"] - colormapsize = 4 * 256 + 2 - colormapmode = im.palette.mode - colormap = im.getdata().getpalette() - else: - colormapsize = 0 - - if "offset" in im.info: - offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 - else: - offset = 0 - - fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) - fp.write(o8(bpp)) - fp.write(o8(version)) - fp.write(o16b(offset)) - fp.write(o8(transparent_index)) - fp.write(o8(compression_type)) - fp.write(o16b(0)) # reserved by Palm - - # now write colormap if necessary - - if colormapsize > 0: - fp.write(o16b(256)) - for i in range(256): - fp.write(o8(i)) - if colormapmode == 'RGB': - fp.write(o8(colormap[3 * i]) + o8(colormap[3 * i + 1]) + o8(colormap[3 * i + 2])) - elif colormapmode == 'RGBA': - fp.write(o8(colormap[4 * i]) + o8(colormap[4 * i + 1]) + o8(colormap[4 * i + 2])) - - # now convert data to raw form - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, rowbytes, 1))]) - - fp.flush() - - -# -# -------------------------------------------------------------------- - -Image.register_save("Palm", _save) - -Image.register_extension("Palm", ".palm") - -Image.register_mime("Palm", "image/palm") diff --git a/Darwin/lib/python3.4/site-packages/PIL/TiffImagePlugin.py b/Darwin/lib/python3.4/site-packages/PIL/TiffImagePlugin.py deleted file mode 100644 index 2e49931..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/TiffImagePlugin.py +++ /dev/null @@ -1,1177 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# TIFF file handling -# -# TIFF is a flexible, if somewhat aged, image file format originally -# defined by Aldus. Although TIFF supports a wide variety of pixel -# layouts and compression methods, the name doesn't really stand for -# "thousands of incompatible file formats," it just feels that way. -# -# To read TIFF data from a stream, the stream must be seekable. For -# progressive decoding, make sure to use TIFF files where the tag -# directory is placed first in the file. -# -# History: -# 1995-09-01 fl Created -# 1996-05-04 fl Handle JPEGTABLES tag -# 1996-05-18 fl Fixed COLORMAP support -# 1997-01-05 fl Fixed PREDICTOR support -# 1997-08-27 fl Added support for rational tags (from Perry Stoll) -# 1998-01-10 fl Fixed seek/tell (from Jan Blom) -# 1998-07-15 fl Use private names for internal variables -# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) -# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) -# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) -# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) -# 2001-12-18 fl Added workaround for broken Matrox library -# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) -# 2003-05-19 fl Check FILLORDER tag -# 2003-09-26 fl Added RGBa support -# 2004-02-24 fl Added DPI support; fixed rational write support -# 2005-02-07 fl Added workaround for broken Corel Draw 10 files -# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) -# -# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. -# Copyright (c) 1995-1997 by Fredrik Lundh -# -# See the README file for information on usage and redistribution. -# - -from __future__ import print_function - -__version__ = "1.3.5" - -from PIL import Image, ImageFile -from PIL import ImagePalette -from PIL import _binary -from PIL._util import isStringType - -import warnings -import array, sys -import collections -import itertools -import os - -# Set these to true to force use of libtiff for reading or writing. -READ_LIBTIFF = False -WRITE_LIBTIFF= False - -II = b"II" # little-endian (intel-style) -MM = b"MM" # big-endian (motorola-style) - -i8 = _binary.i8 -o8 = _binary.o8 - -if sys.byteorder == "little": - native_prefix = II -else: - native_prefix = MM - -# -# -------------------------------------------------------------------- -# Read TIFF files - -il16 = _binary.i16le -il32 = _binary.i32le -ol16 = _binary.o16le -ol32 = _binary.o32le - -ib16 = _binary.i16be -ib32 = _binary.i32be -ob16 = _binary.o16be -ob32 = _binary.o32be - -# a few tag names, just to make the code below a bit more readable -IMAGEWIDTH = 256 -IMAGELENGTH = 257 -BITSPERSAMPLE = 258 -COMPRESSION = 259 -PHOTOMETRIC_INTERPRETATION = 262 -FILLORDER = 266 -IMAGEDESCRIPTION = 270 -STRIPOFFSETS = 273 -SAMPLESPERPIXEL = 277 -ROWSPERSTRIP = 278 -STRIPBYTECOUNTS = 279 -X_RESOLUTION = 282 -Y_RESOLUTION = 283 -PLANAR_CONFIGURATION = 284 -RESOLUTION_UNIT = 296 -SOFTWARE = 305 -DATE_TIME = 306 -ARTIST = 315 -PREDICTOR = 317 -COLORMAP = 320 -TILEOFFSETS = 324 -EXTRASAMPLES = 338 -SAMPLEFORMAT = 339 -JPEGTABLES = 347 -COPYRIGHT = 33432 -IPTC_NAA_CHUNK = 33723 # newsphoto properties -PHOTOSHOP_CHUNK = 34377 # photoshop properties -ICCPROFILE = 34675 -EXIFIFD = 34665 -XMP = 700 - -# https://github.com/fiji/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java -IMAGEJ_META_DATA_BYTE_COUNTS = 50838 -IMAGEJ_META_DATA = 50839 - -COMPRESSION_INFO = { - # Compression => pil compression name - 1: "raw", - 2: "tiff_ccitt", - 3: "group3", - 4: "group4", - 5: "tiff_lzw", - 6: "tiff_jpeg", # obsolete - 7: "jpeg", - 8: "tiff_adobe_deflate", - 32771: "tiff_raw_16", # 16-bit padding - 32773: "packbits", - 32809: "tiff_thunderscan", - 32946: "tiff_deflate", - 34676: "tiff_sgilog", - 34677: "tiff_sgilog24", -} - -COMPRESSION_INFO_REV = dict([(v,k) for (k,v) in COMPRESSION_INFO.items()]) - -OPEN_INFO = { - # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, - # ExtraSamples) => mode, rawmode - (II, 0, 1, 1, (1,), ()): ("1", "1;I"), - (II, 0, 1, 2, (1,), ()): ("1", "1;IR"), - (II, 0, 1, 1, (8,), ()): ("L", "L;I"), - (II, 0, 1, 2, (8,), ()): ("L", "L;IR"), - (II, 0, 3, 1, (32,), ()): ("F", "F;32F"), - (II, 1, 1, 1, (1,), ()): ("1", "1"), - (II, 1, 1, 2, (1,), ()): ("1", "1;R"), - (II, 1, 1, 1, (8,), ()): ("L", "L"), - (II, 1, 1, 1, (8,8), (2,)): ("LA", "LA"), - (II, 1, 1, 2, (8,), ()): ("L", "L;R"), - (II, 1, 1, 1, (12,), ()): ("I;16", "I;12"), - (II, 1, 1, 1, (16,), ()): ("I;16", "I;16"), - (II, 1, 2, 1, (16,), ()): ("I;16S", "I;16S"), - (II, 1, 1, 1, (32,), ()): ("I", "I;32N"), - (II, 1, 2, 1, (32,), ()): ("I", "I;32S"), - (II, 1, 3, 1, (32,), ()): ("F", "F;32F"), - (II, 2, 1, 1, (8,8,8), ()): ("RGB", "RGB"), - (II, 2, 1, 2, (8,8,8), ()): ("RGB", "RGB;R"), - (II, 2, 1, 1, (8,8,8,8), ()): ("RGBA", "RGBA"), # missing ExtraSamples - (II, 2, 1, 1, (8,8,8,8), (0,)): ("RGBX", "RGBX"), - (II, 2, 1, 1, (8,8,8,8), (1,)): ("RGBA", "RGBa"), - (II, 2, 1, 1, (8,8,8,8), (2,)): ("RGBA", "RGBA"), - (II, 2, 1, 1, (8,8,8,8), (999,)): ("RGBA", "RGBA"), # corel draw 10 - (II, 3, 1, 1, (1,), ()): ("P", "P;1"), - (II, 3, 1, 2, (1,), ()): ("P", "P;1R"), - (II, 3, 1, 1, (2,), ()): ("P", "P;2"), - (II, 3, 1, 2, (2,), ()): ("P", "P;2R"), - (II, 3, 1, 1, (4,), ()): ("P", "P;4"), - (II, 3, 1, 2, (4,), ()): ("P", "P;4R"), - (II, 3, 1, 1, (8,), ()): ("P", "P"), - (II, 3, 1, 1, (8,8), (2,)): ("PA", "PA"), - (II, 3, 1, 2, (8,), ()): ("P", "P;R"), - (II, 5, 1, 1, (8,8,8,8), ()): ("CMYK", "CMYK"), - (II, 6, 1, 1, (8,8,8), ()): ("YCbCr", "YCbCr"), - (II, 8, 1, 1, (8,8,8), ()): ("LAB", "LAB"), - - (MM, 0, 1, 1, (1,), ()): ("1", "1;I"), - (MM, 0, 1, 2, (1,), ()): ("1", "1;IR"), - (MM, 0, 1, 1, (8,), ()): ("L", "L;I"), - (MM, 0, 1, 2, (8,), ()): ("L", "L;IR"), - (MM, 1, 1, 1, (1,), ()): ("1", "1"), - (MM, 1, 1, 2, (1,), ()): ("1", "1;R"), - (MM, 1, 1, 1, (8,), ()): ("L", "L"), - (MM, 1, 1, 1, (8,8), (2,)): ("LA", "LA"), - (MM, 1, 1, 2, (8,), ()): ("L", "L;R"), - (MM, 1, 1, 1, (16,), ()): ("I;16B", "I;16B"), - (MM, 1, 2, 1, (16,), ()): ("I;16BS", "I;16BS"), - (MM, 1, 2, 1, (32,), ()): ("I;32BS", "I;32BS"), - (MM, 1, 3, 1, (32,), ()): ("F", "F;32BF"), - (MM, 2, 1, 1, (8,8,8), ()): ("RGB", "RGB"), - (MM, 2, 1, 2, (8,8,8), ()): ("RGB", "RGB;R"), - (MM, 2, 1, 1, (8,8,8,8), (0,)): ("RGBX", "RGBX"), - (MM, 2, 1, 1, (8,8,8,8), (1,)): ("RGBA", "RGBa"), - (MM, 2, 1, 1, (8,8,8,8), (2,)): ("RGBA", "RGBA"), - (MM, 2, 1, 1, (8,8,8,8), (999,)): ("RGBA", "RGBA"), # corel draw 10 - (MM, 3, 1, 1, (1,), ()): ("P", "P;1"), - (MM, 3, 1, 2, (1,), ()): ("P", "P;1R"), - (MM, 3, 1, 1, (2,), ()): ("P", "P;2"), - (MM, 3, 1, 2, (2,), ()): ("P", "P;2R"), - (MM, 3, 1, 1, (4,), ()): ("P", "P;4"), - (MM, 3, 1, 2, (4,), ()): ("P", "P;4R"), - (MM, 3, 1, 1, (8,), ()): ("P", "P"), - (MM, 3, 1, 1, (8,8), (2,)): ("PA", "PA"), - (MM, 3, 1, 2, (8,), ()): ("P", "P;R"), - (MM, 5, 1, 1, (8,8,8,8), ()): ("CMYK", "CMYK"), - (MM, 6, 1, 1, (8,8,8), ()): ("YCbCr", "YCbCr"), - (MM, 8, 1, 1, (8,8,8), ()): ("LAB", "LAB"), - -} - -PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] - -def _accept(prefix): - return prefix[:4] in PREFIXES - -## -# Wrapper for TIFF IFDs. - -class ImageFileDirectory(collections.MutableMapping): - """ This class represents a TIFF tag directory. To speed things - up, we don't decode tags unless they're asked for. - - Exposes a dictionary interface of the tags in the directory - ImageFileDirectory[key] = value - value = ImageFileDirectory[key] - - Also contains a dictionary of tag types as read from the tiff - image file, 'ImageFileDirectory.tagtype' - - - Data Structures: - 'public' - * self.tagtype = {} Key: numerical tiff tag number - Value: integer corresponding to the data type from - `TiffTags.TYPES` - - 'internal' - * self.tags = {} Key: numerical tiff tag number - Value: Decoded data, Generally a tuple. - * If set from __setval__ -- always a tuple - * Numeric types -- always a tuple - * String type -- not a tuple, returned as string - * Undefined data -- not a tuple, returned as bytes - * Byte -- not a tuple, returned as byte. - * self.tagdata = {} Key: numerical tiff tag number - Value: undecoded byte string from file - - - Tags will be found in either self.tags or self.tagdata, but - not both. The union of the two should contain all the tags - from the Tiff image file. External classes shouldn't - reference these unless they're really sure what they're doing. - """ - - def __init__(self, prefix=II): - """ - :prefix: 'II'|'MM' tiff endianness - """ - self.prefix = prefix[:2] - if self.prefix == MM: - self.i16, self.i32 = ib16, ib32 - self.o16, self.o32 = ob16, ob32 - elif self.prefix == II: - self.i16, self.i32 = il16, il32 - self.o16, self.o32 = ol16, ol32 - else: - raise SyntaxError("not a TIFF IFD") - self.reset() - - def reset(self): - #: Tags is an incomplete dictionary of the tags of the image. - #: For a complete dictionary, use the as_dict method. - self.tags = {} - self.tagdata = {} - self.tagtype = {} # added 2008-06-05 by Florian Hoech - self.next = None - - def __str__(self): - return str(self.as_dict()) - - def as_dict(self): - """Return a dictionary of the image's tags.""" - return dict(self.items()) - - def named(self): - """Returns the complete tag dictionary, with named tags where posible.""" - from PIL import TiffTags - result = {} - for tag_code, value in self.items(): - tag_name = TiffTags.TAGS.get(tag_code, tag_code) - result[tag_name] = value - return result - - - # dictionary API - - def __len__(self): - return len(self.tagdata) + len(self.tags) - - def __getitem__(self, tag): - try: - return self.tags[tag] - except KeyError: - data = self.tagdata[tag] # unpack on the fly - type = self.tagtype[tag] - size, handler = self.load_dispatch[type] - self.tags[tag] = data = handler(self, data) - del self.tagdata[tag] - return data - - def getscalar(self, tag, default=None): - try: - value = self[tag] - if len(value) != 1: - if tag == SAMPLEFORMAT: - # work around broken (?) matrox library - # (from Ted Wright, via Bob Klimek) - raise KeyError # use default - raise ValueError("not a scalar") - return value[0] - except KeyError: - if default is None: - raise - return default - - def __contains__(self, tag): - return tag in self.tags or tag in self.tagdata - - if bytes is str: - def has_key(self, tag): - return tag in self - - def __setitem__(self, tag, value): - # tags are tuples for integers - # tags are not tuples for byte, string, and undefined data. - # see load_* - if not isinstance(value, tuple): - value = (value,) - self.tags[tag] = value - - def __delitem__(self, tag): - self.tags.pop(tag, self.tagdata.pop(tag, None)) - - def __iter__(self): - return itertools.chain(self.tags.__iter__(), self.tagdata.__iter__()) - - def items(self): - keys = list(self.__iter__()) - values = [self[key] for key in keys] - return zip(keys, values) - - # load primitives - - load_dispatch = {} - - def load_byte(self, data): - return data - load_dispatch[1] = (1, load_byte) - - def load_string(self, data): - if data[-1:] == b'\0': - data = data[:-1] - return data.decode('latin-1', 'replace') - load_dispatch[2] = (1, load_string) - - def load_short(self, data): - l = [] - for i in range(0, len(data), 2): - l.append(self.i16(data, i)) - return tuple(l) - load_dispatch[3] = (2, load_short) - - def load_long(self, data): - l = [] - for i in range(0, len(data), 4): - l.append(self.i32(data, i)) - return tuple(l) - load_dispatch[4] = (4, load_long) - - def load_rational(self, data): - l = [] - for i in range(0, len(data), 8): - l.append((self.i32(data, i), self.i32(data, i+4))) - return tuple(l) - load_dispatch[5] = (8, load_rational) - - def load_float(self, data): - a = array.array("f", data) - if self.prefix != native_prefix: - a.byteswap() - return tuple(a) - load_dispatch[11] = (4, load_float) - - def load_double(self, data): - a = array.array("d", data) - if self.prefix != native_prefix: - a.byteswap() - return tuple(a) - load_dispatch[12] = (8, load_double) - - def load_undefined(self, data): - # Untyped data - return data - load_dispatch[7] = (1, load_undefined) - - def load(self, fp): - # load tag dictionary - - self.reset() - - i16 = self.i16 - i32 = self.i32 - - for i in range(i16(fp.read(2))): - - ifd = fp.read(12) - - tag, typ = i16(ifd), i16(ifd, 2) - - if Image.DEBUG: - from PIL import TiffTags - tagname = TiffTags.TAGS.get(tag, "unknown") - typname = TiffTags.TYPES.get(typ, "unknown") - print("tag: %s (%d)" % (tagname, tag), end=' ') - print("- type: %s (%d)" % (typname, typ), end=' ') - - try: - dispatch = self.load_dispatch[typ] - except KeyError: - if Image.DEBUG: - print("- unsupported type", typ) - continue # ignore unsupported type - - size, handler = dispatch - - size = size * i32(ifd, 4) - - # Get and expand tag value - if size > 4: - here = fp.tell() - fp.seek(i32(ifd, 8)) - data = ImageFile._safe_read(fp, size) - fp.seek(here) - else: - data = ifd[8:8+size] - - if len(data) != size: - warnings.warn("Possibly corrupt EXIF data. Expecting to read %d bytes but only got %d. Skipping tag %s" % (size, len(data), tag)) - continue - - self.tagdata[tag] = data - self.tagtype[tag] = typ - - if Image.DEBUG: - if tag in (COLORMAP, IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, ICCPROFILE, XMP): - print("- value: " % size) - else: - print("- value:", self[tag]) - - self.next = i32(fp.read(4)) - - # save primitives - - def save(self, fp): - - o16 = self.o16 - o32 = self.o32 - - fp.write(o16(len(self.tags))) - - # always write in ascending tag order - tags = sorted(self.tags.items()) - - directory = [] - append = directory.append - - offset = fp.tell() + len(self.tags) * 12 + 4 - - stripoffsets = None - - # pass 1: convert tags to binary format - for tag, value in tags: - - typ = None - - if tag in self.tagtype: - typ = self.tagtype[tag] - - if Image.DEBUG: - print ("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) - - if typ == 1: - # byte data - if isinstance(value, tuple): - data = value = value[-1] - else: - data = value - elif typ == 7: - # untyped data - data = value = b"".join(value) - elif isStringType(value[0]): - # string data - if isinstance(value, tuple): - value = value[-1] - typ = 2 - # was b'\0'.join(str), which led to \x00a\x00b sorts - # of strings which I don't see in in the wild tiffs - # and doesn't match the tiff spec: 8-bit byte that - # contains a 7-bit ASCII code; the last byte must be - # NUL (binary zero). Also, I don't think this was well - # excersized before. - data = value = b"" + value.encode('ascii', 'replace') + b"\0" - else: - # integer data - if tag == STRIPOFFSETS: - stripoffsets = len(directory) - typ = 4 # to avoid catch-22 - elif tag in (X_RESOLUTION, Y_RESOLUTION) or typ==5: - # identify rational data fields - typ = 5 - if isinstance(value[0], tuple): - # long name for flatten - value = tuple(itertools.chain.from_iterable(value)) - elif not typ: - typ = 3 - for v in value: - if v >= 65536: - typ = 4 - if typ == 3: - data = b"".join(map(o16, value)) - else: - data = b"".join(map(o32, value)) - - if Image.DEBUG: - from PIL import TiffTags - tagname = TiffTags.TAGS.get(tag, "unknown") - typname = TiffTags.TYPES.get(typ, "unknown") - print("save: %s (%d)" % (tagname, tag), end=' ') - print("- type: %s (%d)" % (typname, typ), end=' ') - if tag in (COLORMAP, IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, ICCPROFILE, XMP): - size = len(data) - print("- value: " % size) - else: - print("- value:", value) - - # figure out if data fits into the directory - if len(data) == 4: - append((tag, typ, len(value), data, b"")) - elif len(data) < 4: - append((tag, typ, len(value), data + (4-len(data))*b"\0", b"")) - else: - count = len(value) - if typ == 5: - count = count // 2 # adjust for rational data field - - append((tag, typ, count, o32(offset), data)) - offset += len(data) - if offset & 1: - offset += 1 # word padding - - # update strip offset data to point beyond auxiliary data - if stripoffsets is not None: - tag, typ, count, value, data = directory[stripoffsets] - assert not data, "multistrip support not yet implemented" - value = o32(self.i32(value) + offset) - directory[stripoffsets] = tag, typ, count, value, data - - # pass 2: write directory to file - for tag, typ, count, value, data in directory: - if Image.DEBUG > 1: - print(tag, typ, count, repr(value), repr(data)) - fp.write(o16(tag) + o16(typ) + o32(count) + value) - - # -- overwrite here for multi-page -- - fp.write(b"\0\0\0\0") # end of directory - - # pass 3: write auxiliary data to file - for tag, typ, count, value, data in directory: - fp.write(data) - if len(data) & 1: - fp.write(b"\0") - - return offset - -## -# Image plugin for TIFF files. - -class TiffImageFile(ImageFile.ImageFile): - - format = "TIFF" - format_description = "Adobe TIFF" - - def _open(self): - "Open the first image in a TIFF file" - - # Header - ifh = self.fp.read(8) - - if ifh[:4] not in PREFIXES: - raise SyntaxError("not a TIFF file") - - # image file directory (tag dictionary) - self.tag = self.ifd = ImageFileDirectory(ifh[:2]) - - # setup frame pointers - self.__first = self.__next = self.ifd.i32(ifh, 4) - self.__frame = -1 - self.__fp = self.fp - - if Image.DEBUG: - print ("*** TiffImageFile._open ***") - print ("- __first:", self.__first) - print ("- ifh: ", ifh) - - # and load the first frame - self._seek(0) - - def seek(self, frame): - "Select a given frame as current image" - - if frame < 0: - frame = 0 - self._seek(frame) - - def tell(self): - "Return the current frame number" - - return self._tell() - - def _seek(self, frame): - - self.fp = self.__fp - if frame < self.__frame: - # rewind file - self.__frame = -1 - self.__next = self.__first - while self.__frame < frame: - if not self.__next: - raise EOFError("no more images in TIFF file") - self.fp.seek(self.__next) - self.tag.load(self.fp) - self.__next = self.tag.next - self.__frame += 1 - self._setup() - - def _tell(self): - - return self.__frame - - def _decoder(self, rawmode, layer, tile=None): - "Setup decoder contexts" - - args = None - if rawmode == "RGB" and self._planar_configuration == 2: - rawmode = rawmode[layer] - compression = self._compression - if compression == "raw": - args = (rawmode, 0, 1) - elif compression == "jpeg": - args = rawmode, "" - if JPEGTABLES in self.tag: - # Hack to handle abbreviated JPEG headers - self.tile_prefix = self.tag[JPEGTABLES] - elif compression == "packbits": - args = rawmode - elif compression == "tiff_lzw": - args = rawmode - if 317 in self.tag: - # Section 14: Differencing Predictor - self.decoderconfig = (self.tag[PREDICTOR][0],) - - if ICCPROFILE in self.tag: - self.info['icc_profile'] = self.tag[ICCPROFILE] - - return args - - def _load_libtiff(self): - """ Overload method triggered when we detect a compressed tiff - Calls out to libtiff """ - - pixel = Image.Image.load(self) - - if self.tile is None: - raise IOError("cannot load this image") - if not self.tile: - return pixel - - self.load_prepare() - - if not len(self.tile) == 1: - raise IOError("Not exactly one tile") - - # (self._compression, (extents tuple), 0, (rawmode, self._compression, fp)) - ignored, extents, ignored_2, args = self.tile[0] - decoder = Image._getdecoder(self.mode, 'libtiff', args, self.decoderconfig) - try: - decoder.setimage(self.im, extents) - except ValueError: - raise IOError("Couldn't set the image") - - if hasattr(self.fp, "getvalue"): - # We've got a stringio like thing passed in. Yay for all in memory. - # The decoder needs the entire file in one shot, so there's not - # a lot we can do here other than give it the entire file. - # unless we could do something like get the address of the underlying - # string for stringio. - # - # Rearranging for supporting byteio items, since they have a fileno - # that returns an IOError if there's no underlying fp. Easier to deal - # with here by reordering. - if Image.DEBUG: - print ("have getvalue. just sending in a string from getvalue") - n,err = decoder.decode(self.fp.getvalue()) - elif hasattr(self.fp, "fileno"): - # we've got a actual file on disk, pass in the fp. - if Image.DEBUG: - print ("have fileno, calling fileno version of the decoder.") - self.fp.seek(0) - n,err = decoder.decode(b"fpfp") # 4 bytes, otherwise the trace might error out - else: - # we have something else. - if Image.DEBUG: - print ("don't have fileno or getvalue. just reading") - # UNDONE -- so much for that buffer size thing. - n,err = decoder.decode(self.fp.read()) - - - self.tile = [] - self.readonly = 0 - # libtiff closed the fp in a, we need to close self.fp, if possible - if hasattr(self.fp, 'close'): - self.fp.close() - self.fp = None # might be shared - - if err < 0: - raise IOError(err) - - self.load_end() - - return Image.Image.load(self) - - def _setup(self): - "Setup this image object based on current tags" - - if 0xBC01 in self.tag: - raise IOError("Windows Media Photo files not yet supported") - - getscalar = self.tag.getscalar - - # extract relevant tags - self._compression = COMPRESSION_INFO[getscalar(COMPRESSION, 1)] - self._planar_configuration = getscalar(PLANAR_CONFIGURATION, 1) - - # photometric is a required tag, but not everyone is reading - # the specification - photo = getscalar(PHOTOMETRIC_INTERPRETATION, 0) - - fillorder = getscalar(FILLORDER, 1) - - if Image.DEBUG: - print("*** Summary ***") - print("- compression:", self._compression) - print("- photometric_interpretation:", photo) - print("- planar_configuration:", self._planar_configuration) - print("- fill_order:", fillorder) - - # size - xsize = getscalar(IMAGEWIDTH) - ysize = getscalar(IMAGELENGTH) - self.size = xsize, ysize - - if Image.DEBUG: - print("- size:", self.size) - - format = getscalar(SAMPLEFORMAT, 1) - - # mode: check photometric interpretation and bits per pixel - key = ( - self.tag.prefix, photo, format, fillorder, - self.tag.get(BITSPERSAMPLE, (1,)), - self.tag.get(EXTRASAMPLES, ()) - ) - if Image.DEBUG: - print("format key:", key) - try: - self.mode, rawmode = OPEN_INFO[key] - except KeyError: - if Image.DEBUG: - print("- unsupported format") - raise SyntaxError("unknown pixel mode") - - if Image.DEBUG: - print("- raw mode:", rawmode) - print("- pil mode:", self.mode) - - self.info["compression"] = self._compression - - xres = getscalar(X_RESOLUTION, (1, 1)) - yres = getscalar(Y_RESOLUTION, (1, 1)) - - if xres and not isinstance(xres, tuple): - xres = (xres, 1.) - if yres and not isinstance(yres, tuple): - yres = (yres, 1.) - if xres and yres: - xres = xres[0] / (xres[1] or 1) - yres = yres[0] / (yres[1] or 1) - resunit = getscalar(RESOLUTION_UNIT, 1) - if resunit == 2: # dots per inch - self.info["dpi"] = xres, yres - elif resunit == 3: # dots per centimeter. convert to dpi - self.info["dpi"] = xres * 2.54, yres * 2.54 - else: # No absolute unit of measurement - self.info["resolution"] = xres, yres - - # build tile descriptors - x = y = l = 0 - self.tile = [] - if STRIPOFFSETS in self.tag: - # striped image - offsets = self.tag[STRIPOFFSETS] - h = getscalar(ROWSPERSTRIP, ysize) - w = self.size[0] - if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", "group4", - "tiff_jpeg", "tiff_adobe_deflate", - "tiff_thunderscan", "tiff_deflate", - "tiff_sgilog", "tiff_sgilog24", - "tiff_raw_16"]: - ## if Image.DEBUG: - ## print "Activating g4 compression for whole file" - - # Decoder expects entire file as one tile. - # There's a buffer size limit in load (64k) - # so large g4 images will fail if we use that - # function. - # - # Setup the one tile for the whole image, then - # replace the existing load function with our - # _load_libtiff function. - - self.load = self._load_libtiff - - # To be nice on memory footprint, if there's a - # file descriptor, use that instead of reading - # into a string in python. - - # libtiff closes the file descriptor, so pass in a dup. - try: - fp = hasattr(self.fp, "fileno") and os.dup(self.fp.fileno()) - except IOError: - # io.BytesIO have a fileno, but returns an IOError if - # it doesn't use a file descriptor. - fp = False - - # libtiff handles the fillmode for us, so 1;IR should - # actually be 1;I. Including the R double reverses the - # bits, so stripes of the image are reversed. See - # https://github.com/python-pillow/Pillow/issues/279 - if fillorder == 2: - key = ( - self.tag.prefix, photo, format, 1, - self.tag.get(BITSPERSAMPLE, (1,)), - self.tag.get(EXTRASAMPLES, ()) - ) - if Image.DEBUG: - print("format key:", key) - # this should always work, since all the - # fillorder==2 modes have a corresponding - # fillorder=1 mode - self.mode, rawmode = OPEN_INFO[key] - # libtiff always returns the bytes in native order. - # we're expecting image byte order. So, if the rawmode - # contains I;16, we need to convert from native to image - # byte order. - if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: - rawmode = 'I;16N' - - # Offset in the tile tuple is 0, we go from 0,0 to - # w,h, and we only do this once -- eds - a = (rawmode, self._compression, fp ) - self.tile.append( - (self._compression, - (0, 0, w, ysize), - 0, a)) - a = None - - else: - for i in range(len(offsets)): - a = self._decoder(rawmode, l, i) - self.tile.append( - (self._compression, - (0, min(y, ysize), w, min(y+h, ysize)), - offsets[i], a)) - if Image.DEBUG: - print ("tiles: ", self.tile) - y = y + h - if y >= self.size[1]: - x = y = 0 - l += 1 - a = None - elif TILEOFFSETS in self.tag: - # tiled image - w = getscalar(322) - h = getscalar(323) - a = None - for o in self.tag[TILEOFFSETS]: - if not a: - a = self._decoder(rawmode, l) - # FIXME: this doesn't work if the image size - # is not a multiple of the tile size... - self.tile.append( - (self._compression, - (x, y, x+w, y+h), - o, a)) - x = x + w - if x >= self.size[0]: - x, y = 0, y + h - if y >= self.size[1]: - x = y = 0 - l += 1 - a = None - else: - if Image.DEBUG: - print("- unsupported data organization") - raise SyntaxError("unknown data organization") - - # fixup palette descriptor - - if self.mode == "P": - palette = [o8(a // 256) for a in self.tag[COLORMAP]] - self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) -# -# -------------------------------------------------------------------- -# Write TIFF files - -# little endian is default except for image modes with explict big endian byte-order - -SAVE_INFO = { - # mode => rawmode, byteorder, photometrics, sampleformat, bitspersample, extra - "1": ("1", II, 1, 1, (1,), None), - "L": ("L", II, 1, 1, (8,), None), - "LA": ("LA", II, 1, 1, (8,8), 2), - "P": ("P", II, 3, 1, (8,), None), - "PA": ("PA", II, 3, 1, (8,8), 2), - "I": ("I;32S", II, 1, 2, (32,), None), - "I;16": ("I;16", II, 1, 1, (16,), None), - "I;16S": ("I;16S", II, 1, 2, (16,), None), - "F": ("F;32F", II, 1, 3, (32,), None), - "RGB": ("RGB", II, 2, 1, (8,8,8), None), - "RGBX": ("RGBX", II, 2, 1, (8,8,8,8), 0), - "RGBA": ("RGBA", II, 2, 1, (8,8,8,8), 2), - "CMYK": ("CMYK", II, 5, 1, (8,8,8,8), None), - "YCbCr": ("YCbCr", II, 6, 1, (8,8,8), None), - "LAB": ("LAB", II, 8, 1, (8,8,8), None), - - "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), - "I;16B": ("I;16B", MM, 1, 1, (16,), None), - "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), - "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), -} - -def _cvt_res(value): - # convert value to TIFF rational number -- (numerator, denominator) - if isinstance(value, collections.Sequence): - assert(len(value) % 2 == 0) - return value - if isinstance(value, int): - return (value, 1) - value = float(value) - return (int(value * 65536), 65536) - -def _save(im, fp, filename): - - try: - rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] - except KeyError: - raise IOError("cannot write mode %s as TIFF" % im.mode) - - ifd = ImageFileDirectory(prefix) - - compression = im.encoderinfo.get('compression',im.info.get('compression','raw')) - - libtiff = WRITE_LIBTIFF or compression != 'raw' - - # required for color libtiff images - ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) - - # -- multi-page -- skip TIFF header on subsequent pages - if not libtiff and fp.tell() == 0: - # tiff header (write via IFD to get everything right) - # PIL always starts the first IFD at offset 8 - fp.write(ifd.prefix + ifd.o16(42) + ifd.o32(8)) - - ifd[IMAGEWIDTH] = im.size[0] - ifd[IMAGELENGTH] = im.size[1] - - # write any arbitrary tags passed in as an ImageFileDirectory - info = im.encoderinfo.get("tiffinfo",{}) - if Image.DEBUG: - print ("Tiffinfo Keys: %s"% info.keys) - keys = list(info.keys()) - for key in keys: - ifd[key] = info.get(key) - try: - ifd.tagtype[key] = info.tagtype[key] - except: - pass # might not be an IFD, Might not have populated type - - - # additions written by Greg Couch, gregc@cgl.ucsf.edu - # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com - if hasattr(im, 'tag'): - # preserve tags from original TIFF image file - for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, - IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): - if key in im.tag: - ifd[key] = im.tag[key] - ifd.tagtype[key] = im.tag.tagtype.get(key, None) - - # preserve ICC profile (should also work when saving other formats - # which support profiles as TIFF) -- 2008-06-06 Florian Hoech - if "icc_profile" in im.info: - ifd[ICCPROFILE] = im.info["icc_profile"] - - if "description" in im.encoderinfo: - ifd[IMAGEDESCRIPTION] = im.encoderinfo["description"] - if "resolution" in im.encoderinfo: - ifd[X_RESOLUTION] = ifd[Y_RESOLUTION] \ - = _cvt_res(im.encoderinfo["resolution"]) - if "x resolution" in im.encoderinfo: - ifd[X_RESOLUTION] = _cvt_res(im.encoderinfo["x resolution"]) - if "y resolution" in im.encoderinfo: - ifd[Y_RESOLUTION] = _cvt_res(im.encoderinfo["y resolution"]) - if "resolution unit" in im.encoderinfo: - unit = im.encoderinfo["resolution unit"] - if unit == "inch": - ifd[RESOLUTION_UNIT] = 2 - elif unit == "cm" or unit == "centimeter": - ifd[RESOLUTION_UNIT] = 3 - else: - ifd[RESOLUTION_UNIT] = 1 - if "software" in im.encoderinfo: - ifd[SOFTWARE] = im.encoderinfo["software"] - if "date time" in im.encoderinfo: - ifd[DATE_TIME] = im.encoderinfo["date time"] - if "artist" in im.encoderinfo: - ifd[ARTIST] = im.encoderinfo["artist"] - if "copyright" in im.encoderinfo: - ifd[COPYRIGHT] = im.encoderinfo["copyright"] - - dpi = im.encoderinfo.get("dpi") - if dpi: - ifd[RESOLUTION_UNIT] = 2 - ifd[X_RESOLUTION] = _cvt_res(dpi[0]) - ifd[Y_RESOLUTION] = _cvt_res(dpi[1]) - - if bits != (1,): - ifd[BITSPERSAMPLE] = bits - if len(bits) != 1: - ifd[SAMPLESPERPIXEL] = len(bits) - if extra is not None: - ifd[EXTRASAMPLES] = extra - if format != 1: - ifd[SAMPLEFORMAT] = format - - ifd[PHOTOMETRIC_INTERPRETATION] = photo - - if im.mode == "P": - lut = im.im.getpalette("RGB", "RGB;L") - ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) - - # data orientation - stride = len(bits) * ((im.size[0]*bits[0]+7)//8) - ifd[ROWSPERSTRIP] = im.size[1] - ifd[STRIPBYTECOUNTS] = stride * im.size[1] - ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer - ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression,1) # no compression by default - - if libtiff: - if Image.DEBUG: - print ("Saving using libtiff encoder") - print (ifd.items()) - _fp = 0 - if hasattr(fp, "fileno"): - fp.seek(0) - _fp = os.dup(fp.fileno()) - - blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS, ROWSPERSTRIP, ICCPROFILE] # ICC Profile crashes. - atts={} - # bits per sample is a single short in the tiff directory, not a list. - atts[BITSPERSAMPLE] = bits[0] - # Merge the ones that we have with (optional) more bits from - # the original file, e.g x,y resolution so that we can - # save(load('')) == original file. - for k,v in itertools.chain(ifd.items(), getattr(im, 'ifd', {}).items()): - if k not in atts and k not in blocklist: - if type(v[0]) == tuple and len(v) > 1: - # A tuple of more than one rational tuples - # flatten to floats, following tiffcp.c->cpTag->TIFF_RATIONAL - atts[k] = [float(elt[0])/float(elt[1]) for elt in v] - continue - if type(v[0]) == tuple and len(v) == 1: - # A tuple of one rational tuples - # flatten to floats, following tiffcp.c->cpTag->TIFF_RATIONAL - atts[k] = float(v[0][0])/float(v[0][1]) - continue - if type(v) == tuple and len(v) > 2: - # List of ints? - if type(v[0]) in (int, float): - atts[k] = list(v) - continue - if type(v) == tuple and len(v) == 2: - # one rational tuple - # flatten to float, following tiffcp.c->cpTag->TIFF_RATIONAL - atts[k] = float(v[0])/float(v[1]) - continue - if type(v) == tuple and len(v) == 1: - v = v[0] - # drop through - if isStringType(v): - atts[k] = bytes(v.encode('ascii', 'replace')) + b"\0" - continue - else: - # int or similar - atts[k] = v - - if Image.DEBUG: - print (atts) - - # libtiff always expects the bytes in native order. - # we're storing image byte order. So, if the rawmode - # contains I;16, we need to convert from native to image - # byte order. - if im.mode in ('I;16B', 'I;16'): - rawmode = 'I;16N' - - a = (rawmode, compression, _fp, filename, atts) - # print (im.mode, compression, a, im.encoderconfig) - e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) - e.setimage(im.im, (0,0)+im.size) - while True: - l, s, d = e.encode(16*1024) # undone, change to self.decodermaxblock - if not _fp: - fp.write(d) - if s: - break - if s < 0: - raise IOError("encoder error %d when writing image file" % s) - - else: - offset = ifd.save(fp) - - ImageFile._save(im, fp, [ - ("raw", (0,0)+im.size, offset, (rawmode, stride, 1)) - ]) - - - # -- helper for multi-page save -- - if "_debug_multipage" in im.encoderinfo: - #just to access o32 and o16 (using correct byte order) - im._debug_multipage = ifd - -# -# -------------------------------------------------------------------- -# Register - -Image.register_open("TIFF", TiffImageFile, _accept) -Image.register_save("TIFF", _save) - -Image.register_extension("TIFF", ".tif") -Image.register_extension("TIFF", ".tiff") - -Image.register_mime("TIFF", "image/tiff") diff --git a/Darwin/lib/python3.4/site-packages/PIL/TiffTags.py b/Darwin/lib/python3.4/site-packages/PIL/TiffTags.py deleted file mode 100644 index 92a4b5a..0000000 --- a/Darwin/lib/python3.4/site-packages/PIL/TiffTags.py +++ /dev/null @@ -1,213 +0,0 @@ -# -# The Python Imaging Library. -# $Id$ -# -# TIFF tags -# -# This module provides clear-text names for various well-known -# TIFF tags. the TIFF codec works just fine without it. -# -# Copyright (c) Secret Labs AB 1999. -# -# See the README file for information on usage and redistribution. -# - -## -# This module provides constants and clear-text names for various -# well-known TIFF tags. -## - -## -# Map tag numbers (or tag number, tag value tuples) to tag names. - -TAGS = { - - 254: "NewSubfileType", - 255: "SubfileType", - 256: "ImageWidth", - 257: "ImageLength", - 258: "BitsPerSample", - - 259: "Compression", - (259, 1): "Uncompressed", - (259, 2): "CCITT 1d", - (259, 3): "Group 3 Fax", - (259, 4): "Group 4 Fax", - (259, 5): "LZW", - (259, 6): "JPEG", - (259, 32773): "PackBits", - - 262: "PhotometricInterpretation", - (262, 0): "WhiteIsZero", - (262, 1): "BlackIsZero", - (262, 2): "RGB", - (262, 3): "RGB Palette", - (262, 4): "Transparency Mask", - (262, 5): "CMYK", - (262, 6): "YCbCr", - (262, 8): "CieLAB", - (262, 32803): "CFA", # TIFF/EP, Adobe DNG - (262, 32892): "LinearRaw", # Adobe DNG - - 263: "Thresholding", - 264: "CellWidth", - 265: "CellHeight", - 266: "FillOrder", - 269: "DocumentName", - - 270: "ImageDescription", - 271: "Make", - 272: "Model", - 273: "StripOffsets", - 274: "Orientation", - 277: "SamplesPerPixel", - 278: "RowsPerStrip", - 279: "StripByteCounts", - - 280: "MinSampleValue", - 281: "MaxSampleValue", - 282: "XResolution", - 283: "YResolution", - 284: "PlanarConfiguration", - (284, 1): "Contigous", - (284, 2): "Separate", - - 285: "PageName", - 286: "XPosition", - 287: "YPosition", - 288: "FreeOffsets", - 289: "FreeByteCounts", - - 290: "GrayResponseUnit", - 291: "GrayResponseCurve", - 292: "T4Options", - 293: "T6Options", - 296: "ResolutionUnit", - 297: "PageNumber", - - 301: "TransferFunction", - 305: "Software", - 306: "DateTime", - - 315: "Artist", - 316: "HostComputer", - 317: "Predictor", - 318: "WhitePoint", - 319: "PrimaryChromaticies", - - 320: "ColorMap", - 321: "HalftoneHints", - 322: "TileWidth", - 323: "TileLength", - 324: "TileOffsets", - 325: "TileByteCounts", - - 332: "InkSet", - 333: "InkNames", - 334: "NumberOfInks", - 336: "DotRange", - 337: "TargetPrinter", - 338: "ExtraSamples", - 339: "SampleFormat", - - 340: "SMinSampleValue", - 341: "SMaxSampleValue", - 342: "TransferRange", - - 347: "JPEGTables", - - # obsolete JPEG tags - 512: "JPEGProc", - 513: "JPEGInterchangeFormat", - 514: "JPEGInterchangeFormatLength", - 515: "JPEGRestartInterval", - 517: "JPEGLosslessPredictors", - 518: "JPEGPointTransforms", - 519: "JPEGQTables", - 520: "JPEGDCTables", - 521: "JPEGACTables", - - 529: "YCbCrCoefficients", - 530: "YCbCrSubSampling", - 531: "YCbCrPositioning", - 532: "ReferenceBlackWhite", - - # XMP - 700: "XMP", - - 33432: "Copyright", - - # various extensions (should check specs for "official" names) - 33723: "IptcNaaInfo", - 34377: "PhotoshopInfo", - - # Exif IFD - 34665: "ExifIFD", - - # ICC Profile - 34675: "ICCProfile", - - # Adobe DNG - 50706: "DNGVersion", - 50707: "DNGBackwardVersion", - 50708: "UniqueCameraModel", - 50709: "LocalizedCameraModel", - 50710: "CFAPlaneColor", - 50711: "CFALayout", - 50712: "LinearizationTable", - 50713: "BlackLevelRepeatDim", - 50714: "BlackLevel", - 50715: "BlackLevelDeltaH", - 50716: "BlackLevelDeltaV", - 50717: "WhiteLevel", - 50718: "DefaultScale", - 50741: "BestQualityScale", # FIXME! Dictionary contains duplicate keys 50741 - 50719: "DefaultCropOrigin", - 50720: "DefaultCropSize", - 50778: "CalibrationIlluminant1", - 50779: "CalibrationIlluminant2", - 50721: "ColorMatrix1", - 50722: "ColorMatrix2", - 50723: "CameraCalibration1", - 50724: "CameraCalibration2", - 50725: "ReductionMatrix1", - 50726: "ReductionMatrix2", - 50727: "AnalogBalance", - 50728: "AsShotNeutral", - 50729: "AsShotWhiteXY", - 50730: "BaselineExposure", - 50731: "BaselineNoise", - 50732: "BaselineSharpness", - 50733: "BayerGreenSplit", - 50734: "LinearResponseLimit", - 50735: "CameraSerialNumber", - 50736: "LensInfo", - 50737: "ChromaBlurRadius", - 50738: "AntiAliasStrength", - 50740: "DNGPrivateData", - 50741: "MakerNoteSafety", # FIXME! Dictionary contains duplicate keys 50741 - - #ImageJ - 50838: "ImageJMetaDataByteCounts", # private tag registered with Adobe - 50839: "ImageJMetaData", # private tag registered with Adobe -} - -## -# Map type numbers to type names. - -TYPES = { - - 1: "byte", - 2: "ascii", - 3: "short", - 4: "long", - 5: "rational", - 6: "signed byte", - 7: "undefined", - 8: "signed short", - 9: "signed long", - 10: "signed rational", - 11: "float", - 12: "double", - -} diff --git a/Darwin/lib/python3.4/site-packages/PIL/_imaging.so b/Darwin/lib/python3.4/site-packages/PIL/_imaging.so deleted file mode 100755 index 60376fd..0000000 Binary files a/Darwin/lib/python3.4/site-packages/PIL/_imaging.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/_imagingft.so b/Darwin/lib/python3.4/site-packages/PIL/_imagingft.so deleted file mode 100755 index c7693b7..0000000 Binary files a/Darwin/lib/python3.4/site-packages/PIL/_imagingft.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/_imagingmath.so b/Darwin/lib/python3.4/site-packages/PIL/_imagingmath.so deleted file mode 100755 index 568939d..0000000 Binary files a/Darwin/lib/python3.4/site-packages/PIL/_imagingmath.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/_imagingmorph.so b/Darwin/lib/python3.4/site-packages/PIL/_imagingmorph.so deleted file mode 100755 index de4d7f0..0000000 Binary files a/Darwin/lib/python3.4/site-packages/PIL/_imagingmorph.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/_imagingtk.so b/Darwin/lib/python3.4/site-packages/PIL/_imagingtk.so deleted file mode 100755 index 9d23237..0000000 Binary files a/Darwin/lib/python3.4/site-packages/PIL/_imagingtk.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/PKG-INFO deleted file mode 100644 index 0974e84..0000000 --- a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,2491 +0,0 @@ -Metadata-Version: 1.1 -Name: Pillow -Version: 2.5.3 -Summary: Python Imaging Library (Fork) -Home-page: http://python-pillow.github.io/ -Author: Alex Clark (fork author) -Author-email: aclark@aclark.net -License: Standard PIL License -Description: Pillow - ====== - - *Python Imaging Library (Fork)* - - Pillow is the "friendly" PIL fork by Alex Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. For more information, please `read the documentation `_. - - .. image:: https://travis-ci.org/python-pillow/Pillow.svg?branch=master - :target: https://travis-ci.org/python-pillow/Pillow - :alt: Travis CI build status - - .. image:: https://pypip.in/v/Pillow/badge.png - :target: https://pypi.python.org/pypi/Pillow/ - :alt: Latest PyPI version - - .. image:: https://pypip.in/d/Pillow/badge.png - :target: https://pypi.python.org/pypi/Pillow/ - :alt: Number of PyPI downloads - - .. image:: https://coveralls.io/repos/python-pillow/Pillow/badge.png?branch=master - :target: https://coveralls.io/r/python-pillow/Pillow?branch=master - - - Changelog (Pillow) - ================== - - 2.5.3 (2014-08-18) - ------------------ - - - Fixed CVE-2014-3598, a DOS in the Jpeg2KImagePlugin - [Andrew Drake] - - - 2.5.2 (2014-08-13) - ------------------ - - - Fixed CVE-2014-3589, a DOS in the IcnsImagePlugin - [Andrew Drake] - - 2.5.1 (2014-07-10) - ------------------ - - - Fixed install issue if Multiprocessing.Pool is not available - [wiredfool] - - - 32bit mult overflow fix #782 - [wiredfool] - - 2.5.0 (2014-07-01) - ------------------ - - - Imagedraw rewrite - [terseus, wiredfool] - - - Add support for multithreaded test execution - [wiredfool] - - - Prevent shell injection #748 - [mbrown1413, wiredfool] - - - Support for Resolution in BMP files #734 - [gcq] - - - Fix error in setup.py for Python 3 - [matthew-brett] - - - Pyroma fix and add Python 3.4 to setup metadata #742 - [wirefool] - - - Top level flake8 fixes #741 - [aclark] - - - Remove obsolete Animated Raster Graphics (ARG) support - [hugovk] - - - Fix test_imagedraw failures #727 - [cgohlke] - - - Fix AttributeError: class Image has no attribute 'DEBUG' #726 - [cgohlke] - - - Fix msvc warning: 'inline' : macro redefinition #725 - [cgohlke] - - - Cleanup #654 - [dvska, hugovk, wiredfool] - - - 16-bit monochrome support for JPEG2000 - [videan42] - - - Fixed ImagePalette.save - [brightpisces] - - - Support JPEG qtables - [csinchok] - - - Add binary morphology addon - [dov, wiredfool] - - - Decompression bomb protection - [hugovk] - - - Put images in a single directory - [hugovk] - - - Support OpenJpeg 2.1 - [al45tair] - - - Remove unistd.h #include for all platforms - [wiredfool] - - - Use unittest for tests - [hugovk] - - - ImageCms fixes - [hugovk] - - - Added more ImageDraw tests - [hugovk] - - - Added tests for Spider files - [hugovk] - - - Use libtiff to write any compressed tiff files - [wiredfool] - - - Support for pickling Image objects - [hugovk] - - - Fixed resolution handling for EPS thumbnails - [eliempje] - - - Fixed rendering of some binary EPS files (Issue #302) - [eliempje] - - - Rename variables not to use built-in function names - [hugovk] - - - Ignore junk JPEG markers - [hugovk] - - - Change default interpolation for Image.thumbnail to Image.ANTIALIAS - [hugovk] - - - Add tests and fixes for saving PDFs - [hugovk] - - - Remove transparency resource after P->RGBA conversion - [hugovk] - - - Clean up preprocessor cruft for Windows - [CounterPillow] - - - Adjust Homebrew freetype detection logic - [jacknagel] - - - Added Image.close, context manager support. - [wiredfool] - - - Added support for 16 bit PGM files. - [wiredfool] - - - Updated OleFileIO to version 0.30 from upstream - [hugovk] - - - Added support for additional TIFF floating point format - [Hijackal] - - - Have the tempfile use a suffix with a dot - [wiredfool] - - - Fix variable name used for transparency manipulations - [nijel] - - 2.4.0 (2014-04-01) - ------------------ - - - Indexed Transparency handled for conversions between L, RGB, and P modes. Fixes #510 - [wiredfool] - - - Conversions enabled from RGBA->P, Fixes #544 - [wiredfool] - - - Improved icns support - [al45tair] - - - Fix libtiff leaking open files, fixes #580 - [wiredfool] - - - Fixes for Jpeg encoding in Python 3, fixes #577 - [wiredfool] - - - Added support for JPEG 2000 - [al45tair] - - - Add more detailed error messages to Image.py - [larsmans] - - - Avoid conflicting _expand functions in PIL & MINGW, fixes #538 - [aclark] - - - Merge from Philippe Lagadec’s OleFileIO_PL fork - [vadmium] - - - Fix ImageColor.getcolor - [homm] - - - Make ICO files work with the ImageFile.Parser interface, fixes #522 - [wiredfool] - - - Handle 32bit compiled python on 64bit architecture - [choppsv1] - - - Fix support for characters >128 using .pcf or .pil fonts in Py3k. Fixes #505 - [wiredfool] - - - Skip CFFI test earlier if it's not installed - [wiredfool] - - - Fixed opening and saving odd sized .pcx files, fixes #523 - [wiredfool] - - - Fixed palette handling when converting from mode P->RGB->P - [d_schmidt] - - - Fixed saving mode P image as a PNG with transparency = palette color 0 - [d-schmidt] - - - Improve heuristic used when saving progressive and optimized JPEGs with high quality values - [e98cuenc] - - - Fixed DOS with invalid palette size or invalid image size in BMP file - [wiredfool] - - - Added support for BMP version 4 and 5 - [eddwardo, wiredfool] - - - Fix segfault in getfont when passed a memory resident font - [wiredfool] - - - Fix crash on Saving a PNG when icc-profile is None - [brutasse] - - - Cffi+Python implementation of the PixelAccess object - [wiredfool] - - - PixelAccess returns unsigned ints for I16 mode - [wiredfool] - - - Minor patch on booleans + Travis - [sciunto] - - - Look in multiarch paths in GNU platforms - [pinotree] - - - Add arch support for pcc64, s390, s390x, armv7l, aarch64 - [manisandro] - - - Add arch support for ppc - [wiredfool] - - - Correctly quote file names for WindowsViewer command - [cgohlke] - - - Prefer homebrew freetype over X11 freetype (but still allow both) - [dmckeone] - - 2.3.1 (2014-03-14) - ------------------ - - - Fix insecure use of tempfile.mktemp (CVE-2014-1932 CVE-2014-1933) - [wiredfool] - - 2.3.0 (2014-01-01) - ------------------ - - - Stop leaking filename parameter passed to getfont - [jpharvey] - - - Report availability of LIBTIFF during setup and selftest - [cgohlke] - - - Fix msvc build error C1189: "No Target Architecture" - [cgohlke] - - - Fix memory leak in font_getsize - [wiredfool] - - - Correctly prioritize include and library paths - [ohanar] - - - Image.point fixes for numpy.array and docs - [wiredfool] - - - Save the transparency header by default for PNGs - [wiredfool] - - - Support for PNG tRNS header when converting from RGB->RGBA - [wiredfool] - - - PyQT5 Support - [wiredfool] - - - Updates for saving color tiffs w/compression using libtiff - [wiredfool] - - - 2gigapix image fixes and redux - [wiredfool] - - - Save arbitrary tags in Tiff image files - [wiredfool] - - - Quote filenames and title before using on command line - [tmccombs] - - - Fixed Viewer.show to return properly - [tmccombs] - - - Documentation fixes - [wiredfool] - - - Fixed memory leak saving images as webp when webpmux is available - [cezarsa] - - - Fix compiling with FreeType 2.5.1 - [stromnov] - - - Adds directories for NetBSD. - [deepy] - - - Support RGBA TIFF with missing ExtraSamples tag - [cgohlke] - - - Lossless WEBP Support - [wiredfool] - - - Take compression as an option in the save call for tiffs - [wiredfool] - - - Add support for saving lossless WebP. Just pass 'lossless=True' to save() - [liftoff] - - - LCMS support upgraded from version 1 to version 2, fixes #343 - [wiredfool] - - - Added more raw decoder 16 bit pixel formats - [svanheulen] - - - Document remaining Image* modules listed in PIL handbook - [irksep] - - - Document ImageEnhance, ImageFile, ImageFilter, ImageFont, ImageGrab, ImageMath, and ImageOps - [irksep] - - - Port and update docs for Image, ImageChops, ImageColor, and ImageDraw - [irksep] - - - Move or copy content from README.rst to docs/ - [irksep] - - - Respect CFLAGS/LDFLAGS when searching for headers/libs - [iElectric] - - - Port PIL Handbook tutorial and appendices - [irksep] - - - Alpha Premultiplication support for transform and resize - [wiredfool] - - - Fixes to make Pypy 2.1.0 work on Ubuntu 12.04/64 - [wiredfool] - - 2.2.2 (2013-12-11) - ------------------ - - - Fix #427: compiling with FreeType 2.5.1 - [stromnov] - - 2.2.1 (2013-10-02) - ------------------ - - - Fix #356: Error installing Pillow 2.2.0 on Mac OS X (due to hard dep on brew) - [wiredfool] - - 2.2.0 (2013-10-02) - ------------------ - - - Fix #254: Bug in image transformations resulting from uninitialized memory - [nikmolnar] - - - Fix for encoding of b_whitespace, similar to closed issue #272 - [mhogg] - - - Fix #273: Add numpy array interface support for 16 and 32 bit integer modes - [cgohlke] - - - Partial fix for #290: Add preliminary support for TIFF tags. - [wiredfool] - - - Fix #251 and #326: circumvent classification of pngtest_bad.png as malware - [cgohlke] - - - Add typedef uint64_t for MSVC. - [cgohlke] - - - Fix #329: setup.py: better support for C_INCLUDE_PATH, LD_RUN_PATH, etc. - [nu774] - - - Fix #328: _imagingcms.c: include windef.h to fix build issue on MSVC - [nu774] - - - Automatically discover homebrew include/ and lib/ paths on OSX - [donspaulding] - - - Fix bytes which should be bytearray - [manisandro] - - - Add respective paths for C_INCLUDE_PATH, LD_RUN_PATH (rpath) to build - if specified as environment variables. - [seanupton] - - - Fix #312 + gif optimize improvement - [d-schmidt] - - - Be more tolerant of tag read failures - [ericbuehl] - - - Fix #318: Catch truncated zTXt errors. - [vytisb] - - - Fix IOError when saving progressive JPEGs. - [e98cuenc] - - - Add RGBA support to ImageColor - [yoavweiss] - - - Fix #304: test for `str`, not `"utf-8"`. - [mjpieters] - - - Fix missing import os in _util.py. - [mnowotka] - - - Added missing exif tags. - [freyes] - - - Fail on all import errors, fixes #298. - [macfreek, wiredfool] - - - Fixed Windows fallback (wasn't using correct file in Windows fonts). - [lmollea] - - - Moved ImageFile and ImageFileIO comments to docstrings. - [freyes] - - - Restore compatibility with ISO C. - [cgohlke] - - - Use correct format character for C int type. - [cgohlke] - - - Allocate enough memory to hold pointers in encode.c. - [cgohlke] - - - Fix #279, fillorder double shuffling bug when FillOrder ==2 and decoding using libtiff. - [wiredfool] - - - Moved Image module comments to docstrings. - [freyes] - - - Add 16-bit TIFF support, fixes #274. - [wiredfool] - - - Ignore high ascii characters in string.whitespace, fixes #272. - [wiredfool] - - - Added clean/build to tox to make it behave like travis. - [freyes] - - - Adding support for metadata in webp images. - [heynemann] - - 2.1.0 (2013-07-02) - ------------------ - - - Add /usr/bin/env python shebangs to all scripts in /Scripts. - - - Add several TIFF decoders and encoders. - - - Added support for alpha transparent webp images. - - - Adding Python 3 support for StringIO. - - - Adding Python3 basestring compatibility without changing basestring. - - - Fix webp encode errors on win-amd64. - - - Better fix for ZeroDivisionError in ImageOps.fit for image.size height is 1. - - - Better support for ICO images. - - - Changed PY_VERSION_HEX, fixes #166. - - - Changes to put everything under the PIL namespace. - [wiredfool] - - - Changing StringIO to BytesIO. - - - Cleanup whitespace. - [Arfrever] - - - Don't skip 'import site' on initialization when running tests for inplace builds. - [cgohlke] - - - Enable warnings for test suite. - - - Fix for ZeroDivisionError in ImageOps.fit for image.size == (1,1) - - - Fix for if isinstance(filter, collections.Callable) crash. Python bug #7624 on <2.6.6 - - - Fix #193: remove double typedef declaration. - - - Fix msvc compile errors (#230). - - - Fix rendered characters have been chipped for some TrueType fonts. - - - Fix usage of pilfont.py script. - - - Fresh start for docs, generated by sphinx-apidoc. - - - Introduce --enable-x and fail if it is given and x is not available. - - - Partial work to add a wrapper for WebPGetFeatures to correctly support #204. - - - Significant performance improvement of `alpha_composite` function. - - - Support explicitly disabling features via --disable-* options. - - - Support selftest.py --installed, fixes #263. - - - Transparent WebP Support, #204 - - - Use PyCapsule for py3.1, fixes #237. - - - Workaround for: http://bugs.python.org/16754 in 3.2.x < 3.2.4 and 3.3.0. - - 2.0.0 (2013-03-15) - ------------------ - - .. Note:: Special thanks to Christoph Gohlke and Eric Soroos for assisting with a pre-PyCon 2013 release! - - - Many other bug fixes and enhancements by many other people. - - - Add Python 3 support. (Pillow >= 2.0.0 supports Python 2.6, 2.7, 3.2, 3.3. Pillow < 2.0.0 supports Python 2.4, 2.5, 2.6, 2.7.) - [fluggo] - - - Add PyPy support (experimental, please see: https://github.com/python-pillow/Pillow/issues/67) - - - Add WebP support. - [lqs] - - - Add Tiff G3/G4 support (experimental) - [wiredfool] - - - Backport PIL's PNG/Zip improvements. - [olt] - - - Various 64 bit and Windows fixes. - [cgohlke] - - - Add testing suite. - [cgohlke, fluggo] - - - Added support for PNG images with transparency palette. - [d-schmidt] - - 1.7.8 (2012-11-01) - ------------------ - - - Removed doctests.py that made tests of other packages fail. - [thomasdesvenain] - - - Fix opening psd files with RGBA layers when A mode is not of type 65535 but 3. - Fixes #3 - [thomasdesvenain] - - - 1.7.7 (2012-04-04) - ------------------ - - - UNDEF more types before including windows headers - [mattip] - - 1.7.6 (2012-01-20) - ------------------ - - - Bug fix: freetype not found on Mac OS X with case-sensitive filesystem - [gjo] - - - Bug fix: Backport fix to split() after open() (regression introduced in PIL 1.1.7). - [sfllaw] - - 1.7.5 (2011-09-07) - ------------------ - - - Fix for sys.platform = "linux3" - [blueyed] - - - Package cleanup and additional documentation - [aclark] - - 1.7.4 (2011-07-21) - ------------------ - - - Fix brown bag release - [aclark] - - 1.7.3 (2011-07-20) - ------------------ - - - Fix : resize need int values, append int conversion in thumbnail method - [harobed] - - 1.7.2 (2011-06-02) - ------------------ - - - Bug fix: Python 2.4 compat - [aclark] - - 1.7.1 (2011-05-31) - ------------------ - - - More multi-arch support - [SteveM, regebro, barry, aclark] - - 1.7.0 (2011-05-27) - ------------------ - - - Add support for multi-arch library directory /usr/lib/x86_64-linux-gnu - [aclark] - - 1.6 (12/01/2010) - ---------------- - - - Bug fix: /usr/x11/include should be added to include_dirs not library_dirs - [elro] - - - Doc fixes - [aclark] - - 1.5 (11/28/2010) - ---------------- - - - Module and package fixes - [aclark] - - 1.4 (11/28/2010) - ---------------- - - - Doc fixes - [aclark] - - 1.3 (11/28/2010) - ---------------- - - - Add support for /lib64 and /usr/lib64 library directories on Linux - [aclark] - - - Doc fixes - [aclark] - - 1.2 (08/02/2010) - ---------------- - - - On OS X also check for freetype2 in the X11 path - [jezdez] - - - Doc fixes - [aclark] - - 1.1 (07/31/2010) - ---------------- - - - Removed setuptools_hg requirement - [aclark] - - - Doc fixes - [aclark] - - 1.0 (07/30/2010) - ---------------- - - - Remove support for ``import Image``, etc. from the standard namespace. ``from PIL import Image`` etc. now required. - - Forked PIL based on `Hanno Schlichting's re-packaging `_ - [aclark] - - .. Note:: What follows is the original PIL 1.1.7 CHANGES - - :: - - -*- coding: utf-8 -*- - - The Python Imaging Library - $Id$ - - ACKNOWLEDGEMENTS: PIL wouldn't be what it is without the help of: - David Ascher, Phil Austin, Douglas Bagnall, Larry Bates, Anthony - Baxter, William Baxter, Denis Benoit, Jan Blom, Duncan Booth, Alexey - Borzenkov, Jeff Breidenbach, Roger Burnham, Zac Burns, Gene Cash, - Kevin Cazabon, Fred Clare, Greg Coats, Chris Cogdon, Greg Couch, Bill - Crutchfield, Abel Deuring, Tim Docker, Fred Drake, Graham Dumpleton, - Matthew Ellis, Eric Etheridge, Daniel Fetchinson, Robin Friedrich, - Pier Paolo Glave, Federico Di Gregorio, Markus Gritsch, Daniel - Haertle, Greg Hamilton, Mark Hammond, Bernhard Herzog, Rob Hooft, Bob - Ippolito, Jack Jansen, Bill Janssen, Edward Jones, Richard Jones, - Håkan Karlsson, Robert Kern, David Kirtley, Bob Klimek, Matthias - Klose, Andrew Kuchling, Magnus Källström, Victor Lacina, Ben Last, - Hamish Lawson, Cesare Leonardi, Andrew MacIntyre, Jan Matejek, Naveen - Michaud-Agrawal, Gordon McMillan, Skip Montanaro, Fredrik Nehr, - Russell Nelson, Luciano Nocera, Travis Oliphant, Piet van Oostrum, - Richard Oudkerk, Paul Pharr, Andres Polit, Conrado Porto Lopes Gouvêa, - Eric Raymond, Victor Reijs, Bertil Reinhammar, Nicholas Riley, Don - Rozenberg, Toby Sargeant, Barry Scott, Les Schaffer, Joel Shprentz, - Klamer Shutte, Gene Skonicki, Niki Spahiev, D. Alan Stewart, Perry - Stoll, Paul Svensson, Ulrik Svensson, Miki Tebeka, Michael van - Tellingen, Ivan Tkatchev, Dan Torop, Adam Twardoch, Rune Uhlin, Dmitry - Vasiliev, Sasha Voynow, Charles Waldman, Collin Winter, Dan Wolfe, - Ka-Ping Yee, and many others (if your name should be on this list, let - me know.) - - *** Changes from release 1.1.6 to 1.1.7 *** - - This section may not be fully complete. For changes since this file - was last updated, see the repository revision history: - - http://bitbucket.org/effbot/pil-2009-raclette/changesets/ - - (1.1.7 final) - - + Set GIF loop info property to the number of iterations if a NETSCAPE - loop extension is present, instead of always setting it to 1 (from - Valentino Volonghi). - - (1.1.7c1 released) - - + Improved PNG compression (from Alexey Borzenkov). - - + Read interlaced PNG files (from Conrado Porto Lopes Gouvêa) - - + Added various TGA improvements from Alexey Borzenkov, including - support for specifying image orientation. - - + Bumped block threshold to 16 megabytes, made size estimation a bit - more accurate. This speeds up allocation of large images. - - + Fixed rounding error in ImagingDrawWideLine. - - "gormish" writes: ImagingDrawWideLine() in Draw.c has a bug in every - version I've seen, which leads to different width lines depending on - the order of the points in the line. This is especially bad at some - angles where a 'width=2' line can completely disappear. - - + Added support for RGBA mode to the SGI module (based on code by - Karsten Hiddemann). - - + Handle repeated IPTC tags (adapted from a patch by Eric Bruning). - - Eric writes: According to the specification, some IPTC tags can be - repeated, e.g., tag 2:25 (keywords). PIL 1.1.6 only retained the last - instance of that tag. Below is a patch to store all tags. If there are - multiple tag instances, they are stored in a (python) list. Single tag - instances remain as strings. - - + Fixed potential crash in ImageFilter for small target images - (reported by Zac Burns and Daniel Fetchinson). - - + Use BMP instead of JPEG as temporary show format on Mac OS X. - - + Fixed putpixel/new for I;16 with colors > 255. - - + Added integer power support to ImagingMath. - - + Added limited support for I;16L mode (explicit little endian). - - + Moved WMF support into Image.core; enable WMF rendering by default - if renderer is available. - - + Mark the ARG plugin as obsolete. - - + Added version query mechanism to ImageCms and ImageFont, for - debugging. - - + Added (experimental) ImageCms function for fetching the ICC profile - for the current display (currently Windows only). - - Added HWND/HDC support to ImageCms.get_display_profile(). - - + Added WMF renderer (Windows only). - - + Added ImagePointHandler and ImageTransformHandler mixins; made - ImageCmsTransform work with im.point. - - + Fixed potential endless loop in the XVThumbnail reader (from Nikolai - Ugelvik). - - + Added Kevin Cazabon's pyCMS package. - - The C code has been moved to _imagingcms.c, the Python interface - module is installed as PIL.ImageCMS. - - Added support for in-memory ICC profiles. - - Unified buildTransform and buildTransformFromOpenProfiles. - - The profile can now be either a filename, a profile object, or a - file-like object containing an in-memory profile. - - Additional fixes from Florian Böch: - - Very nice - it just needs LCMS flags support so we can use black - point compensation and softproofing :) See attached patches. They - also fix a naming issue which could cause confusion - display - profile (ImageCms wording) actually means proof profile (lcms - wording), so I changed variable names and docstrings where - applicable. Patches are tested under Python 2.6. - - + Improved support for layer names in PSD files (from Sylvain Baubeau) - - Sylvain writes: I needed to be able to retrieve the names of the - layers in a PSD files. But PsdImagePlugin.py didn't do the job so I - wrote this very small patch. - - + Improved RGBA support for ImageTk for 8.4 and newer (from Con - Radchenko). - - This replaces the slow run-length based encoding model with true - compositing at the Tk level. - - + Added support for 16- and 32-bit images to McIdas loader. - - Based on file samples and stand-alone reader code provided by Craig - Swank. - - + Added ImagePalette support to putpalette. - - + Fixed problem with incremental parsing of PNG files. - - + Make selftest.py report non-zero status on failure (from Mark - Sienkiewicz) - - + Add big endian save support and multipage infrastructure to the TIFF - writer (from Sebastian Haase). - - + Handle files with GPS IFD but no basic EXIF IFD (reported by Kurt - Schwehr). - - + Added zTXT support (from Andrew Kuchling via Lowell Alleman). - - + Fixed potential infinite loop bug in ImageFont (from Guilherme Polo). - - + Added sample ICC profiles (from Kevin Cazabon) - - + Fixed array interface for I, F, and RGBA/RGBX images. - - + Added Chroma subsampling support for JPEG (from Justin Huff). - - Justin writes: Attached is a patch (against PIL 1.1.6) to provide - control over the chroma subsampling done by the JPEG encoder. This - is often useful for reducing compression artifacts around edges of - clipart and text. - - + Added USM/Gaussian Blur code from Kevin Cazabon. - - + Fixed bug w. uninitialized image data when cropping outside the - source image. - - + Use ImageShow to implement the Image.show method. - - Most notably, this picks the 'display' utility when available. It - also allows application code to register new display utilities via - the ImageShow registry. - - + Release the GIL in the PNG compressor (from Michael van Tellingen). - - + Revised JPEG CMYK handling. - - Always assume Adobe behaviour, both when reading and writing (based on - a patch by Kevin Cazabon, and test data by Tim V. and Charlie Clark, and - additional debugging by Michael van Tellingen). - - + Support for preserving ICC profiles (by Florian Böch via Tim Hatch). - - Florian writes: - - It's a beta, so still needs some testing, but should allow you to: - - retain embedded ICC profiles when saving from/to JPEG, PNG, TIFF. - Existing code doesn't need to be changed. - - access embedded profiles in JPEG, PNG, PSD, TIFF. - - It also includes patches for TIFF to retain IPTC, Photoshop and XMP - metadata when saving as TIFF again, read/write TIFF resolution - information correctly, and to correct inverted CMYK JPEG files. - - + Fixed potential memory leak in median cut quantizer (from Evgeny Salmin). - - + Fixed OverflowError when reading upside-down BMP images. - - + Added resolution save option for PDF files. - - Andreas Kostyrka writes: I've included a patched PdfImagePlugin.py - based on 1.1.6 as included in Ubuntu, that supports a "resolution" - save option. Not great, but it makes the PDF saving more useful by - allowing PDFs that are not exactly 72dpi. - - + Look for Tcl/Tk include files in version-specific include directory - (from Encolpe Degoute). - - + Fixed grayscale rounding error in ImageColor.getcolor (from Tim - Hatch). - - + Fixed calculation of mean value in ImageEnhance.Contrast (reported - by "roop" and Scott David Daniels). - - + Fixed truetype positioning when first character has a negative left - bearing (from Ned Batchelder): - - Ned writes: In PIL 1.1.6, ImageDraw.text will position the string - incorrectly if the first character has a negative left bearing. To - see the problem, show a string like "///" in an italic font. The - first slash will be clipped at the left, and the string will be - mis-positioned. - - + Fixed resolution unit bug in tiff reader/writer (based on code by - Florian Höch, Gary Bloom, and others). - - + Added simple transparency support for RGB images (reported by - Sebastian Spaeth). - - + Added support for Unicode filenames in ImageFont.truetype (from Donn - Ingle). - - + Fixed potential crash in ImageFont.getname method (from Donn Ingle). - - + Fixed encoding issue in PIL/WalImageFile (from Santiago M. Mola). - - *** Changes from release 1.1.5 to 1.1.6 *** - - (1.1.6 released) - - + Fixed some 64-bit compatibility warnings for Python 2.5. - - + Added threading support for the Sane driver (from Abel Deuring). - - (1.1.6b2 released) - - + Added experimental "floodfill" function to the ImageDraw module - (based on code by Eric Raymond). - - + The default arguments for "frombuffer" doesn't match "fromstring" - and the documentation; this is a bug, and will most likely be fixed - in a future version. In this release, PIL prints a warning message - instead. To silence the warning, change any calls of the form - "frombuffer(mode, size, data)" to - - frombuffer(mode, size, data, "raw", mode, 0, 1) - - + Added "fromarray" function, which takes an object implementing the - NumPy array interface and creates a PIL Image from it. (from Travis - Oliphant). - - + Added NumPy array interface support (__array_interface__) to the - Image class (based on code by Travis Oliphant). - - This allows you to easily convert between PIL image memories and - NumPy arrays: - - import numpy, Image - - im = Image.open('lena.jpg') - - a = numpy.asarray(im) # a is readonly - - im = Image.fromarray(a) - - + Fixed CMYK polarity for JPEG images, by treating all images as - "Adobe CMYK" images. (thanks to Cesare Leonardi and Kevin Cazabon - for samples, debugging, and patches). - - (1.1.6b1 released) - - + Added 'expand' option to the Image 'rotate' method. If true, the - output image is made large enough to hold the entire rotated image. - - + Changed the ImageDraw 'line' method to always draw the last pixel in - a polyline, independent of line angle. - - + Fixed bearing calculation and clipping in the ImageFont truetype - renderer; this could lead to clipped text, or crashes in the low- - level _imagingft module. (based on input from Adam Twardoch and - others). - - + Added ImageQt wrapper module, for converting PIL Image objects to - QImage objects in an efficient way. - - + Fixed 'getmodebands' to return the number of bands also for "PA" - and "LA" modes. Added 'getmodebandnames' helper that return the - band names. - - (1.1.6a2 released) - - + Added float/double support to the TIFF loader (from Russell - Nelson). - - + Fixed broken use of realloc() in path.c (from Jan Matejek) - - + Added save support for Spider images (from William Baxter). - - + Fixed broken 'paste' and 'resize' operations in pildriver - (from Bill Janssen). - - + Added support for duplex scanning to the Sane interface (Abel - Deuring). - - (1.1.6a1 released) - - + Fixed a memory leak in "convert(mode)", when converting from - L to P. - - + Added pixel access object. The "load" method now returns a - access object that can be used to directly get and set pixel - values, using ordinary [x, y] notation: - - pixel = im.load() - v = pixel[x, y] - pixel[x, y] = v - - If you're accessing more than a few pixels, this is a lot - faster than using getpixel/putpixel. - - + Fixed building on Cygwin (from Miki Tebeka). - - + Fixed "point(callable)" on unloaded images (reported by Håkan - Karlsson). - - + Fixed size bug in ImageWin.ImageWindow constructor (from Victor - Reijs) - - + Fixed ImageMath float() and int() operations for Python 2.4 - (reported by Don Rozenberg). - - + Fixed "RuntimeError: encoder error -8 in tostring" problem for - wide "RGB", "I", and "F" images. - - + Fixed line width calculation. - - (1.1.6a0 released) - - + Fixed byte order issue in Image.paste(ink) (from Ka-Ping Yee). - - + Fixed off-by-0.5 errors in the ANTIALIAS code (based on input - from Douglas Bagnall). - - + Added buffer interface support to the Path constructor. If - a buffer is provided, it is assumed to contain a flat array - of float coordinates (e.g. array.array('f', seq)). - - + Added new ImageMath module. - - + Fixed ImageOps.equalize when used with a small number of distinct - values (reported by David Kirtley). - - + Fixed potential integer division in PSDraw.image (from Eric Etheridge). - - *** Changes from release 1.1 to 1.1.5 *** - - (1.1.5c2 and 1.1.5 final released) - - + Added experimental PERSPECTIVE transform method (from Jeff Breiden- - bach). - - (1.1.5c1 released) - - + Make sure "thumbnail" never generates zero-wide or zero-high images - (reported by Gene Skonicki) - - + Fixed a "getcolors" bug that could result in a zero count for some - colors (reported by Richard Oudkerk). - - + Changed default "convert" palette to avoid "rounding errors" when - round-tripping white source pixels (reported by Henryk Gerlach and - Jeff Epler). - - (1.1.5b3 released) - - + Don't crash in "quantize" method if the number of colors requested - is larger than 256. This release raises a ValueError exception; - future versions may return a mode "RGB" image instead (reported - by Richard Oudkerk). - - + Added WBMP read/write support (based on code by Duncan Booth). - - (1.1.5b2 released) - - + Added DPI read/write support to the PNG codec. The decoder sets - the info["dpi"] attribute for PNG files with appropriate resolution - settings. The encoder uses the "dpi" option (based on code by Niki - Spahiev). - - + Added limited support for "point" mappings from mode "I" to mode "L". - Only 16-bit values are supported (other values are clipped), the lookup - table must contain exactly 65536 entries, and the mode argument must be - set to "L". - - + Added support for Mac OS X icns files (based on code by Bob Ippolito). - - + Added "ModeFilter" support to the ImageFilter module. - - + Added support for Spider images (from William Baxter). See the - comments in PIL/SpiderImagePlugin.py for more information on this - format. - - (1.1.5b1 released) - - + Added new Sane release (from Ralph Heinkel). See the Sane/README - and Sane/CHANGES files for more information. - - + Added experimental PngInfo chunk container to the PngImageFile - module. This can be used to add arbitrary chunks to a PNG file. - Create a PngInfo instance, use "add" or "add_text" to add chunks, - and pass the instance as the "pnginfo" option when saving the - file. - - + Added "getpalette" method. This returns the palette as a list, - or None if the image has no palette. To modify the palette, use - "getpalette" to fetch the current palette, modify the list, and - put it back using "putpalette". - - + Added optional flattening to the ImagePath "tolist" method. - tolist() or tolist(0) returns a list of 2-tuples, as before. - tolist(1) returns a flattened list instead. - - (1.1.5a5 released) - - + Fixed BILINEAR/BICUBIC/ANTIALIAS filtering for mode "LA". - - + Added "getcolors()" method. This is similar to the existing histo- - gram method, but looks at color values instead of individual layers, - and returns an unsorted list of (count, color) tuples. - - By default, the method returns None if finds more than 256 colors. - If you need to look for more colors, you can pass in a limit (this - is used to allocate internal tables, so you probably don't want to - pass in too large values). - - + Build improvements: Fixed building under AIX, improved detection of - FreeType2 and Mac OS X framework libraries, and more. Many thanks - to everyone who helped test the new "setup.py" script! - - (1.1.5a4 released) - - + The "save" method now looks for a file format driver before - creating the file. - - + Don't use antialiased truetype fonts when drawing in mode "P", "I", - and "F" images. - - + Rewrote the "setup.py" file. The new version scans for available - support libraries, and configures both the libImaging core library - and the bindings in one step. - - To use specific versions of the libraries, edit the ROOT variables - in the setup.py file. - - + Removed threaded "show" viewer; use the old "show" implementation - instead (Windows). - - + Added deprecation warnings to Image.offset, ImageDraw.setink, and - ImageDraw.setfill. - - + Added width option to ImageDraw.line(). The current implementation - works best for straight lines; it does not support line joins, so - polylines won't look good. - - + ImageDraw.Draw is now a factory function instead of a class. If - you need to create custom draw classes, inherit from the ImageDraw - class. All other code should use the factory function. - - + Fixed loading of certain PCX files (problem reported by Greg - Hamilton, who also provided samples). - - + Changed _imagingft.c to require FreeType 2.1 or newer. The - module can still be built with earlier versions; see comments - in _imagingft.c for details. - - (1.1.5a3 released) - - + Added 'getim' method, which returns a PyCObject wrapping an - Imaging pointer. The description string is set to IMAGING_MAGIC. - See Imaging.h for pointer and string declarations. - - + Fixed reading of TIFF JPEG images (problem reported by Ulrik - Svensson). - - + Made ImageColor work under Python 1.5.2 - - + Fixed division by zero "equalize" on very small images (from - Douglas Bagnall). - - (1.1.5a2 released) - - + The "paste" method now supports the alternative "paste(im, mask)" - syntax (in this case, the box defaults to im's bounding box). - - + The "ImageFile.Parser" class now works also for PNG files with - more than one IDAT block. - - + Added DPI read/write to the TIFF codec, and fixed writing of - rational values. The decoder sets the info["dpi"] attribute - for TIFF files with appropriate resolution settings. The - encoder uses the "dpi" option. - - + Disable interlacing for small (or narrow) GIF images, to - work around what appears to be a hard-to-find bug in PIL's - GIF encoder. - - + Fixed writing of mode "P" PDF images. Made mode "1" PDF - images smaller. - - + Made the XBM reader a bit more robust; the file may now start - with a few whitespace characters. - - + Added support for enhanced metafiles to the WMF driver. The - separate PILWMF kit lets you render both placeable WMF files - and EMF files as raster images. See - - http://effbot.org/downloads#pilwmf - - (1.1.5a1 released) - - + Replaced broken WMF driver with a WMF stub plugin (see below). - - + Fixed writing of mode "1", "L", and "CMYK" PDF images (based on - input from Nicholas Riley and others). - - + Fixed adaptive palette conversion for zero-width or zero-height - images (from Chris Cogdon) - - + Fixed reading of PNG images from QuickTime 6 (from Paul Pharr) - - + Added support for StubImageFile plugins, including stub plugins - for BUFR, FITS, GRIB, and HDF5 files. A stub plugin can identify - a given file format, but relies on application code to open and - save files in that format. - - + Added optional "encoding" argument to the ImageFont.truetype - factory. This argument can be used to specify non-Unicode character - maps for fonts that support that. For example, to draw text using - the Microsoft Symbol font, use: - - font = ImageFont.truetype("symbol.ttf", 16, encoding="symb") - draw.text((0, 0), unichr(0xF000 + 0xAA)) - - (note that the symbol font uses characters in the 0xF000-0xF0FF - range) - - Common encodings are "unic" (Unicode), "symb" (Microsoft Symbol), - "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), and "armn" (Apple - Roman). See the FreeType documentation for more information. - - + Made "putalpha" a bit more robust; you can now attach an alpha - layer to a plain "L" or "RGB" image, and you can also specify - constant alphas instead of alpha layers (using integers or colour - names). - - + Added experimental "LA" mode support. - - An "LA" image is an "L" image with an attached transparency layer. - Note that support for "LA" is not complete; some operations may - fail or produce unexpected results. - - + Added "RankFilter", "MinFilter", "MedianFilter", and "MaxFilter" - classes to the ImageFilter module. - - + Improved support for applications using multiple threads; PIL - now releases the global interpreter lock for many CPU-intensive - operations (based on work by Kevin Cazabon). - - + Ignore Unicode characters in the PCF loader (from Andres Polit) - - + Fixed typo in OleFileIO.loadfat, which could affect loading of - FlashPix and Image Composer images (Daniel Haertle) - - + Fixed building on platforms that have Freetype but don't have - Tcl/Tk (Jack Jansen, Luciano Nocera, Piet van Oostrum and others) - - + Added EXIF GPSInfo read support for JPEG files. To extract - GPSInfo information, open the file, extract the exif dictionary, - and check for the key 0x8825 (GPSInfo). If present, it contains - a dictionary mapping GPS keys to GPS values. For a list of keys, - see the EXIF specification. - - The "ExifTags" module contains a GPSTAGS dictionary mapping GPS - tags to tag names. - - + Added DPI read support to the PCX and DCX codecs (info["dpi"]). - - + The "show" methods now uses a built-in image viewer on Windows. - This viewer creates an instance of the ImageWindow class (see - below) and keeps it running in a separate thread. NOTE: This - was disabled in 1.1.5a4. - - + Added experimental "Window" and "ImageWindow" classes to the - ImageWin module. These classes allow you to create a WCK-style - toplevel window, and use it to display raster data. - - + Fixed some Python 1.5.2 issues (to build under 1.5.2, use the - Makefile.pre.in/Setup.in approach) - - + Added support for the TIFF FillOrder tag. PIL can read mode "1", - "L", "P" and "RGB" images with non-standard FillOrder (based on - input from Jeff Breidenbach). - - (1.1.4 final released) - - + Fixed ImageTk build problem on Unix. - - (1.1.4b2 released) - - + Improved building on Mac OS X (from Jack Jansen). - - + Improved building on Windows with MinGW (from Klamer Shutte). - - + If no font is specified, ImageDraw now uses the embedded default - font. Use the "load" or "truetype" methods to load a real font. - - + Added embedded default font to the ImageFont module (currently - an 8-pixel Courier font, taken from the X window distribution). - - (1.1.4b1 released) - - + Added experimental EXIF support for JPEG files. To extract EXIF - information from a JPEG file, open the file as usual, and call the - "_getexif" method. If successful, this method returns a dictionary - mapping EXIF TIFF tags to values. If the file does not contain EXIF - data, the "_getexif" method returns None. - - The "ExifTags" module contains a dictionary mapping tags to tag - names. - - This interface will most likely change in future versions. - - + Fixed a bug when using the "transparency" option with the GIF - writer. - - + Added limited support for "bitfield compression" in BMP files - and DIB buffers, for 15-bit, 16-bit, and 32-bit images. This - also fixes a problem with ImageGrab module when copying screen- - dumps from the clipboard on 15/16/32-bit displays. - - + Added experimental WAL (Quake 2 textures) loader. To use this - loader, import WalImageFile and call the "open" method in that - module. - - (1.1.4a4 released) - - + Added updated SANE driver (Andrew Kuchling, Abel Deuring) - - + Use Python's "mmap" module on non-Windows platforms to read some - uncompressed formats using memory mapping. Also added a "frombuffer" - function that allows you to access the contents of an existing string - or buffer object as if it were an image object. - - + Fixed a memory leak that could appear when processing mode "P" - images (from Pier Paolo Glave) - - + Ignore Unicode characters in the BDF loader (from Graham Dumpleton) - - (1.1.4a3 released; windows only) - - + Added experimental RGBA-on-RGB drawing support. To use RGBA - colours on an RGB image, pass "RGBA" as the second string to - the ImageDraw.Draw constructor. - - + Added support for non-ASCII strings (Latin-1) and Unicode - to the truetype font renderer. - - + The ImageWin "Dib" object can now be constructed directly from - an image object. - - + The ImageWin module now allows you use window handles as well - as device contexts. To use a window handle, wrap the handle in - an ImageWin.HWND object, and pass in this object instead of the - device context. - - (1.1.4a2 released) - - + Improved support for 16-bit unsigned integer images (mode "I;16"). - This includes TIFF reader support, and support for "getextrema" - and "point" (from Klamer Shutte). - - + Made the BdfFontFile reader a bit more robust (from Kevin Cazabon - and Dmitry Vasiliev) - - + Changed TIFF writer to always write Compression tag, even when - using the default compression (from Greg Couch). - - + Added "show" support for Mac OS X (from Dan Wolfe). - - + Added clipboard support to the "ImageGrab" module (Windows only). - The "grabclipboard" function returns an Image object, a list of - filenames (not in 1.1.4), or None if neither was found. - - (1.1.4a1 released) - - + Improved support for drawing RGB data in palette images. You can - now use RGB tuples or colour names (see below) when drawing in a - mode "P" image. The drawing layer automatically assigns color - indexes, as long as you don't use more than 256 unique colours. - - + Moved self test from MiniTest/test.py to ./selftest.py. - - + Added support for CSS3-style color strings to most places that - accept colour codes/tuples. This includes the "ImageDraw" module, - the Image "new" function, and the Image "paste" method. - - Colour strings can use one of the following formats: "#f00", - "#ff0000", "rgb(255,0,0)", "rgb(100%,0%,0%)", "hsl(0, 100%, 50%)", - or "red" (most X11-style colour names are supported). See the - documentation for the "ImageColor" module for more information. - - + Fixed DCX decoder (based on input from Larry Bates) - - + Added "IptcImagePlugin.getiptcinfo" helper to extract IPTC/NAA - newsphoto properties from JPEG, TIFF, or IPTC files. - - + Support for TrueType/OpenType fonts has been added to - the standard distribution. You need the freetype 2.0 - library. - - + Made the PCX reader a bit more robust when reading 2-bit - and 4-bit PCX images with odd image sizes. - - + Added "Kernel" class to the ImageFilter module. This class - allows you to filter images with user-defined 3x3 and 5x5 - convolution kernels. - - + Added "putdata" support for mode "I", "F" and "RGB". - - + The GIF writer now supports the transparency option (from - Denis Benoit). - - + A HTML version of the module documentation is now shipped - with the source code distribution. You'll find the files in - the Doc subdirectory. - - + Added support for Palm pixmaps (from Bill Janssen). This - change was listed for 1.1.3, but the "PalmImagePlugin" driver - didn't make it into the distribution. - - + Improved decoder error messages. - - (1.1.3 final released) - - + Made setup.py look for old versions of zlib. For some back- - ground, see: http://www.gzip.org/zlib/advisory-2002-03-11.txt - - (1.1.3c2 released) - - + Added setup.py file (tested on Unix and Windows). You still - need to build libImaging/imaging.lib in the traditional way, - but the setup.py script takes care of the rest. - - The old Setup.in/Makefile.pre.in build method is still - supported. - - + Fixed segmentation violation in ANTIALIAS filter (an internal - buffer wasn't properly allocated). - - (1.1.3c1 released) - - + Added ANTIALIAS downsampling filter for high-quality "resize" - and "thumbnail" operations. Also added filter option to the - "thumbnail" operation; the default value is NEAREST, but this - will most likely change in future versions. - - + Fixed plugin loader to be more robust if the __file__ - variable isn't set. - - + Added seek/tell support (for layers) to the PhotoShop - loader. Layer 0 is the main image. - - + Added new (but experimental) "ImageOps" module, which provides - shortcuts for commonly used operations on entire images. - - + Don't mess up when loading PNG images if the decoder leaves - data in the output buffer. This could cause internal errors - on some PNG images, with some versions of ZLIB. (Bug report - and patch provided by Bernhard Herzog.) - - + Don't mess up on Unicode filenames. - - + Don't mess up when drawing on big endian platforms. - - + Made the TIFF loader a bit more robust; it can now read some - more slightly broken TIFF files (based on input from Ted Wright, - Bob Klimek, and D. Alan Stewart) - - + Added OS/2 EMX build files (from Andrew MacIntyre) - - + Change "ImageFont" to reject image files if they don't have the - right mode. Older versions could leak memory for "P" images. - (Bug reported by Markus Gritsch). - - + Renamed some internal functions to avoid potential build - problem on Mac OS X. - - + Added DL_EXPORT where relevant (for Cygwin, based on input - from Robert Yodlowski) - - + (re)moved bogus __init__ call in BdfFontFile (bug spotted - by Fred Clare) - - + Added "ImageGrab" support (Windows only) - - + Added support for XBM hotspots (based on code contributed by - Bernhard Herzog). - - + Added write support for more TIFF tags, namely the Artist, - Copyright, DateTime, ResolutionUnit, Software, XResolution and - YResolution tags (from Greg Couch) - - + Added TransposedFont wrapper to ImageFont module - - + Added "optimize" flag to GIF encoder. If optimize is present - and non-zero, PIL will work harder to create a small file. - - + Raise "EOFError" (not IndexError) when reading beyond the - end of a TIFF sequence. - - + Support rewind ("seek(0)") for GIF and TIFF sequences. - - + Load grayscale GIF images as mode "L" - - + Added DPI read/write support to the JPEG codec. The decoder - sets the info["dpi"] attribute for JPEG files with JFIF dpi - settings. The encoder uses the "dpi" option: - - im = Image.open("file.jpg") - dpi = im.info["dpi"] # raises KeyError if DPI not known - im.save("out.jpg", dpi=dpi) - - Note that PIL doesn't always preserve the "info" attribute - for normal image operations. - - (1.1.2c1 and 1.1.2 final released) - - + Adapted to Python 2.1. Among other things, all uses of the - "regex" module has been repleased with "re". - - + Fixed attribute error when reading large PNG files (this bug - was introduced in maintenance code released after the 1.1.1 - release) - - + Ignore non-string objects in sys.path - - + Fixed Image.transform(EXTENT) for negative xoffsets - - + Fixed loading of image plugins if PIL is installed as a package. - (The plugin loader now always looks in the directory where the - Image.py module itself is found, even if that directory isn't on - the standard search path) - - + The Png plugin has been added to the list of preloaded standard - formats - - + Fixed bitmap/text drawing in fill mode. - - + Fixed "getextrema" to work also for multiband images. - - + Added transparency support for L and P images to the PNG codec. - - + Improved support for read-only images. The "load" method now - sets the "readonly" attribute for memory-mapped images. Operations - that modifies an image in place (such as "paste" and drawing operations) - creates an in-memory copy of the image, if necessary. (before this - change, any attempt to modify a memory-mapped image resulted in a - core dump...) - - + Added special cases for lists everywhere PIL expects a sequence. - This should speed up things like "putdata" and drawing operations. - - + The Image.offset method is deprecated. Use the ImageChops.offset - function instead. - - + Changed ImageChops operators to copy palette and info dictionary - from the first image argument. - - (1.1.1 released) - - + Additional fixes for Python 1.6/2.0, including TIFF "save" bug. - - + Changed "init" to properly load plugins when PIL is used as a - package. - - + Fixed broken "show" method (on Unix) - - *** Changes from release 1.0 to 1.1 *** - - + Adapted to Python 1.6 ("append" and other method changes) - - + Fixed Image.paste when pasting with solid colour and matte - layers ("L" or "RGBA" masks) (bug reported by Robert Kern) - - + To make it easier to distribute prebuilt versions of PIL, - the tkinit binding stuff has been moved to a separate - extension module, named "_imagingtk". - - *** Changes from release 0.3b2 to 1.0 final *** - - + If there's no 16-bit integer (like on a Cray T3E), set - INT16 to the smallest integer available. Most of the - library works just fine anyway (from Bill Crutchfield) - - + Tweaks to make drawing work on big-endian platforms. - - (1.0c2 released) - - + If PIL is built with the WITH_TKINTER flag, ImageTk can - automatically hook into a standard Tkinter build. You - no longer need to build your own Tkinter to use the - ImageTk module. - - The old way still works, though. For more information, - see Tk/install.txt. - - + Some tweaks to ImageTk to support multiple Tk interpreters - (from Greg Couch). - - + ImageFont "load_path" now scans directory mentioned in .pth - files (from Richard Jones). - - (1.0c1 released) - - + The TIFF plugin has been rewritten. The new plugin fully - supports all major PIL image modes (including F and I). - - + The ImageFile module now includes a Parser class, which can - be used to incrementally decode an image file (while down- - loading it from the net, for example). See the handbook for - details. - - + "show" now converts non-standard modes to "L" or "RGB" (as - appropriate), rather than writing weird things to disk for - "xv" to choke upon. (bug reported by Les Schaffer). - - (1.0b2 released) - - + Major speedups for rotate, transform(EXTENT), and transform(AFFINE) - when using nearest neighbour resampling. - - + Modified ImageDraw to be compatible with the Arrow graphics - interface. See the handbook for details. - - + PIL now automatically loads file codecs when used as a package - (from The Dragon De Monsyne). Also included an __init__.py file - in the standard distribution. - - + The GIF encoder has been modified to produce much smaller files. - - PIL now uses a run-length encoding method to encode GIF files. - On a random selection of GIF images grabbed from the web, this - version makes the images about twice as large as the original - LZW files, where the earlier version made them over 5 times - larger. YMMV, of course. - - + Added PCX write support (works with "1", "P", "L", and "RGB") - - + Added "bitmap" and "textsize" methods to ImageDraw. - - + Improved font rendering code. Fixed a bug or two, and moved - most of the time critical stuff to C. - - + Removed "bdf2pil.py". Use "pilfont.py" instead! - - + Improved 16-bit support (still experimental, though). - - The following methods now support "I;16" and "I;16B" images: - "getpixel", "copy", "convert" (to and from mode "I"), "resize", - "rotate", and "transform" with nearest neighbour filters, and - "save" using the IM format. The "new" and "open" functions - also work as expected. On Windows, 16-bit files are memory - mapped. - - NOTE: ALL other operations are still UNDEFINED on 16-bit images. - - + The "paste" method now supports constant sources. - - Just pass a colour value (a number or a tuple, depending on - the target image mode) instead of the source image. - - This was in fact implemented in an inefficient way in - earlier versions (the "paste" method generated a temporary - source image if you passed it a colour instead of an image). - In this version, this is handled on the C level instead. - - + Added experimental "RGBa" mode support. - - An "RGBa" image is an RGBA image where the colour components - have have been premultipled with the alpha value. PIL allows - you to convert an RGBA image to an RGBa image, and to paste - RGBa images on top of RGB images. Since this saves a bunch - of multiplications and shifts, it is typically about twice - as fast an ordinary RGBA paste. - - + Eliminated extra conversion step when pasting "RGBA" or "RGBa" - images on top of "RGB" images. - - + Fixed Image.BICUBIC resampling for "RGB" images. - - + Fixed PCX image file handler to properly read 8-bit PCX - files (bug introduced in 1.0b1, reported by Bernhard - Herzog) - - + Fixed PSDraw "image" method to restore the coordinate - system. - - + Fixed "blend" problem when applied to images that was - not already loaded (reported by Edward C. Jones) - - + Fixed -f option to "pilconvert.py" (from Anthony Baxter) - - (1.0b1 released) - - + Added Toby J. Sargeant's quantization package. To enable - quantization, use the "palette" option to "convert": - - imOut = im.convert("P", palette=Image.ADAPTIVE) - - This can be used with "L", "P", and "RGB" images. In this - version, dithering cannot be used with adaptive palettes. - - Note: ADAPTIVE currently maps to median cut quantization - with 256 colours. The quantization package also contains - a maximum coverage quantizer, which will be supported by - future versions of PIL. - - + Added Eric S. Raymond's "pildriver" image calculator to the - distribution. See the docstring for more information. - - + The "offset" method no longer dumps core if given positive - offsets (from Charles Waldman). - - + Fixed a resource leak that could cause ImageWin to run out of - GDI resources (from Roger Burnham). - - + Added "arc", "chord", and "pieslice" methods to ImageDraw (inspired - by code contributed by Richard Jones). - - + Added experimental 16-bit support, via modes "I;16" (little endian - data) and "I;16B" (big endian). Only a few methods properly support - such images (see above). - - + Added XV thumbnail file handler (from Gene Cash). - - + Fixed BMP image file handler to handle palette images with small - palettes (from Rob Hooft). - - + Fixed Sun raster file handler for palette images (from Charles - Waldman). - - + Improved various internal error messages. - - + Fixed Path constructor to handle arbitrary sequence objects. This - also affects the ImageDraw class (from Richard Jones). - - + Fixed a bug in JpegDecode that caused PIL to report "decoder error - -2" for some progressive JPEG files (reported by Magnus Källström, - who also provided samples). - - + Fixed a bug in JpegImagePlugin that caused PIL to hang when loading - JPEG files using 16-bit quantization tables. - - + The Image "transform" method now supports Image.QUAD transforms. - The data argument is an 8-tuple giving the upper left, lower - left, lower right, and upper right corner of the source quadri- - lateral. Also added Image.MESH transform which takes a list - of quadrilaterals. - - + The Image "resize", "rotate", and "transform" methods now support - Image.BILINEAR (2x2) and Image.BICUBIC (4x4) resampling filters. - Filters can be used with all transform methods. - - + The ImageDraw "rectangle" method now includes both the right - and the bottom edges when drawing filled rectangles. - - + The TGA decoder now works properly for runlength encoded images - which have more than one byte per pixel. - - + "getbands" on an YCbCr image now returns ("Y", "Cb", "Cr") - - + Some file drivers didn't handle the optional "modify" argument - to the load method. This resulted in exceptions when you used - "paste" (and other methods that modify an image in place) on a - newly opened file. - - *** Changes from release 0.2 (b5) to 0.3 (b2) *** - - (0.3b2 released) - - The test suite includes 825 individual tests. - - + An Image "getbands" method has been added. It returns a tuple - containing the individual band names for this image. To figure - out how many bands an image has, use "len(im.getbands())". - - + An Image "putpixel" method has been added. - - + The Image "point" method can now be used to convert "L" images - to any other format, via a lookup table. That table should - contain 256 values for each band in the output image. - - + Some file drivers (including FLI/FLC, GIF, and IM) accidently - overwrote the offset method with an internal attribute. All - drivers have been updated to use private attributes where - possible. - - + The Image "histogram" method now works for "I" and "F" images. - For these modes, PIL divides the range between the min and - max values used in the image into 256 bins. You can also - pass in your own min and max values via the "extrema" option: - - h = im.histogram(extrema=(0, 255)) - - + An Image "getextrema" method has been added. It returns the - min and max values used in the image. In this release, this - works for single band images only. - - + Changed the PNG driver to load and save mode "I" images as - 16-bit images. When saving, values outside the range 0..65535 - are clipped. - - + Fixed ImageFont.py to work with the new "pilfont" compiler. - - + Added JPEG "save" and "draft" support for mode "YCbCr" images. - Note that if you save an "YCbCr" image as a JPEG file and read - it back, it is read as an RGB file. To get around this, you - can use the "draft" method: - - im = Image.open("color.jpg") - im.draft("YCbCr", im.size) - - + Read "RGBA" TGA images. Also fixed the orientation bug; all - images should now come out the right way. - - + Changed mode name (and internal representation) from "YCrCb" - to "YCbCr" (!) - *** WARNING: MAY BREAK EXISTING CODE *** - - (0.3b1 released) - - The test suite includes 750 individual tests. - - + The "pilfont" package is now included in the standard PIL - distribution. The pilfont utility can be used to convert - X BDF and PCF raster font files to a format understood by - the ImageFont module. - - + GIF files are now interlaced by default. To write a - non-interlaced file, pass interlace=0 to the "save" - method. - - + The default string format has changed for the "fromstring" - and "tostring" methods. - *** WARNING: MAY BREAK EXISTING CODE *** - - NOTE: If no extra arguments are given, the first line in - the string buffer is the top line of the image, instead of - the bottom line. For RGB images, the string now contains - 3 bytes per pixel instead of 4. These changes were made - to make the methods compatible with the "fromstring" - factory function. - - To get the old behaviour, use the following syntax: - - data = im.tostring("raw", "RGBX", 0, -1) - im.fromstring(data, "raw", "RGBX", 0, -1) - - + "new" no longer gives a MemoryError if the width or height - is zero (this only happened on platforms where malloc(0) - or calloc(0) returns NULL). - - + "new" now adds a default palette object to "P" images. - - + You can now convert directly between all modes supported by - PIL. When converting colour images to "P", PIL defaults to - a "web" palette and dithering. When converting greyscale - images to "1", PIL uses a thresholding and dithering. - - + Added a "dither" option to "convert". By default, "convert" - uses floyd-steinberg error diffusion for "P" and "1" targets, - so this option is only used to *disable* dithering. Allowed - values are NONE (no dithering) or FLOYDSTEINBERG (default). - - imOut = im.convert("P", dither=Image.NONE) - - + Added a full set of "I" decoders. You can use "fromstring" - (and file decoders) to read any standard integer type as an - "I" image. - - + Added some support for "YCbCr" images (creation, conversion - from/to "L" and "RGB", IM YCC load/save) - - + "getpixel" now works properly with fractional coordinates. - - + ImageDraw "setink" now works with "I", "F", "RGB", "RGBA", - "RGBX", "CMYK", and "YCbCr" images. - - + ImImagePlugin no longer attaches palettes to "RGB" images. - - + Various minor fixes. - - (0.3a4 released) - - + Added experimental IPTC/NAA support. - - + Eliminated AttributeError exceptions after "crop" (from - Skip Montanaro) - - + Reads some uncompressed formats via memory mapping (this - is currently supported on Win32 only) - - + Fixed some last minute glitches in the last alpha release - (Types instead of types in Image.py, version numbers, etc.) - - + Eliminated some more bogus compiler warnings. - - + Various fixes to make PIL compile and run smoother on Macs - (from Jack Jansen). - - + Fixed "fromstring" and "tostring" for mode "I" images. - - (0.3a3 released) - - The test suite includes 530 individual tests. - - + Eliminated unexpected side-effect in "paste" with matte. "paste" - now works properly also if compiled with "gcc". - - + Adapted to Python 1.5 (build issues only) - - + Fixed the ImageDraw "point" method to draw also the last - point (!). - - + Added "I" and "RGBX" support to Image.new. - - + The plugin path is now properly prepended to the module search - path when a plugin module is imported. - - + Added "draw" method to the ImageWin.Dib class. This is used by - Topaz to print images on Windows printers. - - + "convert" now supports conversions from "P" to "1" and "F". - - + "paste" can now take a colour instead of an image as the first argument. - The colour must match the colour argument given to the new function, and - match the mode of the target image. - - + Fixed "paste" to allow a mask also for mode "F" images. - - + The BMP driver now saves mode "1" images. When loading images, the mode - is set to "L" for 8-bit files with greyscale palettes, and to "P" for - other 8-bit files. - - + The IM driver now reads and saves "1" images (file modes "0 1" or "L 1"). - - + The JPEG and GIF drivers now saves "1" images. For JPEG, the image - is saved as 8-bit greyscale (it will load as mode "L"). For GIF, the - image will be loaded as a "P" image. - - + Fixed a potential buffer overrun in the GIF encoder. - - (0.3a2 released) - - The test suite includes 400 individual tests. - - + Improvements to the test suite revealed a number of minor bugs, which - are all fixed. Note that crop/paste, 32-bit ImageDraw, and ImageFont - are still weak spots in this release. - - + Added "putpalette" method to the Image class. You can use this - to add or modify the palette for "P" and "L" images. If a palette - is added to an "L" image, it is automatically converted to a "P" - image. - - + Fixed ImageDraw to properly handle 32-bit image memories - ("RGB", "RGBA", "CMYK", "F") - - + Fixed "fromstring" and "tostring" not to mess up the mode attribute - in default mode. - - + Changed ImPlatform.h to work on CRAY's (don't have one at home, so I - haven't tried it). The previous version assumed that either "short" - or "int" were 16-bit wide. PIL still won't compile on platforms where - neither "short", "int" nor "long" are 32-bit wide. - - + Added file= and data= keyword arguments to PhotoImage and BitmapImage. - This allows you to use them as drop-in replacements for the corre- - sponding Tkinter classes. - - + Removed bogus references to the crack coder (ImagingCrack). - - (0.3a1 released) - - + Make sure image is loaded in "tostring". - - + Added floating point packer (native 32-bit floats only). - - *** Changes from release 0.1b1 to 0.2 (b5) *** - - + Modified "fromstring" and "tostring" methods to use file codecs. - Also added "fromstring" factory method to create an image directly - from data in a string. - - + Added support for 32-bit floating point images (mode "F"). You - can convert between "L" and "F" images, and apply a subset of the - available image processing methods on the "F" image. You can also - read virtually any data format into a floating point image memory; - see the section on "Decoding Floating Point Data" in the handbook - for more information. - - (0.2b5 released; on windows only) - - + Fixed the tobitmap() method to work properly for small bitmaps. - - + Added RMS and standard deviation to the ImageStat.Stat class. Also - modified the constructor to take an optional feature mask, and also - to accept either an image or a list containing the histogram data. - - + The BitmapImage code in ImageTk can now use a special bitmap - decoder, which has to be patched into Tk. See the "Tk/pilbitmap.txt" - file for details. If not installed, bitmaps are transferred to Tk as - XBM strings. - - + The PhotoImage code in ImageTk now uses a Tcl command ("PyImagingPaste") - instead of a special image type. This gives somewhat better performance, - and also allows PIL to support transparency. - *** WARNING: TKAPPINIT MUST BE MODIFIED *** - - + ImageTk now honours the alpha layer in RGBA images. Only fully - transparent pixels are made transparent (that is, the alpha layer - is treated as a mask). To treat the alpha laters as a matte, you - must paste the image on the background before handing it over to - ImageTk. - - + Added McIdas reader (supports 8-bit images only). - - + PIL now preloads drivers for BMP, GIF, JPEG, PPM, and TIFF. As - long as you only load and save these formats, you don't have to - wait for a full scan for drivers. To force scanning, call the - Image.init() function. - - + The "seek" and "tell" methods are now always available, also for - single-frame images. - - + Added optional mask argument to histogram method. The mask may - be an "1" or "L" image with the same size as the original image. - Only pixels where the mask is non-zero are included in the - histogram. - - + The "paste" method now allows you to specify only the lower left - corner (a 2-tuple), instead of the full region (a 4-tuple). - - + Reverted to old plugin scanning model; now scans all directory - names in the path when looking for plugins. - - + Added PIXAR raster support. Only uncompressed ("dumped") RGB - images can currently be read (based on information provided - by Greg Coats). - - + Added FlashPix (FPX) read support. Reads all pixel formats, but - only the highest resolution is read, and the viewing transform is - currently ignored. - - + Made PNG encoding somewhat more efficient in "optimize" mode; a - bug in 0.2b4 didn't enable all predictor filters when optimized - storage were requested. - - + Added Microsoft Image Composer (MIC) read support. When opened, - the first sprite in the file is loaded. You can use the seek method - to load additional sprites from the file. - - + Properly reads "P" and "CMYK" PSD images. - - + "pilconvert" no longer optimizes by default; use the -o option to - make the file as small as possible (at the expense of speed); use - the -q option to set the quality when compressing to JPEG. - - + Fixed "crop" not to drop the palette for "P" images. - - + Added and verified FLC support. - - + Paste with "L" or "RGBA" alpha is now several times faster on most - platforms. - - + Changed Image.new() to initialize the image to black, as described - in the handbook. To get an uninitialized image, use None as the - colour. - - + Fixed the PDF encoder to produce a valid header; Acrobat no longer - complains when you load PDF images created by PIL. - - + PIL only scans fully-qualified directory names in the path when - looking for plugins. - *** WARNING: MAY BREAK EXISTING CODE *** - - + Faster implementation of "save" used when filename is given, - or when file object has "fileno" and "flush" methods. - - + Don't crash in "crop" if region extends outside the source image. - - + Eliminated a massive memory leak in the "save" function. - - + The GIF decoder doesn't crash if the code size is set to an illegal - value. This could happen since another bug didn't handle local - palettes properly if they didn't have the same size as the - global palette (not very common). - - + Added predictor support (TIFF 6.0 section 14) to the TIFF decoder. - - + Fixed palette and padding problems in BMP driver. Now properly - writes "1", "L", "P" and "RGB" images. - - + Fixed getpixel()/getdata() to return correct pixel values. - - + Added PSD (PhotoShop) read support. Reads both uncompressed - and compressed images of most types. - - + Added GIF write support (writes "uncompressed" GIF files only, - due to unresolvable licensing issues). The "gifmaker.py" script - can be used to create GIF animations. - - + Reads 8-bit "L" and "P" TGA images. Also reads 16-bit "RGB" - images. - - + Added FLI read support. This driver has only been tested - on a few FLI samples. - - + Reads 2-bit and 4-bit PCX images. - - + Added MSP read and write support. Both version 1 and 2 can be - read, but only version 1 (uncompressed) files are written. - - + Fixed a bug in the FLI/FLC identification code that caused the - driver to raise an exception when parsing valid FLI/FLC files. - - + Improved performance when loading file format plugins, and when - opening files. - - + Added GIF animation support, via the "seek" and "tell" methods. - You can use "player.py" to play an animated GIF file. - - + Removed MNG support, since the spec is changing faster than I - can change the code. I've added support for the experimental - ARG format instead. Contact me for more information on this - format. - - + Added keyword options to the "save" method. The following options - are currently supported: - - format option description - -------------------------------------------------------- - JPEG optimize minimize output file at the - expense of compression speed. - - JPEG progressive enable progressive output. the - option value is ignored. - - JPEG quality set compression quality (1-100). - the default value is 75. - - JPEG smooth smooth dithered images. value - is strengh (1-100). default is - off (0). - - PNG optimize minimize output file at the - expense of compression speed. - - Expect more options in future releases. Also note that - file writers silently ignore unknown options. - - + Plugged memory leaks in the PNG and TIFF decoders. - - + Added PNG write support. - - + (internal) RGB unpackers and converters now set the pad byte - to 255 (full opacity). - - + Properly handles the "transparency" property for GIF, PNG - and XPM files. - - + Added a "putalpha" method, allowing you to attach a "1" or "L" - image as the alpha layer to an "RGBA" image. - - + Various improvements to the sample scripts: - - "pilconvert" Carries out some extra tricks in order to make - the resulting file as small as possible. - - "explode" (NEW) Split an image sequence into individual frames. - - "gifmaker" (NEW) Convert a sequence file into a GIF animation. - Note that the GIF encoder create "uncompressed" GIF - files, so animations created by this script are - rather large (typically 2-5 times the compressed - sizes). - - "image2py" (NEW) Convert a single image to a python module. See - comments in this script for details. - - "player" If multiple images are given on the command line, - they are interpreted as frames in a sequence. The - script assumes that they all have the same size. - Also note that this script now can play FLI/FLC - and GIF animations. - - This player can also execute embedded Python - animation applets (ARG format only). - - "viewer" Transparent images ("P" with transparency property, - and "RGBA") are superimposed on the standard Tk back- - ground. - - + Fixed colour argument to "new". For multilayer images, pass a - tuple: (Red, Green, Blue), (Red, Green, Blue, Alpha), or (Cyan, - Magenta, Yellow, Black). - - + Added XPM (X pixmap) read support. - - (0.2b3 released) - - + Added MNG (multi-image network graphics) read support. "Ming" - is a proposed animation standard, based on the PNG file format. - - You can use the "player" sample script to display some flavours - of this format. The MNG standard is still under development, - as is this driver. More information, including sample files, - can be found at - - + Added a "verify" method to images loaded from file. This method - scans the file for errors, without actually decoding the image - data, and raises a suitable exception if it finds any problems. - Currently implemented for PNG and MNG files only. - - + Added support for interlaced GIF images. - - + Added PNG read support -- if linked with the ZLIB compression library, - PIL reads all kinds of PNG images, except interlaced files. - - + Improved PNG identification support -- doesn't mess up on unknown - chunks, identifies all possible PNG modes, and verifies checksum - on PNG header chunks. - - + Added an experimental reader for placable Windows Meta Files (WMF). - This reader is still very incomplete, but it illustrates how PIL's - drawing capabilities can be used to render vector and metafile - formats. - - + Added restricted drivers for images from Image Tools (greyscale - only) and LabEye/IFUNC (common interchange modes only). - - + Some minor improvements to the sample scripts provided in the - "Scripts" directory. - - + The test images have been moved to the "Images" directory. - - (0.2b2 released) - (0.2b1 released; Windows only) - - + Fixed filling of complex polygons. The ImageDraw "line" and - "polygon" methods also accept Path objects. - - + The ImageTk "PhotoImage" object can now be constructed directly - from an image. You can also pass the object itself to Tkinter, - instead of using the "image" attribute. Finally, using "paste" - on a displayed image automatically updates the display. - - + The ImageTk "BitmapImage" object allows you to create transparent - overlays from 1-bit images. You can pass the object itself to - Tkinter. The constructor takes the same arguments as the Tkinter - BitmapImage class; use the "foreground" option to set the colour - of the overlay. - - + Added a "putdata" method to the Image class. This can be used to - load a 1-layer image with data from a sequence object or a string. - An optional floating point scale and offset can be used to adjust - the data to fit into the 8-bit pixel range. Also see the "getdata" - method. - - + Added the EXTENT method to the Image "transform" method. This can - be used to quickly crop, stretch, shrink, or mirror a subregion - from another image. - - + Adapted to Python 1.4. - - + Added a project makefile for Visual C++ 4.x. This allows you to - easily build a dynamically linked version of PIL for Windows 95 - and NT. - - + A Tk "booster" patch for Windows is available. It gives dramatic - performance improvements for some displays. Has been tested with - Tk 4.2 only, but is likely to work with Tk 4.1 as well. See the Tk - subdirectory for details. - - + You can now save 1-bit images in the XBM format. In addition, the - Image class now provides a "tobitmap" method which returns a string - containing an XBM representation of the image. Quite handy to use - with Tk. - - + More conversions, including "RGB" to "1" and more. - - (0.2a1 released) - - + Where earlier versions accepted lists, this version accepts arbitrary - Python sequences (including strings, in some cases). A few resource - leaks were plugged in the process. - - + The Image "paste" method now allows the box to extend outside - the target image. The size of the box, the image to be pasted, - and the optional mask must still match. - - + The ImageDraw module now supports filled polygons, outlined and - filled ellipses, and text. Font support is rudimentary, though. - - + The Image "point" method now takes an optional mode argument, - allowing you to convert the image while translating it. Currently, - this can only be used to convert "L" or "P" images to "1" images - (creating thresholded images or "matte" masks). - - + An Image "getpixel" method has been added. For single band images, - it returns the pixel value at a given position as an integer. - For n-band images, it returns an n-tuple of integers. - - + An Image "getdata" method has been added. It returns a sequence - object representing the image as a 1-dimensional array. Only len() - and [] can be used with this sequence. This method returns a - reference to the existing image data, so changes in the image - will be immediately reflected in the sequence object. - - + Fixed alignment problems in the Windows BMP writer. - - + If converting an "RGB" image to "RGB" or "L", you can give a second - argument containing a colour conversion matrix. - - + An Image "getbbox" method has been added. It returns the bounding - box of data in an image, considering the value 0 as background. - - + An Image "offset" method has been added. It returns a new image - where the contents of the image have been offset the given distance - in X and/or Y direction. Data wraps between edges. - - + Saves PDF images. The driver creates a binary PDF 1.1 files, using - JPEG compression for "L", "RGB", and "CMYK" images, and hex encoding - (same as for PostScript) for other formats. - - + The "paste" method now accepts "1" masks. Zero means transparent, - any other pixel value means opaque. This is faster than using an - "L" transparency mask. - - + Properly writes EPS files (and properly prints images to postscript - printers as well). - - + Reads 4-bit BMP files, as well as 4 and 8-bit Windows ICO and CUR - files. Cursor animations are not supported. - - + Fixed alignment problems in the Sun raster loader. - - + Added "draft" and "thumbnail" methods. The draft method is used - to optimize loading of JPEG and PCD files, the thumbnail method is - used to create a thumbnail representation of an image. - - + Added Windows display support, via the ImageWin class (see the - handbook for details). - - + Added raster conversion for EPS files. This requires GNU or Aladdin - Ghostscript, and probably works on UNIX only. - - + Reads PhotoCD (PCD) images. The base resolution (768x512) can be - read from a PhotoCD file. - - + Eliminated some compiler warnings. Bindings now compile cleanly in C++ - mode. Note that the Imaging library itself must be compiled in C mode. - - + Added "bdf2pil.py", which converts BDF fonts into images with associated - metrics. This is definitely work in progress. For info, see description - in script for details. - - + Fixed a bug in the "ImageEnhance.py" module. - - + Fixed a bug in the netpbm save hack in "GifImagePlugin.py" - - + Fixed 90 and 270 degree rotation of rectangular images. - - + Properly reads 8-bit TIFF palette-color images. - - + Reads plane separated RGB and CMYK TIFF images. - - + Added driver debug mode. This is enabled by setting Image.DEBUG - to a non-zero value. Try the -D option to "pilfile.py" and see what - happens. - - + Don't crash on "atend" constructs in PostScript files. - - + Only the Image module imports _imaging directly. Other modules - should refer to the binding module as "Image.core". - - *** Changes from release 0.0 to 0.1 (b1) *** - - + A handbook is available (distributed separately). - - + The coordinate system is changed so that (0,0) is now located - in the upper left corner. This is in compliancy with ISO 12087 - and 90% of all other image processing and graphics libraries. - - + Modes "1" (bilevel) and "P" (palette) have been introduced. Note - that bilevel images are stored with one byte per pixel. - - + The Image "crop" and "paste" methods now accepts None as the - box argument, to refer to the full image (self, that is). - - + The Image "crop" method now works properly. - - + The Image "point" method is now available. You can use either a - lookup table or a function taking one argument. - - + The Image join function has been renamed to "merge". - - + An Image "composite" function has been added. It is identical - to copy() followed by paste(mask). - - + An Image "eval" function has been added. It is currently identical - to point(function); that is, only a single image can be processed. - - + A set of channel operations has been added. See the "ImageChops" - module, test_chops.py, and the handbook for details. - - + Added the "pilconvert" utility, which converts image files. Note - that the number of output formats are still quite restricted. - - + Added the "pilfile" utility, which quickly identifies image files - (without loading them, in most cases). - - + Added the "pilprint" utility, which prints image files to Postscript - printers. - - + Added a rudimentary version of the "pilview" utility, which is - simple image viewer based on Tk. Only File/Exit and Image/Next - works properly. - - + An interface to Tk has been added. See "Lib/ImageTk.py" and README - for details. - - + An interface to Jack Jansen's Img library has been added (thanks to - Jack). This allows you to read images through the Img extensions file - format handlers. See the file "Lib/ImgExtImagePlugin.py" for details. - - + Postscript printing is provided through the PSDraw module. See the - handbook for details. - -Keywords: Imaging -Platform: UNKNOWN -Classifier: Development Status :: 6 - Mature -Classifier: Topic :: Multimedia :: Graphics -Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera -Classifier: Topic :: Multimedia :: Graphics :: Capture :: Scanners -Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture -Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion -Classifier: Topic :: Multimedia :: Graphics :: Viewers -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 diff --git a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index b8fd432..0000000 --- a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,593 +0,0 @@ -.coveragerc -.gitattributes -.travis.yml -CHANGES.rst -MANIFEST.in -Makefile -README.rst -_imaging.c -_imagingcms.c -_imagingft.c -_imagingmath.c -_imagingmorph.c -_imagingtk.c -_webp.c -decode.c -display.c -encode.c -map.c -mp_compile.py -outline.c -path.c -py3.h -requirements.txt -selftest.py -setup.cfg -setup.py -test-installed.py -tox.ini -PIL/BdfFontFile.py -PIL/BmpImagePlugin.py -PIL/BufrStubImagePlugin.py -PIL/ContainerIO.py -PIL/CurImagePlugin.py -PIL/DcxImagePlugin.py -PIL/EpsImagePlugin.py -PIL/ExifTags.py -PIL/FitsStubImagePlugin.py -PIL/FliImagePlugin.py -PIL/FontFile.py -PIL/FpxImagePlugin.py -PIL/GbrImagePlugin.py -PIL/GdImageFile.py -PIL/GifImagePlugin.py -PIL/GimpGradientFile.py -PIL/GimpPaletteFile.py -PIL/GribStubImagePlugin.py -PIL/Hdf5StubImagePlugin.py -PIL/IcnsImagePlugin.py -PIL/IcoImagePlugin.py -PIL/ImImagePlugin.py -PIL/Image.py -PIL/ImageChops.py -PIL/ImageCms.py -PIL/ImageColor.py -PIL/ImageDraw.py -PIL/ImageDraw2.py -PIL/ImageEnhance.py -PIL/ImageFile.py -PIL/ImageFileIO.py -PIL/ImageFilter.py -PIL/ImageFont.py -PIL/ImageGrab.py -PIL/ImageMath.py -PIL/ImageMode.py -PIL/ImageMorph.py -PIL/ImageOps.py -PIL/ImagePalette.py -PIL/ImagePath.py -PIL/ImageQt.py -PIL/ImageSequence.py -PIL/ImageShow.py -PIL/ImageStat.py -PIL/ImageTk.py -PIL/ImageTransform.py -PIL/ImageWin.py -PIL/ImtImagePlugin.py -PIL/IptcImagePlugin.py -PIL/Jpeg2KImagePlugin.py -PIL/JpegImagePlugin.py -PIL/JpegPresets.py -PIL/McIdasImagePlugin.py -PIL/MicImagePlugin.py -PIL/MpegImagePlugin.py -PIL/MspImagePlugin.py -PIL/OleFileIO-README.md -PIL/OleFileIO.py -PIL/PSDraw.py -PIL/PaletteFile.py -PIL/PalmImagePlugin.py -PIL/PcdImagePlugin.py -PIL/PcfFontFile.py -PIL/PcxImagePlugin.py -PIL/PdfImagePlugin.py -PIL/PixarImagePlugin.py -PIL/PngImagePlugin.py -PIL/PpmImagePlugin.py -PIL/PsdImagePlugin.py -PIL/PyAccess.py -PIL/SgiImagePlugin.py -PIL/SpiderImagePlugin.py -PIL/SunImagePlugin.py -PIL/TarIO.py -PIL/TgaImagePlugin.py -PIL/TiffImagePlugin.py -PIL/TiffTags.py -PIL/WalImageFile.py -PIL/WebPImagePlugin.py -PIL/WmfImagePlugin.py -PIL/XVThumbImagePlugin.py -PIL/XbmImagePlugin.py -PIL/XpmImagePlugin.py -PIL/__init__.py -PIL/_binary.py -PIL/_util.py -Pillow.egg-info/PKG-INFO -Pillow.egg-info/SOURCES.txt -Pillow.egg-info/dependency_links.txt -Pillow.egg-info/top_level.txt -Pillow.egg-info/zip-safe -Sane/CHANGES -Sane/README.rst -Sane/_sane.c -Sane/demo_numarray.py -Sane/demo_pil.py -Sane/sane.py -Sane/sanedoc.txt -Sane/setup.py -Scripts/README.rst -Scripts/enhancer.py -Scripts/explode.py -Scripts/gifmaker.py -Scripts/painter.py -Scripts/pilconvert.py -Scripts/pildriver.py -Scripts/pilfile.py -Scripts/pilfont.py -Scripts/pilprint.py -Scripts/player.py -Scripts/thresholder.py -Scripts/viewer.py -Tests/README.rst -Tests/bench_cffi_access.py -Tests/bench_get.py -Tests/check_icns_dos.py -Tests/check_j2k_dos.py -Tests/crash_ttf_memory_error.py -Tests/helper.py -Tests/import_all.py -Tests/large_memory_numpy_test.py -Tests/large_memory_test.py -Tests/make_hash.py -Tests/show_icc.py -Tests/show_mcidas.py -Tests/test_000_sanity.py -Tests/test_bmp_reference.py -Tests/test_cffi.py -Tests/test_decompression_bomb.py -Tests/test_file_bmp.py -Tests/test_file_eps.py -Tests/test_file_fli.py -Tests/test_file_gif.py -Tests/test_file_icns.py -Tests/test_file_ico.py -Tests/test_file_jpeg.py -Tests/test_file_jpeg2k.py -Tests/test_file_libtiff.py -Tests/test_file_libtiff_small.py -Tests/test_file_msp.py -Tests/test_file_pcx.py -Tests/test_file_pdf.py -Tests/test_file_png.py -Tests/test_file_ppm.py -Tests/test_file_psd.py -Tests/test_file_spider.py -Tests/test_file_tar.py -Tests/test_file_tiff.py -Tests/test_file_tiff_metadata.py -Tests/test_file_webp.py -Tests/test_file_webp_alpha.py -Tests/test_file_webp_lossless.py -Tests/test_file_webp_metadata.py -Tests/test_file_xbm.py -Tests/test_file_xpm.py -Tests/test_font_bdf.py -Tests/test_font_pcf.py -Tests/test_format_lab.py -Tests/test_image.py -Tests/test_image_array.py -Tests/test_image_convert.py -Tests/test_image_copy.py -Tests/test_image_crop.py -Tests/test_image_draft.py -Tests/test_image_filter.py -Tests/test_image_frombytes.py -Tests/test_image_getbands.py -Tests/test_image_getbbox.py -Tests/test_image_getcolors.py -Tests/test_image_getdata.py -Tests/test_image_getextrema.py -Tests/test_image_getim.py -Tests/test_image_getpalette.py -Tests/test_image_getpixel.py -Tests/test_image_getprojection.py -Tests/test_image_histogram.py -Tests/test_image_load.py -Tests/test_image_mode.py -Tests/test_image_offset.py -Tests/test_image_point.py -Tests/test_image_putalpha.py -Tests/test_image_putdata.py -Tests/test_image_putpalette.py -Tests/test_image_putpixel.py -Tests/test_image_quantize.py -Tests/test_image_resize.py -Tests/test_image_rotate.py -Tests/test_image_split.py -Tests/test_image_thumbnail.py -Tests/test_image_tobitmap.py -Tests/test_image_tobytes.py -Tests/test_image_transform.py -Tests/test_image_transpose.py -Tests/test_imagechops.py -Tests/test_imagecms.py -Tests/test_imagecolor.py -Tests/test_imagedraw.py -Tests/test_imageenhance.py -Tests/test_imagefile.py -Tests/test_imagefileio.py -Tests/test_imagefilter.py -Tests/test_imagefont.py -Tests/test_imagegrab.py -Tests/test_imagemath.py -Tests/test_imagemode.py -Tests/test_imagemorph.py -Tests/test_imageops.py -Tests/test_imageops_usm.py -Tests/test_imagepalette.py -Tests/test_imagepath.py -Tests/test_imageqt.py -Tests/test_imagesequence.py -Tests/test_imageshow.py -Tests/test_imagestat.py -Tests/test_imagetk.py -Tests/test_imagetransform.py -Tests/test_imagewin.py -Tests/test_lib_image.py -Tests/test_lib_pack.py -Tests/test_locale.py -Tests/test_mode_i16.py -Tests/test_numpy.py -Tests/test_olefileio.py -Tests/test_pickle.py -Tests/test_pyroma.py -Tests/test_shell_injection.py -Tests/threaded_save.py -Tests/versions.py -Tests/fonts/FreeMono.ttf -Tests/fonts/helvO18.pcf -Tests/icc/CMY.icm -Tests/icc/YCC709.icm -Tests/icc/sRGB.icm -Tests/images/10ct_32bit_128.tiff -Tests/images/12bit.cropped.tif -Tests/images/12in16bit.tif -Tests/images/16_bit_binary.pgm -Tests/images/16_bit_binary_pgm.png -Tests/images/16bit.MM.cropped.tif -Tests/images/16bit.MM.deflate.tif -Tests/images/16bit.cropped.j2k -Tests/images/16bit.cropped.jp2 -Tests/images/16bit.cropped.tif -Tests/images/16bit.deflate.tif -Tests/images/binary_preview_map.eps -Tests/images/broken.png -Tests/images/caption_6_33_22.png -Tests/images/corner.lut -Tests/images/courB08.bdf -Tests/images/courB08.pbm -Tests/images/courB08.pil -Tests/images/create_eps.gnuplot -Tests/images/dilation4.lut -Tests/images/dilation8.lut -Tests/images/edge.lut -Tests/images/erosion4.lut -Tests/images/erosion8.lut -Tests/images/flower.jpg -Tests/images/flower.webp -Tests/images/flower2.jpg -Tests/images/flower2.webp -Tests/images/g4-fillorder-test.png -Tests/images/g4-fillorder-test.tif -Tests/images/high_ascii_chars.png -Tests/images/icc_profile_none.png -Tests/images/imagedraw_arc.png -Tests/images/imagedraw_bitmap.png -Tests/images/imagedraw_chord.png -Tests/images/imagedraw_ellipse.png -Tests/images/imagedraw_floodfill.png -Tests/images/imagedraw_floodfill2.png -Tests/images/imagedraw_line.png -Tests/images/imagedraw_pieslice.png -Tests/images/imagedraw_point.png -Tests/images/imagedraw_polygon.png -Tests/images/imagedraw_rectangle.png -Tests/images/junk_jpeg_header.jpg -Tests/images/l_trns.png -Tests/images/lab-green.tif -Tests/images/lab-red.tif -Tests/images/lab.tif -Tests/images/lena.Lab.tif -Tests/images/lena.fli -Tests/images/lena.gif -Tests/images/lena.ico -Tests/images/lena.jpg -Tests/images/lena.png -Tests/images/lena.ppm -Tests/images/lena.psd -Tests/images/lena.spider -Tests/images/lena.tar -Tests/images/lena.tif -Tests/images/lena.webp -Tests/images/lena.xpm -Tests/images/lena_bw.png -Tests/images/lena_bw_500.png -Tests/images/lena_g4.tif -Tests/images/lena_g4_500.tif -Tests/images/lena_webp_bits.ppm -Tests/images/lena_webp_write.ppm -Tests/images/morph_a.png -Tests/images/multiline_text.png -Tests/images/non_zero_bb.eps -Tests/images/non_zero_bb.png -Tests/images/non_zero_bb_scale2.png -Tests/images/p_trns_single.png -Tests/images/pil123p.png -Tests/images/pil123rgba.png -Tests/images/pil136.tiff -Tests/images/pil168.tif -Tests/images/pil184.pcx -Tests/images/pil_sample_cmyk.jpg -Tests/images/pil_sample_rgb.jpg -Tests/images/pillow.icns -Tests/images/pillow.ico -Tests/images/pillow2.icns -Tests/images/pillow3.icns -Tests/images/pngtest_bad.png.bin -Tests/images/pport_g4.tif -Tests/images/python.ico -Tests/images/rgb.jpg -Tests/images/rgb_trns.png -Tests/images/rgb_trns_ycbc.j2k -Tests/images/rgb_trns_ycbc.jp2 -Tests/images/test-card-lossless.jp2 -Tests/images/test-card-lossy-tiled.jp2 -Tests/images/test-card.png -Tests/images/test-ole-file.doc -Tests/images/test.colors.gif -Tests/images/tiff_adobe_deflate.tif -Tests/images/transparent.png -Tests/images/transparent.webp -Tests/images/zero_bb.eps -Tests/images/zero_bb.png -Tests/images/zero_bb_scale2.png -Tests/images/bmp/README.txt -Tests/images/bmp/b/badbitcount.bmp -Tests/images/bmp/b/badbitssize.bmp -Tests/images/bmp/b/baddens1.bmp -Tests/images/bmp/b/baddens2.bmp -Tests/images/bmp/b/badfilesize.bmp -Tests/images/bmp/b/badheadersize.bmp -Tests/images/bmp/b/badpalettesize.bmp -Tests/images/bmp/b/badplanes.bmp -Tests/images/bmp/b/badrle.bmp -Tests/images/bmp/b/badwidth.bmp -Tests/images/bmp/b/pal8badindex.bmp -Tests/images/bmp/b/reallybig.bmp -Tests/images/bmp/b/rletopdown.bmp -Tests/images/bmp/b/shortfile.bmp -Tests/images/bmp/g/pal1.bmp -Tests/images/bmp/g/pal1bg.bmp -Tests/images/bmp/g/pal1wb.bmp -Tests/images/bmp/g/pal4.bmp -Tests/images/bmp/g/pal4rle.bmp -Tests/images/bmp/g/pal8-0.bmp -Tests/images/bmp/g/pal8.bmp -Tests/images/bmp/g/pal8nonsquare.bmp -Tests/images/bmp/g/pal8os2.bmp -Tests/images/bmp/g/pal8rle.bmp -Tests/images/bmp/g/pal8topdown.bmp -Tests/images/bmp/g/pal8v4.bmp -Tests/images/bmp/g/pal8v5.bmp -Tests/images/bmp/g/pal8w124.bmp -Tests/images/bmp/g/pal8w125.bmp -Tests/images/bmp/g/pal8w126.bmp -Tests/images/bmp/g/rgb16-565.bmp -Tests/images/bmp/g/rgb16-565pal.bmp -Tests/images/bmp/g/rgb16.bmp -Tests/images/bmp/g/rgb24.bmp -Tests/images/bmp/g/rgb24pal.bmp -Tests/images/bmp/g/rgb32.bmp -Tests/images/bmp/g/rgb32bf.bmp -Tests/images/bmp/html/bkgd.png -Tests/images/bmp/html/bmpsuite.html -Tests/images/bmp/html/fakealpha.png -Tests/images/bmp/html/pal1.png -Tests/images/bmp/html/pal1bg.png -Tests/images/bmp/html/pal1p1.png -Tests/images/bmp/html/pal2.png -Tests/images/bmp/html/pal4.png -Tests/images/bmp/html/pal4rletrns-0.png -Tests/images/bmp/html/pal4rletrns-b.png -Tests/images/bmp/html/pal4rletrns.png -Tests/images/bmp/html/pal8.png -Tests/images/bmp/html/pal8nonsquare-e.png -Tests/images/bmp/html/pal8nonsquare-v.png -Tests/images/bmp/html/pal8rletrns-0.png -Tests/images/bmp/html/pal8rletrns-b.png -Tests/images/bmp/html/pal8rletrns.png -Tests/images/bmp/html/pal8w124.png -Tests/images/bmp/html/pal8w125.png -Tests/images/bmp/html/pal8w126.png -Tests/images/bmp/html/rgb16-231.png -Tests/images/bmp/html/rgb16-565.png -Tests/images/bmp/html/rgb16.png -Tests/images/bmp/html/rgb24.jpg -Tests/images/bmp/html/rgb24.png -Tests/images/bmp/html/rgba16-4444.png -Tests/images/bmp/html/rgba32.png -Tests/images/bmp/q/pal1p1.bmp -Tests/images/bmp/q/pal2.bmp -Tests/images/bmp/q/pal4rletrns.bmp -Tests/images/bmp/q/pal8offs.bmp -Tests/images/bmp/q/pal8os2sp.bmp -Tests/images/bmp/q/pal8os2v2-16.bmp -Tests/images/bmp/q/pal8os2v2.bmp -Tests/images/bmp/q/pal8oversizepal.bmp -Tests/images/bmp/q/pal8rletrns.bmp -Tests/images/bmp/q/rgb16-231.bmp -Tests/images/bmp/q/rgb24jpeg.bmp -Tests/images/bmp/q/rgb24largepal.bmp -Tests/images/bmp/q/rgb24lprof.bmp -Tests/images/bmp/q/rgb24png.bmp -Tests/images/bmp/q/rgb24prof.bmp -Tests/images/bmp/q/rgb32-111110.bmp -Tests/images/bmp/q/rgb32fakealpha.bmp -Tests/images/bmp/q/rgba16-4444.bmp -Tests/images/bmp/q/rgba32.bmp -Tests/images/bmp/q/rgba32abf.bmp -Tests/images/imagedraw/line_horizontal_slope1px_w2px.png -Tests/images/imagedraw/line_horizontal_w101px.png -Tests/images/imagedraw/line_horizontal_w2px_inverted.png -Tests/images/imagedraw/line_horizontal_w2px_normal.png -Tests/images/imagedraw/line_horizontal_w3px.png -Tests/images/imagedraw/line_oblique_45_w3px_a.png -Tests/images/imagedraw/line_oblique_45_w3px_b.png -Tests/images/imagedraw/line_vertical_slope1px_w2px.png -Tests/images/imagedraw/line_vertical_w101px.png -Tests/images/imagedraw/line_vertical_w2px_inverted.png -Tests/images/imagedraw/line_vertical_w2px_normal.png -Tests/images/imagedraw/line_vertical_w3px.png -Tests/images/imagedraw/square.png -Tests/images/imagedraw/triangle_right.png -Tk/README.rst -Tk/tkImaging.c -depends/README.rst -depends/install_openjpeg.sh -depends/install_webp.sh -docs/BUILDME -docs/COPYING -docs/Guardfile -docs/LICENSE -docs/Makefile -docs/PIL.rst -docs/about.rst -docs/conf.py -docs/developer.rst -docs/guides.rst -docs/index.rst -docs/installation.rst -docs/make.bat -docs/original-readme.rst -docs/plugins.rst -docs/porting-pil-to-pillow.rst -docs/requirements.txt -docs/_build/.gitignore -docs/_static/.gitignore -docs/_templates/.gitignore -docs/_templates/sidebarhelp.html -docs/handbook/appendices.rst -docs/handbook/concepts.rst -docs/handbook/image-file-formats.rst -docs/handbook/overview.rst -docs/handbook/tutorial.rst -docs/handbook/writing-your-own-file-decoder.rst -docs/reference/Image.rst -docs/reference/ImageChops.rst -docs/reference/ImageColor.rst -docs/reference/ImageDraw.rst -docs/reference/ImageEnhance.rst -docs/reference/ImageFile.rst -docs/reference/ImageFilter.rst -docs/reference/ImageFont.rst -docs/reference/ImageGrab.rst -docs/reference/ImageMath.rst -docs/reference/ImageOps.rst -docs/reference/ImagePalette.rst -docs/reference/ImagePath.rst -docs/reference/ImageQt.rst -docs/reference/ImageSequence.rst -docs/reference/ImageStat.rst -docs/reference/ImageTk.rst -docs/reference/ImageWin.rst -docs/reference/PSDraw.rst -docs/reference/index.rst -libImaging/Access.c -libImaging/AlphaComposite.c -libImaging/Antialias.c -libImaging/Bands.c -libImaging/Bit.h -libImaging/BitDecode.c -libImaging/Blend.c -libImaging/Chops.c -libImaging/Convert.c -libImaging/ConvertYCbCr.c -libImaging/Copy.c -libImaging/Crc32.c -libImaging/Crop.c -libImaging/Dib.c -libImaging/Draw.c -libImaging/Effects.c -libImaging/EpsEncode.c -libImaging/Except.c -libImaging/File.c -libImaging/Fill.c -libImaging/Filter.c -libImaging/FliDecode.c -libImaging/Geometry.c -libImaging/GetBBox.c -libImaging/Gif.h -libImaging/GifDecode.c -libImaging/GifEncode.c -libImaging/HexDecode.c -libImaging/Histo.c -libImaging/ImDib.h -libImaging/ImPlatform.h -libImaging/Imaging.h -libImaging/Incremental.c -libImaging/Jpeg.h -libImaging/Jpeg2K.h -libImaging/Jpeg2KDecode.c -libImaging/Jpeg2KEncode.c -libImaging/JpegDecode.c -libImaging/JpegEncode.c -libImaging/Lzw.h -libImaging/LzwDecode.c -libImaging/Matrix.c -libImaging/ModeFilter.c -libImaging/MspDecode.c -libImaging/Negative.c -libImaging/Offset.c -libImaging/Pack.c -libImaging/PackDecode.c -libImaging/Palette.c -libImaging/Paste.c -libImaging/PcdDecode.c -libImaging/PcxDecode.c -libImaging/PcxEncode.c -libImaging/Point.c -libImaging/Quant.c -libImaging/QuantHash.c -libImaging/QuantHash.h -libImaging/QuantHeap.c -libImaging/QuantHeap.h -libImaging/QuantOctree.c -libImaging/QuantOctree.h -libImaging/QuantTypes.h -libImaging/RankFilter.c -libImaging/Raw.h -libImaging/RawDecode.c -libImaging/RawEncode.c -libImaging/Storage.c -libImaging/SunRleDecode.c -libImaging/TgaRleDecode.c -libImaging/TiffDecode.c -libImaging/TiffDecode.h -libImaging/Unpack.c -libImaging/UnpackYCC.c -libImaging/UnsharpMask.c -libImaging/XbmDecode.c -libImaging/XbmEncode.c -libImaging/Zip.h -libImaging/ZipDecode.c -libImaging/ZipEncode.c \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 88e0955..0000000 --- a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,187 +0,0 @@ -../PIL/__init__.py -../PIL/_binary.py -../PIL/_util.py -../PIL/BdfFontFile.py -../PIL/BmpImagePlugin.py -../PIL/BufrStubImagePlugin.py -../PIL/ContainerIO.py -../PIL/CurImagePlugin.py -../PIL/DcxImagePlugin.py -../PIL/EpsImagePlugin.py -../PIL/ExifTags.py -../PIL/FitsStubImagePlugin.py -../PIL/FliImagePlugin.py -../PIL/FontFile.py -../PIL/FpxImagePlugin.py -../PIL/GbrImagePlugin.py -../PIL/GdImageFile.py -../PIL/GifImagePlugin.py -../PIL/GimpGradientFile.py -../PIL/GimpPaletteFile.py -../PIL/GribStubImagePlugin.py -../PIL/Hdf5StubImagePlugin.py -../PIL/IcnsImagePlugin.py -../PIL/IcoImagePlugin.py -../PIL/Image.py -../PIL/ImageChops.py -../PIL/ImageCms.py -../PIL/ImageColor.py -../PIL/ImageDraw.py -../PIL/ImageDraw2.py -../PIL/ImageEnhance.py -../PIL/ImageFile.py -../PIL/ImageFileIO.py -../PIL/ImageFilter.py -../PIL/ImageFont.py -../PIL/ImageGrab.py -../PIL/ImageMath.py -../PIL/ImageMode.py -../PIL/ImageMorph.py -../PIL/ImageOps.py -../PIL/ImagePalette.py -../PIL/ImagePath.py -../PIL/ImageQt.py -../PIL/ImageSequence.py -../PIL/ImageShow.py -../PIL/ImageStat.py -../PIL/ImageTk.py -../PIL/ImageTransform.py -../PIL/ImageWin.py -../PIL/ImImagePlugin.py -../PIL/ImtImagePlugin.py -../PIL/IptcImagePlugin.py -../PIL/Jpeg2KImagePlugin.py -../PIL/JpegImagePlugin.py -../PIL/JpegPresets.py -../PIL/McIdasImagePlugin.py -../PIL/MicImagePlugin.py -../PIL/MpegImagePlugin.py -../PIL/MspImagePlugin.py -../PIL/OleFileIO.py -../PIL/PaletteFile.py -../PIL/PalmImagePlugin.py -../PIL/PcdImagePlugin.py -../PIL/PcfFontFile.py -../PIL/PcxImagePlugin.py -../PIL/PdfImagePlugin.py -../PIL/PixarImagePlugin.py -../PIL/PngImagePlugin.py -../PIL/PpmImagePlugin.py -../PIL/PsdImagePlugin.py -../PIL/PSDraw.py -../PIL/PyAccess.py -../PIL/SgiImagePlugin.py -../PIL/SpiderImagePlugin.py -../PIL/SunImagePlugin.py -../PIL/TarIO.py -../PIL/TgaImagePlugin.py -../PIL/TiffImagePlugin.py -../PIL/TiffTags.py -../PIL/WalImageFile.py -../PIL/WebPImagePlugin.py -../PIL/WmfImagePlugin.py -../PIL/XbmImagePlugin.py -../PIL/XpmImagePlugin.py -../PIL/XVThumbImagePlugin.py -../PIL/OleFileIO-README.md -../PIL/__init__.pyc -../PIL/_binary.pyc -../PIL/_util.pyc -../PIL/BdfFontFile.pyc -../PIL/BmpImagePlugin.pyc -../PIL/BufrStubImagePlugin.pyc -../PIL/ContainerIO.pyc -../PIL/CurImagePlugin.pyc -../PIL/DcxImagePlugin.pyc -../PIL/EpsImagePlugin.pyc -../PIL/ExifTags.pyc -../PIL/FitsStubImagePlugin.pyc -../PIL/FliImagePlugin.pyc -../PIL/FontFile.pyc -../PIL/FpxImagePlugin.pyc -../PIL/GbrImagePlugin.pyc -../PIL/GdImageFile.pyc -../PIL/GifImagePlugin.pyc -../PIL/GimpGradientFile.pyc -../PIL/GimpPaletteFile.pyc -../PIL/GribStubImagePlugin.pyc -../PIL/Hdf5StubImagePlugin.pyc -../PIL/IcnsImagePlugin.pyc -../PIL/IcoImagePlugin.pyc -../PIL/Image.pyc -../PIL/ImageChops.pyc -../PIL/ImageCms.pyc -../PIL/ImageColor.pyc -../PIL/ImageDraw.pyc -../PIL/ImageDraw2.pyc -../PIL/ImageEnhance.pyc -../PIL/ImageFile.pyc -../PIL/ImageFileIO.pyc -../PIL/ImageFilter.pyc -../PIL/ImageFont.pyc -../PIL/ImageGrab.pyc -../PIL/ImageMath.pyc -../PIL/ImageMode.pyc -../PIL/ImageMorph.pyc -../PIL/ImageOps.pyc -../PIL/ImagePalette.pyc -../PIL/ImagePath.pyc -../PIL/ImageQt.pyc -../PIL/ImageSequence.pyc -../PIL/ImageShow.pyc -../PIL/ImageStat.pyc -../PIL/ImageTk.pyc -../PIL/ImageTransform.pyc -../PIL/ImageWin.pyc -../PIL/ImImagePlugin.pyc -../PIL/ImtImagePlugin.pyc -../PIL/IptcImagePlugin.pyc -../PIL/Jpeg2KImagePlugin.pyc -../PIL/JpegImagePlugin.pyc -../PIL/JpegPresets.pyc -../PIL/McIdasImagePlugin.pyc -../PIL/MicImagePlugin.pyc -../PIL/MpegImagePlugin.pyc -../PIL/MspImagePlugin.pyc -../PIL/OleFileIO.pyc -../PIL/PaletteFile.pyc -../PIL/PalmImagePlugin.pyc -../PIL/PcdImagePlugin.pyc -../PIL/PcfFontFile.pyc -../PIL/PcxImagePlugin.pyc -../PIL/PdfImagePlugin.pyc -../PIL/PixarImagePlugin.pyc -../PIL/PngImagePlugin.pyc -../PIL/PpmImagePlugin.pyc -../PIL/PsdImagePlugin.pyc -../PIL/PSDraw.pyc -../PIL/PyAccess.pyc -../PIL/SgiImagePlugin.pyc -../PIL/SpiderImagePlugin.pyc -../PIL/SunImagePlugin.pyc -../PIL/TarIO.pyc -../PIL/TgaImagePlugin.pyc -../PIL/TiffImagePlugin.pyc -../PIL/TiffTags.pyc -../PIL/WalImageFile.pyc -../PIL/WebPImagePlugin.pyc -../PIL/WmfImagePlugin.pyc -../PIL/XbmImagePlugin.pyc -../PIL/XpmImagePlugin.pyc -../PIL/XVThumbImagePlugin.pyc -../PIL/_imaging.so -../PIL/_imagingft.so -../PIL/_imagingtk.so -../PIL/_imagingmath.so -../PIL/_imagingmorph.so -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt -zip-safe -../../../../bin/pilconvert.py -../../../../bin/pildriver.py -../../../../bin/pilfile.py -../../../../bin/pilfont.py -../../../../bin/pilprint.py diff --git a/Darwin/lib/python3.4/site-packages/README b/Darwin/lib/python3.4/site-packages/README deleted file mode 100644 index 273f625..0000000 --- a/Darwin/lib/python3.4/site-packages/README +++ /dev/null @@ -1,2 +0,0 @@ -This directory exists so that 3rd party packages can be installed -here. Read the source for site.py for more details. diff --git a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/PKG-INFO deleted file mode 100644 index 0a46d7a..0000000 --- a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,155 +0,0 @@ -Metadata-Version: 1.1 -Name: SQLAlchemy -Version: 0.9.7 -Summary: Database Abstraction Library -Home-page: http://www.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT License -Description: SQLAlchemy - ========== - - The Python SQL Toolkit and Object Relational Mapper - - Introduction - ------------- - - SQLAlchemy is the Python SQL toolkit and Object Relational Mapper - that gives application developers the full power and - flexibility of SQL. SQLAlchemy provides a full suite - of well known enterprise-level persistence patterns, - designed for efficient and high-performing database - access, adapted into a simple and Pythonic domain - language. - - Major SQLAlchemy features include: - - * An industrial strength ORM, built - from the core on the identity map, unit of work, - and data mapper patterns. These patterns - allow transparent persistence of objects - using a declarative configuration system. - Domain models - can be constructed and manipulated naturally, - and changes are synchronized with the - current transaction automatically. - * A relationally-oriented query system, exposing - the full range of SQL's capabilities - explicitly, including joins, subqueries, - correlation, and most everything else, - in terms of the object model. - Writing queries with the ORM uses the same - techniques of relational composition you use - when writing SQL. While you can drop into - literal SQL at any time, it's virtually never - needed. - * A comprehensive and flexible system - of eager loading for related collections and objects. - Collections are cached within a session, - and can be loaded on individual access, all - at once using joins, or by query per collection - across the full result set. - * A Core SQL construction system and DBAPI - interaction layer. The SQLAlchemy Core is - separate from the ORM and is a full database - abstraction layer in its own right, and includes - an extensible Python-based SQL expression - language, schema metadata, connection pooling, - type coercion, and custom types. - * All primary and foreign key constraints are - assumed to be composite and natural. Surrogate - integer primary keys are of course still the - norm, but SQLAlchemy never assumes or hardcodes - to this model. - * Database introspection and generation. Database - schemas can be "reflected" in one step into - Python structures representing database metadata; - those same structures can then generate - CREATE statements right back out - all within - the Core, independent of the ORM. - - SQLAlchemy's philosophy: - - * SQL databases behave less and less like object - collections the more size and performance start to - matter; object collections behave less and less like - tables and rows the more abstraction starts to matter. - SQLAlchemy aims to accommodate both of these - principles. - * An ORM doesn't need to hide the "R". A relational - database provides rich, set-based functionality - that should be fully exposed. SQLAlchemy's - ORM provides an open-ended set of patterns - that allow a developer to construct a custom - mediation layer between a domain model and - a relational schema, turning the so-called - "object relational impedance" issue into - a distant memory. - * The developer, in all cases, makes all decisions - regarding the design, structure, and naming conventions - of both the object model as well as the relational - schema. SQLAlchemy only provides the means - to automate the execution of these decisions. - * With SQLAlchemy, there's no such thing as - "the ORM generated a bad query" - you - retain full control over the structure of - queries, including how joins are organized, - how subqueries and correlation is used, what - columns are requested. Everything SQLAlchemy - does is ultimately the result of a developer- - initiated decision. - * Don't use an ORM if the problem doesn't need one. - SQLAlchemy consists of a Core and separate ORM - component. The Core offers a full SQL expression - language that allows Pythonic construction - of SQL constructs that render directly to SQL - strings for a target database, returning - result sets that are essentially enhanced DBAPI - cursors. - * Transactions should be the norm. With SQLAlchemy's - ORM, nothing goes to permanent storage until - commit() is called. SQLAlchemy encourages applications - to create a consistent means of delineating - the start and end of a series of operations. - * Never render a literal value in a SQL statement. - Bound parameters are used to the greatest degree - possible, allowing query optimizers to cache - query plans effectively and making SQL injection - attacks a non-issue. - - Documentation - ------------- - - Latest documentation is at: - - http://www.sqlalchemy.org/docs/ - - Installation / Requirements - --------------------------- - - Full documentation for installation is at - `Installation `_. - - Getting Help / Development / Bug reporting - ------------------------------------------ - - Please refer to the `SQLAlchemy Community Guide `_. - - License - ------- - - SQLAlchemy is distributed under the `MIT license - `_. - - -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: Jython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Classifier: Operating System :: OS Independent diff --git a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 50d65c8..0000000 --- a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,682 +0,0 @@ -AUTHORS -CHANGES -LICENSE -MANIFEST.in -README.dialects.rst -README.rst -README.unittests.rst -setup.cfg -setup.py -sqla_nose.py -doc/contents.html -doc/copyright.html -doc/faq.html -doc/genindex.html -doc/glossary.html -doc/index.html -doc/intro.html -doc/search.html -doc/searchindex.js -doc/_images/sqla_arch_small.png -doc/_images/sqla_engine_arch.png -doc/_modules/index.html -doc/_modules/examples/adjacency_list/adjacency_list.html -doc/_modules/examples/association/basic_association.html -doc/_modules/examples/association/dict_of_sets_with_default.html -doc/_modules/examples/association/proxied_association.html -doc/_modules/examples/custom_attributes/custom_management.html -doc/_modules/examples/custom_attributes/listen_for_events.html -doc/_modules/examples/dogpile_caching/advanced.html -doc/_modules/examples/dogpile_caching/caching_query.html -doc/_modules/examples/dogpile_caching/environment.html -doc/_modules/examples/dogpile_caching/fixture_data.html -doc/_modules/examples/dogpile_caching/helloworld.html -doc/_modules/examples/dogpile_caching/local_session_caching.html -doc/_modules/examples/dogpile_caching/model.html -doc/_modules/examples/dogpile_caching/relationship_caching.html -doc/_modules/examples/dynamic_dict/dynamic_dict.html -doc/_modules/examples/elementtree/adjacency_list.html -doc/_modules/examples/elementtree/optimized_al.html -doc/_modules/examples/elementtree/pickle.html -doc/_modules/examples/generic_associations/discriminator_on_association.html -doc/_modules/examples/generic_associations/generic_fk.html -doc/_modules/examples/generic_associations/table_per_association.html -doc/_modules/examples/generic_associations/table_per_related.html -doc/_modules/examples/graphs/directed_graph.html -doc/_modules/examples/inheritance/concrete.html -doc/_modules/examples/inheritance/joined.html -doc/_modules/examples/inheritance/single.html -doc/_modules/examples/join_conditions/cast.html -doc/_modules/examples/join_conditions/threeway.html -doc/_modules/examples/large_collection/large_collection.html -doc/_modules/examples/materialized_paths/materialized_paths.html -doc/_modules/examples/nested_sets/nested_sets.html -doc/_modules/examples/postgis/postgis.html -doc/_modules/examples/sharding/attribute_shard.html -doc/_modules/examples/versioned_history/history_meta.html -doc/_modules/examples/versioned_history/test_versioning.html -doc/_modules/examples/versioned_rows/versioned_map.html -doc/_modules/examples/versioned_rows/versioned_rows.html -doc/_modules/examples/vertical/dictlike-polymorphic.html -doc/_modules/examples/vertical/dictlike.html -doc/_static/basic.css -doc/_static/changelog.css -doc/_static/comment-bright.png -doc/_static/comment-close.png -doc/_static/comment.png -doc/_static/default.css -doc/_static/detectmobile.js -doc/_static/docs.css -doc/_static/doctools.js -doc/_static/down-pressed.png -doc/_static/down.png -doc/_static/file.png -doc/_static/init.js -doc/_static/jquery.js -doc/_static/minus.png -doc/_static/plus.png -doc/_static/pygments.css -doc/_static/searchtools.js -doc/_static/sidebar.js -doc/_static/sphinx_paramlinks.css -doc/_static/underscore.js -doc/_static/up-pressed.png -doc/_static/up.png -doc/_static/websupport.js -doc/build/Makefile -doc/build/conf.py -doc/build/contents.rst -doc/build/copyright.rst -doc/build/faq.rst -doc/build/glossary.rst -doc/build/index.rst -doc/build/intro.rst -doc/build/requirements.txt -doc/build/sqla_arch_small.png -doc/build/testdocs.py -doc/build/builder/__init__.py -doc/build/builder/autodoc_mods.py -doc/build/builder/dialect_info.py -doc/build/builder/mako.py -doc/build/builder/sqlformatter.py -doc/build/builder/util.py -doc/build/builder/viewsource.py -doc/build/changelog/changelog_01.rst -doc/build/changelog/changelog_02.rst -doc/build/changelog/changelog_03.rst -doc/build/changelog/changelog_04.rst -doc/build/changelog/changelog_05.rst -doc/build/changelog/changelog_06.rst -doc/build/changelog/changelog_07.rst -doc/build/changelog/changelog_08.rst -doc/build/changelog/changelog_09.rst -doc/build/changelog/index.rst -doc/build/changelog/migration_04.rst -doc/build/changelog/migration_05.rst -doc/build/changelog/migration_06.rst -doc/build/changelog/migration_07.rst -doc/build/changelog/migration_08.rst -doc/build/changelog/migration_09.rst -doc/build/core/compiler.rst -doc/build/core/connections.rst -doc/build/core/constraints.rst -doc/build/core/ddl.rst -doc/build/core/defaults.rst -doc/build/core/dml.rst -doc/build/core/engines.rst -doc/build/core/event.rst -doc/build/core/events.rst -doc/build/core/exceptions.rst -doc/build/core/expression_api.rst -doc/build/core/functions.rst -doc/build/core/index.rst -doc/build/core/inspection.rst -doc/build/core/interfaces.rst -doc/build/core/internals.rst -doc/build/core/metadata.rst -doc/build/core/pooling.rst -doc/build/core/reflection.rst -doc/build/core/schema.rst -doc/build/core/selectable.rst -doc/build/core/serializer.rst -doc/build/core/sqla_engine_arch.png -doc/build/core/sqlelement.rst -doc/build/core/tutorial.rst -doc/build/core/types.rst -doc/build/dialects/drizzle.rst -doc/build/dialects/firebird.rst -doc/build/dialects/index.rst -doc/build/dialects/mssql.rst -doc/build/dialects/mysql.rst -doc/build/dialects/oracle.rst -doc/build/dialects/postgresql.rst -doc/build/dialects/sqlite.rst -doc/build/dialects/sybase.rst -doc/build/orm/collections.rst -doc/build/orm/deprecated.rst -doc/build/orm/events.rst -doc/build/orm/examples.rst -doc/build/orm/exceptions.rst -doc/build/orm/index.rst -doc/build/orm/inheritance.rst -doc/build/orm/internals.rst -doc/build/orm/loading.rst -doc/build/orm/mapper_config.rst -doc/build/orm/query.rst -doc/build/orm/relationships.rst -doc/build/orm/session.rst -doc/build/orm/tutorial.rst -doc/build/orm/extensions/associationproxy.rst -doc/build/orm/extensions/automap.rst -doc/build/orm/extensions/declarative.rst -doc/build/orm/extensions/horizontal_shard.rst -doc/build/orm/extensions/hybrid.rst -doc/build/orm/extensions/index.rst -doc/build/orm/extensions/instrumentation.rst -doc/build/orm/extensions/mutable.rst -doc/build/orm/extensions/orderinglist.rst -doc/build/static/detectmobile.js -doc/build/static/docs.css -doc/build/static/init.js -doc/build/templates/genindex.mako -doc/build/templates/layout.mako -doc/build/templates/page.mako -doc/build/templates/search.mako -doc/build/templates/static_base.mako -doc/build/texinputs/Makefile -doc/build/texinputs/sphinx.sty -doc/changelog/changelog_01.html -doc/changelog/changelog_02.html -doc/changelog/changelog_03.html -doc/changelog/changelog_04.html -doc/changelog/changelog_05.html -doc/changelog/changelog_06.html -doc/changelog/changelog_07.html -doc/changelog/changelog_08.html -doc/changelog/changelog_09.html -doc/changelog/index.html -doc/changelog/migration_04.html -doc/changelog/migration_05.html -doc/changelog/migration_06.html -doc/changelog/migration_07.html -doc/changelog/migration_08.html -doc/changelog/migration_09.html -doc/core/compiler.html -doc/core/connections.html -doc/core/constraints.html -doc/core/ddl.html -doc/core/defaults.html -doc/core/dml.html -doc/core/engines.html -doc/core/event.html -doc/core/events.html -doc/core/exceptions.html -doc/core/expression_api.html -doc/core/functions.html -doc/core/index.html -doc/core/inspection.html -doc/core/interfaces.html -doc/core/internals.html -doc/core/metadata.html -doc/core/pooling.html -doc/core/reflection.html -doc/core/schema.html -doc/core/selectable.html -doc/core/serializer.html -doc/core/sqlelement.html -doc/core/tutorial.html -doc/core/types.html -doc/dialects/drizzle.html -doc/dialects/firebird.html -doc/dialects/index.html -doc/dialects/mssql.html -doc/dialects/mysql.html -doc/dialects/oracle.html -doc/dialects/postgresql.html -doc/dialects/sqlite.html -doc/dialects/sybase.html -doc/orm/collections.html -doc/orm/deprecated.html -doc/orm/events.html -doc/orm/examples.html -doc/orm/exceptions.html -doc/orm/index.html -doc/orm/inheritance.html -doc/orm/internals.html -doc/orm/loading.html -doc/orm/mapper_config.html -doc/orm/query.html -doc/orm/relationships.html -doc/orm/session.html -doc/orm/tutorial.html -doc/orm/extensions/associationproxy.html -doc/orm/extensions/automap.html -doc/orm/extensions/declarative.html -doc/orm/extensions/horizontal_shard.html -doc/orm/extensions/hybrid.html -doc/orm/extensions/index.html -doc/orm/extensions/instrumentation.html -doc/orm/extensions/mutable.html -doc/orm/extensions/orderinglist.html -examples/__init__.py -examples/adjacency_list/__init__.py -examples/adjacency_list/adjacency_list.py -examples/association/__init__.py -examples/association/basic_association.py -examples/association/dict_of_sets_with_default.py -examples/association/proxied_association.py -examples/custom_attributes/__init__.py -examples/custom_attributes/custom_management.py -examples/custom_attributes/listen_for_events.py -examples/dogpile_caching/__init__.py -examples/dogpile_caching/advanced.py -examples/dogpile_caching/caching_query.py -examples/dogpile_caching/environment.py -examples/dogpile_caching/fixture_data.py -examples/dogpile_caching/helloworld.py -examples/dogpile_caching/local_session_caching.py -examples/dogpile_caching/model.py -examples/dogpile_caching/relationship_caching.py -examples/dynamic_dict/__init__.py -examples/dynamic_dict/dynamic_dict.py -examples/elementtree/__init__.py -examples/elementtree/adjacency_list.py -examples/elementtree/optimized_al.py -examples/elementtree/pickle.py -examples/elementtree/test.xml -examples/elementtree/test2.xml -examples/elementtree/test3.xml -examples/generic_associations/__init__.py -examples/generic_associations/discriminator_on_association.py -examples/generic_associations/generic_fk.py -examples/generic_associations/table_per_association.py -examples/generic_associations/table_per_related.py -examples/graphs/__init__.py -examples/graphs/directed_graph.py -examples/inheritance/__init__.py -examples/inheritance/concrete.py -examples/inheritance/joined.py -examples/inheritance/single.py -examples/join_conditions/__init__.py -examples/join_conditions/cast.py -examples/join_conditions/threeway.py -examples/large_collection/__init__.py -examples/large_collection/large_collection.py -examples/materialized_paths/__init__.py -examples/materialized_paths/materialized_paths.py -examples/nested_sets/__init__.py -examples/nested_sets/nested_sets.py -examples/postgis/__init__.py -examples/postgis/postgis.py -examples/sharding/__init__.py -examples/sharding/attribute_shard.py -examples/versioned_history/__init__.py -examples/versioned_history/history_meta.py -examples/versioned_history/test_versioning.py -examples/versioned_rows/__init__.py -examples/versioned_rows/versioned_map.py -examples/versioned_rows/versioned_rows.py -examples/vertical/__init__.py -examples/vertical/dictlike-polymorphic.py -examples/vertical/dictlike.py -lib/SQLAlchemy.egg-info/PKG-INFO -lib/SQLAlchemy.egg-info/SOURCES.txt -lib/SQLAlchemy.egg-info/dependency_links.txt -lib/SQLAlchemy.egg-info/top_level.txt -lib/sqlalchemy/__init__.py -lib/sqlalchemy/events.py -lib/sqlalchemy/exc.py -lib/sqlalchemy/inspection.py -lib/sqlalchemy/interfaces.py -lib/sqlalchemy/log.py -lib/sqlalchemy/pool.py -lib/sqlalchemy/processors.py -lib/sqlalchemy/schema.py -lib/sqlalchemy/types.py -lib/sqlalchemy/cextension/processors.c -lib/sqlalchemy/cextension/resultproxy.c -lib/sqlalchemy/cextension/utils.c -lib/sqlalchemy/connectors/__init__.py -lib/sqlalchemy/connectors/mxodbc.py -lib/sqlalchemy/connectors/mysqldb.py -lib/sqlalchemy/connectors/pyodbc.py -lib/sqlalchemy/connectors/zxJDBC.py -lib/sqlalchemy/databases/__init__.py -lib/sqlalchemy/dialects/__init__.py -lib/sqlalchemy/dialects/postgres.py -lib/sqlalchemy/dialects/type_migration_guidelines.txt -lib/sqlalchemy/dialects/drizzle/__init__.py -lib/sqlalchemy/dialects/drizzle/base.py -lib/sqlalchemy/dialects/drizzle/mysqldb.py -lib/sqlalchemy/dialects/firebird/__init__.py -lib/sqlalchemy/dialects/firebird/base.py -lib/sqlalchemy/dialects/firebird/fdb.py -lib/sqlalchemy/dialects/firebird/kinterbasdb.py -lib/sqlalchemy/dialects/mssql/__init__.py -lib/sqlalchemy/dialects/mssql/adodbapi.py -lib/sqlalchemy/dialects/mssql/base.py -lib/sqlalchemy/dialects/mssql/information_schema.py -lib/sqlalchemy/dialects/mssql/mxodbc.py -lib/sqlalchemy/dialects/mssql/pymssql.py -lib/sqlalchemy/dialects/mssql/pyodbc.py -lib/sqlalchemy/dialects/mssql/zxjdbc.py -lib/sqlalchemy/dialects/mysql/__init__.py -lib/sqlalchemy/dialects/mysql/base.py -lib/sqlalchemy/dialects/mysql/cymysql.py -lib/sqlalchemy/dialects/mysql/gaerdbms.py -lib/sqlalchemy/dialects/mysql/mysqlconnector.py -lib/sqlalchemy/dialects/mysql/mysqldb.py -lib/sqlalchemy/dialects/mysql/oursql.py -lib/sqlalchemy/dialects/mysql/pymysql.py -lib/sqlalchemy/dialects/mysql/pyodbc.py -lib/sqlalchemy/dialects/mysql/zxjdbc.py -lib/sqlalchemy/dialects/oracle/__init__.py -lib/sqlalchemy/dialects/oracle/base.py -lib/sqlalchemy/dialects/oracle/cx_oracle.py -lib/sqlalchemy/dialects/oracle/zxjdbc.py -lib/sqlalchemy/dialects/postgresql/__init__.py -lib/sqlalchemy/dialects/postgresql/base.py -lib/sqlalchemy/dialects/postgresql/constraints.py -lib/sqlalchemy/dialects/postgresql/hstore.py -lib/sqlalchemy/dialects/postgresql/json.py -lib/sqlalchemy/dialects/postgresql/pg8000.py -lib/sqlalchemy/dialects/postgresql/psycopg2.py -lib/sqlalchemy/dialects/postgresql/pypostgresql.py -lib/sqlalchemy/dialects/postgresql/ranges.py -lib/sqlalchemy/dialects/postgresql/zxjdbc.py -lib/sqlalchemy/dialects/sqlite/__init__.py -lib/sqlalchemy/dialects/sqlite/base.py -lib/sqlalchemy/dialects/sqlite/pysqlite.py -lib/sqlalchemy/dialects/sybase/__init__.py -lib/sqlalchemy/dialects/sybase/base.py -lib/sqlalchemy/dialects/sybase/mxodbc.py -lib/sqlalchemy/dialects/sybase/pyodbc.py -lib/sqlalchemy/dialects/sybase/pysybase.py -lib/sqlalchemy/engine/__init__.py -lib/sqlalchemy/engine/base.py -lib/sqlalchemy/engine/default.py -lib/sqlalchemy/engine/interfaces.py -lib/sqlalchemy/engine/reflection.py -lib/sqlalchemy/engine/result.py -lib/sqlalchemy/engine/strategies.py -lib/sqlalchemy/engine/threadlocal.py -lib/sqlalchemy/engine/url.py -lib/sqlalchemy/engine/util.py -lib/sqlalchemy/event/__init__.py -lib/sqlalchemy/event/api.py -lib/sqlalchemy/event/attr.py -lib/sqlalchemy/event/base.py -lib/sqlalchemy/event/legacy.py -lib/sqlalchemy/event/registry.py -lib/sqlalchemy/ext/__init__.py -lib/sqlalchemy/ext/associationproxy.py -lib/sqlalchemy/ext/automap.py -lib/sqlalchemy/ext/compiler.py -lib/sqlalchemy/ext/horizontal_shard.py -lib/sqlalchemy/ext/hybrid.py -lib/sqlalchemy/ext/instrumentation.py -lib/sqlalchemy/ext/mutable.py -lib/sqlalchemy/ext/orderinglist.py -lib/sqlalchemy/ext/serializer.py -lib/sqlalchemy/ext/declarative/__init__.py -lib/sqlalchemy/ext/declarative/api.py -lib/sqlalchemy/ext/declarative/base.py -lib/sqlalchemy/ext/declarative/clsregistry.py -lib/sqlalchemy/orm/__init__.py -lib/sqlalchemy/orm/attributes.py -lib/sqlalchemy/orm/base.py -lib/sqlalchemy/orm/collections.py -lib/sqlalchemy/orm/dependency.py -lib/sqlalchemy/orm/deprecated_interfaces.py -lib/sqlalchemy/orm/descriptor_props.py -lib/sqlalchemy/orm/dynamic.py -lib/sqlalchemy/orm/evaluator.py -lib/sqlalchemy/orm/events.py -lib/sqlalchemy/orm/exc.py -lib/sqlalchemy/orm/identity.py -lib/sqlalchemy/orm/instrumentation.py -lib/sqlalchemy/orm/interfaces.py -lib/sqlalchemy/orm/loading.py -lib/sqlalchemy/orm/mapper.py -lib/sqlalchemy/orm/path_registry.py -lib/sqlalchemy/orm/persistence.py -lib/sqlalchemy/orm/properties.py -lib/sqlalchemy/orm/query.py -lib/sqlalchemy/orm/relationships.py -lib/sqlalchemy/orm/scoping.py -lib/sqlalchemy/orm/session.py -lib/sqlalchemy/orm/state.py -lib/sqlalchemy/orm/strategies.py -lib/sqlalchemy/orm/strategy_options.py -lib/sqlalchemy/orm/sync.py -lib/sqlalchemy/orm/unitofwork.py -lib/sqlalchemy/orm/util.py -lib/sqlalchemy/sql/__init__.py -lib/sqlalchemy/sql/annotation.py -lib/sqlalchemy/sql/base.py -lib/sqlalchemy/sql/compiler.py -lib/sqlalchemy/sql/ddl.py -lib/sqlalchemy/sql/default_comparator.py -lib/sqlalchemy/sql/dml.py -lib/sqlalchemy/sql/elements.py -lib/sqlalchemy/sql/expression.py -lib/sqlalchemy/sql/functions.py -lib/sqlalchemy/sql/naming.py -lib/sqlalchemy/sql/operators.py -lib/sqlalchemy/sql/schema.py -lib/sqlalchemy/sql/selectable.py -lib/sqlalchemy/sql/sqltypes.py -lib/sqlalchemy/sql/type_api.py -lib/sqlalchemy/sql/util.py -lib/sqlalchemy/sql/visitors.py -lib/sqlalchemy/testing/__init__.py -lib/sqlalchemy/testing/assertions.py -lib/sqlalchemy/testing/assertsql.py -lib/sqlalchemy/testing/config.py -lib/sqlalchemy/testing/distutils_run.py -lib/sqlalchemy/testing/engines.py -lib/sqlalchemy/testing/entities.py -lib/sqlalchemy/testing/exclusions.py -lib/sqlalchemy/testing/fixtures.py -lib/sqlalchemy/testing/mock.py -lib/sqlalchemy/testing/pickleable.py -lib/sqlalchemy/testing/profiling.py -lib/sqlalchemy/testing/requirements.py -lib/sqlalchemy/testing/runner.py -lib/sqlalchemy/testing/schema.py -lib/sqlalchemy/testing/util.py -lib/sqlalchemy/testing/warnings.py -lib/sqlalchemy/testing/plugin/__init__.py -lib/sqlalchemy/testing/plugin/noseplugin.py -lib/sqlalchemy/testing/plugin/plugin_base.py -lib/sqlalchemy/testing/plugin/pytestplugin.py -lib/sqlalchemy/testing/suite/__init__.py -lib/sqlalchemy/testing/suite/test_ddl.py -lib/sqlalchemy/testing/suite/test_insert.py -lib/sqlalchemy/testing/suite/test_reflection.py -lib/sqlalchemy/testing/suite/test_results.py -lib/sqlalchemy/testing/suite/test_select.py -lib/sqlalchemy/testing/suite/test_sequence.py -lib/sqlalchemy/testing/suite/test_types.py -lib/sqlalchemy/testing/suite/test_update_delete.py -lib/sqlalchemy/util/__init__.py -lib/sqlalchemy/util/_collections.py -lib/sqlalchemy/util/compat.py -lib/sqlalchemy/util/deprecations.py -lib/sqlalchemy/util/langhelpers.py -lib/sqlalchemy/util/queue.py -lib/sqlalchemy/util/topological.py -test/__init__.py -test/binary_data_one.dat -test/binary_data_two.dat -test/conftest.py -test/requirements.py -test/aaa_profiling/__init__.py -test/aaa_profiling/test_compiler.py -test/aaa_profiling/test_memusage.py -test/aaa_profiling/test_orm.py -test/aaa_profiling/test_pool.py -test/aaa_profiling/test_resultset.py -test/aaa_profiling/test_zoomark.py -test/aaa_profiling/test_zoomark_orm.py -test/base/__init__.py -test/base/test_dependency.py -test/base/test_events.py -test/base/test_except.py -test/base/test_inspect.py -test/base/test_utils.py -test/dialect/__init__.py -test/dialect/test_firebird.py -test/dialect/test_mxodbc.py -test/dialect/test_oracle.py -test/dialect/test_pyodbc.py -test/dialect/test_sqlite.py -test/dialect/test_suite.py -test/dialect/test_sybase.py -test/dialect/mssql/__init__.py -test/dialect/mssql/test_compiler.py -test/dialect/mssql/test_engine.py -test/dialect/mssql/test_query.py -test/dialect/mssql/test_reflection.py -test/dialect/mssql/test_types.py -test/dialect/mysql/__init__.py -test/dialect/mysql/test_compiler.py -test/dialect/mysql/test_dialect.py -test/dialect/mysql/test_query.py -test/dialect/mysql/test_reflection.py -test/dialect/mysql/test_types.py -test/dialect/postgresql/__init__.py -test/dialect/postgresql/test_compiler.py -test/dialect/postgresql/test_dialect.py -test/dialect/postgresql/test_query.py -test/dialect/postgresql/test_reflection.py -test/dialect/postgresql/test_types.py -test/engine/__init__.py -test/engine/test_bind.py -test/engine/test_ddlevents.py -test/engine/test_execute.py -test/engine/test_logging.py -test/engine/test_parseconnect.py -test/engine/test_pool.py -test/engine/test_processors.py -test/engine/test_reconnect.py -test/engine/test_reflection.py -test/engine/test_transaction.py -test/ext/__init__.py -test/ext/test_associationproxy.py -test/ext/test_automap.py -test/ext/test_compiler.py -test/ext/test_extendedattr.py -test/ext/test_horizontal_shard.py -test/ext/test_hybrid.py -test/ext/test_mutable.py -test/ext/test_orderinglist.py -test/ext/test_serializer.py -test/ext/declarative/__init__.py -test/ext/declarative/test_basic.py -test/ext/declarative/test_clsregistry.py -test/ext/declarative/test_inheritance.py -test/ext/declarative/test_mixin.py -test/ext/declarative/test_reflection.py -test/orm/__init__.py -test/orm/_fixtures.py -test/orm/test_association.py -test/orm/test_assorted_eager.py -test/orm/test_attributes.py -test/orm/test_backref_mutations.py -test/orm/test_bind.py -test/orm/test_bundle.py -test/orm/test_cascade.py -test/orm/test_collection.py -test/orm/test_compile.py -test/orm/test_composites.py -test/orm/test_cycles.py -test/orm/test_default_strategies.py -test/orm/test_defaults.py -test/orm/test_deferred.py -test/orm/test_deprecations.py -test/orm/test_descriptor.py -test/orm/test_dynamic.py -test/orm/test_eager_relations.py -test/orm/test_evaluator.py -test/orm/test_events.py -test/orm/test_expire.py -test/orm/test_froms.py -test/orm/test_generative.py -test/orm/test_hasparent.py -test/orm/test_immediate_load.py -test/orm/test_inspect.py -test/orm/test_instrumentation.py -test/orm/test_joins.py -test/orm/test_lazy_relations.py -test/orm/test_load_on_fks.py -test/orm/test_loading.py -test/orm/test_lockmode.py -test/orm/test_manytomany.py -test/orm/test_mapper.py -test/orm/test_merge.py -test/orm/test_naturalpks.py -test/orm/test_of_type.py -test/orm/test_onetoone.py -test/orm/test_options.py -test/orm/test_pickled.py -test/orm/test_query.py -test/orm/test_rel_fn.py -test/orm/test_relationships.py -test/orm/test_scoping.py -test/orm/test_selectable.py -test/orm/test_session.py -test/orm/test_subquery_relations.py -test/orm/test_sync.py -test/orm/test_transaction.py -test/orm/test_unitofwork.py -test/orm/test_unitofworkv2.py -test/orm/test_update_delete.py -test/orm/test_utils.py -test/orm/test_validators.py -test/orm/test_versioning.py -test/orm/inheritance/__init__.py -test/orm/inheritance/_poly_fixtures.py -test/orm/inheritance/test_abc_inheritance.py -test/orm/inheritance/test_abc_polymorphic.py -test/orm/inheritance/test_assorted_poly.py -test/orm/inheritance/test_basic.py -test/orm/inheritance/test_concrete.py -test/orm/inheritance/test_magazine.py -test/orm/inheritance/test_manytomany.py -test/orm/inheritance/test_poly_linked_list.py -test/orm/inheritance/test_poly_persistence.py -test/orm/inheritance/test_polymorphic_rel.py -test/orm/inheritance/test_productspec.py -test/orm/inheritance/test_relationship.py -test/orm/inheritance/test_selects.py -test/orm/inheritance/test_single.py -test/orm/inheritance/test_with_poly.py -test/perf/invalidate_stresstest.py -test/perf/orm2010.py -test/sql/__init__.py -test/sql/test_case_statement.py -test/sql/test_compiler.py -test/sql/test_constraints.py -test/sql/test_cte.py -test/sql/test_ddlemit.py -test/sql/test_defaults.py -test/sql/test_delete.py -test/sql/test_functions.py -test/sql/test_generative.py -test/sql/test_insert.py -test/sql/test_inspect.py -test/sql/test_join_rewriting.py -test/sql/test_labels.py -test/sql/test_metadata.py -test/sql/test_operators.py -test/sql/test_query.py -test/sql/test_quote.py -test/sql/test_returning.py -test/sql/test_rowcount.py -test/sql/test_selectable.py -test/sql/test_text.py -test/sql/test_type_expressions.py -test/sql/test_types.py -test/sql/test_unicode.py -test/sql/test_update.py \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/installed-files.txt deleted file mode 100644 index efda185..0000000 --- a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,366 +0,0 @@ -../sqlalchemy/__init__.py -../sqlalchemy/events.py -../sqlalchemy/exc.py -../sqlalchemy/inspection.py -../sqlalchemy/interfaces.py -../sqlalchemy/log.py -../sqlalchemy/pool.py -../sqlalchemy/processors.py -../sqlalchemy/schema.py -../sqlalchemy/types.py -../sqlalchemy/connectors/__init__.py -../sqlalchemy/connectors/mxodbc.py -../sqlalchemy/connectors/mysqldb.py -../sqlalchemy/connectors/pyodbc.py -../sqlalchemy/connectors/zxJDBC.py -../sqlalchemy/databases/__init__.py -../sqlalchemy/dialects/__init__.py -../sqlalchemy/dialects/postgres.py -../sqlalchemy/dialects/drizzle/__init__.py -../sqlalchemy/dialects/drizzle/base.py -../sqlalchemy/dialects/drizzle/mysqldb.py -../sqlalchemy/dialects/firebird/__init__.py -../sqlalchemy/dialects/firebird/base.py -../sqlalchemy/dialects/firebird/fdb.py -../sqlalchemy/dialects/firebird/kinterbasdb.py -../sqlalchemy/dialects/mssql/__init__.py -../sqlalchemy/dialects/mssql/adodbapi.py -../sqlalchemy/dialects/mssql/base.py -../sqlalchemy/dialects/mssql/information_schema.py -../sqlalchemy/dialects/mssql/mxodbc.py -../sqlalchemy/dialects/mssql/pymssql.py -../sqlalchemy/dialects/mssql/pyodbc.py -../sqlalchemy/dialects/mssql/zxjdbc.py -../sqlalchemy/dialects/mysql/__init__.py -../sqlalchemy/dialects/mysql/base.py -../sqlalchemy/dialects/mysql/cymysql.py -../sqlalchemy/dialects/mysql/gaerdbms.py -../sqlalchemy/dialects/mysql/mysqlconnector.py -../sqlalchemy/dialects/mysql/mysqldb.py -../sqlalchemy/dialects/mysql/oursql.py -../sqlalchemy/dialects/mysql/pymysql.py -../sqlalchemy/dialects/mysql/pyodbc.py -../sqlalchemy/dialects/mysql/zxjdbc.py -../sqlalchemy/dialects/oracle/__init__.py -../sqlalchemy/dialects/oracle/base.py -../sqlalchemy/dialects/oracle/cx_oracle.py -../sqlalchemy/dialects/oracle/zxjdbc.py -../sqlalchemy/dialects/postgresql/__init__.py -../sqlalchemy/dialects/postgresql/base.py -../sqlalchemy/dialects/postgresql/constraints.py -../sqlalchemy/dialects/postgresql/hstore.py -../sqlalchemy/dialects/postgresql/json.py -../sqlalchemy/dialects/postgresql/pg8000.py -../sqlalchemy/dialects/postgresql/psycopg2.py -../sqlalchemy/dialects/postgresql/pypostgresql.py -../sqlalchemy/dialects/postgresql/ranges.py -../sqlalchemy/dialects/postgresql/zxjdbc.py -../sqlalchemy/dialects/sqlite/__init__.py -../sqlalchemy/dialects/sqlite/base.py -../sqlalchemy/dialects/sqlite/pysqlite.py -../sqlalchemy/dialects/sybase/__init__.py -../sqlalchemy/dialects/sybase/base.py -../sqlalchemy/dialects/sybase/mxodbc.py -../sqlalchemy/dialects/sybase/pyodbc.py -../sqlalchemy/dialects/sybase/pysybase.py -../sqlalchemy/engine/__init__.py -../sqlalchemy/engine/base.py -../sqlalchemy/engine/default.py -../sqlalchemy/engine/interfaces.py -../sqlalchemy/engine/reflection.py -../sqlalchemy/engine/result.py -../sqlalchemy/engine/strategies.py -../sqlalchemy/engine/threadlocal.py -../sqlalchemy/engine/url.py -../sqlalchemy/engine/util.py -../sqlalchemy/event/__init__.py -../sqlalchemy/event/api.py -../sqlalchemy/event/attr.py -../sqlalchemy/event/base.py -../sqlalchemy/event/legacy.py -../sqlalchemy/event/registry.py -../sqlalchemy/ext/__init__.py -../sqlalchemy/ext/associationproxy.py -../sqlalchemy/ext/automap.py -../sqlalchemy/ext/compiler.py -../sqlalchemy/ext/horizontal_shard.py -../sqlalchemy/ext/hybrid.py -../sqlalchemy/ext/instrumentation.py -../sqlalchemy/ext/mutable.py -../sqlalchemy/ext/orderinglist.py -../sqlalchemy/ext/serializer.py -../sqlalchemy/ext/declarative/__init__.py -../sqlalchemy/ext/declarative/api.py -../sqlalchemy/ext/declarative/base.py -../sqlalchemy/ext/declarative/clsregistry.py -../sqlalchemy/orm/__init__.py -../sqlalchemy/orm/attributes.py -../sqlalchemy/orm/base.py -../sqlalchemy/orm/collections.py -../sqlalchemy/orm/dependency.py -../sqlalchemy/orm/deprecated_interfaces.py -../sqlalchemy/orm/descriptor_props.py -../sqlalchemy/orm/dynamic.py -../sqlalchemy/orm/evaluator.py -../sqlalchemy/orm/events.py -../sqlalchemy/orm/exc.py -../sqlalchemy/orm/identity.py -../sqlalchemy/orm/instrumentation.py -../sqlalchemy/orm/interfaces.py -../sqlalchemy/orm/loading.py -../sqlalchemy/orm/mapper.py -../sqlalchemy/orm/path_registry.py -../sqlalchemy/orm/persistence.py -../sqlalchemy/orm/properties.py -../sqlalchemy/orm/query.py -../sqlalchemy/orm/relationships.py -../sqlalchemy/orm/scoping.py -../sqlalchemy/orm/session.py -../sqlalchemy/orm/state.py -../sqlalchemy/orm/strategies.py -../sqlalchemy/orm/strategy_options.py -../sqlalchemy/orm/sync.py -../sqlalchemy/orm/unitofwork.py -../sqlalchemy/orm/util.py -../sqlalchemy/sql/__init__.py -../sqlalchemy/sql/annotation.py -../sqlalchemy/sql/base.py -../sqlalchemy/sql/compiler.py -../sqlalchemy/sql/ddl.py -../sqlalchemy/sql/default_comparator.py -../sqlalchemy/sql/dml.py -../sqlalchemy/sql/elements.py -../sqlalchemy/sql/expression.py -../sqlalchemy/sql/functions.py -../sqlalchemy/sql/naming.py -../sqlalchemy/sql/operators.py -../sqlalchemy/sql/schema.py -../sqlalchemy/sql/selectable.py -../sqlalchemy/sql/sqltypes.py -../sqlalchemy/sql/type_api.py -../sqlalchemy/sql/util.py -../sqlalchemy/sql/visitors.py -../sqlalchemy/testing/__init__.py -../sqlalchemy/testing/assertions.py -../sqlalchemy/testing/assertsql.py -../sqlalchemy/testing/config.py -../sqlalchemy/testing/distutils_run.py -../sqlalchemy/testing/engines.py -../sqlalchemy/testing/entities.py -../sqlalchemy/testing/exclusions.py -../sqlalchemy/testing/fixtures.py -../sqlalchemy/testing/mock.py -../sqlalchemy/testing/pickleable.py -../sqlalchemy/testing/profiling.py -../sqlalchemy/testing/requirements.py -../sqlalchemy/testing/runner.py -../sqlalchemy/testing/schema.py -../sqlalchemy/testing/util.py -../sqlalchemy/testing/warnings.py -../sqlalchemy/testing/plugin/__init__.py -../sqlalchemy/testing/plugin/noseplugin.py -../sqlalchemy/testing/plugin/plugin_base.py -../sqlalchemy/testing/plugin/pytestplugin.py -../sqlalchemy/testing/suite/__init__.py -../sqlalchemy/testing/suite/test_ddl.py -../sqlalchemy/testing/suite/test_insert.py -../sqlalchemy/testing/suite/test_reflection.py -../sqlalchemy/testing/suite/test_results.py -../sqlalchemy/testing/suite/test_select.py -../sqlalchemy/testing/suite/test_sequence.py -../sqlalchemy/testing/suite/test_types.py -../sqlalchemy/testing/suite/test_update_delete.py -../sqlalchemy/util/__init__.py -../sqlalchemy/util/_collections.py -../sqlalchemy/util/compat.py -../sqlalchemy/util/deprecations.py -../sqlalchemy/util/langhelpers.py -../sqlalchemy/util/queue.py -../sqlalchemy/util/topological.py -../sqlalchemy/__init__.pyc -../sqlalchemy/events.pyc -../sqlalchemy/exc.pyc -../sqlalchemy/inspection.pyc -../sqlalchemy/interfaces.pyc -../sqlalchemy/log.pyc -../sqlalchemy/pool.pyc -../sqlalchemy/processors.pyc -../sqlalchemy/schema.pyc -../sqlalchemy/types.pyc -../sqlalchemy/connectors/__init__.pyc -../sqlalchemy/connectors/mxodbc.pyc -../sqlalchemy/connectors/mysqldb.pyc -../sqlalchemy/connectors/pyodbc.pyc -../sqlalchemy/connectors/zxJDBC.pyc -../sqlalchemy/databases/__init__.pyc -../sqlalchemy/dialects/__init__.pyc -../sqlalchemy/dialects/postgres.pyc -../sqlalchemy/dialects/drizzle/__init__.pyc -../sqlalchemy/dialects/drizzle/base.pyc -../sqlalchemy/dialects/drizzle/mysqldb.pyc -../sqlalchemy/dialects/firebird/__init__.pyc -../sqlalchemy/dialects/firebird/base.pyc -../sqlalchemy/dialects/firebird/fdb.pyc -../sqlalchemy/dialects/firebird/kinterbasdb.pyc -../sqlalchemy/dialects/mssql/__init__.pyc -../sqlalchemy/dialects/mssql/adodbapi.pyc -../sqlalchemy/dialects/mssql/base.pyc -../sqlalchemy/dialects/mssql/information_schema.pyc -../sqlalchemy/dialects/mssql/mxodbc.pyc -../sqlalchemy/dialects/mssql/pymssql.pyc -../sqlalchemy/dialects/mssql/pyodbc.pyc -../sqlalchemy/dialects/mssql/zxjdbc.pyc -../sqlalchemy/dialects/mysql/__init__.pyc -../sqlalchemy/dialects/mysql/base.pyc -../sqlalchemy/dialects/mysql/cymysql.pyc -../sqlalchemy/dialects/mysql/gaerdbms.pyc -../sqlalchemy/dialects/mysql/mysqlconnector.pyc -../sqlalchemy/dialects/mysql/mysqldb.pyc -../sqlalchemy/dialects/mysql/oursql.pyc -../sqlalchemy/dialects/mysql/pymysql.pyc -../sqlalchemy/dialects/mysql/pyodbc.pyc -../sqlalchemy/dialects/mysql/zxjdbc.pyc -../sqlalchemy/dialects/oracle/__init__.pyc -../sqlalchemy/dialects/oracle/base.pyc -../sqlalchemy/dialects/oracle/cx_oracle.pyc -../sqlalchemy/dialects/oracle/zxjdbc.pyc -../sqlalchemy/dialects/postgresql/__init__.pyc -../sqlalchemy/dialects/postgresql/base.pyc -../sqlalchemy/dialects/postgresql/constraints.pyc -../sqlalchemy/dialects/postgresql/hstore.pyc -../sqlalchemy/dialects/postgresql/json.pyc -../sqlalchemy/dialects/postgresql/pg8000.pyc -../sqlalchemy/dialects/postgresql/psycopg2.pyc -../sqlalchemy/dialects/postgresql/pypostgresql.pyc -../sqlalchemy/dialects/postgresql/ranges.pyc -../sqlalchemy/dialects/postgresql/zxjdbc.pyc -../sqlalchemy/dialects/sqlite/__init__.pyc -../sqlalchemy/dialects/sqlite/base.pyc -../sqlalchemy/dialects/sqlite/pysqlite.pyc -../sqlalchemy/dialects/sybase/__init__.pyc -../sqlalchemy/dialects/sybase/base.pyc -../sqlalchemy/dialects/sybase/mxodbc.pyc -../sqlalchemy/dialects/sybase/pyodbc.pyc -../sqlalchemy/dialects/sybase/pysybase.pyc -../sqlalchemy/engine/__init__.pyc -../sqlalchemy/engine/base.pyc -../sqlalchemy/engine/default.pyc -../sqlalchemy/engine/interfaces.pyc -../sqlalchemy/engine/reflection.pyc -../sqlalchemy/engine/result.pyc -../sqlalchemy/engine/strategies.pyc -../sqlalchemy/engine/threadlocal.pyc -../sqlalchemy/engine/url.pyc -../sqlalchemy/engine/util.pyc -../sqlalchemy/event/__init__.pyc -../sqlalchemy/event/api.pyc -../sqlalchemy/event/attr.pyc -../sqlalchemy/event/base.pyc -../sqlalchemy/event/legacy.pyc -../sqlalchemy/event/registry.pyc -../sqlalchemy/ext/__init__.pyc -../sqlalchemy/ext/associationproxy.pyc -../sqlalchemy/ext/automap.pyc -../sqlalchemy/ext/compiler.pyc -../sqlalchemy/ext/horizontal_shard.pyc -../sqlalchemy/ext/hybrid.pyc -../sqlalchemy/ext/instrumentation.pyc -../sqlalchemy/ext/mutable.pyc -../sqlalchemy/ext/orderinglist.pyc -../sqlalchemy/ext/serializer.pyc -../sqlalchemy/ext/declarative/__init__.pyc -../sqlalchemy/ext/declarative/api.pyc -../sqlalchemy/ext/declarative/base.pyc -../sqlalchemy/ext/declarative/clsregistry.pyc -../sqlalchemy/orm/__init__.pyc -../sqlalchemy/orm/attributes.pyc -../sqlalchemy/orm/base.pyc -../sqlalchemy/orm/collections.pyc -../sqlalchemy/orm/dependency.pyc -../sqlalchemy/orm/deprecated_interfaces.pyc -../sqlalchemy/orm/descriptor_props.pyc -../sqlalchemy/orm/dynamic.pyc -../sqlalchemy/orm/evaluator.pyc -../sqlalchemy/orm/events.pyc -../sqlalchemy/orm/exc.pyc -../sqlalchemy/orm/identity.pyc -../sqlalchemy/orm/instrumentation.pyc -../sqlalchemy/orm/interfaces.pyc -../sqlalchemy/orm/loading.pyc -../sqlalchemy/orm/mapper.pyc -../sqlalchemy/orm/path_registry.pyc -../sqlalchemy/orm/persistence.pyc -../sqlalchemy/orm/properties.pyc -../sqlalchemy/orm/query.pyc -../sqlalchemy/orm/relationships.pyc -../sqlalchemy/orm/scoping.pyc -../sqlalchemy/orm/session.pyc -../sqlalchemy/orm/state.pyc -../sqlalchemy/orm/strategies.pyc -../sqlalchemy/orm/strategy_options.pyc -../sqlalchemy/orm/sync.pyc -../sqlalchemy/orm/unitofwork.pyc -../sqlalchemy/orm/util.pyc -../sqlalchemy/sql/__init__.pyc -../sqlalchemy/sql/annotation.pyc -../sqlalchemy/sql/base.pyc -../sqlalchemy/sql/compiler.pyc -../sqlalchemy/sql/ddl.pyc -../sqlalchemy/sql/default_comparator.pyc -../sqlalchemy/sql/dml.pyc -../sqlalchemy/sql/elements.pyc -../sqlalchemy/sql/expression.pyc -../sqlalchemy/sql/functions.pyc -../sqlalchemy/sql/naming.pyc -../sqlalchemy/sql/operators.pyc -../sqlalchemy/sql/schema.pyc -../sqlalchemy/sql/selectable.pyc -../sqlalchemy/sql/sqltypes.pyc -../sqlalchemy/sql/type_api.pyc -../sqlalchemy/sql/util.pyc -../sqlalchemy/sql/visitors.pyc -../sqlalchemy/testing/__init__.pyc -../sqlalchemy/testing/assertions.pyc -../sqlalchemy/testing/assertsql.pyc -../sqlalchemy/testing/config.pyc -../sqlalchemy/testing/distutils_run.pyc -../sqlalchemy/testing/engines.pyc -../sqlalchemy/testing/entities.pyc -../sqlalchemy/testing/exclusions.pyc -../sqlalchemy/testing/fixtures.pyc -../sqlalchemy/testing/mock.pyc -../sqlalchemy/testing/pickleable.pyc -../sqlalchemy/testing/profiling.pyc -../sqlalchemy/testing/requirements.pyc -../sqlalchemy/testing/runner.pyc -../sqlalchemy/testing/schema.pyc -../sqlalchemy/testing/util.pyc -../sqlalchemy/testing/warnings.pyc -../sqlalchemy/testing/plugin/__init__.pyc -../sqlalchemy/testing/plugin/noseplugin.pyc -../sqlalchemy/testing/plugin/plugin_base.pyc -../sqlalchemy/testing/plugin/pytestplugin.pyc -../sqlalchemy/testing/suite/__init__.pyc -../sqlalchemy/testing/suite/test_ddl.pyc -../sqlalchemy/testing/suite/test_insert.pyc -../sqlalchemy/testing/suite/test_reflection.pyc -../sqlalchemy/testing/suite/test_results.pyc -../sqlalchemy/testing/suite/test_select.pyc -../sqlalchemy/testing/suite/test_sequence.pyc -../sqlalchemy/testing/suite/test_types.pyc -../sqlalchemy/testing/suite/test_update_delete.pyc -../sqlalchemy/util/__init__.pyc -../sqlalchemy/util/_collections.pyc -../sqlalchemy/util/compat.pyc -../sqlalchemy/util/deprecations.pyc -../sqlalchemy/util/langhelpers.pyc -../sqlalchemy/util/queue.pyc -../sqlalchemy/util/topological.pyc -../sqlalchemy/cprocessors.so -../sqlalchemy/cresultproxy.so -../sqlalchemy/cutils.so -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/_cffi_backend.so b/Darwin/lib/python3.4/site-packages/_cffi_backend.so deleted file mode 100755 index 24d209a..0000000 Binary files a/Darwin/lib/python3.4/site-packages/_cffi_backend.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 599d5cc..0000000 --- a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,104 +0,0 @@ -LICENSE -MANIFEST.in -setup.cfg -setup.py -setup_base.py -c/_cffi_backend.c -c/file_emulator.h -c/malloc_closure.h -c/minibuffer.h -c/misc_thread.h -c/misc_win32.h -c/test_c.py -c/wchar_helper.h -c/libffi_msvc/ffi.c -c/libffi_msvc/ffi.h -c/libffi_msvc/ffi_common.h -c/libffi_msvc/fficonfig.h -c/libffi_msvc/ffitarget.h -c/libffi_msvc/prep_cif.c -c/libffi_msvc/types.c -c/libffi_msvc/win32.c -c/libffi_msvc/win64.asm -c/libffi_msvc/win64.obj -cffi/__init__.py -cffi/api.py -cffi/backend_ctypes.py -cffi/commontypes.py -cffi/cparser.py -cffi/ffiplatform.py -cffi/gc_weakref.py -cffi/lock.py -cffi/model.py -cffi/vengine_cpy.py -cffi/vengine_gen.py -cffi/verifier.py -cffi.egg-info/PKG-INFO -cffi.egg-info/SOURCES.txt -cffi.egg-info/dependency_links.txt -cffi.egg-info/not-zip-safe -cffi.egg-info/requires.txt -cffi.egg-info/top_level.txt -demo/_curses.py -demo/api.py -demo/bsdopendirtype.py -demo/btrfs-snap.py -demo/cffi-cocoa.py -demo/fastcsv.py -demo/gmp.py -demo/pwuid.py -demo/py.cleanup -demo/pyobj.py -demo/readdir.py -demo/readdir2.py -demo/readdir_ctypes.py -demo/setup.py -demo/winclipboard.py -demo/xclient.py -doc/Makefile -doc/design.rst -doc/make.bat -doc/source/conf.py -doc/source/index.rst -testing/__init__.py -testing/backend_tests.py -testing/callback_in_thread.py -testing/support.py -testing/test_cdata.py -testing/test_ctypes.py -testing/test_ffi_backend.py -testing/test_function.py -testing/test_model.py -testing/test_ownlib.py -testing/test_parsing.py -testing/test_platform.py -testing/test_unicode_literals.py -testing/test_verify.py -testing/test_verify2.py -testing/test_version.py -testing/test_vgen.py -testing/test_vgen2.py -testing/test_zdistutils.py -testing/test_zintegration.py -testing/udir.py -testing/snippets/distutils_module/setup.py -testing/snippets/distutils_module/snip_basic_verify.py -testing/snippets/distutils_module/build/lib.linux-x86_64-2.7/snip_basic_verify.py -testing/snippets/distutils_package_1/setup.py -testing/snippets/distutils_package_1/build/lib.linux-x86_64-2.7/snip_basic_verify1/__init__.py -testing/snippets/distutils_package_1/snip_basic_verify1/__init__.py -testing/snippets/distutils_package_2/setup.py -testing/snippets/distutils_package_2/build/lib.linux-x86_64-2.7/snip_basic_verify2/__init__.py -testing/snippets/distutils_package_2/snip_basic_verify2/__init__.py -testing/snippets/infrastructure/setup.py -testing/snippets/infrastructure/build/lib.linux-x86_64-2.7/snip_infrastructure/__init__.py -testing/snippets/infrastructure/snip_infrastructure/__init__.py -testing/snippets/setuptools_module/setup.py -testing/snippets/setuptools_module/snip_setuptools_verify.py -testing/snippets/setuptools_module/build/lib.linux-x86_64-2.7/snip_setuptools_verify.py -testing/snippets/setuptools_package_1/setup.py -testing/snippets/setuptools_package_1/build/lib.linux-x86_64-2.7/snip_setuptools_verify1/__init__.py -testing/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py -testing/snippets/setuptools_package_2/setup.py -testing/snippets/setuptools_package_2/build/lib.linux-x86_64-2.7/snip_setuptools_verify2/__init__.py -testing/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 903b597..0000000 --- a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,32 +0,0 @@ -../cffi/__init__.py -../cffi/api.py -../cffi/backend_ctypes.py -../cffi/commontypes.py -../cffi/cparser.py -../cffi/ffiplatform.py -../cffi/gc_weakref.py -../cffi/lock.py -../cffi/model.py -../cffi/vengine_cpy.py -../cffi/vengine_gen.py -../cffi/verifier.py -../cffi/__init__.pyc -../cffi/api.pyc -../cffi/backend_ctypes.pyc -../cffi/commontypes.pyc -../cffi/cparser.pyc -../cffi/ffiplatform.pyc -../cffi/gc_weakref.pyc -../cffi/lock.pyc -../cffi/model.pyc -../cffi/vengine_cpy.pyc -../cffi/vengine_gen.pyc -../cffi/verifier.pyc -../_cffi_backend.so -./ -dependency_links.txt -not-zip-safe -PKG-INFO -requires.txt -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/requires.txt b/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/requires.txt deleted file mode 100644 index 203143c..0000000 --- a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -pycparser \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/cffi/__init__.py b/Darwin/lib/python3.4/site-packages/cffi/__init__.py deleted file mode 100644 index df981fd..0000000 --- a/Darwin/lib/python3.4/site-packages/cffi/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', - 'FFIError'] - -from .api import FFI, CDefError, FFIError -from .ffiplatform import VerificationError, VerificationMissing - -__version__ = "0.8.6" -__version_info__ = (0, 8, 6) diff --git a/Darwin/lib/python3.4/site-packages/cffi/commontypes.py b/Darwin/lib/python3.4/site-packages/cffi/commontypes.py deleted file mode 100644 index 9daf2ff..0000000 --- a/Darwin/lib/python3.4/site-packages/cffi/commontypes.py +++ /dev/null @@ -1,248 +0,0 @@ -import sys -from . import api, model - - -COMMON_TYPES = { - 'FILE': model.unknown_type('FILE', '_IO_FILE'), - 'bool': '_Bool', - } - -for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - if _type.endswith('_t'): - COMMON_TYPES[_type] = _type -del _type - -_CACHE = {} - -def resolve_common_type(commontype): - try: - return _CACHE[commontype] - except KeyError: - result = COMMON_TYPES.get(commontype, commontype) - if not isinstance(result, str): - pass # result is already a BaseType - elif result.endswith(' *'): - if result.startswith('const '): - result = model.ConstPointerType( - resolve_common_type(result[6:-2])) - else: - result = model.PointerType(resolve_common_type(result[:-2])) - elif result in model.PrimitiveType.ALL_PRIMITIVE_TYPES: - result = model.PrimitiveType(result) - else: - if commontype == result: - raise api.FFIError("Unsupported type: %r. Please file a bug " - "if you think it should be." % (commontype,)) - result = resolve_common_type(result) # recursively - assert isinstance(result, model.BaseTypeByIdentity) - _CACHE[commontype] = result - return result - - -# ____________________________________________________________ -# Windows common types - - -def win_common_types(maxsize): - result = {} - if maxsize < (1<<32): - result.update({ # Windows 32-bits - 'HALF_PTR': 'short', - 'INT_PTR': 'int', - 'LONG_PTR': 'long', - 'UHALF_PTR': 'unsigned short', - 'UINT_PTR': 'unsigned int', - 'ULONG_PTR': 'unsigned long', - }) - else: - result.update({ # Windows 64-bits - 'HALF_PTR': 'int', - 'INT_PTR': 'long long', - 'LONG_PTR': 'long long', - 'UHALF_PTR': 'unsigned int', - 'UINT_PTR': 'unsigned long long', - 'ULONG_PTR': 'unsigned long long', - }) - result.update({ - "BYTE": "unsigned char", - "BOOL": "int", - "CCHAR": "char", - "CHAR": "char", - "DWORD": "unsigned long", - "DWORD32": "unsigned int", - "DWORD64": "unsigned long long", - "FLOAT": "float", - "INT": "int", - "INT8": "signed char", - "INT16": "short", - "INT32": "int", - "INT64": "long long", - "LONG": "long", - "LONGLONG": "long long", - "LONG32": "int", - "LONG64": "long long", - "WORD": "unsigned short", - "PVOID": model.voidp_type, - "ULONGLONG": "unsigned long long", - "WCHAR": "wchar_t", - "SHORT": "short", - "TBYTE": "WCHAR", - "TCHAR": "WCHAR", - "UCHAR": "unsigned char", - "UINT": "unsigned int", - "UINT8": "unsigned char", - "UINT16": "unsigned short", - "UINT32": "unsigned int", - "UINT64": "unsigned long long", - "ULONG": "unsigned long", - "ULONG32": "unsigned int", - "ULONG64": "unsigned long long", - "USHORT": "unsigned short", - - "SIZE_T": "ULONG_PTR", - "SSIZE_T": "LONG_PTR", - "ATOM": "WORD", - "BOOLEAN": "BYTE", - "COLORREF": "DWORD", - - "HANDLE": "PVOID", - "DWORDLONG": "ULONGLONG", - "DWORD_PTR": "ULONG_PTR", - "HACCEL": "HANDLE", - - "HBITMAP": "HANDLE", - "HBRUSH": "HANDLE", - "HCOLORSPACE": "HANDLE", - "HCONV": "HANDLE", - "HCONVLIST": "HANDLE", - "HDC": "HANDLE", - "HDDEDATA": "HANDLE", - "HDESK": "HANDLE", - "HDROP": "HANDLE", - "HDWP": "HANDLE", - "HENHMETAFILE": "HANDLE", - "HFILE": "int", - "HFONT": "HANDLE", - "HGDIOBJ": "HANDLE", - "HGLOBAL": "HANDLE", - "HHOOK": "HANDLE", - "HICON": "HANDLE", - "HCURSOR": "HICON", - "HINSTANCE": "HANDLE", - "HKEY": "HANDLE", - "HKL": "HANDLE", - "HLOCAL": "HANDLE", - "HMENU": "HANDLE", - "HMETAFILE": "HANDLE", - "HMODULE": "HINSTANCE", - "HMONITOR": "HANDLE", - "HPALETTE": "HANDLE", - "HPEN": "HANDLE", - "HRESULT": "LONG", - "HRGN": "HANDLE", - "HRSRC": "HANDLE", - "HSZ": "HANDLE", - "WINSTA": "HANDLE", - "HWND": "HANDLE", - - "LANGID": "WORD", - "LCID": "DWORD", - "LCTYPE": "DWORD", - "LGRPID": "DWORD", - "LPARAM": "LONG_PTR", - "LPBOOL": "BOOL *", - "LPBYTE": "BYTE *", - "LPCOLORREF": "DWORD *", - "LPCSTR": "const char *", - - "LPCVOID": model.const_voidp_type, - "LPCWSTR": "const WCHAR *", - "LPCTSTR": "LPCWSTR", - "LPDWORD": "DWORD *", - "LPHANDLE": "HANDLE *", - "LPINT": "int *", - "LPLONG": "long *", - "LPSTR": "CHAR *", - "LPWSTR": "WCHAR *", - "LPTSTR": "LPWSTR", - "LPVOID": model.voidp_type, - "LPWORD": "WORD *", - "LRESULT": "LONG_PTR", - "PBOOL": "BOOL *", - "PBOOLEAN": "BOOLEAN *", - "PBYTE": "BYTE *", - "PCHAR": "CHAR *", - "PCSTR": "const CHAR *", - "PCTSTR": "LPCWSTR", - "PCWSTR": "const WCHAR *", - "PDWORD": "DWORD *", - "PDWORDLONG": "DWORDLONG *", - "PDWORD_PTR": "DWORD_PTR *", - "PDWORD32": "DWORD32 *", - "PDWORD64": "DWORD64 *", - "PFLOAT": "FLOAT *", - "PHALF_PTR": "HALF_PTR *", - "PHANDLE": "HANDLE *", - "PHKEY": "HKEY *", - "PINT": "int *", - "PINT_PTR": "INT_PTR *", - "PINT8": "INT8 *", - "PINT16": "INT16 *", - "PINT32": "INT32 *", - "PINT64": "INT64 *", - "PLCID": "PDWORD", - "PLONG": "LONG *", - "PLONGLONG": "LONGLONG *", - "PLONG_PTR": "LONG_PTR *", - "PLONG32": "LONG32 *", - "PLONG64": "LONG64 *", - "PSHORT": "SHORT *", - "PSIZE_T": "SIZE_T *", - "PSSIZE_T": "SSIZE_T *", - "PSTR": "CHAR *", - "PTBYTE": "TBYTE *", - "PTCHAR": "TCHAR *", - "PTSTR": "LPWSTR", - "PUCHAR": "UCHAR *", - "PUHALF_PTR": "UHALF_PTR *", - "PUINT": "UINT *", - "PUINT_PTR": "UINT_PTR *", - "PUINT8": "UINT8 *", - "PUINT16": "UINT16 *", - "PUINT32": "UINT32 *", - "PUINT64": "UINT64 *", - "PULONG": "ULONG *", - "PULONGLONG": "ULONGLONG *", - "PULONG_PTR": "ULONG_PTR *", - "PULONG32": "ULONG32 *", - "PULONG64": "ULONG64 *", - "PUSHORT": "USHORT *", - "PWCHAR": "WCHAR *", - "PWORD": "WORD *", - "PWSTR": "WCHAR *", - "QWORD": "unsigned long long", - "SC_HANDLE": "HANDLE", - "SC_LOCK": "LPVOID", - "SERVICE_STATUS_HANDLE": "HANDLE", - - "UNICODE_STRING": model.StructType( - "_UNICODE_STRING", - ["Length", - "MaximumLength", - "Buffer"], - [model.PrimitiveType("unsigned short"), - model.PrimitiveType("unsigned short"), - model.PointerType(model.PrimitiveType("wchar_t"))], - [-1, -1, -1]), - "PUNICODE_STRING": "UNICODE_STRING *", - "PCUNICODE_STRING": "const UNICODE_STRING *", - - "USN": "LONGLONG", - "VOID": model.void_type, - "WPARAM": "UINT_PTR", - }) - return result - - -if sys.platform == 'win32': - COMMON_TYPES.update(win_common_types(sys.maxsize)) diff --git a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/PKG-INFO deleted file mode 100644 index 40d0e13..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,80 +0,0 @@ -Metadata-Version: 1.1 -Name: cryptography -Version: 0.5.4 -Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. -Home-page: https://github.com/pyca/cryptography -Author: The cryptography developers -Author-email: cryptography-dev@python.org -License: Apache License, Version 2.0 -Description: Cryptography - ============ - - .. image:: https://pypip.in/version/cryptography/badge.svg - :target: https://pypi.python.org/pypi/cryptography/ - :alt: Latest Version - - .. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master - :target: https://travis-ci.org/pyca/cryptography - - .. image:: https://coveralls.io/repos/pyca/cryptography/badge.png?branch=master - :target: https://coveralls.io/r/pyca/cryptography?branch=master - - - ``cryptography`` is a package which provides cryptographic recipes and - primitives to Python developers. Our goal is for it to be your "cryptographic - standard library". It supports Python 2.6-2.7, Python 3.2+, and PyPy. - - ``cryptography`` includes both high level recipes, and low level interfaces to - common cryptographic algorithms such as symmetric ciphers, message digests and - key derivation functions. For example, to encrypt something with - ``cryptography``'s high level symmetric encryption recipe: - - .. code-block:: pycon - - >>> from cryptography.fernet import Fernet - >>> # Put this somewhere safe! - >>> key = Fernet.generate_key() - >>> f = Fernet(key) - >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") - >>> token - '...' - >>> f.decrypt(token) - 'A really secret message. Not for prying eyes.' - - You can find more information in the `documentation`_. - - Discussion - ~~~~~~~~~~ - - If you run into bugs, you can file them in our `issue tracker`_. - - We maintain a `cryptography-dev`_ mailing list for development discussion. - - You can also join ``#cryptography-dev`` on Freenode to ask questions or get - involved. - - - .. _`documentation`: https://cryptography.io/ - .. _`issue tracker`: https://github.com/pyca/cryptography/issues - .. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev - -Platform: UNKNOWN -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Natural Language :: English -Classifier: Operating System :: MacOS :: MacOS X -Classifier: Operating System :: POSIX -Classifier: Operating System :: POSIX :: BSD -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Microsoft :: Windows -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Security :: Cryptography diff --git a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 6b5ad18..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,218 +0,0 @@ -AUTHORS.rst -CHANGELOG.rst -CONTRIBUTING.rst -LICENSE -MANIFEST.in -README.rst -setup.cfg -setup.py -cryptography/__about__.py -cryptography/__init__.py -cryptography/exceptions.py -cryptography/fernet.py -cryptography/utils.py -cryptography.egg-info/PKG-INFO -cryptography.egg-info/SOURCES.txt -cryptography.egg-info/dependency_links.txt -cryptography.egg-info/not-zip-safe -cryptography.egg-info/requires.txt -cryptography.egg-info/top_level.txt -cryptography/hazmat/__init__.py -cryptography/hazmat/backends/__init__.py -cryptography/hazmat/backends/interfaces.py -cryptography/hazmat/backends/multibackend.py -cryptography/hazmat/backends/commoncrypto/__init__.py -cryptography/hazmat/backends/commoncrypto/backend.py -cryptography/hazmat/backends/commoncrypto/ciphers.py -cryptography/hazmat/backends/commoncrypto/hashes.py -cryptography/hazmat/backends/commoncrypto/hmac.py -cryptography/hazmat/backends/openssl/__init__.py -cryptography/hazmat/backends/openssl/backend.py -cryptography/hazmat/backends/openssl/ciphers.py -cryptography/hazmat/backends/openssl/cmac.py -cryptography/hazmat/backends/openssl/dsa.py -cryptography/hazmat/backends/openssl/ec.py -cryptography/hazmat/backends/openssl/hashes.py -cryptography/hazmat/backends/openssl/hmac.py -cryptography/hazmat/backends/openssl/rsa.py -cryptography/hazmat/bindings/__init__.py -cryptography/hazmat/bindings/utils.py -cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_3e31f141x4000d087.c -cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_4ed9e37dx4000d087.c -cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_d62b3d91x972e1c0b.c -cryptography/hazmat/bindings/commoncrypto/__init__.py -cryptography/hazmat/bindings/commoncrypto/binding.py -cryptography/hazmat/bindings/commoncrypto/cf.py -cryptography/hazmat/bindings/commoncrypto/common_cryptor.py -cryptography/hazmat/bindings/commoncrypto/common_digest.py -cryptography/hazmat/bindings/commoncrypto/common_hmac.py -cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py -cryptography/hazmat/bindings/commoncrypto/secimport.py -cryptography/hazmat/bindings/commoncrypto/secitem.py -cryptography/hazmat/bindings/commoncrypto/seckey.py -cryptography/hazmat/bindings/commoncrypto/seckeychain.py -cryptography/hazmat/bindings/commoncrypto/sectransform.py -cryptography/hazmat/bindings/openssl/__init__.py -cryptography/hazmat/bindings/openssl/aes.py -cryptography/hazmat/bindings/openssl/asn1.py -cryptography/hazmat/bindings/openssl/bignum.py -cryptography/hazmat/bindings/openssl/binding.py -cryptography/hazmat/bindings/openssl/bio.py -cryptography/hazmat/bindings/openssl/cmac.py -cryptography/hazmat/bindings/openssl/cms.py -cryptography/hazmat/bindings/openssl/conf.py -cryptography/hazmat/bindings/openssl/crypto.py -cryptography/hazmat/bindings/openssl/dh.py -cryptography/hazmat/bindings/openssl/dsa.py -cryptography/hazmat/bindings/openssl/ec.py -cryptography/hazmat/bindings/openssl/ecdh.py -cryptography/hazmat/bindings/openssl/ecdsa.py -cryptography/hazmat/bindings/openssl/engine.py -cryptography/hazmat/bindings/openssl/err.py -cryptography/hazmat/bindings/openssl/evp.py -cryptography/hazmat/bindings/openssl/hmac.py -cryptography/hazmat/bindings/openssl/nid.py -cryptography/hazmat/bindings/openssl/objects.py -cryptography/hazmat/bindings/openssl/opensslv.py -cryptography/hazmat/bindings/openssl/osrandom_engine.py -cryptography/hazmat/bindings/openssl/pem.py -cryptography/hazmat/bindings/openssl/pkcs12.py -cryptography/hazmat/bindings/openssl/pkcs7.py -cryptography/hazmat/bindings/openssl/rand.py -cryptography/hazmat/bindings/openssl/rsa.py -cryptography/hazmat/bindings/openssl/ssl.py -cryptography/hazmat/bindings/openssl/x509.py -cryptography/hazmat/bindings/openssl/x509_vfy.py -cryptography/hazmat/bindings/openssl/x509name.py -cryptography/hazmat/bindings/openssl/x509v3.py -cryptography/hazmat/primitives/__init__.py -cryptography/hazmat/primitives/cmac.py -cryptography/hazmat/primitives/constant_time.py -cryptography/hazmat/primitives/hashes.py -cryptography/hazmat/primitives/hmac.py -cryptography/hazmat/primitives/interfaces.py -cryptography/hazmat/primitives/padding.py -cryptography/hazmat/primitives/serialization.py -cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_684bb40axf342507b.c -cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_8f86901cxc1767c5a.c -cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_c61834dexf342507b.c -cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_dd416c1exc1767c5a.c -cryptography/hazmat/primitives/asymmetric/__init__.py -cryptography/hazmat/primitives/asymmetric/dsa.py -cryptography/hazmat/primitives/asymmetric/ec.py -cryptography/hazmat/primitives/asymmetric/padding.py -cryptography/hazmat/primitives/asymmetric/rsa.py -cryptography/hazmat/primitives/ciphers/__init__.py -cryptography/hazmat/primitives/ciphers/algorithms.py -cryptography/hazmat/primitives/ciphers/base.py -cryptography/hazmat/primitives/ciphers/modes.py -cryptography/hazmat/primitives/kdf/__init__.py -cryptography/hazmat/primitives/kdf/hkdf.py -cryptography/hazmat/primitives/kdf/pbkdf2.py -cryptography/hazmat/primitives/twofactor/__init__.py -cryptography/hazmat/primitives/twofactor/hotp.py -cryptography/hazmat/primitives/twofactor/totp.py -docs/Makefile -docs/api-stability.rst -docs/changelog.rst -docs/community.rst -docs/conf.py -docs/cryptography-docs.py -docs/doing-a-release.rst -docs/exceptions.rst -docs/faq.rst -docs/fernet.rst -docs/glossary.rst -docs/index.rst -docs/installation.rst -docs/limitations.rst -docs/make.bat -docs/random-numbers.rst -docs/security.rst -docs/spelling_wordlist.txt -docs/_static/.keep -docs/development/c-bindings.rst -docs/development/getting-started.rst -docs/development/index.rst -docs/development/reviewing-patches.rst -docs/development/submitting-patches.rst -docs/development/test-vectors.rst -docs/development/custom-vectors/cast5.rst -docs/development/custom-vectors/idea.rst -docs/development/custom-vectors/seed.rst -docs/development/custom-vectors/cast5/generate_cast5.py -docs/development/custom-vectors/cast5/verify_cast5.go -docs/development/custom-vectors/idea/generate_idea.py -docs/development/custom-vectors/idea/verify_idea.py -docs/development/custom-vectors/seed/generate_seed.py -docs/development/custom-vectors/seed/verify_seed.py -docs/hazmat/backends/commoncrypto.rst -docs/hazmat/backends/index.rst -docs/hazmat/backends/interfaces.rst -docs/hazmat/backends/multibackend.rst -docs/hazmat/backends/openssl.rst -docs/hazmat/bindings/commoncrypto.rst -docs/hazmat/bindings/index.rst -docs/hazmat/bindings/openssl.rst -docs/hazmat/primitives/constant-time.rst -docs/hazmat/primitives/cryptographic-hashes.rst -docs/hazmat/primitives/index.rst -docs/hazmat/primitives/interfaces.rst -docs/hazmat/primitives/key-derivation-functions.rst -docs/hazmat/primitives/padding.rst -docs/hazmat/primitives/symmetric-encryption.rst -docs/hazmat/primitives/twofactor.rst -docs/hazmat/primitives/asymmetric/dsa.rst -docs/hazmat/primitives/asymmetric/ec.rst -docs/hazmat/primitives/asymmetric/index.rst -docs/hazmat/primitives/asymmetric/padding.rst -docs/hazmat/primitives/asymmetric/rsa.rst -docs/hazmat/primitives/asymmetric/serialization.rst -docs/hazmat/primitives/mac/cmac.rst -docs/hazmat/primitives/mac/hmac.rst -docs/hazmat/primitives/mac/index.rst -tests/__init__.py -tests/conftest.py -tests/test_fernet.py -tests/test_utils.py -tests/utils.py -tests/hazmat/__init__.py -tests/hazmat/backends/__init__.py -tests/hazmat/backends/test_commoncrypto.py -tests/hazmat/backends/test_multibackend.py -tests/hazmat/backends/test_openssl.py -tests/hazmat/bindings/test_commoncrypto.py -tests/hazmat/bindings/test_openssl.py -tests/hazmat/bindings/test_utils.py -tests/hazmat/primitives/__init__.py -tests/hazmat/primitives/fixtures_dsa.py -tests/hazmat/primitives/fixtures_rsa.py -tests/hazmat/primitives/test_3des.py -tests/hazmat/primitives/test_aes.py -tests/hazmat/primitives/test_arc4.py -tests/hazmat/primitives/test_block.py -tests/hazmat/primitives/test_blowfish.py -tests/hazmat/primitives/test_camellia.py -tests/hazmat/primitives/test_cast5.py -tests/hazmat/primitives/test_ciphers.py -tests/hazmat/primitives/test_cmac.py -tests/hazmat/primitives/test_constant_time.py -tests/hazmat/primitives/test_dsa.py -tests/hazmat/primitives/test_ec.py -tests/hazmat/primitives/test_hash_vectors.py -tests/hazmat/primitives/test_hashes.py -tests/hazmat/primitives/test_hkdf.py -tests/hazmat/primitives/test_hkdf_vectors.py -tests/hazmat/primitives/test_hmac.py -tests/hazmat/primitives/test_hmac_vectors.py -tests/hazmat/primitives/test_idea.py -tests/hazmat/primitives/test_padding.py -tests/hazmat/primitives/test_pbkdf2hmac.py -tests/hazmat/primitives/test_pbkdf2hmac_vectors.py -tests/hazmat/primitives/test_rsa.py -tests/hazmat/primitives/test_seed.py -tests/hazmat/primitives/test_serialization.py -tests/hazmat/primitives/utils.py -tests/hazmat/primitives/twofactor/__init__.py -tests/hazmat/primitives/twofactor/test_hotp.py -tests/hazmat/primitives/twofactor/test_totp.py \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 12e2880..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,196 +0,0 @@ -../cryptography/__about__.py -../cryptography/__init__.py -../cryptography/exceptions.py -../cryptography/fernet.py -../cryptography/utils.py -../cryptography/hazmat/__init__.py -../cryptography/hazmat/backends/__init__.py -../cryptography/hazmat/backends/interfaces.py -../cryptography/hazmat/backends/multibackend.py -../cryptography/hazmat/bindings/__init__.py -../cryptography/hazmat/bindings/utils.py -../cryptography/hazmat/primitives/__init__.py -../cryptography/hazmat/primitives/cmac.py -../cryptography/hazmat/primitives/constant_time.py -../cryptography/hazmat/primitives/hashes.py -../cryptography/hazmat/primitives/hmac.py -../cryptography/hazmat/primitives/interfaces.py -../cryptography/hazmat/primitives/padding.py -../cryptography/hazmat/primitives/serialization.py -../cryptography/hazmat/backends/commoncrypto/__init__.py -../cryptography/hazmat/backends/commoncrypto/backend.py -../cryptography/hazmat/backends/commoncrypto/ciphers.py -../cryptography/hazmat/backends/commoncrypto/hashes.py -../cryptography/hazmat/backends/commoncrypto/hmac.py -../cryptography/hazmat/backends/openssl/__init__.py -../cryptography/hazmat/backends/openssl/backend.py -../cryptography/hazmat/backends/openssl/ciphers.py -../cryptography/hazmat/backends/openssl/cmac.py -../cryptography/hazmat/backends/openssl/dsa.py -../cryptography/hazmat/backends/openssl/ec.py -../cryptography/hazmat/backends/openssl/hashes.py -../cryptography/hazmat/backends/openssl/hmac.py -../cryptography/hazmat/backends/openssl/rsa.py -../cryptography/hazmat/bindings/commoncrypto/__init__.py -../cryptography/hazmat/bindings/commoncrypto/binding.py -../cryptography/hazmat/bindings/commoncrypto/cf.py -../cryptography/hazmat/bindings/commoncrypto/common_cryptor.py -../cryptography/hazmat/bindings/commoncrypto/common_digest.py -../cryptography/hazmat/bindings/commoncrypto/common_hmac.py -../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py -../cryptography/hazmat/bindings/commoncrypto/secimport.py -../cryptography/hazmat/bindings/commoncrypto/secitem.py -../cryptography/hazmat/bindings/commoncrypto/seckey.py -../cryptography/hazmat/bindings/commoncrypto/seckeychain.py -../cryptography/hazmat/bindings/commoncrypto/sectransform.py -../cryptography/hazmat/bindings/openssl/__init__.py -../cryptography/hazmat/bindings/openssl/aes.py -../cryptography/hazmat/bindings/openssl/asn1.py -../cryptography/hazmat/bindings/openssl/bignum.py -../cryptography/hazmat/bindings/openssl/binding.py -../cryptography/hazmat/bindings/openssl/bio.py -../cryptography/hazmat/bindings/openssl/cmac.py -../cryptography/hazmat/bindings/openssl/cms.py -../cryptography/hazmat/bindings/openssl/conf.py -../cryptography/hazmat/bindings/openssl/crypto.py -../cryptography/hazmat/bindings/openssl/dh.py -../cryptography/hazmat/bindings/openssl/dsa.py -../cryptography/hazmat/bindings/openssl/ec.py -../cryptography/hazmat/bindings/openssl/ecdh.py -../cryptography/hazmat/bindings/openssl/ecdsa.py -../cryptography/hazmat/bindings/openssl/engine.py -../cryptography/hazmat/bindings/openssl/err.py -../cryptography/hazmat/bindings/openssl/evp.py -../cryptography/hazmat/bindings/openssl/hmac.py -../cryptography/hazmat/bindings/openssl/nid.py -../cryptography/hazmat/bindings/openssl/objects.py -../cryptography/hazmat/bindings/openssl/opensslv.py -../cryptography/hazmat/bindings/openssl/osrandom_engine.py -../cryptography/hazmat/bindings/openssl/pem.py -../cryptography/hazmat/bindings/openssl/pkcs12.py -../cryptography/hazmat/bindings/openssl/pkcs7.py -../cryptography/hazmat/bindings/openssl/rand.py -../cryptography/hazmat/bindings/openssl/rsa.py -../cryptography/hazmat/bindings/openssl/ssl.py -../cryptography/hazmat/bindings/openssl/x509.py -../cryptography/hazmat/bindings/openssl/x509_vfy.py -../cryptography/hazmat/bindings/openssl/x509name.py -../cryptography/hazmat/bindings/openssl/x509v3.py -../cryptography/hazmat/primitives/asymmetric/__init__.py -../cryptography/hazmat/primitives/asymmetric/dsa.py -../cryptography/hazmat/primitives/asymmetric/ec.py -../cryptography/hazmat/primitives/asymmetric/padding.py -../cryptography/hazmat/primitives/asymmetric/rsa.py -../cryptography/hazmat/primitives/ciphers/__init__.py -../cryptography/hazmat/primitives/ciphers/algorithms.py -../cryptography/hazmat/primitives/ciphers/base.py -../cryptography/hazmat/primitives/ciphers/modes.py -../cryptography/hazmat/primitives/kdf/__init__.py -../cryptography/hazmat/primitives/kdf/hkdf.py -../cryptography/hazmat/primitives/kdf/pbkdf2.py -../cryptography/hazmat/primitives/twofactor/__init__.py -../cryptography/hazmat/primitives/twofactor/hotp.py -../cryptography/hazmat/primitives/twofactor/totp.py -../cryptography/__about__.pyc -../cryptography/__init__.pyc -../cryptography/exceptions.pyc -../cryptography/fernet.pyc -../cryptography/utils.pyc -../cryptography/hazmat/__init__.pyc -../cryptography/hazmat/backends/__init__.pyc -../cryptography/hazmat/backends/interfaces.pyc -../cryptography/hazmat/backends/multibackend.pyc -../cryptography/hazmat/bindings/__init__.pyc -../cryptography/hazmat/bindings/utils.pyc -../cryptography/hazmat/primitives/__init__.pyc -../cryptography/hazmat/primitives/cmac.pyc -../cryptography/hazmat/primitives/constant_time.pyc -../cryptography/hazmat/primitives/hashes.pyc -../cryptography/hazmat/primitives/hmac.pyc -../cryptography/hazmat/primitives/interfaces.pyc -../cryptography/hazmat/primitives/padding.pyc -../cryptography/hazmat/primitives/serialization.pyc -../cryptography/hazmat/backends/commoncrypto/__init__.pyc -../cryptography/hazmat/backends/commoncrypto/backend.pyc -../cryptography/hazmat/backends/commoncrypto/ciphers.pyc -../cryptography/hazmat/backends/commoncrypto/hashes.pyc -../cryptography/hazmat/backends/commoncrypto/hmac.pyc -../cryptography/hazmat/backends/openssl/__init__.pyc -../cryptography/hazmat/backends/openssl/backend.pyc -../cryptography/hazmat/backends/openssl/ciphers.pyc -../cryptography/hazmat/backends/openssl/cmac.pyc -../cryptography/hazmat/backends/openssl/dsa.pyc -../cryptography/hazmat/backends/openssl/ec.pyc -../cryptography/hazmat/backends/openssl/hashes.pyc -../cryptography/hazmat/backends/openssl/hmac.pyc -../cryptography/hazmat/backends/openssl/rsa.pyc -../cryptography/hazmat/bindings/commoncrypto/__init__.pyc -../cryptography/hazmat/bindings/commoncrypto/binding.pyc -../cryptography/hazmat/bindings/commoncrypto/cf.pyc -../cryptography/hazmat/bindings/commoncrypto/common_cryptor.pyc -../cryptography/hazmat/bindings/commoncrypto/common_digest.pyc -../cryptography/hazmat/bindings/commoncrypto/common_hmac.pyc -../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.pyc -../cryptography/hazmat/bindings/commoncrypto/secimport.pyc -../cryptography/hazmat/bindings/commoncrypto/secitem.pyc -../cryptography/hazmat/bindings/commoncrypto/seckey.pyc -../cryptography/hazmat/bindings/commoncrypto/seckeychain.pyc -../cryptography/hazmat/bindings/commoncrypto/sectransform.pyc -../cryptography/hazmat/bindings/openssl/__init__.pyc -../cryptography/hazmat/bindings/openssl/aes.pyc -../cryptography/hazmat/bindings/openssl/asn1.pyc -../cryptography/hazmat/bindings/openssl/bignum.pyc -../cryptography/hazmat/bindings/openssl/binding.pyc -../cryptography/hazmat/bindings/openssl/bio.pyc -../cryptography/hazmat/bindings/openssl/cmac.pyc -../cryptography/hazmat/bindings/openssl/cms.pyc -../cryptography/hazmat/bindings/openssl/conf.pyc -../cryptography/hazmat/bindings/openssl/crypto.pyc -../cryptography/hazmat/bindings/openssl/dh.pyc -../cryptography/hazmat/bindings/openssl/dsa.pyc -../cryptography/hazmat/bindings/openssl/ec.pyc -../cryptography/hazmat/bindings/openssl/ecdh.pyc -../cryptography/hazmat/bindings/openssl/ecdsa.pyc -../cryptography/hazmat/bindings/openssl/engine.pyc -../cryptography/hazmat/bindings/openssl/err.pyc -../cryptography/hazmat/bindings/openssl/evp.pyc -../cryptography/hazmat/bindings/openssl/hmac.pyc -../cryptography/hazmat/bindings/openssl/nid.pyc -../cryptography/hazmat/bindings/openssl/objects.pyc -../cryptography/hazmat/bindings/openssl/opensslv.pyc -../cryptography/hazmat/bindings/openssl/osrandom_engine.pyc -../cryptography/hazmat/bindings/openssl/pem.pyc -../cryptography/hazmat/bindings/openssl/pkcs12.pyc -../cryptography/hazmat/bindings/openssl/pkcs7.pyc -../cryptography/hazmat/bindings/openssl/rand.pyc -../cryptography/hazmat/bindings/openssl/rsa.pyc -../cryptography/hazmat/bindings/openssl/ssl.pyc -../cryptography/hazmat/bindings/openssl/x509.pyc -../cryptography/hazmat/bindings/openssl/x509_vfy.pyc -../cryptography/hazmat/bindings/openssl/x509name.pyc -../cryptography/hazmat/bindings/openssl/x509v3.pyc -../cryptography/hazmat/primitives/asymmetric/__init__.pyc -../cryptography/hazmat/primitives/asymmetric/dsa.pyc -../cryptography/hazmat/primitives/asymmetric/ec.pyc -../cryptography/hazmat/primitives/asymmetric/padding.pyc -../cryptography/hazmat/primitives/asymmetric/rsa.pyc -../cryptography/hazmat/primitives/ciphers/__init__.pyc -../cryptography/hazmat/primitives/ciphers/algorithms.pyc -../cryptography/hazmat/primitives/ciphers/base.pyc -../cryptography/hazmat/primitives/ciphers/modes.pyc -../cryptography/hazmat/primitives/kdf/__init__.pyc -../cryptography/hazmat/primitives/kdf/hkdf.pyc -../cryptography/hazmat/primitives/kdf/pbkdf2.pyc -../cryptography/hazmat/primitives/twofactor/__init__.pyc -../cryptography/hazmat/primitives/twofactor/hotp.pyc -../cryptography/hazmat/primitives/twofactor/totp.pyc -../cryptography/_Cryptography_cffi_3e31f141x4000d087.so -../cryptography/_Cryptography_cffi_c61834dexf342507b.so -../cryptography/_Cryptography_cffi_dd416c1exc1767c5a.so -./ -dependency_links.txt -not-zip-safe -PKG-INFO -requires.txt -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/requires.txt b/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/requires.txt deleted file mode 100644 index aeb5cd7..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/requires.txt +++ /dev/null @@ -1,2 +0,0 @@ -cffi>=0.8 -six>=1.4.1 \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/top_level.txt deleted file mode 100644 index 717ffe8..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography-0.5.4-py3.4.egg-info/top_level.txt +++ /dev/null @@ -1,4 +0,0 @@ -_Cryptography_cffi_c61834dexf342507b -cryptography -_Cryptography_cffi_dd416c1exc1767c5a -_Cryptography_cffi_3e31f141x4000d087 diff --git a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_3e31f141x4000d087.so b/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_3e31f141x4000d087.so deleted file mode 100755 index ba428e4..0000000 Binary files a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_3e31f141x4000d087.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_c61834dexf342507b.so b/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_c61834dexf342507b.so deleted file mode 100755 index b7086ec..0000000 Binary files a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_c61834dexf342507b.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_dd416c1exc1767c5a.so b/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_dd416c1exc1767c5a.so deleted file mode 100755 index 28de2de..0000000 Binary files a/Darwin/lib/python3.4/site-packages/cryptography/_Cryptography_cffi_dd416c1exc1767c5a.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/cryptography/exceptions.py b/Darwin/lib/python3.4/site-packages/cryptography/exceptions.py deleted file mode 100644 index c14763f..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/exceptions.py +++ /dev/null @@ -1,63 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - - -class _Reasons(object): - BACKEND_MISSING_INTERFACE = object() - UNSUPPORTED_HASH = object() - UNSUPPORTED_CIPHER = object() - UNSUPPORTED_PADDING = object() - UNSUPPORTED_MGF = object() - UNSUPPORTED_PUBLIC_KEY_ALGORITHM = object() - UNSUPPORTED_ELLIPTIC_CURVE = object() - UNSUPPORTED_SERIALIZATION = object() - - -class UnsupportedAlgorithm(Exception): - def __init__(self, message, reason=None): - super(UnsupportedAlgorithm, self).__init__(message) - self._reason = reason - - -class AlreadyFinalized(Exception): - pass - - -class AlreadyUpdated(Exception): - pass - - -class NotYetFinalized(Exception): - pass - - -class InvalidTag(Exception): - pass - - -class InvalidSignature(Exception): - pass - - -class InternalError(Exception): - pass - - -class InvalidKey(Exception): - pass - - -class InvalidToken(Exception): - pass diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py deleted file mode 100644 index ae78822..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -from cryptography.hazmat.backends.multibackend import MultiBackend -from cryptography.hazmat.bindings.commoncrypto.binding import ( - Binding as CommonCryptoBinding -) -from cryptography.hazmat.bindings.openssl.binding import ( - Binding as OpenSSLBinding -) - - -_available_backends_list = None - - -def _available_backends(): - global _available_backends_list - - if _available_backends_list is None: - _available_backends_list = [] - - if CommonCryptoBinding.is_available(): - from cryptography.hazmat.backends import commoncrypto - _available_backends_list.append(commoncrypto.backend) - - if OpenSSLBinding.is_available(): - from cryptography.hazmat.backends import openssl - _available_backends_list.append(openssl.backend) - - return _available_backends_list - - -_default_backend = None - - -def default_backend(): - global _default_backend - - if _default_backend is None: - _default_backend = MultiBackend(_available_backends()) - - return _default_backend diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py deleted file mode 100644 index f080394..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -from cryptography.hazmat.backends.commoncrypto.backend import backend - - -__all__ = ["backend"] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py deleted file mode 100644 index 25885e1..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -from cryptography.hazmat.backends.openssl.backend import backend - - -__all__ = ["backend"] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py deleted file mode 100644 index 4991177..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/backend.py +++ /dev/null @@ -1,1061 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import collections -import itertools -import warnings - -import six - -from cryptography import utils -from cryptography.exceptions import ( - InternalError, UnsupportedAlgorithm, _Reasons -) -from cryptography.hazmat.backends.interfaces import ( - CMACBackend, CipherBackend, DSABackend, EllipticCurveBackend, HMACBackend, - HashBackend, PBKDF2HMACBackend, PKCS8SerializationBackend, RSABackend, - TraditionalOpenSSLSerializationBackend -) -from cryptography.hazmat.backends.openssl.ciphers import ( - _AESCTRCipherContext, _CipherContext -) -from cryptography.hazmat.backends.openssl.cmac import _CMACContext -from cryptography.hazmat.backends.openssl.dsa import ( - _DSAParameters, _DSAPrivateKey, _DSAPublicKey, - _DSASignatureContext, _DSAVerificationContext -) -from cryptography.hazmat.backends.openssl.ec import ( - _EllipticCurvePrivateKey, _EllipticCurvePublicKey -) -from cryptography.hazmat.backends.openssl.hashes import _HashContext -from cryptography.hazmat.backends.openssl.hmac import _HMACContext -from cryptography.hazmat.backends.openssl.rsa import ( - _RSAPrivateKey, _RSAPublicKey, _RSASignatureContext, - _RSAVerificationContext -) -from cryptography.hazmat.bindings.openssl.binding import Binding -from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa -from cryptography.hazmat.primitives.asymmetric.padding import ( - MGF1, OAEP, PKCS1v15, PSS -) -from cryptography.hazmat.primitives.ciphers.algorithms import ( - AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES -) -from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, CFB, CFB8, CTR, ECB, GCM, OFB -) - - -_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) -_OpenSSLError = collections.namedtuple("_OpenSSLError", - ["code", "lib", "func", "reason"]) - - -@utils.register_interface(CipherBackend) -@utils.register_interface(CMACBackend) -@utils.register_interface(DSABackend) -@utils.register_interface(EllipticCurveBackend) -@utils.register_interface(HashBackend) -@utils.register_interface(HMACBackend) -@utils.register_interface(PBKDF2HMACBackend) -@utils.register_interface(PKCS8SerializationBackend) -@utils.register_interface(RSABackend) -@utils.register_interface(TraditionalOpenSSLSerializationBackend) -class Backend(object): - """ - OpenSSL API binding interfaces. - """ - name = "openssl" - - def __init__(self): - self._binding = Binding() - self._ffi = self._binding.ffi - self._lib = self._binding.lib - - self._binding.init_static_locks() - - # adds all ciphers/digests for EVP - self._lib.OpenSSL_add_all_algorithms() - # registers available SSL/TLS ciphers and digests - self._lib.SSL_library_init() - # loads error strings for libcrypto and libssl functions - self._lib.SSL_load_error_strings() - - self._cipher_registry = {} - self._register_default_ciphers() - self.activate_osrandom_engine() - - def activate_builtin_random(self): - # Obtain a new structural reference. - e = self._lib.ENGINE_get_default_RAND() - if e != self._ffi.NULL: - self._lib.ENGINE_unregister_RAND(e) - # Reset the RNG to use the new engine. - self._lib.RAND_cleanup() - # decrement the structural reference from get_default_RAND - res = self._lib.ENGINE_finish(e) - assert res == 1 - - def activate_osrandom_engine(self): - # Unregister and free the current engine. - self.activate_builtin_random() - # Fetches an engine by id and returns it. This creates a structural - # reference. - e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id) - assert e != self._ffi.NULL - # Initialize the engine for use. This adds a functional reference. - res = self._lib.ENGINE_init(e) - assert res == 1 - # Set the engine as the default RAND provider. - res = self._lib.ENGINE_set_default_RAND(e) - assert res == 1 - # Decrement the structural ref incremented by ENGINE_by_id. - res = self._lib.ENGINE_free(e) - assert res == 1 - # Decrement the functional ref incremented by ENGINE_init. - res = self._lib.ENGINE_finish(e) - assert res == 1 - # Reset the RNG to use the new engine. - self._lib.RAND_cleanup() - - def openssl_version_text(self): - """ - Friendly string name of the loaded OpenSSL library. This is not - necessarily the same version as it was compiled against. - - Example: OpenSSL 1.0.1e 11 Feb 2013 - """ - return self._ffi.string( - self._lib.SSLeay_version(self._lib.SSLEAY_VERSION) - ).decode("ascii") - - def create_hmac_ctx(self, key, algorithm): - return _HMACContext(self, key, algorithm) - - def hash_supported(self, algorithm): - digest = self._lib.EVP_get_digestbyname(algorithm.name.encode("ascii")) - return digest != self._ffi.NULL - - def hmac_supported(self, algorithm): - return self.hash_supported(algorithm) - - def create_hash_ctx(self, algorithm): - return _HashContext(self, algorithm) - - def cipher_supported(self, cipher, mode): - if self._evp_cipher_supported(cipher, mode): - return True - elif isinstance(mode, CTR) and isinstance(cipher, AES): - return True - else: - return False - - def _evp_cipher_supported(self, cipher, mode): - try: - adapter = self._cipher_registry[type(cipher), type(mode)] - except KeyError: - return False - evp_cipher = adapter(self, cipher, mode) - return self._ffi.NULL != evp_cipher - - def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): - if (cipher_cls, mode_cls) in self._cipher_registry: - raise ValueError("Duplicate registration for: {0} {1}.".format( - cipher_cls, mode_cls) - ) - self._cipher_registry[cipher_cls, mode_cls] = adapter - - def _register_default_ciphers(self): - for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8]: - self.register_cipher_adapter( - AES, - mode_cls, - GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") - ) - for mode_cls in [CBC, CTR, ECB, OFB, CFB]: - self.register_cipher_adapter( - Camellia, - mode_cls, - GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") - ) - for mode_cls in [CBC, CFB, CFB8, OFB]: - self.register_cipher_adapter( - TripleDES, - mode_cls, - GetCipherByName("des-ede3-{mode.name}") - ) - self.register_cipher_adapter( - TripleDES, - ECB, - GetCipherByName("des-ede3") - ) - for mode_cls in [CBC, CFB, OFB, ECB]: - self.register_cipher_adapter( - Blowfish, - mode_cls, - GetCipherByName("bf-{mode.name}") - ) - for mode_cls in [CBC, CFB, OFB, ECB]: - self.register_cipher_adapter( - SEED, - mode_cls, - GetCipherByName("seed-{mode.name}") - ) - for cipher_cls, mode_cls in itertools.product( - [CAST5, IDEA], - [CBC, OFB, CFB, ECB], - ): - self.register_cipher_adapter( - cipher_cls, - mode_cls, - GetCipherByName("{cipher.name}-{mode.name}") - ) - self.register_cipher_adapter( - ARC4, - type(None), - GetCipherByName("rc4") - ) - self.register_cipher_adapter( - AES, - GCM, - GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") - ) - - def create_symmetric_encryption_ctx(self, cipher, mode): - if (isinstance(mode, CTR) and isinstance(cipher, AES) - and not self._evp_cipher_supported(cipher, mode)): - # This is needed to provide support for AES CTR mode in OpenSSL - # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 - # extended life ends 2020). - return _AESCTRCipherContext(self, cipher, mode) - else: - return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) - - def create_symmetric_decryption_ctx(self, cipher, mode): - if (isinstance(mode, CTR) and isinstance(cipher, AES) - and not self._evp_cipher_supported(cipher, mode)): - # This is needed to provide support for AES CTR mode in OpenSSL - # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 - # extended life ends 2020). - return _AESCTRCipherContext(self, cipher, mode) - else: - return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) - - def pbkdf2_hmac_supported(self, algorithm): - if self._lib.Cryptography_HAS_PBKDF2_HMAC: - return self.hmac_supported(algorithm) - else: - # OpenSSL < 1.0.0 has an explicit PBKDF2-HMAC-SHA1 function, - # so if the PBKDF2_HMAC function is missing we only support - # SHA1 via PBKDF2_HMAC_SHA1. - return isinstance(algorithm, hashes.SHA1) - - def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, - key_material): - buf = self._ffi.new("char[]", length) - if self._lib.Cryptography_HAS_PBKDF2_HMAC: - evp_md = self._lib.EVP_get_digestbyname( - algorithm.name.encode("ascii")) - assert evp_md != self._ffi.NULL - res = self._lib.PKCS5_PBKDF2_HMAC( - key_material, - len(key_material), - salt, - len(salt), - iterations, - evp_md, - length, - buf - ) - assert res == 1 - else: - if not isinstance(algorithm, hashes.SHA1): - raise UnsupportedAlgorithm( - "This version of OpenSSL only supports PBKDF2HMAC with " - "SHA1.", - _Reasons.UNSUPPORTED_HASH - ) - res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( - key_material, - len(key_material), - salt, - len(salt), - iterations, - length, - buf - ) - assert res == 1 - - return self._ffi.buffer(buf)[:] - - def _err_string(self, code): - err_buf = self._ffi.new("char[]", 256) - self._lib.ERR_error_string_n(code, err_buf, 256) - return self._ffi.string(err_buf, 256)[:] - - def _consume_errors(self): - errors = [] - while True: - code = self._lib.ERR_get_error() - if code == 0: - break - - lib = self._lib.ERR_GET_LIB(code) - func = self._lib.ERR_GET_FUNC(code) - reason = self._lib.ERR_GET_REASON(code) - - errors.append(_OpenSSLError(code, lib, func, reason)) - return errors - - def _unknown_error(self, error): - return InternalError( - "Unknown error code {0} from OpenSSL, " - "you should probably file a bug. {1}.".format( - error.code, self._err_string(error.code) - ) - ) - - def _bn_to_int(self, bn): - if six.PY3: - # Python 3 has constant time from_bytes, so use that. - - bn_num_bytes = (self._lib.BN_num_bits(bn) + 7) // 8 - bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) - bin_len = self._lib.BN_bn2bin(bn, bin_ptr) - assert bin_len > 0 - assert bin_ptr != self._ffi.NULL - return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") - - else: - # Under Python 2 the best we can do is hex() - - hex_cdata = self._lib.BN_bn2hex(bn) - assert hex_cdata != self._ffi.NULL - hex_str = self._ffi.string(hex_cdata) - self._lib.OPENSSL_free(hex_cdata) - return int(hex_str, 16) - - def _int_to_bn(self, num, bn=None): - """ - Converts a python integer to a BIGNUM. The returned BIGNUM will not - be garbage collected (to support adding them to structs that take - ownership of the object). Be sure to register it for GC if it will - be discarded after use. - """ - - if bn is None: - bn = self._ffi.NULL - - if six.PY3: - # Python 3 has constant time to_bytes, so use that. - - binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") - bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) - assert bn_ptr != self._ffi.NULL - return bn_ptr - - else: - # Under Python 2 the best we can do is hex() - - hex_num = hex(num).rstrip("L").lstrip("0x").encode("ascii") or b"0" - bn_ptr = self._ffi.new("BIGNUM **") - bn_ptr[0] = bn - res = self._lib.BN_hex2bn(bn_ptr, hex_num) - assert res != 0 - assert bn_ptr[0] != self._ffi.NULL - return bn_ptr[0] - - def generate_rsa_private_key(self, public_exponent, key_size): - rsa._verify_rsa_parameters(public_exponent, key_size) - - rsa_cdata = self._lib.RSA_new() - assert rsa_cdata != self._ffi.NULL - rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) - - bn = self._int_to_bn(public_exponent) - bn = self._ffi.gc(bn, self._lib.BN_free) - - res = self._lib.RSA_generate_key_ex( - rsa_cdata, key_size, bn, self._ffi.NULL - ) - assert res == 1 - - return _RSAPrivateKey(self, rsa_cdata) - - def generate_rsa_parameters_supported(self, public_exponent, key_size): - return (public_exponent >= 3 and public_exponent & 1 != 0 and - key_size >= 512) - - def load_rsa_private_numbers(self, numbers): - rsa._check_private_key_components( - numbers.p, - numbers.q, - numbers.d, - numbers.dmp1, - numbers.dmq1, - numbers.iqmp, - numbers.public_numbers.e, - numbers.public_numbers.n - ) - rsa_cdata = self._lib.RSA_new() - assert rsa_cdata != self._ffi.NULL - rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) - rsa_cdata.p = self._int_to_bn(numbers.p) - rsa_cdata.q = self._int_to_bn(numbers.q) - rsa_cdata.d = self._int_to_bn(numbers.d) - rsa_cdata.dmp1 = self._int_to_bn(numbers.dmp1) - rsa_cdata.dmq1 = self._int_to_bn(numbers.dmq1) - rsa_cdata.iqmp = self._int_to_bn(numbers.iqmp) - rsa_cdata.e = self._int_to_bn(numbers.public_numbers.e) - rsa_cdata.n = self._int_to_bn(numbers.public_numbers.n) - res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) - assert res == 1 - - return _RSAPrivateKey(self, rsa_cdata) - - def load_rsa_public_numbers(self, numbers): - rsa._check_public_key_components(numbers.e, numbers.n) - rsa_cdata = self._lib.RSA_new() - assert rsa_cdata != self._ffi.NULL - rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) - rsa_cdata.e = self._int_to_bn(numbers.e) - rsa_cdata.n = self._int_to_bn(numbers.n) - res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) - assert res == 1 - - return _RSAPublicKey(self, rsa_cdata) - - def _bytes_to_bio(self, data): - """ - Return a _MemoryBIO namedtuple of (BIO, char*). - - The char* is the storage for the BIO and it must stay alive until the - BIO is finished with. - """ - data_char_p = self._ffi.new("char[]", data) - bio = self._lib.BIO_new_mem_buf( - data_char_p, len(data) - ) - assert bio != self._ffi.NULL - - return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) - - def _evp_pkey_to_private_key(self, evp_pkey): - """ - Return the appropriate type of PrivateKey given an evp_pkey cdata - pointer. - """ - - type = evp_pkey.type - - if type == self._lib.EVP_PKEY_RSA: - rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) - assert rsa_cdata != self._ffi.NULL - rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) - return _RSAPrivateKey(self, rsa_cdata) - elif type == self._lib.EVP_PKEY_DSA: - dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) - assert dsa_cdata != self._ffi.NULL - dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) - return _DSAPrivateKey(self, dsa_cdata) - else: - raise UnsupportedAlgorithm("Unsupported key type.") - - def _pem_password_cb(self, password): - """ - Generate a pem_password_cb function pointer that copied the password to - OpenSSL as required and returns the number of bytes copied. - - typedef int pem_password_cb(char *buf, int size, - int rwflag, void *userdata); - - Useful for decrypting PKCS8 files and so on. - - Returns a tuple of (cdata function pointer, callback function). - """ - - def pem_password_cb(buf, size, writing, userdata): - pem_password_cb.called += 1 - - if not password or len(password) >= size: - return 0 - else: - pw_buf = self._ffi.buffer(buf, size) - pw_buf[:len(password)] = password - return len(password) - - pem_password_cb.called = 0 - - return ( - self._ffi.callback("int (char *, int, int, void *)", - pem_password_cb), - pem_password_cb - ) - - def _rsa_cdata_from_private_key(self, private_key): - ctx = self._lib.RSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.RSA_free) - - ctx.p = self._int_to_bn(private_key.p) - ctx.q = self._int_to_bn(private_key.q) - ctx.d = self._int_to_bn(private_key.d) - ctx.e = self._int_to_bn(private_key.e) - ctx.n = self._int_to_bn(private_key.n) - ctx.dmp1 = self._int_to_bn(private_key.dmp1) - ctx.dmq1 = self._int_to_bn(private_key.dmq1) - ctx.iqmp = self._int_to_bn(private_key.iqmp) - res = self._lib.RSA_blinding_on(ctx, self._ffi.NULL) - assert res == 1 - - return ctx - - def _rsa_cdata_from_public_key(self, public_key): - ctx = self._lib.RSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.RSA_free) - - ctx.e = self._int_to_bn(public_key.e) - ctx.n = self._int_to_bn(public_key.n) - res = self._lib.RSA_blinding_on(ctx, self._ffi.NULL) - assert res == 1 - - return ctx - - def create_rsa_signature_ctx(self, private_key, padding, algorithm): - warnings.warn( - "create_rsa_signature_ctx is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - rsa_cdata = self._rsa_cdata_from_private_key(private_key) - key = _RSAPrivateKey(self, rsa_cdata) - return _RSASignatureContext(self, key, padding, algorithm) - - def create_rsa_verification_ctx(self, public_key, signature, padding, - algorithm): - warnings.warn( - "create_rsa_verification_ctx is deprecated and will be removed in " - "a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - rsa_cdata = self._rsa_cdata_from_public_key(public_key) - key = _RSAPublicKey(self, rsa_cdata) - return _RSAVerificationContext(self, key, signature, padding, - algorithm) - - def mgf1_hash_supported(self, algorithm): - warnings.warn( - "mgf1_hash_supported is deprecated and will be removed in " - "a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - return self._mgf1_hash_supported(algorithm) - - def _mgf1_hash_supported(self, algorithm): - if self._lib.Cryptography_HAS_MGF1_MD: - return self.hash_supported(algorithm) - else: - return isinstance(algorithm, hashes.SHA1) - - def rsa_padding_supported(self, padding): - if isinstance(padding, PKCS1v15): - return True - elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): - return self._mgf1_hash_supported(padding._mgf._algorithm) - elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): - return isinstance(padding._mgf._algorithm, hashes.SHA1) - else: - return False - - def generate_dsa_parameters(self, key_size): - if key_size not in (1024, 2048, 3072): - raise ValueError( - "Key size must be 1024 or 2048 or 3072 bits.") - - if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and - key_size > 1024): - raise ValueError( - "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " - "support larger key sizes.") - - ctx = self._lib.DSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.DSA_free) - - res = self._lib.DSA_generate_parameters_ex( - ctx, key_size, self._ffi.NULL, 0, - self._ffi.NULL, self._ffi.NULL, self._ffi.NULL - ) - - assert res == 1 - - return _DSAParameters(self, ctx) - - def generate_dsa_private_key(self, parameters): - ctx = self._lib.DSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.DSA_free) - if isinstance(parameters, dsa.DSAParameters): - ctx.p = self._int_to_bn(parameters.p) - ctx.q = self._int_to_bn(parameters.q) - ctx.g = self._int_to_bn(parameters.g) - else: - ctx.p = self._lib.BN_dup(parameters._dsa_cdata.p) - ctx.q = self._lib.BN_dup(parameters._dsa_cdata.q) - ctx.g = self._lib.BN_dup(parameters._dsa_cdata.g) - - self._lib.DSA_generate_key(ctx) - - return _DSAPrivateKey(self, ctx) - - def generate_dsa_private_key_and_parameters(self, key_size): - parameters = self.generate_dsa_parameters(key_size) - return self.generate_dsa_private_key(parameters) - - def create_dsa_signature_ctx(self, private_key, algorithm): - warnings.warn( - "create_dsa_signature_ctx is deprecated and will be removed in " - "a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - dsa_cdata = self._dsa_cdata_from_private_key(private_key) - key = _DSAPrivateKey(self, dsa_cdata) - return _DSASignatureContext(self, key, algorithm) - - def create_dsa_verification_ctx(self, public_key, signature, - algorithm): - warnings.warn( - "create_dsa_verification_ctx is deprecated and will be removed in " - "a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - dsa_cdata = self._dsa_cdata_from_public_key(public_key) - key = _DSAPublicKey(self, dsa_cdata) - return _DSAVerificationContext(self, key, signature, algorithm) - - def load_dsa_private_numbers(self, numbers): - dsa._check_dsa_private_numbers(numbers) - parameter_numbers = numbers.public_numbers.parameter_numbers - - dsa_cdata = self._lib.DSA_new() - assert dsa_cdata != self._ffi.NULL - dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) - - dsa_cdata.p = self._int_to_bn(parameter_numbers.p) - dsa_cdata.q = self._int_to_bn(parameter_numbers.q) - dsa_cdata.g = self._int_to_bn(parameter_numbers.g) - dsa_cdata.pub_key = self._int_to_bn(numbers.public_numbers.y) - dsa_cdata.priv_key = self._int_to_bn(numbers.x) - - return _DSAPrivateKey(self, dsa_cdata) - - def load_dsa_public_numbers(self, numbers): - dsa._check_dsa_parameters(numbers.parameter_numbers) - dsa_cdata = self._lib.DSA_new() - assert dsa_cdata != self._ffi.NULL - dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) - - dsa_cdata.p = self._int_to_bn(numbers.parameter_numbers.p) - dsa_cdata.q = self._int_to_bn(numbers.parameter_numbers.q) - dsa_cdata.g = self._int_to_bn(numbers.parameter_numbers.g) - dsa_cdata.pub_key = self._int_to_bn(numbers.y) - - return _DSAPublicKey(self, dsa_cdata) - - def load_dsa_parameter_numbers(self, numbers): - dsa._check_dsa_parameters(numbers) - dsa_cdata = self._lib.DSA_new() - assert dsa_cdata != self._ffi.NULL - dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) - - dsa_cdata.p = self._int_to_bn(numbers.p) - dsa_cdata.q = self._int_to_bn(numbers.q) - dsa_cdata.g = self._int_to_bn(numbers.g) - - return _DSAParameters(self, dsa_cdata) - - def _dsa_cdata_from_public_key(self, public_key): - ctx = self._lib.DSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.DSA_free) - parameters = public_key.parameters() - ctx.p = self._int_to_bn(parameters.p) - ctx.q = self._int_to_bn(parameters.q) - ctx.g = self._int_to_bn(parameters.g) - ctx.pub_key = self._int_to_bn(public_key.y) - return ctx - - def _dsa_cdata_from_private_key(self, private_key): - ctx = self._lib.DSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.DSA_free) - parameters = private_key.parameters() - ctx.p = self._int_to_bn(parameters.p) - ctx.q = self._int_to_bn(parameters.q) - ctx.g = self._int_to_bn(parameters.g) - ctx.priv_key = self._int_to_bn(private_key.x) - ctx.pub_key = self._int_to_bn(private_key.y) - return ctx - - def dsa_hash_supported(self, algorithm): - if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: - return isinstance(algorithm, hashes.SHA1) - else: - return self.hash_supported(algorithm) - - def dsa_parameters_supported(self, p, q, g): - if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: - return (utils.bit_length(p) <= 1024 and utils.bit_length(q) <= 160) - else: - return True - - def decrypt_rsa(self, private_key, ciphertext, padding): - warnings.warn( - "decrypt_rsa is deprecated and will be removed in a future " - "version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - rsa_cdata = self._rsa_cdata_from_private_key(private_key) - key = _RSAPrivateKey(self, rsa_cdata) - return key.decrypt(ciphertext, padding) - - def encrypt_rsa(self, public_key, plaintext, padding): - warnings.warn( - "encrypt_rsa is deprecated and will be removed in a future " - "version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - rsa_cdata = self._rsa_cdata_from_public_key(public_key) - key = _RSAPublicKey(self, rsa_cdata) - return key.encrypt(plaintext, padding) - - def cmac_algorithm_supported(self, algorithm): - return ( - self._lib.Cryptography_HAS_CMAC == 1 - and self.cipher_supported(algorithm, CBC( - b"\x00" * algorithm.block_size)) - ) - - def create_cmac_ctx(self, algorithm): - return _CMACContext(self, algorithm) - - def load_traditional_openssl_pem_private_key(self, data, password): - # OpenSSLs API for loading PKCS#8 certs can also load the traditional - # format so we just use that for both of them. - - return self.load_pkcs8_pem_private_key(data, password) - - def load_pkcs8_pem_private_key(self, data, password): - mem_bio = self._bytes_to_bio(data) - - password_callback, password_func = self._pem_password_cb(password) - - evp_pkey = self._lib.PEM_read_bio_PrivateKey( - mem_bio.bio, - self._ffi.NULL, - password_callback, - self._ffi.NULL - ) - - if evp_pkey == self._ffi.NULL: - errors = self._consume_errors() - if not errors: - raise ValueError("Could not unserialize key data.") - - if ( - errors[0][1:] == ( - self._lib.ERR_LIB_PEM, - self._lib.PEM_F_PEM_DO_HEADER, - self._lib.PEM_R_BAD_PASSWORD_READ - ) - ) or ( - errors[0][1:] == ( - self._lib.ERR_LIB_PEM, - self._lib.PEM_F_PEM_READ_BIO_PRIVATEKEY, - self._lib.PEM_R_BAD_PASSWORD_READ - ) - ): - assert not password - raise TypeError( - "Password was not given but private key is encrypted.") - - elif errors[0][1:] == ( - self._lib.ERR_LIB_EVP, - self._lib.EVP_F_EVP_DECRYPTFINAL_EX, - self._lib.EVP_R_BAD_DECRYPT - ): - raise ValueError( - "Bad decrypt. Incorrect password?" - ) - - elif errors[0][1:] in ( - ( - self._lib.ERR_LIB_PEM, - self._lib.PEM_F_PEM_GET_EVP_CIPHER_INFO, - self._lib.PEM_R_UNSUPPORTED_ENCRYPTION - ), - - ( - self._lib.ERR_LIB_EVP, - self._lib.EVP_F_EVP_PBE_CIPHERINIT, - self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM - ) - ): - raise UnsupportedAlgorithm( - "PEM data is encrypted with an unsupported cipher", - _Reasons.UNSUPPORTED_CIPHER - ) - - elif any( - error[1:] == ( - self._lib.ERR_LIB_EVP, - self._lib.EVP_F_EVP_PKCS82PKEY, - self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM - ) - for error in errors - ): - raise UnsupportedAlgorithm( - "Unsupported public key algorithm.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM - ) - - else: - assert errors[0][1] in ( - self._lib.ERR_LIB_EVP, - self._lib.ERR_LIB_PEM, - self._lib.ERR_LIB_ASN1, - ) - raise ValueError("Could not unserialize key data.") - - evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) - - if password is not None and password_func.called == 0: - raise TypeError( - "Password was given but private key is not encrypted.") - - assert ( - (password is not None and password_func.called == 1) or - password is None - ) - - return self._evp_pkey_to_private_key(evp_pkey) - - def elliptic_curve_supported(self, curve): - if self._lib.Cryptography_HAS_EC != 1: - return False - - try: - curve_nid = self._elliptic_curve_to_nid(curve) - except UnsupportedAlgorithm: - curve_nid = self._lib.NID_undef - - ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid) - - if ctx == self._ffi.NULL: - errors = self._consume_errors() - assert ( - curve_nid == self._lib.NID_undef or - errors[0][1:] == ( - self._lib.ERR_LIB_EC, - self._lib.EC_F_EC_GROUP_NEW_BY_CURVE_NAME, - self._lib.EC_R_UNKNOWN_GROUP - ) - ) - return False - else: - assert curve_nid != self._lib.NID_undef - self._lib.EC_GROUP_free(ctx) - return True - - def elliptic_curve_signature_algorithm_supported( - self, signature_algorithm, curve - ): - if self._lib.Cryptography_HAS_EC != 1: - return False - - # We only support ECDSA right now. - if not isinstance(signature_algorithm, ec.ECDSA): - return False - - # Before 0.9.8m OpenSSL can't cope with digests longer than the curve. - if ( - self._lib.OPENSSL_VERSION_NUMBER < 0x009080df and - curve.key_size < signature_algorithm.algorithm.digest_size * 8 - ): - return False - - return self.elliptic_curve_supported(curve) - - def generate_elliptic_curve_private_key(self, curve): - """ - Generate a new private key on the named curve. - """ - - if self.elliptic_curve_supported(curve): - curve_nid = self._elliptic_curve_to_nid(curve) - - ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) - - res = self._lib.EC_KEY_generate_key(ctx) - assert res == 1 - - res = self._lib.EC_KEY_check_key(ctx) - assert res == 1 - - return _EllipticCurvePrivateKey(self, ctx, curve) - else: - raise UnsupportedAlgorithm( - "Backend object does not support {0}.".format(curve.name), - _Reasons.UNSUPPORTED_ELLIPTIC_CURVE - ) - - def elliptic_curve_private_key_from_numbers(self, numbers): - public = numbers.public_numbers - - curve_nid = self._elliptic_curve_to_nid(public.curve) - - ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) - - ctx = self._ec_key_set_public_key_affine_coordinates( - ctx, public.x, public.y) - - res = self._lib.EC_KEY_set_private_key( - ctx, self._int_to_bn(numbers.private_value)) - assert res == 1 - - return _EllipticCurvePrivateKey(self, ctx, - numbers.public_numbers.curve) - - def elliptic_curve_public_key_from_numbers(self, numbers): - curve_nid = self._elliptic_curve_to_nid(numbers.curve) - - ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) - - ctx = self._ec_key_set_public_key_affine_coordinates( - ctx, numbers.x, numbers.y) - - return _EllipticCurvePublicKey(self, ctx, numbers.curve) - - def _elliptic_curve_to_nid(self, curve): - """ - Get the NID for a curve name. - """ - - curve_aliases = { - "secp192r1": "prime192v1", - "secp256r1": "prime256v1" - } - - curve_name = curve_aliases.get(curve.name, curve.name) - - curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) - if curve_nid == self._lib.NID_undef: - raise UnsupportedAlgorithm( - "{0} is not a supported elliptic curve".format(curve.name), - _Reasons.UNSUPPORTED_ELLIPTIC_CURVE - ) - return curve_nid - - def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): - """ - This is a port of EC_KEY_set_public_key_affine_coordinates that was - added in 1.0.1. - - Sets the public key point in the EC_KEY context to the affine x and y - values. - """ - - assert ctx != self._ffi.NULL - - bn_x = self._int_to_bn(x) - bn_y = self._int_to_bn(y) - - nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") - assert nid_two_field != self._lib.NID_undef - - bn_ctx = self._lib.BN_CTX_new() - assert bn_ctx != self._ffi.NULL - bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) - - group = self._lib.EC_KEY_get0_group(ctx) - assert group != self._ffi.NULL - - point = self._lib.EC_POINT_new(group) - assert point != self._ffi.NULL - point = self._ffi.gc(point, self._lib.EC_POINT_free) - - method = self._lib.EC_GROUP_method_of(group) - assert method != self._ffi.NULL - - nid = self._lib.EC_METHOD_get_field_type(method) - assert nid != self._lib.NID_undef - - check_x = self._lib.BN_CTX_get(bn_ctx) - check_y = self._lib.BN_CTX_get(bn_ctx) - - if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: - set_func = self._lib.EC_POINT_set_affine_coordinates_GF2m - get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m - else: - set_func = self._lib.EC_POINT_set_affine_coordinates_GFp - get_func = self._lib.EC_POINT_get_affine_coordinates_GFp - - assert set_func and get_func - - res = set_func(group, point, bn_x, bn_y, bn_ctx) - assert res == 1 - - res = get_func(group, point, check_x, check_y, bn_ctx) - assert res == 1 - - assert ( - self._lib.BN_cmp(bn_x, check_x) == 0 and - self._lib.BN_cmp(bn_y, check_y) == 0 - ) - - res = self._lib.EC_KEY_set_public_key(ctx, point) - assert res == 1 - - res = self._lib.EC_KEY_check_key(ctx) - assert res == 1 - - return ctx - - -class GetCipherByName(object): - def __init__(self, fmt): - self._fmt = fmt - - def __call__(self, backend, cipher, mode): - cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() - return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) - - -backend = Backend() diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py deleted file mode 100644 index b7cd980..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ec.py +++ /dev/null @@ -1,191 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import six - -from cryptography import utils -from cryptography.exceptions import ( - InvalidSignature, UnsupportedAlgorithm, _Reasons -) -from cryptography.hazmat.primitives import hashes, interfaces -from cryptography.hazmat.primitives.asymmetric import ec - - -def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend): - _lib = backend._lib - _ffi = backend._ffi - - digest_len = len(digest) - - group = _lib.EC_KEY_get0_group(ec_key_cdata) - - bn_ctx = _lib.BN_CTX_new() - assert bn_ctx != _ffi.NULL - bn_ctx = _ffi.gc(bn_ctx, _lib.BN_CTX_free) - - order = _lib.BN_CTX_get(bn_ctx) - assert order != _ffi.NULL - - res = _lib.EC_GROUP_get_order(group, order, bn_ctx) - assert res == 1 - - order_bits = _lib.BN_num_bits(order) - - if 8 * digest_len > order_bits: - digest_len = (order_bits + 7) // 8 - digest = digest[:digest_len] - - if 8 * digest_len > order_bits: - rshift = 8 - (order_bits & 0x7) - assert rshift > 0 and rshift < 8 - - mask = 0xFF >> rshift << rshift - - # Set the bottom rshift bits to 0 - digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask) - - return digest - - -@utils.register_interface(interfaces.AsymmetricSignatureContext) -class _ECDSASignatureContext(object): - def __init__(self, backend, private_key, algorithm): - self._backend = backend - self._private_key = private_key - self._digest = hashes.Hash(algorithm, backend) - - def update(self, data): - self._digest.update(data) - - def finalize(self): - ec_key = self._private_key._ec_key - - digest = self._digest.finalize() - - digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) - - max_size = self._backend._lib.ECDSA_size(ec_key) - assert max_size > 0 - - sigbuf = self._backend._ffi.new("char[]", max_size) - siglen_ptr = self._backend._ffi.new("unsigned int[]", 1) - res = self._backend._lib.ECDSA_sign( - 0, - digest, - len(digest), - sigbuf, - siglen_ptr, - ec_key - ) - assert res == 1 - return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] - - -@utils.register_interface(interfaces.AsymmetricVerificationContext) -class _ECDSAVerificationContext(object): - def __init__(self, backend, public_key, signature, algorithm): - self._backend = backend - self._public_key = public_key - self._signature = signature - self._digest = hashes.Hash(algorithm, backend) - - def update(self, data): - self._digest.update(data) - - def verify(self): - ec_key = self._public_key._ec_key - - digest = self._digest.finalize() - - digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) - - res = self._backend._lib.ECDSA_verify( - 0, - digest, - len(digest), - self._signature, - len(self._signature), - ec_key - ) - if res != 1: - self._backend._consume_errors() - raise InvalidSignature - return True - - -@utils.register_interface(interfaces.EllipticCurvePrivateKey) -class _EllipticCurvePrivateKey(object): - def __init__(self, backend, ec_key_cdata, curve): - self._backend = backend - self._ec_key = ec_key_cdata - self._curve = curve - - @property - def curve(self): - return self._curve - - def signer(self, signature_algorithm): - if isinstance(signature_algorithm, ec.ECDSA): - return _ECDSASignatureContext( - self._backend, self, signature_algorithm.algorithm - ) - else: - raise UnsupportedAlgorithm( - "Unsupported elliptic curve signature algorithm.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def public_key(self): - group = self._backend._lib.EC_KEY_get0_group(self._ec_key) - assert group != self._backend._ffi.NULL - - curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) - - public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) - assert public_ec_key != self._backend._ffi.NULL - public_ec_key = self._backend._ffi.gc( - public_ec_key, self._backend._lib.EC_KEY_free - ) - - point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) - assert point != self._backend._ffi.NULL - - res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) - assert res == 1 - - return _EllipticCurvePublicKey( - self._backend, public_ec_key, self._curve - ) - - -@utils.register_interface(interfaces.EllipticCurvePublicKey) -class _EllipticCurvePublicKey(object): - def __init__(self, backend, ec_key_cdata, curve): - self._backend = backend - self._ec_key = ec_key_cdata - self._curve = curve - - @property - def curve(self): - return self._curve - - def verifier(self, signature, signature_algorithm): - if isinstance(signature_algorithm, ec.ECDSA): - return _ECDSAVerificationContext( - self._backend, self, signature, signature_algorithm.algorithm - ) - else: - raise UnsupportedAlgorithm( - "Unsupported elliptic curve signature algorithm.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py deleted file mode 100644 index ee7378a..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py +++ /dev/null @@ -1,60 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import platform -import sys - -from cryptography.hazmat.bindings.utils import build_ffi - - -class Binding(object): - """ - CommonCrypto API wrapper. - """ - _module_prefix = "cryptography.hazmat.bindings.commoncrypto." - _modules = [ - "cf", - "common_digest", - "common_hmac", - "common_key_derivation", - "common_cryptor", - "secimport", - "secitem", - "seckey", - "seckeychain", - "sectransform", - ] - - ffi = None - lib = None - - def __init__(self): - self._ensure_ffi_initialized() - - @classmethod - def _ensure_ffi_initialized(cls): - if cls.ffi is not None and cls.lib is not None: - return - - cls.ffi, cls.lib = build_ffi( - module_prefix=cls._module_prefix, - modules=cls._modules, - extra_link_args=["-framework", "Security"] - ) - - @classmethod - def is_available(cls): - return sys.platform == "darwin" and list(map( - int, platform.mac_ver()[0].split("."))) >= [10, 8, 0] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py deleted file mode 100644 index 671963a..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py +++ /dev/null @@ -1,114 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef bool Boolean; -typedef signed long OSStatus; -typedef unsigned char UInt8; -typedef uint32_t UInt32; - -typedef const void * CFAllocatorRef; -const CFAllocatorRef kCFAllocatorDefault; -typedef const void * CFDataRef; -typedef signed long long CFIndex; -typedef ... *CFStringRef; -typedef ... *CFArrayRef; -typedef ... *CFBooleanRef; -typedef ... *CFErrorRef; -typedef ... *CFNumberRef; -typedef ... *CFTypeRef; -typedef ... *CFDictionaryRef; -typedef ... *CFMutableDictionaryRef; -typedef struct { - ...; -} CFDictionaryKeyCallBacks; -typedef struct { - ...; -} CFDictionaryValueCallBacks; -typedef struct { - ...; -} CFRange; - -typedef UInt32 CFStringEncoding; -enum { - kCFStringEncodingASCII = 0x0600 -}; - -enum { - kCFNumberSInt8Type = 1, - kCFNumberSInt16Type = 2, - kCFNumberSInt32Type = 3, - kCFNumberSInt64Type = 4, - kCFNumberFloat32Type = 5, - kCFNumberFloat64Type = 6, - kCFNumberCharType = 7, - kCFNumberShortType = 8, - kCFNumberIntType = 9, - kCFNumberLongType = 10, - kCFNumberLongLongType = 11, - kCFNumberFloatType = 12, - kCFNumberDoubleType = 13, - kCFNumberCFIndexType = 14, - kCFNumberNSIntegerType = 15, - kCFNumberCGFloatType = 16, - kCFNumberMaxType = 16 -}; -typedef int CFNumberType; - -const CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; -const CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; - -const CFBooleanRef kCFBooleanTrue; -const CFBooleanRef kCFBooleanFalse; -""" - -FUNCTIONS = """ -CFDataRef CFDataCreate(CFAllocatorRef, const UInt8 *, CFIndex); -CFStringRef CFStringCreateWithCString(CFAllocatorRef, const char *, - CFStringEncoding); -CFDictionaryRef CFDictionaryCreate(CFAllocatorRef, const void **, - const void **, CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks *); -CFMutableDictionaryRef CFDictionaryCreateMutable( - CFAllocatorRef, - CFIndex, - const CFDictionaryKeyCallBacks *, - const CFDictionaryValueCallBacks * -); -void CFDictionarySetValue(CFMutableDictionaryRef, const void *, const void *); -CFIndex CFArrayGetCount(CFArrayRef); -const void *CFArrayGetValueAtIndex(CFArrayRef, CFIndex); -CFIndex CFDataGetLength(CFDataRef); -void CFDataGetBytes(CFDataRef, CFRange, UInt8 *); -CFRange CFRangeMake(CFIndex, CFIndex); -void CFShow(CFTypeRef); -Boolean CFBooleanGetValue(CFBooleanRef); -CFNumberRef CFNumberCreate(CFAllocatorRef, CFNumberType, const void *); -void CFRelease(CFTypeRef); -CFTypeRef CFRetain(CFTypeRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py deleted file mode 100644 index 713bc56..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py +++ /dev/null @@ -1,110 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCAlgorithmAES128 = 0, - kCCAlgorithmDES, - kCCAlgorithm3DES, - kCCAlgorithmCAST, - kCCAlgorithmRC4, - kCCAlgorithmRC2, - kCCAlgorithmBlowfish -}; -typedef uint32_t CCAlgorithm; -enum { - kCCSuccess = 0, - kCCParamError = -4300, - kCCBufferTooSmall = -4301, - kCCMemoryFailure = -4302, - kCCAlignmentError = -4303, - kCCDecodeError = -4304, - kCCUnimplemented = -4305 -}; -typedef int32_t CCCryptorStatus; -typedef uint32_t CCOptions; -enum { - kCCEncrypt = 0, - kCCDecrypt, -}; -typedef uint32_t CCOperation; -typedef ... *CCCryptorRef; - -enum { - kCCModeOptionCTR_LE = 0x0001, - kCCModeOptionCTR_BE = 0x0002 -}; - -typedef uint32_t CCModeOptions; - -enum { - kCCModeECB = 1, - kCCModeCBC = 2, - kCCModeCFB = 3, - kCCModeCTR = 4, - kCCModeF8 = 5, - kCCModeLRW = 6, - kCCModeOFB = 7, - kCCModeXTS = 8, - kCCModeRC4 = 9, - kCCModeCFB8 = 10, - kCCModeGCM = 11 -}; -typedef uint32_t CCMode; -enum { - ccNoPadding = 0, - ccPKCS7Padding = 1, -}; -typedef uint32_t CCPadding; -""" - -FUNCTIONS = """ -CCCryptorStatus CCCryptorCreateWithMode(CCOperation, CCMode, CCAlgorithm, - CCPadding, const void *, const void *, - size_t, const void *, size_t, int, - CCModeOptions, CCCryptorRef *); -CCCryptorStatus CCCryptorCreate(CCOperation, CCAlgorithm, CCOptions, - const void *, size_t, const void *, - CCCryptorRef *); -CCCryptorStatus CCCryptorUpdate(CCCryptorRef, const void *, size_t, void *, - size_t, size_t *); -CCCryptorStatus CCCryptorFinal(CCCryptorRef, void *, size_t, size_t *); -CCCryptorStatus CCCryptorRelease(CCCryptorRef); - -CCCryptorStatus CCCryptorGCMAddIV(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMAddAAD(CCCryptorRef, const void *, size_t); -CCCryptorStatus CCCryptorGCMEncrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMDecrypt(CCCryptorRef, const void *, size_t, - void *); -CCCryptorStatus CCCryptorGCMFinal(CCCryptorRef, const void *, size_t *); -CCCryptorStatus CCCryptorGCMReset(CCCryptorRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -/* Not defined in the public header */ -enum { - kCCModeGCM = 11 -}; -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py deleted file mode 100644 index c59200c..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py +++ /dev/null @@ -1,69 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef uint32_t CC_LONG; -typedef uint64_t CC_LONG64; -typedef struct CC_MD5state_st { - ...; -} CC_MD5_CTX; -typedef struct CC_SHA1state_st { - ...; -} CC_SHA1_CTX; -typedef struct CC_SHA256state_st { - ...; -} CC_SHA256_CTX; -typedef struct CC_SHA512state_st { - ...; -} CC_SHA512_CTX; -""" - -FUNCTIONS = """ -int CC_MD5_Init(CC_MD5_CTX *); -int CC_MD5_Update(CC_MD5_CTX *, const void *, CC_LONG); -int CC_MD5_Final(unsigned char *, CC_MD5_CTX *); - -int CC_SHA1_Init(CC_SHA1_CTX *); -int CC_SHA1_Update(CC_SHA1_CTX *, const void *, CC_LONG); -int CC_SHA1_Final(unsigned char *, CC_SHA1_CTX *); - -int CC_SHA224_Init(CC_SHA256_CTX *); -int CC_SHA224_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA224_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA256_Init(CC_SHA256_CTX *); -int CC_SHA256_Update(CC_SHA256_CTX *, const void *, CC_LONG); -int CC_SHA256_Final(unsigned char *, CC_SHA256_CTX *); - -int CC_SHA384_Init(CC_SHA512_CTX *); -int CC_SHA384_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA384_Final(unsigned char *, CC_SHA512_CTX *); - -int CC_SHA512_Init(CC_SHA512_CTX *); -int CC_SHA512_Update(CC_SHA512_CTX *, const void *, CC_LONG); -int CC_SHA512_Final(unsigned char *, CC_SHA512_CTX *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py deleted file mode 100644 index 4f54b62..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py +++ /dev/null @@ -1,48 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { - ...; -} CCHmacContext; -enum { - kCCHmacAlgSHA1, - kCCHmacAlgMD5, - kCCHmacAlgSHA256, - kCCHmacAlgSHA384, - kCCHmacAlgSHA512, - kCCHmacAlgSHA224 -}; -typedef uint32_t CCHmacAlgorithm; -""" - -FUNCTIONS = """ -void CCHmacInit(CCHmacContext *, CCHmacAlgorithm, const void *, size_t); -void CCHmacUpdate(CCHmacContext *, const void *, size_t); -void CCHmacFinal(CCHmacContext *, void *); - -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py deleted file mode 100644 index e8cc03e..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py +++ /dev/null @@ -1,50 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -enum { - kCCPBKDF2 = 2, -}; -typedef uint32_t CCPBKDFAlgorithm; -enum { - kCCPRFHmacAlgSHA1 = 1, - kCCPRFHmacAlgSHA224 = 2, - kCCPRFHmacAlgSHA256 = 3, - kCCPRFHmacAlgSHA384 = 4, - kCCPRFHmacAlgSHA512 = 5, -}; -typedef uint32_t CCPseudoRandomAlgorithm; -typedef unsigned int uint; -""" - -FUNCTIONS = """ -int CCKeyDerivationPBKDF(CCPBKDFAlgorithm, const char *, size_t, - const uint8_t *, size_t, CCPseudoRandomAlgorithm, - uint, uint8_t *, size_t); -uint CCCalibratePBKDF(CCPBKDFAlgorithm, size_t, size_t, - CCPseudoRandomAlgorithm, size_t, uint32_t); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py deleted file mode 100644 index add62c7..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py +++ /dev/null @@ -1,95 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecAccessRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -typedef uint32_t SecExternalItemType; -enum { - kSecItemTypeUnknown, - kSecItemTypePrivateKey, - kSecItemTypePublicKey, - kSecItemTypeSessionKey, - kSecItemTypeCertificate, - kSecItemTypeAggregate -}; - - -typedef uint32_t SecExternalFormat; -enum { - kSecFormatUnknown = 0, - kSecFormatOpenSSL, - kSecFormatSSH, - kSecFormatBSAFE, - kSecFormatRawKey, - kSecFormatWrappedPKCS8, - kSecFormatWrappedOpenSSL, - kSecFormatWrappedSSH, - kSecFormatWrappedLSH, - kSecFormatX509Cert, - kSecFormatPEMSequence, - kSecFormatPKCS7, - kSecFormatPKCS12, - kSecFormatNetscapeCertSequence, - kSecFormatSSHv2 -}; - -typedef uint32_t SecItemImportExportFlags; -enum { - kSecKeyImportOnlyOne = 0x00000001, - kSecKeySecurePassphrase = 0x00000002, - kSecKeyNoAccessControl = 0x00000004 -}; -typedef uint32_t SecKeyImportExportFlags; - -typedef struct { - /* for import and export */ - uint32_t version; - SecKeyImportExportFlags flags; - CFTypeRef passphrase; - CFStringRef alertTitle; - CFStringRef alertPrompt; - - /* for import only */ - SecAccessRef accessRef; - CFArrayRef keyUsage; - - CFArrayRef keyAttributes; -} SecItemImportExportKeyParameters; -""" - -FUNCTIONS = """ -OSStatus SecItemImport(CFDataRef, CFStringRef, SecExternalFormat *, - SecExternalItemType *, SecItemImportExportFlags, - const SecItemImportExportKeyParameters *, - SecKeychainRef, CFArrayRef *); -OSStatus SecPKCS12Import(CFDataRef, CFDictionaryRef, CFArrayRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py deleted file mode 100644 index 5e4b6da..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeyRef; -""" - -FUNCTIONS = """ -OSStatus SecKeyGeneratePair(CFDictionaryRef, SecKeyRef *, SecKeyRef *); -size_t SecKeyGetBlockSize(SecKeyRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py deleted file mode 100644 index c045c34..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... *SecKeychainRef; -""" - -FUNCTIONS = """ -OSStatus SecKeychainCreate(const char *, UInt32, const void *, Boolean, - SecAccessRef, SecKeychainRef *); -OSStatus SecKeychainDelete(SecKeychainRef); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py deleted file mode 100644 index d6dbc5f..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py +++ /dev/null @@ -1,79 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -#include -#include -""" - -TYPES = """ -typedef ... *SecTransformRef; - -CFStringRef kSecImportExportPassphrase; -CFStringRef kSecImportExportKeychain; -CFStringRef kSecImportExportAccess; - -CFStringRef kSecEncryptionMode; -CFStringRef kSecEncryptKey; -CFStringRef kSecIVKey; -CFStringRef kSecModeCBCKey; -CFStringRef kSecModeCFBKey; -CFStringRef kSecModeECBKey; -CFStringRef kSecModeNoneKey; -CFStringRef kSecModeOFBKey; -CFStringRef kSecOAEPEncodingParametersAttributeName; -CFStringRef kSecPaddingKey; -CFStringRef kSecPaddingNoneKey; -CFStringRef kSecPaddingOAEPKey; -CFStringRef kSecPaddingPKCS1Key; -CFStringRef kSecPaddingPKCS5Key; -CFStringRef kSecPaddingPKCS7Key; - -const CFStringRef kSecTransformInputAttributeName; -const CFStringRef kSecTransformOutputAttributeName; -const CFStringRef kSecTransformDebugAttributeName; -const CFStringRef kSecTransformTransformName; -const CFStringRef kSecTransformAbortAttributeName; - -CFStringRef kSecInputIsAttributeName; -CFStringRef kSecInputIsPlainText; -CFStringRef kSecInputIsDigest; -CFStringRef kSecInputIsRaw; - -const CFStringRef kSecDigestTypeAttribute; -const CFStringRef kSecDigestLengthAttribute; -const CFStringRef kSecDigestMD5; -const CFStringRef kSecDigestSHA1; -const CFStringRef kSecDigestSHA2; -""" - -FUNCTIONS = """ -Boolean SecTransformSetAttribute(SecTransformRef, CFStringRef, CFTypeRef, - CFErrorRef *); -SecTransformRef SecDecryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecEncryptTransformCreate(SecKeyRef, CFErrorRef *); -SecTransformRef SecVerifyTransformCreate(SecKeyRef, CFDataRef, CFErrorRef *); -SecTransformRef SecSignTransformCreate(SecKeyRef, CFErrorRef *) ; -CFTypeRef SecTransformExecute(SecTransformRef, CFErrorRef *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/aes.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/aes.py deleted file mode 100644 index e407152..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/aes.py +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -static const int Cryptography_HAS_AES_WRAP; - -struct aes_key_st { - ...; -}; -typedef struct aes_key_st AES_KEY; -""" - -FUNCTIONS = """ -int AES_set_encrypt_key(const unsigned char *, const int, AES_KEY *); -int AES_set_decrypt_key(const unsigned char *, const int, AES_KEY *); -""" - -MACROS = """ -/* these can be moved back to FUNCTIONS once we drop support for 0.9.8h. - This should be when we drop RHEL/CentOS 5, which is on 0.9.8e. */ -int AES_wrap_key(AES_KEY *, const unsigned char *, unsigned char *, - const unsigned char *, unsigned int); -int AES_unwrap_key(AES_KEY *, const unsigned char *, unsigned char *, - const unsigned char *, unsigned int); - -/* The ctr128_encrypt function is only useful in 0.9.8. You should use EVP for - this in 1.0.0+. It is defined in macros because the function signature - changed after 0.9.8 */ -void AES_ctr128_encrypt(const unsigned char *, unsigned char *, - const size_t, const AES_KEY *, - unsigned char[], unsigned char[], unsigned int *); - -""" - -CUSTOMIZATIONS = """ -/* OpenSSL 0.9.8h+ */ -#if OPENSSL_VERSION_NUMBER >= 0x0090808fL -static const long Cryptography_HAS_AES_WRAP = 1; -#else -static const long Cryptography_HAS_AES_WRAP = 0; -int (*AES_wrap_key)(AES_KEY *, const unsigned char *, unsigned char *, - const unsigned char *, unsigned int) = NULL; -int (*AES_unwrap_key)(AES_KEY *, const unsigned char *, unsigned char *, - const unsigned char *, unsigned int) = NULL; -#endif - -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_AES_WRAP": [ - "AES_wrap_key", - "AES_unwrap_key", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/asn1.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/asn1.py deleted file mode 100644 index 2edfd2d..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/asn1.py +++ /dev/null @@ -1,152 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -/* - * TODO: This typedef is wrong. - * - * This is due to limitations of cffi. - * See https://bitbucket.org/cffi/cffi/issue/69 - * - * For another possible work-around (not used here because it involves more - * complicated use of the cffi API which falls outside the general pattern used - * by this package), see - * http://paste.pound-python.org/show/iJcTUMkKeBeS6yXpZWUU/ - * - * The work-around used here is to just be sure to declare a type that is at - * least as large as the real type. Maciej explains: - * - * I think you want to declare your value too large (e.g. long) - * that way you'll never pass garbage - */ -typedef intptr_t time_t; - -typedef int ASN1_BOOLEAN; -typedef ... ASN1_INTEGER; - -struct asn1_string_st { - int length; - int type; - unsigned char *data; - long flags; -}; - -typedef struct asn1_string_st ASN1_OCTET_STRING; -typedef struct asn1_string_st ASN1_IA5STRING; -typedef ... ASN1_OBJECT; -typedef ... ASN1_STRING; -typedef ... ASN1_TYPE; -typedef ... ASN1_GENERALIZEDTIME; -typedef ... ASN1_ENUMERATED; -typedef ... ASN1_ITEM; -typedef ... ASN1_VALUE; - -typedef struct { - ...; -} ASN1_TIME; -typedef ... ASN1_ITEM_EXP; - -typedef ... ASN1_UTCTIME; - -static const int V_ASN1_GENERALIZEDTIME; - -static const int MBSTRING_UTF8; -""" - -FUNCTIONS = """ -ASN1_OBJECT *ASN1_OBJECT_new(void); -void ASN1_OBJECT_free(ASN1_OBJECT *); - -/* ASN1 OBJECT IDENTIFIER */ -ASN1_OBJECT *d2i_ASN1_OBJECT(ASN1_OBJECT **, const unsigned char **, long); -int i2d_ASN1_OBJECT(ASN1_OBJECT *, unsigned char **); - -/* ASN1 STRING */ -ASN1_STRING *ASN1_STRING_new(void); -ASN1_STRING *ASN1_STRING_type_new(int); -void ASN1_STRING_free(ASN1_STRING *); -unsigned char *ASN1_STRING_data(ASN1_STRING *); -int ASN1_STRING_set(ASN1_STRING *, const void *, int); -int ASN1_STRING_type(ASN1_STRING *); -int ASN1_STRING_to_UTF8(unsigned char **, ASN1_STRING *); - -/* ASN1 OCTET STRING */ -ASN1_OCTET_STRING *ASN1_OCTET_STRING_new(void); -void ASN1_OCTET_STRING_free(ASN1_OCTET_STRING *); -int ASN1_OCTET_STRING_set(ASN1_OCTET_STRING *, const unsigned char *, int); - -/* ASN1 INTEGER */ -ASN1_INTEGER *ASN1_INTEGER_new(void); -void ASN1_INTEGER_free(ASN1_INTEGER *); -int ASN1_INTEGER_set(ASN1_INTEGER *, long); -int i2a_ASN1_INTEGER(BIO *, ASN1_INTEGER *); - -/* ASN1 TIME */ -ASN1_TIME *ASN1_TIME_new(void); -void ASN1_TIME_free(ASN1_TIME *); -ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, - ASN1_GENERALIZEDTIME **); - -/* ASN1 UTCTIME */ -int ASN1_UTCTIME_cmp_time_t(const ASN1_UTCTIME *, time_t); - -/* ASN1 GENERALIZEDTIME */ -int ASN1_GENERALIZEDTIME_set_string(ASN1_GENERALIZEDTIME *, const char *); -void ASN1_GENERALIZEDTIME_free(ASN1_GENERALIZEDTIME *); - -/* ASN1 ENUMERATED */ -ASN1_ENUMERATED *ASN1_ENUMERATED_new(void); -void ASN1_ENUMERATED_free(ASN1_ENUMERATED *); -int ASN1_ENUMERATED_set(ASN1_ENUMERATED *, long); - -ASN1_VALUE *ASN1_item_d2i(ASN1_VALUE **, const unsigned char **, long, - const ASN1_ITEM *); -""" - -MACROS = """ -ASN1_TIME *M_ASN1_TIME_dup(void *); -const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); - -/* These aren't macros these arguments are all const X on openssl > 1.0.x */ - -int ASN1_STRING_length(ASN1_STRING *); -ASN1_STRING *ASN1_STRING_dup(ASN1_STRING *); -int ASN1_STRING_cmp(ASN1_STRING *, ASN1_STRING *); - -ASN1_OCTET_STRING *ASN1_OCTET_STRING_dup(ASN1_OCTET_STRING *); -int ASN1_OCTET_STRING_cmp(ASN1_OCTET_STRING *, ASN1_OCTET_STRING *); - -ASN1_INTEGER *ASN1_INTEGER_dup(ASN1_INTEGER *); -int ASN1_INTEGER_cmp(ASN1_INTEGER *, ASN1_INTEGER *); -long ASN1_INTEGER_get(ASN1_INTEGER *); - -BIGNUM *ASN1_INTEGER_to_BN(ASN1_INTEGER *, BIGNUM *); -ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *); - -/* These isn't a macro the arg is const on openssl 1.0.2+ */ -int ASN1_GENERALIZEDTIME_check(ASN1_GENERALIZEDTIME *); - -/* Not a macro, const on openssl 1.0 */ -int ASN1_STRING_set_default_mask_asc(char *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bignum.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bignum.py deleted file mode 100644 index 1d944ee..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bignum.py +++ /dev/null @@ -1,114 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... BN_CTX; -typedef ... BIGNUM; -/* - * TODO: This typedef is wrong. - * - * This is due to limitations of cffi. - * See https://bitbucket.org/cffi/cffi/issue/69 - * - * For another possible work-around (not used here because it involves more - * complicated use of the cffi API which falls outside the general pattern used - * by this package), see - * http://paste.pound-python.org/show/iJcTUMkKeBeS6yXpZWUU/ - * - * The work-around used here is to just be sure to declare a type that is at - * least as large as the real type. Maciej explains: - * - * I think you want to declare your value too large (e.g. long) - * that way you'll never pass garbage - */ -typedef uintptr_t BN_ULONG; -""" - -FUNCTIONS = """ -BIGNUM *BN_new(void); -void BN_free(BIGNUM *); - -BN_CTX *BN_CTX_new(void); -void BN_CTX_free(BN_CTX *); - -void BN_CTX_start(BN_CTX *); -BIGNUM *BN_CTX_get(BN_CTX *); -void BN_CTX_end(BN_CTX *); - -BIGNUM *BN_copy(BIGNUM *, const BIGNUM *); -BIGNUM *BN_dup(const BIGNUM *); - -int BN_set_word(BIGNUM *, BN_ULONG); -BN_ULONG BN_get_word(const BIGNUM *); - -const BIGNUM *BN_value_one(void); - -char *BN_bn2hex(const BIGNUM *); -int BN_hex2bn(BIGNUM **, const char *); -int BN_dec2bn(BIGNUM **, const char *); - -int BN_bn2bin(const BIGNUM *, unsigned char *); -BIGNUM *BN_bin2bn(const unsigned char *, int, BIGNUM *); - -int BN_num_bits(const BIGNUM *); - -int BN_cmp(const BIGNUM *, const BIGNUM *); -int BN_add(BIGNUM *, const BIGNUM *, const BIGNUM *); -int BN_sub(BIGNUM *, const BIGNUM *, const BIGNUM *); -int BN_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_sqr(BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_div(BIGNUM *, BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_nnmod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_mod_add(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, - BN_CTX *); -int BN_mod_sub(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, - BN_CTX *); -int BN_mod_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, - BN_CTX *); -int BN_mod_sqr(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int BN_mod_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, - BN_CTX *); -int BN_gcd(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -BIGNUM *BN_mod_inverse(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -int BN_set_bit(BIGNUM *, int); -int BN_clear_bit(BIGNUM *, int); - -int BN_is_bit_set(const BIGNUM *, int); - -int BN_mask_bits(BIGNUM *, int); -""" - -MACROS = """ -int BN_zero(BIGNUM *); -int BN_one(BIGNUM *); -int BN_mod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -int BN_lshift(BIGNUM *, const BIGNUM *, int); -int BN_lshift1(BIGNUM *, BIGNUM *); - -int BN_rshift(BIGNUM *, BIGNUM *, int); -int BN_rshift1(BIGNUM *, BIGNUM *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py deleted file mode 100644 index 4cd1b89..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/binding.py +++ /dev/null @@ -1,170 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import os -import sys -import threading - -from cryptography.hazmat.bindings.utils import build_ffi - - -_OSX_PRE_INCLUDE = """ -#ifdef __APPLE__ -#include -#define __ORIG_DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER \ - DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER -#undef DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER -#define DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER -#endif -""" - -_OSX_POST_INCLUDE = """ -#ifdef __APPLE__ -#undef DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER -#define DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER \ - __ORIG_DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER -#endif -""" - - -class Binding(object): - """ - OpenSSL API wrapper. - """ - _module_prefix = "cryptography.hazmat.bindings.openssl." - _modules = [ - "aes", - "asn1", - "bignum", - "bio", - "cmac", - "cms", - "conf", - "crypto", - "dh", - "dsa", - "ec", - "ecdh", - "ecdsa", - "engine", - "err", - "evp", - "hmac", - "nid", - "objects", - "opensslv", - "osrandom_engine", - "pem", - "pkcs7", - "pkcs12", - "rand", - "rsa", - "ssl", - "x509", - "x509name", - "x509v3", - "x509_vfy" - ] - - _locks = None - _lock_cb_handle = None - _lock_init_lock = threading.Lock() - - ffi = None - lib = None - - def __init__(self): - self._ensure_ffi_initialized() - - @classmethod - def _ensure_ffi_initialized(cls): - if cls.ffi is not None and cls.lib is not None: - return - - # OpenSSL goes by a different library name on different operating - # systems. - if sys.platform != "win32": - libraries = ["crypto", "ssl"] - else: # pragma: no cover - link_type = os.environ.get("PYCA_WINDOWS_LINK_TYPE", "static") - libraries = _get_windows_libraries(link_type) - - cls.ffi, cls.lib = build_ffi( - module_prefix=cls._module_prefix, - modules=cls._modules, - pre_include=_OSX_PRE_INCLUDE, - post_include=_OSX_POST_INCLUDE, - libraries=libraries, - ) - res = cls.lib.Cryptography_add_osrandom_engine() - assert res != 0 - - @classmethod - def is_available(cls): - # For now, OpenSSL is considered our "default" binding, so we treat it - # as always available. - return True - - @classmethod - def init_static_locks(cls): - with cls._lock_init_lock: - cls._ensure_ffi_initialized() - - if not cls._lock_cb_handle: - cls._lock_cb_handle = cls.ffi.callback( - "void(int, int, const char *, int)", - cls._lock_cb - ) - - # Use Python's implementation if available, importing _ssl triggers - # the setup for this. - __import__("_ssl") - - if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL: - return - - # If nothing else has setup a locking callback already, we set up - # our own - num_locks = cls.lib.CRYPTO_num_locks() - cls._locks = [threading.Lock() for n in range(num_locks)] - - cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle) - - @classmethod - def _lock_cb(cls, mode, n, file, line): - lock = cls._locks[n] - - if mode & cls.lib.CRYPTO_LOCK: - lock.acquire() - elif mode & cls.lib.CRYPTO_UNLOCK: - lock.release() - else: - raise RuntimeError( - "Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format( - mode, n, file, line - ) - ) - - -def _get_windows_libraries(link_type): - if link_type == "dynamic": - return ["libeay32", "ssleay32", "advapi32"] - elif link_type == "static" or link_type == "": - return ["libeay32mt", "ssleay32mt", "advapi32", - "crypt32", "gdi32", "user32", "ws2_32"] - else: - raise ValueError( - "PYCA_WINDOWS_LINK_TYPE must be 'static' or 'dynamic'" - ) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bio.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bio.py deleted file mode 100644 index cfe6034..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/bio.py +++ /dev/null @@ -1,181 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct bio_st BIO; -typedef void bio_info_cb(BIO *, int, const char *, int, long, long); -struct bio_method_st { - int type; - const char *name; - int (*bwrite)(BIO *, const char *, int); - int (*bread)(BIO *, char *, int); - int (*bputs)(BIO *, const char *); - int (*bgets)(BIO *, char*, int); - long (*ctrl)(BIO *, int, long, void *); - int (*create)(BIO *); - int (*destroy)(BIO *); - long (*callback_ctrl)(BIO *, int, bio_info_cb *); - ...; -}; -typedef struct bio_method_st BIO_METHOD; -struct bio_st { - BIO_METHOD *method; - long (*callback)(struct bio_st*, int, const char*, int, long, long); - char *cb_arg; - int init; - int shutdown; - int flags; - int retry_reason; - int num; - void *ptr; - struct bio_st *next_bio; - struct bio_st *prev_bio; - int references; - unsigned long num_read; - unsigned long num_write; - ...; -}; -typedef ... BUF_MEM; - -static const int BIO_TYPE_MEM; -static const int BIO_TYPE_FILE; -static const int BIO_TYPE_FD; -static const int BIO_TYPE_SOCKET; -static const int BIO_TYPE_CONNECT; -static const int BIO_TYPE_ACCEPT; -static const int BIO_TYPE_NULL; -static const int BIO_CLOSE; -static const int BIO_NOCLOSE; -static const int BIO_TYPE_SOURCE_SINK; -static const int BIO_CTRL_RESET; -static const int BIO_CTRL_EOF; -static const int BIO_CTRL_SET; -static const int BIO_CTRL_SET_CLOSE; -static const int BIO_CTRL_FLUSH; -static const int BIO_CTRL_DUP; -static const int BIO_CTRL_GET_CLOSE; -static const int BIO_CTRL_INFO; -static const int BIO_CTRL_GET; -static const int BIO_CTRL_PENDING; -static const int BIO_CTRL_WPENDING; -static const int BIO_C_FILE_SEEK; -static const int BIO_C_FILE_TELL; -static const int BIO_TYPE_NONE; -static const int BIO_TYPE_PROXY_CLIENT; -static const int BIO_TYPE_PROXY_SERVER; -static const int BIO_TYPE_NBIO_TEST; -static const int BIO_TYPE_BER; -static const int BIO_TYPE_BIO; -static const int BIO_TYPE_DESCRIPTOR; -static const int BIO_FLAGS_READ; -static const int BIO_FLAGS_WRITE; -static const int BIO_FLAGS_IO_SPECIAL; -static const int BIO_FLAGS_RWS; -static const int BIO_FLAGS_SHOULD_RETRY; -static const int BIO_TYPE_NULL_FILTER; -static const int BIO_TYPE_SSL; -static const int BIO_TYPE_MD; -static const int BIO_TYPE_BUFFER; -static const int BIO_TYPE_CIPHER; -static const int BIO_TYPE_BASE64; -static const int BIO_TYPE_FILTER; -""" - -FUNCTIONS = """ -BIO* BIO_new(BIO_METHOD *); -int BIO_set(BIO *, BIO_METHOD *); -int BIO_free(BIO *); -void BIO_vfree(BIO *); -void BIO_free_all(BIO *); -BIO *BIO_push(BIO *, BIO *); -BIO *BIO_pop(BIO *); -BIO *BIO_next(BIO *); -BIO *BIO_find_type(BIO *, int); -BIO_METHOD *BIO_s_mem(void); -BIO *BIO_new_mem_buf(void *, int); -BIO_METHOD *BIO_s_file(void); -BIO *BIO_new_file(const char *, const char *); -BIO *BIO_new_fp(FILE *, int); -BIO_METHOD *BIO_s_fd(void); -BIO *BIO_new_fd(int, int); -BIO_METHOD *BIO_s_socket(void); -BIO *BIO_new_socket(int, int); -BIO_METHOD *BIO_s_null(void); -long BIO_ctrl(BIO *, int, long, void *); -long BIO_callback_ctrl( - BIO *, - int, - void (*)(struct bio_st *, int, const char *, int, long, long) -); -char *BIO_ptr_ctrl(BIO *, int, long); -long BIO_int_ctrl(BIO *, int, long, int); -size_t BIO_ctrl_pending(BIO *); -size_t BIO_ctrl_wpending(BIO *); -int BIO_read(BIO *, void *, int); -int BIO_gets(BIO *, char *, int); -int BIO_write(BIO *, const void *, int); -int BIO_puts(BIO *, const char *); -BIO_METHOD *BIO_f_null(void); -BIO_METHOD *BIO_f_buffer(void); -""" - -MACROS = """ -long BIO_set_fd(BIO *, long, int); -long BIO_get_fd(BIO *, char *); -long BIO_set_mem_eof_return(BIO *, int); -long BIO_get_mem_data(BIO *, char **); -long BIO_set_mem_buf(BIO *, BUF_MEM *, int); -long BIO_get_mem_ptr(BIO *, BUF_MEM **); -long BIO_set_fp(BIO *, FILE *, int); -long BIO_get_fp(BIO *, FILE **); -long BIO_read_filename(BIO *, char *); -long BIO_write_filename(BIO *, char *); -long BIO_append_filename(BIO *, char *); -long BIO_rw_filename(BIO *, char *); -int BIO_should_read(BIO *); -int BIO_should_write(BIO *); -int BIO_should_io_special(BIO *); -int BIO_retry_type(BIO *); -int BIO_should_retry(BIO *); -int BIO_reset(BIO *); -int BIO_seek(BIO *, int); -int BIO_tell(BIO *); -int BIO_flush(BIO *); -int BIO_eof(BIO *); -int BIO_set_close(BIO *,long); -int BIO_get_close(BIO *); -int BIO_pending(BIO *); -int BIO_wpending(BIO *); -int BIO_get_info_callback(BIO *, bio_info_cb **); -int BIO_set_info_callback(BIO *, bio_info_cb *); -long BIO_get_buffer_num_lines(BIO *); -long BIO_set_read_buffer_size(BIO *, long); -long BIO_set_write_buffer_size(BIO *, long); -long BIO_set_buffer_size(BIO *, long); -long BIO_set_buffer_read_data(BIO *, void *, long); - -/* The following was a macro in 0.9.8e. Once we drop support for RHEL/CentOS 5 - we should move this back to FUNCTIONS. */ -int BIO_method_type(const BIO *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cmac.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cmac.py deleted file mode 100644 index c8bcc82..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cmac.py +++ /dev/null @@ -1,65 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#if OPENSSL_VERSION_NUMBER >= 0x10001000L -#include -#endif -""" - -TYPES = """ -static const int Cryptography_HAS_CMAC; -typedef ... CMAC_CTX; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -CMAC_CTX *CMAC_CTX_new(void); -int CMAC_Init(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, ENGINE *); -int CMAC_Update(CMAC_CTX *, const void *, size_t); -int CMAC_Final(CMAC_CTX *, unsigned char *, size_t *); -int CMAC_CTX_copy(CMAC_CTX *, const CMAC_CTX *); -void CMAC_CTX_free(CMAC_CTX *); -""" - -CUSTOMIZATIONS = """ -#if OPENSSL_VERSION_NUMBER < 0x10001000L - -static const long Cryptography_HAS_CMAC = 0; -typedef void CMAC_CTX; -CMAC_CTX *(*CMAC_CTX_new)(void) = NULL; -int (*CMAC_Init)(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, - ENGINE *) = NULL; -int (*CMAC_Update)(CMAC_CTX *, const void *, size_t) = NULL; -int (*CMAC_Final)(CMAC_CTX *, unsigned char *, size_t *) = NULL; -int (*CMAC_CTX_copy)(CMAC_CTX *, const CMAC_CTX *) = NULL; -void (*CMAC_CTX_free)(CMAC_CTX *) = NULL; -#else -static const long Cryptography_HAS_CMAC = 1; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_CMAC": [ - "CMAC_CTX_new", - "CMAC_Init", - "CMAC_Update", - "CMAC_Final", - "CMAC_CTX_copy", - "CMAC_CTX_free", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cms.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cms.py deleted file mode 100644 index cbf4b28..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/cms.py +++ /dev/null @@ -1,100 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#if !defined(OPENSSL_NO_CMS) && OPENSSL_VERSION_NUMBER >= 0x0090808fL -/* The next define should really be in the OpenSSL header, but it is missing. - Failing to include this on Windows causes compilation failures. */ -#if defined(OPENSSL_SYS_WINDOWS) -#include -#endif -#include -#endif -""" - -TYPES = """ -static const long Cryptography_HAS_CMS; - -typedef ... CMS_ContentInfo; -typedef ... CMS_SignerInfo; -typedef ... CMS_CertificateChoices; -typedef ... CMS_RevocationInfoChoice; -typedef ... CMS_RecipientInfo; -typedef ... CMS_ReceiptRequest; -typedef ... CMS_Receipt; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -BIO *BIO_new_CMS(BIO *, CMS_ContentInfo *); -int i2d_CMS_bio_stream(BIO *, CMS_ContentInfo *, BIO *, int); -int PEM_write_bio_CMS_stream(BIO *, CMS_ContentInfo *, BIO *, int); -int CMS_final(CMS_ContentInfo *, BIO *, BIO *, unsigned int); -CMS_ContentInfo *CMS_sign(X509 *, EVP_PKEY *, Cryptography_STACK_OF_X509 *, - BIO *, unsigned int); -int CMS_verify(CMS_ContentInfo *, Cryptography_STACK_OF_X509 *, X509_STORE *, - BIO *, BIO *, unsigned int); -CMS_ContentInfo *CMS_encrypt(Cryptography_STACK_OF_X509 *, BIO *, - const EVP_CIPHER *, unsigned int); -int CMS_decrypt(CMS_ContentInfo *, EVP_PKEY *, X509 *, BIO *, BIO *, - unsigned int); -CMS_SignerInfo *CMS_add1_signer(CMS_ContentInfo *, X509 *, EVP_PKEY *, - const EVP_MD *, unsigned int); -""" - -CUSTOMIZATIONS = """ -#if !defined(OPENSSL_NO_CMS) && OPENSSL_VERSION_NUMBER >= 0x0090808fL -static const long Cryptography_HAS_CMS = 1; -#else -static const long Cryptography_HAS_CMS = 0; -typedef void CMS_ContentInfo; -typedef void CMS_SignerInfo; -typedef void CMS_CertificateChoices; -typedef void CMS_RevocationInfoChoice; -typedef void CMS_RecipientInfo; -typedef void CMS_ReceiptRequest; -typedef void CMS_Receipt; -BIO *(*BIO_new_CMS)(BIO *, CMS_ContentInfo *) = NULL; -int (*i2d_CMS_bio_stream)(BIO *, CMS_ContentInfo *, BIO *, int) = NULL; -int (*PEM_write_bio_CMS_stream)(BIO *, CMS_ContentInfo *, BIO *, int) = NULL; -int (*CMS_final)(CMS_ContentInfo *, BIO *, BIO *, unsigned int) = NULL; -CMS_ContentInfo *(*CMS_sign)(X509 *, EVP_PKEY *, Cryptography_STACK_OF_X509 *, - BIO *, unsigned int) = NULL; -int (*CMS_verify)(CMS_ContentInfo *, Cryptography_STACK_OF_X509 *, - X509_STORE *, BIO *, BIO *, unsigned int) = NULL; -CMS_ContentInfo *(*CMS_encrypt)(Cryptography_STACK_OF_X509 *, BIO *, - const EVP_CIPHER *, unsigned int) = NULL; -int (*CMS_decrypt)(CMS_ContentInfo *, EVP_PKEY *, X509 *, BIO *, BIO *, - unsigned int) = NULL; -CMS_SignerInfo *(*CMS_add1_signer)(CMS_ContentInfo *, X509 *, EVP_PKEY *, - const EVP_MD *, unsigned int) = NULL; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_CMS": [ - "BIO_new_CMS", - "i2d_CMS_bio_stream", - "PEM_write_bio_CMS_stream", - "CMS_final", - "CMS_sign", - "CMS_verify", - "CMS_encrypt", - "CMS_decrypt", - "CMS_add1_signer", - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/conf.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/conf.py deleted file mode 100644 index 001a070..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/conf.py +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... CONF; -""" - -FUNCTIONS = """ -void OPENSSL_config(const char *); -void OPENSSL_no_config(void); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/crypto.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/crypto.py deleted file mode 100644 index 99e1a61..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/crypto.py +++ /dev/null @@ -1,67 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... CRYPTO_THREADID; - -static const int SSLEAY_VERSION; -static const int SSLEAY_CFLAGS; -static const int SSLEAY_PLATFORM; -static const int SSLEAY_DIR; -static const int SSLEAY_BUILT_ON; -static const int CRYPTO_MEM_CHECK_ON; -static const int CRYPTO_MEM_CHECK_OFF; -static const int CRYPTO_MEM_CHECK_ENABLE; -static const int CRYPTO_MEM_CHECK_DISABLE; -static const int CRYPTO_LOCK; -static const int CRYPTO_UNLOCK; -static const int CRYPTO_READ; -static const int CRYPTO_WRITE; -static const int CRYPTO_LOCK_SSL; -""" - -FUNCTIONS = """ -unsigned long SSLeay(void); -const char *SSLeay_version(int); - -void CRYPTO_free(void *); -int CRYPTO_mem_ctrl(int); -int CRYPTO_is_mem_check_on(void); -void CRYPTO_mem_leaks(struct bio_st *); -void CRYPTO_cleanup_all_ex_data(void); -int CRYPTO_num_locks(void); -void CRYPTO_set_locking_callback(void(*)(int, int, const char *, int)); -void CRYPTO_set_id_callback(unsigned long (*)(void)); -unsigned long (*CRYPTO_get_id_callback(void))(void); -void (*CRYPTO_get_locking_callback(void))(int, int, const char *, int); -void CRYPTO_lock(int, int, const char *, int); - -void OPENSSL_free(void *); -""" - -MACROS = """ -void CRYPTO_add(int *, int, int); -void CRYPTO_malloc_init(void); -void CRYPTO_malloc_debug_init(void); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dh.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dh.py deleted file mode 100644 index e2e8976..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dh.py +++ /dev/null @@ -1,57 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct dh_st { - /* Prime number (shared) */ - BIGNUM *p; - /* Generator of Z_p (shared) */ - BIGNUM *g; - /* Private DH value x */ - BIGNUM *priv_key; - /* Public DH value g^x */ - BIGNUM *pub_key; - ...; -} DH; -""" - -FUNCTIONS = """ -DH *DH_new(void); -void DH_free(DH *); -int DH_size(const DH *); -DH *DH_generate_parameters(int, int, void (*)(int, int, void *), void *); -int DH_check(const DH *, int *); -int DH_generate_key(DH *); -int DH_compute_key(unsigned char *, const BIGNUM *, DH *); -int DH_set_ex_data(DH *, int, void *); -void *DH_get_ex_data(DH *, int); -DH *d2i_DHparams(DH **, const unsigned char **, long); -int i2d_DHparams(const DH *, unsigned char **); -int DHparams_print_fp(FILE *, const DH *); -int DHparams_print(BIO *, const DH *); -""" - -MACROS = """ -int DH_generate_parameters_ex(DH *, int, int, BN_GENCB *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dsa.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dsa.py deleted file mode 100644 index c9aa888..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/dsa.py +++ /dev/null @@ -1,65 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct dsa_st { - /* Prime number (public) */ - BIGNUM *p; - /* Subprime (160-bit, q | p-1, public) */ - BIGNUM *q; - /* Generator of subgroup (public) */ - BIGNUM *g; - /* Private key x */ - BIGNUM *priv_key; - /* Public key y = g^x */ - BIGNUM *pub_key; - ...; -} DSA; -typedef struct { - BIGNUM *r; - BIGNUM *s; -} DSA_SIG; -""" - -FUNCTIONS = """ -DSA *DSA_generate_parameters(int, unsigned char *, int, int *, unsigned long *, - void (*)(int, int, void *), void *); -int DSA_generate_key(DSA *); -DSA *DSA_new(void); -void DSA_free(DSA *); -DSA_SIG *DSA_SIG_new(void); -void DSA_SIG_free(DSA_SIG *); -int i2d_DSA_SIG(const DSA_SIG *, unsigned char **); -DSA_SIG *d2i_DSA_SIG(DSA_SIG **, const unsigned char **, long); -int DSA_size(const DSA *); -int DSA_sign(int, const unsigned char *, int, unsigned char *, unsigned int *, - DSA *); -int DSA_verify(int, const unsigned char *, int, const unsigned char *, int, - DSA *); -""" - -MACROS = """ -int DSA_generate_parameters_ex(DSA *, int, unsigned char *, int, - int *, unsigned long *, BN_GENCB *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ec.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ec.py deleted file mode 100644 index 26fc8ff..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ec.py +++ /dev/null @@ -1,490 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#ifndef OPENSSL_NO_EC -#include -#endif - -#include -""" - -TYPES = """ -static const int Cryptography_HAS_EC; -static const int Cryptography_HAS_EC_1_0_1; -static const int Cryptography_HAS_EC_NISTP_64_GCC_128; -static const int Cryptography_HAS_EC2M; - -static const int OPENSSL_EC_NAMED_CURVE; - -typedef ... EC_KEY; -typedef ... EC_GROUP; -typedef ... EC_POINT; -typedef ... EC_METHOD; -typedef struct { - int nid; - const char *comment; -} EC_builtin_curve; -typedef enum { ... } point_conversion_form_t; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -EC_GROUP *EC_GROUP_new(const EC_METHOD *); -void EC_GROUP_free(EC_GROUP *); -void EC_GROUP_clear_free(EC_GROUP *); - -EC_GROUP *EC_GROUP_new_curve_GFp( - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -EC_GROUP *EC_GROUP_new_curve_GF2m( - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -EC_GROUP *EC_GROUP_new_by_curve_name(int); - -int EC_GROUP_set_curve_GFp( - EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int EC_GROUP_get_curve_GFp( - const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); -int EC_GROUP_set_curve_GF2m( - EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -int EC_GROUP_get_curve_GF2m( - const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); - -int EC_GROUP_get_degree(const EC_GROUP *); - -const EC_METHOD *EC_GROUP_method_of(const EC_GROUP *); -const EC_POINT *EC_GROUP_get0_generator(const EC_GROUP *); -int EC_GROUP_get_curve_name(const EC_GROUP *); - -size_t EC_get_builtin_curves(EC_builtin_curve *, size_t); - -void EC_KEY_free(EC_KEY *); - -int EC_KEY_get_flags(const EC_KEY *); -void EC_KEY_set_flags(EC_KEY *, int); -void EC_KEY_clear_flags(EC_KEY *, int); -EC_KEY *EC_KEY_new_by_curve_name(int); -EC_KEY *EC_KEY_copy(EC_KEY *, const EC_KEY *); -EC_KEY *EC_KEY_dup(const EC_KEY *); -int EC_KEY_up_ref(EC_KEY *); -const EC_GROUP *EC_KEY_get0_group(const EC_KEY *); -int EC_GROUP_get_order(const EC_GROUP *, BIGNUM *, BN_CTX *); -int EC_KEY_set_group(EC_KEY *, const EC_GROUP *); -const BIGNUM *EC_KEY_get0_private_key(const EC_KEY *); -int EC_KEY_set_private_key(EC_KEY *, const BIGNUM *); -const EC_POINT *EC_KEY_get0_public_key(const EC_KEY *); -int EC_KEY_set_public_key(EC_KEY *, const EC_POINT *); -unsigned int EC_KEY_get_enc_flags(const EC_KEY *); -void EC_KEY_set_enc_flags(EC_KEY *eckey, unsigned int); -point_conversion_form_t EC_KEY_get_conv_form(const EC_KEY *); -void EC_KEY_set_conv_form(EC_KEY *, point_conversion_form_t); -void *EC_KEY_get_key_method_data( - EC_KEY *, - void *(*)(void *), - void (*)(void *), - void (*)(void *) -); -void EC_KEY_insert_key_method_data( - EC_KEY *, - void *, - void *(*)(void *), - void (*)(void *), - void (*)(void *) -); -void EC_KEY_set_asn1_flag(EC_KEY *, int); -int EC_KEY_precompute_mult(EC_KEY *, BN_CTX *); -int EC_KEY_generate_key(EC_KEY *); -int EC_KEY_check_key(const EC_KEY *); -int EC_KEY_set_public_key_affine_coordinates(EC_KEY *, BIGNUM *, BIGNUM *); - -EC_POINT *EC_POINT_new(const EC_GROUP *); -void EC_POINT_free(EC_POINT *); -void EC_POINT_clear_free(EC_POINT *); -int EC_POINT_copy(EC_POINT *, const EC_POINT *); -EC_POINT *EC_POINT_dup(const EC_POINT *, const EC_GROUP *); -const EC_METHOD *EC_POINT_method_of(const EC_POINT *); - -int EC_POINT_set_to_infinity(const EC_GROUP *, EC_POINT *); - -int EC_POINT_set_Jprojective_coordinates_GFp(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -int EC_POINT_get_Jprojective_coordinates_GFp(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); - -int EC_POINT_set_affine_coordinates_GFp(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, BN_CTX *); - -int EC_POINT_get_affine_coordinates_GFp(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *); - -int EC_POINT_set_compressed_coordinates_GFp(const EC_GROUP *, EC_POINT *, - const BIGNUM *, int, BN_CTX *); - -int EC_POINT_set_affine_coordinates_GF2m(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, BN_CTX *); - -int EC_POINT_get_affine_coordinates_GF2m(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *); - -int EC_POINT_set_compressed_coordinates_GF2m(const EC_GROUP *, EC_POINT *, - const BIGNUM *, int, BN_CTX *); - -size_t EC_POINT_point2oct(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t, - unsigned char *, size_t, BN_CTX *); - -int EC_POINT_oct2point(const EC_GROUP *, EC_POINT *, - const unsigned char *, size_t, BN_CTX *); - -BIGNUM *EC_POINT_point2bn(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t form, BIGNUM *, BN_CTX *); - -EC_POINT *EC_POINT_bn2point(const EC_GROUP *, const BIGNUM *, - EC_POINT *, BN_CTX *); - -char *EC_POINT_point2hex(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t form, BN_CTX *); - -EC_POINT *EC_POINT_hex2point(const EC_GROUP *, const char *, - EC_POINT *, BN_CTX *); - -int EC_POINT_add(const EC_GROUP *, EC_POINT *, const EC_POINT *, - const EC_POINT *, BN_CTX *); - -int EC_POINT_dbl(const EC_GROUP *, EC_POINT *, const EC_POINT *, BN_CTX *); -int EC_POINT_invert(const EC_GROUP *, EC_POINT *, BN_CTX *); -int EC_POINT_is_at_infinity(const EC_GROUP *, const EC_POINT *); -int EC_POINT_is_on_curve(const EC_GROUP *, const EC_POINT *, BN_CTX *); - -int EC_POINT_cmp( - const EC_GROUP *, const EC_POINT *, const EC_POINT *, BN_CTX *); - -int EC_POINT_make_affine(const EC_GROUP *, EC_POINT *, BN_CTX *); -int EC_POINTs_make_affine(const EC_GROUP *, size_t, EC_POINT *[], BN_CTX *); - -int EC_POINTs_mul( - const EC_GROUP *, EC_POINT *, const BIGNUM *, - size_t, const EC_POINT *[], const BIGNUM *[], BN_CTX *); - -int EC_POINT_mul(const EC_GROUP *, EC_POINT *, const BIGNUM *, - const EC_POINT *, const BIGNUM *, BN_CTX *); - -int EC_GROUP_precompute_mult(EC_GROUP *, BN_CTX *); -int EC_GROUP_have_precompute_mult(const EC_GROUP *); - -const EC_METHOD *EC_GFp_simple_method(); -const EC_METHOD *EC_GFp_mont_method(); -const EC_METHOD *EC_GFp_nist_method(); - -const EC_METHOD *EC_GFp_nistp224_method(); -const EC_METHOD *EC_GFp_nistp256_method(); -const EC_METHOD *EC_GFp_nistp521_method(); - -const EC_METHOD *EC_GF2m_simple_method(); - -int EC_METHOD_get_field_type(const EC_METHOD *); -""" - -CUSTOMIZATIONS = """ -#ifdef OPENSSL_NO_EC -static const long Cryptography_HAS_EC = 0; - -typedef void EC_KEY; -typedef void EC_GROUP; -typedef void EC_POINT; -typedef void EC_METHOD; -typedef struct { - int nid; - const char *comment; -} EC_builtin_curve; -typedef long point_conversion_form_t; - -static const int OPENSSL_EC_NAMED_CURVE = 0; - -void (*EC_KEY_free)(EC_KEY *) = NULL; -size_t (*EC_get_builtin_curves)(EC_builtin_curve *, size_t) = NULL; -EC_KEY *(*EC_KEY_new_by_curve_name)(int) = NULL; -EC_KEY *(*EC_KEY_copy)(EC_KEY *, const EC_KEY *) = NULL; -EC_KEY *(*EC_KEY_dup)(const EC_KEY *) = NULL; -int (*EC_KEY_up_ref)(EC_KEY *) = NULL; -const EC_GROUP *(*EC_KEY_get0_group)(const EC_KEY *) = NULL; -int (*EC_GROUP_get_order)(const EC_GROUP *, BIGNUM *, BN_CTX *) = NULL; -int (*EC_KEY_set_group)(EC_KEY *, const EC_GROUP *) = NULL; -const BIGNUM *(*EC_KEY_get0_private_key)(const EC_KEY *) = NULL; -int (*EC_KEY_set_private_key)(EC_KEY *, const BIGNUM *) = NULL; -const EC_POINT *(*EC_KEY_get0_public_key)(const EC_KEY *) = NULL; -int (*EC_KEY_set_public_key)(EC_KEY *, const EC_POINT *) = NULL; -unsigned int (*EC_KEY_get_enc_flags)(const EC_KEY *) = NULL; -void (*EC_KEY_set_enc_flags)(EC_KEY *eckey, unsigned int) = NULL; -point_conversion_form_t (*EC_KEY_get_conv_form)(const EC_KEY *) = NULL; -void (*EC_KEY_set_conv_form)(EC_KEY *, point_conversion_form_t) = NULL; -void *(*EC_KEY_get_key_method_data)( - EC_KEY *, void *(*)(void *), void (*)(void *), void (*)(void *)) = NULL; -void (*EC_KEY_insert_key_method_data)( - EC_KEY *, void *, - void *(*)(void *), void (*)(void *), void (*)(void *)) = NULL; -void (*EC_KEY_set_asn1_flag)(EC_KEY *, int) = NULL; -int (*EC_KEY_precompute_mult)(EC_KEY *, BN_CTX *) = NULL; -int (*EC_KEY_generate_key)(EC_KEY *) = NULL; -int (*EC_KEY_check_key)(const EC_KEY *) = NULL; - -EC_GROUP *(*EC_GROUP_new)(const EC_METHOD *); -void (*EC_GROUP_free)(EC_GROUP *); -void (*EC_GROUP_clear_free)(EC_GROUP *); - -EC_GROUP *(*EC_GROUP_new_curve_GFp)( - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -EC_GROUP *(*EC_GROUP_new_by_curve_name)(int); - -int (*EC_GROUP_set_curve_GFp)( - EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -int (*EC_GROUP_get_curve_GFp)( - const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); - -int (*EC_GROUP_get_degree)(const EC_GROUP *) = NULL; - -const EC_METHOD *(*EC_GROUP_method_of)(const EC_GROUP *) = NULL; -const EC_POINT *(*EC_GROUP_get0_generator)(const EC_GROUP *) = NULL; -int (*EC_GROUP_get_curve_name)(const EC_GROUP *) = NULL; - -EC_POINT *(*EC_POINT_new)(const EC_GROUP *) = NULL; -void (*EC_POINT_free)(EC_POINT *) = NULL; -void (*EC_POINT_clear_free)(EC_POINT *) = NULL; -int (*EC_POINT_copy)(EC_POINT *, const EC_POINT *) = NULL; -EC_POINT *(*EC_POINT_dup)(const EC_POINT *, const EC_GROUP *) = NULL; -const EC_METHOD *(*EC_POINT_method_of)(const EC_POINT *) = NULL; -int (*EC_POINT_set_to_infinity)(const EC_GROUP *, EC_POINT *) = NULL; -int (*EC_POINT_set_Jprojective_coordinates_GFp)(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_get_Jprojective_coordinates_GFp)(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_set_affine_coordinates_GFp)(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_get_affine_coordinates_GFp)(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_set_compressed_coordinates_GFp)(const EC_GROUP *, EC_POINT *, - const BIGNUM *, int, BN_CTX *) = NULL; - -size_t (*EC_POINT_point2oct)(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t, - unsigned char *, size_t, BN_CTX *) = NULL; - -int (*EC_POINT_oct2point)(const EC_GROUP *, EC_POINT *, - const unsigned char *, size_t, BN_CTX *) = NULL; - -BIGNUM *(*EC_POINT_point2bn)(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t form, BIGNUM *, BN_CTX *) = NULL; - -EC_POINT *(*EC_POINT_bn2point)(const EC_GROUP *, const BIGNUM *, - EC_POINT *, BN_CTX *) = NULL; - -char *(*EC_POINT_point2hex)(const EC_GROUP *, const EC_POINT *, - point_conversion_form_t form, BN_CTX *) = NULL; - -EC_POINT *(*EC_POINT_hex2point)(const EC_GROUP *, const char *, - EC_POINT *, BN_CTX *) = NULL; - -int (*EC_POINT_add)(const EC_GROUP *, EC_POINT *, const EC_POINT *, - const EC_POINT *, BN_CTX *) = NULL; - -int (*EC_POINT_dbl)(const EC_GROUP *, EC_POINT *, const EC_POINT *, - BN_CTX *) = NULL; - -int (*EC_POINT_invert)(const EC_GROUP *, EC_POINT *, BN_CTX *) = NULL; -int (*EC_POINT_is_at_infinity)(const EC_GROUP *, const EC_POINT *) = NULL; - -int (*EC_POINT_is_on_curve)(const EC_GROUP *, const EC_POINT *, - BN_CTX *) = NULL; - -int (*EC_POINT_cmp)( - const EC_GROUP *, const EC_POINT *, const EC_POINT *, BN_CTX *) = NULL; - -int (*EC_POINT_make_affine)(const EC_GROUP *, EC_POINT *, BN_CTX *) = NULL; - -int (*EC_POINTs_make_affine)(const EC_GROUP *, size_t, EC_POINT *[], - BN_CTX *) = NULL; - -int (*EC_POINTs_mul)( - const EC_GROUP *, EC_POINT *, const BIGNUM *, - size_t, const EC_POINT *[], const BIGNUM *[], BN_CTX *) = NULL; - -int (*EC_POINT_mul)(const EC_GROUP *, EC_POINT *, const BIGNUM *, - const EC_POINT *, const BIGNUM *, BN_CTX *) = NULL; - -int (*EC_GROUP_precompute_mult)(EC_GROUP *, BN_CTX *) = NULL; -int (*EC_GROUP_have_precompute_mult)(const EC_GROUP *) = NULL; - -const EC_METHOD *(*EC_GFp_simple_method)() = NULL; -const EC_METHOD *(*EC_GFp_mont_method)() = NULL; -const EC_METHOD *(*EC_GFp_nist_method)() = NULL; - -int (*EC_METHOD_get_field_type)(const EC_METHOD *) = NULL; - -#else -static const long Cryptography_HAS_EC = 1; -#endif - -#if defined(OPENSSL_NO_EC) || OPENSSL_VERSION_NUMBER < 0x1000100f -static const long Cryptography_HAS_EC_1_0_1 = 0; - -int (*EC_KEY_get_flags)(const EC_KEY *) = NULL; -void (*EC_KEY_set_flags)(EC_KEY *, int) = NULL; -void (*EC_KEY_clear_flags)(EC_KEY *, int) = NULL; - -int (*EC_KEY_set_public_key_affine_coordinates)( - EC_KEY *, BIGNUM *, BIGNUM *) = NULL; -#else -static const long Cryptography_HAS_EC_1_0_1 = 1; -#endif - - -#if defined(OPENSSL_NO_EC) || OPENSSL_VERSION_NUMBER < 0x1000100f || \ - defined(OPENSSL_NO_EC_NISTP_64_GCC_128) -static const long Cryptography_HAS_EC_NISTP_64_GCC_128 = 0; - -const EC_METHOD *(*EC_GFp_nistp224_method)(void) = NULL; -const EC_METHOD *(*EC_GFp_nistp256_method)(void) = NULL; -const EC_METHOD *(*EC_GFp_nistp521_method)(void) = NULL; -#else -static const long Cryptography_HAS_EC_NISTP_64_GCC_128 = 1; -#endif - -#if defined(OPENSSL_NO_EC) || defined(OPENSSL_NO_EC2M) -static const long Cryptography_HAS_EC2M = 0; - -const EC_METHOD *(*EC_GF2m_simple_method)() = NULL; - -int (*EC_POINT_set_affine_coordinates_GF2m)(const EC_GROUP *, EC_POINT *, - const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_get_affine_coordinates_GF2m)(const EC_GROUP *, - const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; - -int (*EC_POINT_set_compressed_coordinates_GF2m)(const EC_GROUP *, EC_POINT *, - const BIGNUM *, int, BN_CTX *) = NULL; - -int (*EC_GROUP_set_curve_GF2m)( - EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); - -int (*EC_GROUP_get_curve_GF2m)( - const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); - -EC_GROUP *(*EC_GROUP_new_curve_GF2m)( - const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); -#else -static const long Cryptography_HAS_EC2M = 1; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_EC": [ - "OPENSSL_EC_NAMED_CURVE", - "EC_GROUP_new", - "EC_GROUP_free", - "EC_GROUP_clear_free", - "EC_GROUP_new_curve_GFp", - "EC_GROUP_new_by_curve_name", - "EC_GROUP_set_curve_GFp", - "EC_GROUP_get_curve_GFp", - "EC_GROUP_method_of", - "EC_GROUP_get0_generator", - "EC_GROUP_get_curve_name", - "EC_GROUP_get_degree", - "EC_KEY_free", - "EC_get_builtin_curves", - "EC_KEY_new_by_curve_name", - "EC_KEY_copy", - "EC_KEY_dup", - "EC_KEY_up_ref", - "EC_KEY_set_group", - "EC_KEY_get0_private_key", - "EC_KEY_set_private_key", - "EC_KEY_set_public_key", - "EC_KEY_get_enc_flags", - "EC_KEY_set_enc_flags", - "EC_KEY_set_conv_form", - "EC_KEY_get_key_method_data", - "EC_KEY_insert_key_method_data", - "EC_KEY_set_asn1_flag", - "EC_KEY_precompute_mult", - "EC_KEY_generate_key", - "EC_KEY_check_key", - "EC_POINT_new", - "EC_POINT_free", - "EC_POINT_clear_free", - "EC_POINT_copy", - "EC_POINT_dup", - "EC_POINT_method_of", - "EC_POINT_set_to_infinity", - "EC_POINT_set_Jprojective_coordinates_GFp", - "EC_POINT_get_Jprojective_coordinates_GFp", - "EC_POINT_set_affine_coordinates_GFp", - "EC_POINT_get_affine_coordinates_GFp", - "EC_POINT_set_compressed_coordinates_GFp", - "EC_POINT_point2oct", - "EC_POINT_oct2point", - "EC_POINT_point2bn", - "EC_POINT_bn2point", - "EC_POINT_point2hex", - "EC_POINT_hex2point", - "EC_POINT_add", - "EC_POINT_dbl", - "EC_POINT_invert", - "EC_POINT_is_at_infinity", - "EC_POINT_is_on_curve", - "EC_POINT_cmp", - "EC_POINT_make_affine", - "EC_POINTs_make_affine", - "EC_POINTs_mul", - "EC_POINT_mul", - "EC_GROUP_precompute_mult", - "EC_GROUP_have_precompute_mult", - "EC_GFp_simple_method", - "EC_GFp_mont_method", - "EC_GFp_nist_method", - "EC_METHOD_get_field_type", - ], - - "Cryptography_HAS_EC_1_0_1": [ - "EC_KEY_get_flags", - "EC_KEY_set_flags", - "EC_KEY_clear_flags", - "EC_KEY_set_public_key_affine_coordinates", - ], - - "Cryptography_HAS_EC_NISTP_64_GCC_128": [ - "EC_GFp_nistp224_method", - "EC_GFp_nistp256_method", - "EC_GFp_nistp521_method", - ], - - "Cryptography_HAS_EC2M": [ - "EC_GF2m_simple_method", - "EC_POINT_set_affine_coordinates_GF2m", - "EC_POINT_get_affine_coordinates_GF2m", - "EC_POINT_set_compressed_coordinates_GF2m", - "EC_GROUP_set_curve_GF2m", - "EC_GROUP_get_curve_GF2m", - "EC_GROUP_new_curve_GF2m", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py deleted file mode 100644 index 960d46f..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py +++ /dev/null @@ -1,68 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#ifndef OPENSSL_NO_ECDH -#include -#endif -""" - -TYPES = """ -static const int Cryptography_HAS_ECDH; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -int ECDH_compute_key(void *, size_t, const EC_POINT *, EC_KEY *, - void *(*)(const void *, size_t, void *, size_t *)); - -int ECDH_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *, - CRYPTO_EX_free *); - -int ECDH_set_ex_data(EC_KEY *, int, void *); - -void *ECDH_get_ex_data(EC_KEY *, int); -""" - -CUSTOMIZATIONS = """ -#ifdef OPENSSL_NO_ECDH -static const long Cryptography_HAS_ECDH = 0; - -int (*ECDH_compute_key)(void *, size_t, const EC_POINT *, EC_KEY *, - void *(*)(const void *, size_t, void *, - size_t *)) = NULL; - -int (*ECDH_get_ex_new_index)(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *, - CRYPTO_EX_free *) = NULL; - -int (*ECDH_set_ex_data)(EC_KEY *, int, void *) = NULL; - -void *(*ECDH_get_ex_data)(EC_KEY *, int) = NULL; - -#else -static const long Cryptography_HAS_ECDH = 1; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_ECDH": [ - "ECDH_compute_key", - "ECDH_get_ex_new_index", - "ECDH_set_ex_data", - "ECDH_get_ex_data", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py deleted file mode 100644 index bfa6720..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py +++ /dev/null @@ -1,130 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#ifndef OPENSSL_NO_ECDSA -#include -#endif -""" - -TYPES = """ -static const int Cryptography_HAS_ECDSA; - -typedef struct { - BIGNUM *r; - BIGNUM *s; -} ECDSA_SIG; - -typedef ... CRYPTO_EX_new; -typedef ... CRYPTO_EX_dup; -typedef ... CRYPTO_EX_free; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -ECDSA_SIG *ECDSA_SIG_new(); -void ECDSA_SIG_free(ECDSA_SIG *); -int i2d_ECDSA_SIG(const ECDSA_SIG *, unsigned char **); -ECDSA_SIG *d2i_ECDSA_SIG(ECDSA_SIG **s, const unsigned char **, long); -ECDSA_SIG *ECDSA_do_sign(const unsigned char *, int, EC_KEY *); -ECDSA_SIG *ECDSA_do_sign_ex(const unsigned char *, int, const BIGNUM *, - const BIGNUM *, EC_KEY *); -int ECDSA_do_verify(const unsigned char *, int, const ECDSA_SIG *, EC_KEY*); -int ECDSA_sign_setup(EC_KEY *, BN_CTX *, BIGNUM **, BIGNUM **); -int ECDSA_sign(int, const unsigned char *, int, unsigned char *, - unsigned int *, EC_KEY *); -int ECDSA_sign_ex(int, const unsigned char *, int dgstlen, unsigned char *, - unsigned int *, const BIGNUM *, const BIGNUM *, EC_KEY *); -int ECDSA_verify(int, const unsigned char *, int, const unsigned char *, int, - EC_KEY *); -int ECDSA_size(const EC_KEY *); - -const ECDSA_METHOD* ECDSA_OpenSSL(); -void ECDSA_set_default_method(const ECDSA_METHOD *); -const ECDSA_METHOD* ECDSA_get_default_method(); -int ECDSA_get_ex_new_index(long, void *, CRYPTO_EX_new *, - CRYPTO_EX_dup *, CRYPTO_EX_free *); -int ECDSA_set_method(EC_KEY *, const ECDSA_METHOD *); -int ECDSA_set_ex_data(EC_KEY *, int, void *); -void *ECDSA_get_ex_data(EC_KEY *, int); -""" - -CUSTOMIZATIONS = """ -#ifdef OPENSSL_NO_ECDSA -static const long Cryptography_HAS_ECDSA = 0; - -typedef struct { - BIGNUM *r; - BIGNUM *s; -} ECDSA_SIG; - -ECDSA_SIG* (*ECDSA_SIG_new)() = NULL; -void (*ECDSA_SIG_free)(ECDSA_SIG *) = NULL; -int (*i2d_ECDSA_SIG)(const ECDSA_SIG *, unsigned char **) = NULL; -ECDSA_SIG* (*d2i_ECDSA_SIG)(ECDSA_SIG **s, const unsigned char **, - long) = NULL; -ECDSA_SIG* (*ECDSA_do_sign)(const unsigned char *, int, EC_KEY *eckey) = NULL; -ECDSA_SIG* (*ECDSA_do_sign_ex)(const unsigned char *, int, const BIGNUM *, - const BIGNUM *, EC_KEY *) = NULL; -int (*ECDSA_do_verify)(const unsigned char *, int, const ECDSA_SIG *, - EC_KEY*) = NULL; -int (*ECDSA_sign_setup)(EC_KEY *, BN_CTX *, BIGNUM **, BIGNUM **) = NULL; -int (*ECDSA_sign)(int, const unsigned char *, int, unsigned char *, - unsigned int *, EC_KEY *) = NULL; -int (*ECDSA_sign_ex)(int, const unsigned char *, int dgstlen, unsigned char *, - unsigned int *, const BIGNUM *, const BIGNUM *, - EC_KEY *) = NULL; -int (*ECDSA_verify)(int, const unsigned char *, int, const unsigned char *, - int, EC_KEY *) = NULL; -int (*ECDSA_size)(const EC_KEY *) = NULL; - -const ECDSA_METHOD* (*ECDSA_OpenSSL)() = NULL; -void (*ECDSA_set_default_method)(const ECDSA_METHOD *) = NULL; -const ECDSA_METHOD* (*ECDSA_get_default_method)() = NULL; -int (*ECDSA_set_method)(EC_KEY *, const ECDSA_METHOD *) = NULL; -int (*ECDSA_get_ex_new_index)(long, void *, CRYPTO_EX_new *, - CRYPTO_EX_dup *, CRYPTO_EX_free *) = NULL; -int (*ECDSA_set_ex_data)(EC_KEY *, int, void *) = NULL; -void* (*ECDSA_get_ex_data)(EC_KEY *, int) = NULL; -#else -static const long Cryptography_HAS_ECDSA = 1; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_ECDSA": [ - "ECDSA_SIG_new", - "ECDSA_SIG_free", - "i2d_ECDSA_SIG", - "d2i_ECDSA_SIG", - "ECDSA_do_sign", - "ECDSA_do_sign_ex", - "ECDSA_do_verify", - "ECDSA_sign_setup", - "ECDSA_sign", - "ECDSA_sign_ex", - "ECDSA_verify", - "ECDSA_size", - "ECDSA_OpenSSL", - "ECDSA_set_default_method", - "ECDSA_get_default_method", - "ECDSA_set_method", - "ECDSA_get_ex_new_index", - "ECDSA_set_ex_data", - "ECDSA_get_ex_data", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/engine.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/engine.py deleted file mode 100644 index 364232e..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/engine.py +++ /dev/null @@ -1,165 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... ENGINE; -typedef ... RSA_METHOD; -typedef ... DSA_METHOD; -typedef ... ECDH_METHOD; -typedef ... ECDSA_METHOD; -typedef ... DH_METHOD; -typedef ... RAND_METHOD; -typedef ... STORE_METHOD; -typedef ... *ENGINE_GEN_INT_FUNC_PTR; -typedef ... *ENGINE_CTRL_FUNC_PTR; -typedef ... *ENGINE_LOAD_KEY_PTR; -typedef ... *ENGINE_CIPHERS_PTR; -typedef ... *ENGINE_DIGESTS_PTR; -typedef ... ENGINE_CMD_DEFN; -typedef ... UI_METHOD; - -static const unsigned int ENGINE_METHOD_RSA; -static const unsigned int ENGINE_METHOD_DSA; -static const unsigned int ENGINE_METHOD_RAND; -static const unsigned int ENGINE_METHOD_ECDH; -static const unsigned int ENGINE_METHOD_ECDSA; -static const unsigned int ENGINE_METHOD_CIPHERS; -static const unsigned int ENGINE_METHOD_DIGESTS; -static const unsigned int ENGINE_METHOD_STORE; -static const unsigned int ENGINE_METHOD_ALL; -static const unsigned int ENGINE_METHOD_NONE; -""" - -FUNCTIONS = """ -ENGINE *ENGINE_get_first(void); -ENGINE *ENGINE_get_last(void); -ENGINE *ENGINE_get_next(ENGINE *); -ENGINE *ENGINE_get_prev(ENGINE *); -int ENGINE_add(ENGINE *); -int ENGINE_remove(ENGINE *); -ENGINE *ENGINE_by_id(const char *); -int ENGINE_init(ENGINE *); -int ENGINE_finish(ENGINE *); -void ENGINE_load_openssl(void); -void ENGINE_load_dynamic(void); -void ENGINE_load_cryptodev(void); -void ENGINE_load_builtin_engines(void); -void ENGINE_cleanup(void); -ENGINE *ENGINE_get_default_RSA(void); -ENGINE *ENGINE_get_default_DSA(void); -ENGINE *ENGINE_get_default_ECDH(void); -ENGINE *ENGINE_get_default_ECDSA(void); -ENGINE *ENGINE_get_default_DH(void); -ENGINE *ENGINE_get_default_RAND(void); -ENGINE *ENGINE_get_cipher_engine(int); -ENGINE *ENGINE_get_digest_engine(int); -int ENGINE_set_default_RSA(ENGINE *); -int ENGINE_set_default_DSA(ENGINE *); -int ENGINE_set_default_ECDH(ENGINE *); -int ENGINE_set_default_ECDSA(ENGINE *); -int ENGINE_set_default_DH(ENGINE *); -int ENGINE_set_default_RAND(ENGINE *); -int ENGINE_set_default_ciphers(ENGINE *); -int ENGINE_set_default_digests(ENGINE *); -int ENGINE_set_default_string(ENGINE *, const char *); -int ENGINE_set_default(ENGINE *, unsigned int); -unsigned int ENGINE_get_table_flags(void); -void ENGINE_set_table_flags(unsigned int); -int ENGINE_register_RSA(ENGINE *); -void ENGINE_unregister_RSA(ENGINE *); -void ENGINE_register_all_RSA(void); -int ENGINE_register_DSA(ENGINE *); -void ENGINE_unregister_DSA(ENGINE *); -void ENGINE_register_all_DSA(void); -int ENGINE_register_ECDH(ENGINE *); -void ENGINE_unregister_ECDH(ENGINE *); -void ENGINE_register_all_ECDH(void); -int ENGINE_register_ECDSA(ENGINE *); -void ENGINE_unregister_ECDSA(ENGINE *); -void ENGINE_register_all_ECDSA(void); -int ENGINE_register_DH(ENGINE *); -void ENGINE_unregister_DH(ENGINE *); -void ENGINE_register_all_DH(void); -int ENGINE_register_RAND(ENGINE *); -void ENGINE_unregister_RAND(ENGINE *); -void ENGINE_register_all_RAND(void); -int ENGINE_register_STORE(ENGINE *); -void ENGINE_unregister_STORE(ENGINE *); -void ENGINE_register_all_STORE(void); -int ENGINE_register_ciphers(ENGINE *); -void ENGINE_unregister_ciphers(ENGINE *); -void ENGINE_register_all_ciphers(void); -int ENGINE_register_digests(ENGINE *); -void ENGINE_unregister_digests(ENGINE *); -void ENGINE_register_all_digests(void); -int ENGINE_register_complete(ENGINE *); -int ENGINE_register_all_complete(void); -int ENGINE_ctrl(ENGINE *, int, long, void *, void (*)(void)); -int ENGINE_cmd_is_executable(ENGINE *, int); -int ENGINE_ctrl_cmd(ENGINE *, const char *, long, void *, void (*)(void), int); -int ENGINE_ctrl_cmd_string(ENGINE *, const char *, const char *, int); - -ENGINE *ENGINE_new(void); -int ENGINE_free(ENGINE *); -int ENGINE_up_ref(ENGINE *); -int ENGINE_set_id(ENGINE *, const char *); -int ENGINE_set_name(ENGINE *, const char *); -int ENGINE_set_RSA(ENGINE *, const RSA_METHOD *); -int ENGINE_set_DSA(ENGINE *, const DSA_METHOD *); -int ENGINE_set_ECDH(ENGINE *, const ECDH_METHOD *); -int ENGINE_set_ECDSA(ENGINE *, const ECDSA_METHOD *); -int ENGINE_set_DH(ENGINE *, const DH_METHOD *); -int ENGINE_set_RAND(ENGINE *, const RAND_METHOD *); -int ENGINE_set_STORE(ENGINE *, const STORE_METHOD *); -int ENGINE_set_destroy_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); -int ENGINE_set_init_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); -int ENGINE_set_finish_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); -int ENGINE_set_ctrl_function(ENGINE *, ENGINE_CTRL_FUNC_PTR); -int ENGINE_set_load_privkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); -int ENGINE_set_load_pubkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); -int ENGINE_set_ciphers(ENGINE *, ENGINE_CIPHERS_PTR); -int ENGINE_set_digests(ENGINE *, ENGINE_DIGESTS_PTR); -int ENGINE_set_flags(ENGINE *, int); -int ENGINE_set_cmd_defns(ENGINE *, const ENGINE_CMD_DEFN *); -const char *ENGINE_get_id(const ENGINE *); -const char *ENGINE_get_name(const ENGINE *); -const RSA_METHOD *ENGINE_get_RSA(const ENGINE *); -const DSA_METHOD *ENGINE_get_DSA(const ENGINE *); -const ECDH_METHOD *ENGINE_get_ECDH(const ENGINE *); -const ECDSA_METHOD *ENGINE_get_ECDSA(const ENGINE *); -const DH_METHOD *ENGINE_get_DH(const ENGINE *); -const RAND_METHOD *ENGINE_get_RAND(const ENGINE *); -const STORE_METHOD *ENGINE_get_STORE(const ENGINE *); - -const EVP_CIPHER *ENGINE_get_cipher(ENGINE *, int); -const EVP_MD *ENGINE_get_digest(ENGINE *, int); -int ENGINE_get_flags(const ENGINE *); -const ENGINE_CMD_DEFN *ENGINE_get_cmd_defns(const ENGINE *); -EVP_PKEY *ENGINE_load_private_key(ENGINE *, const char *, UI_METHOD *, void *); -EVP_PKEY *ENGINE_load_public_key(ENGINE *, const char *, UI_METHOD *, void *); -void ENGINE_add_conf_module(void); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/err.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/err.py deleted file mode 100644 index 232060a..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/err.py +++ /dev/null @@ -1,347 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -static const int Cryptography_HAS_REMOVE_THREAD_STATE; -static const int Cryptography_HAS_098H_ERROR_CODES; -static const int Cryptography_HAS_098C_CAMELLIA_CODES; -static const int Cryptography_HAS_EC_CODES; - -struct ERR_string_data_st { - unsigned long error; - const char *string; -}; -typedef struct ERR_string_data_st ERR_STRING_DATA; - -static const int ERR_LIB_EVP; -static const int ERR_LIB_EC; -static const int ERR_LIB_PEM; -static const int ERR_LIB_ASN1; -static const int ERR_LIB_RSA; - -static const int ASN1_F_ASN1_ENUMERATED_TO_BN; -static const int ASN1_F_ASN1_EX_C2I; -static const int ASN1_F_ASN1_FIND_END; -static const int ASN1_F_ASN1_GENERALIZEDTIME_SET; -static const int ASN1_F_ASN1_GENERATE_V3; -static const int ASN1_F_ASN1_GET_OBJECT; -static const int ASN1_F_ASN1_ITEM_I2D_FP; -static const int ASN1_F_ASN1_ITEM_PACK; -static const int ASN1_F_ASN1_ITEM_SIGN; -static const int ASN1_F_ASN1_ITEM_UNPACK; -static const int ASN1_F_ASN1_ITEM_VERIFY; -static const int ASN1_F_ASN1_MBSTRING_NCOPY; -static const int ASN1_F_ASN1_TEMPLATE_EX_D2I; -static const int ASN1_F_ASN1_TEMPLATE_NEW; -static const int ASN1_F_ASN1_TEMPLATE_NOEXP_D2I; -static const int ASN1_F_ASN1_TIME_SET; -static const int ASN1_F_ASN1_TYPE_GET_INT_OCTETSTRING; -static const int ASN1_F_ASN1_TYPE_GET_OCTETSTRING; -static const int ASN1_F_ASN1_UNPACK_STRING; -static const int ASN1_F_ASN1_UTCTIME_SET; -static const int ASN1_F_ASN1_VERIFY; -static const int ASN1_F_BITSTR_CB; -static const int ASN1_F_BN_TO_ASN1_ENUMERATED; -static const int ASN1_F_BN_TO_ASN1_INTEGER; -static const int ASN1_F_D2I_ASN1_TYPE_BYTES; -static const int ASN1_F_D2I_ASN1_UINTEGER; -static const int ASN1_F_D2I_ASN1_UTCTIME; -static const int ASN1_F_D2I_NETSCAPE_RSA; -static const int ASN1_F_D2I_NETSCAPE_RSA_2; -static const int ASN1_F_D2I_PRIVATEKEY; -static const int ASN1_F_D2I_X509; -static const int ASN1_F_D2I_X509_CINF; -static const int ASN1_F_D2I_X509_PKEY; -static const int ASN1_F_I2D_ASN1_SET; -static const int ASN1_F_I2D_ASN1_TIME; -static const int ASN1_F_I2D_DSA_PUBKEY; -static const int ASN1_F_LONG_C2I; -static const int ASN1_F_OID_MODULE_INIT; -static const int ASN1_F_PARSE_TAGGING; -static const int ASN1_F_PKCS5_PBE_SET; -static const int ASN1_F_X509_CINF_NEW; -static const int ASN1_R_BOOLEAN_IS_WRONG_LENGTH; -static const int ASN1_R_BUFFER_TOO_SMALL; -static const int ASN1_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER; -static const int ASN1_R_DATA_IS_WRONG; -static const int ASN1_R_DECODE_ERROR; -static const int ASN1_R_DECODING_ERROR; -static const int ASN1_R_DEPTH_EXCEEDED; -static const int ASN1_R_ENCODE_ERROR; -static const int ASN1_R_ERROR_GETTING_TIME; -static const int ASN1_R_ERROR_LOADING_SECTION; -static const int ASN1_R_MSTRING_WRONG_TAG; -static const int ASN1_R_NESTED_ASN1_STRING; -static const int ASN1_R_NO_MATCHING_CHOICE_TYPE; -static const int ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM; -static const int ASN1_R_UNKNOWN_OBJECT_TYPE; -static const int ASN1_R_UNKNOWN_PUBLIC_KEY_TYPE; -static const int ASN1_R_UNKNOWN_TAG; -static const int ASN1_R_UNKOWN_FORMAT; -static const int ASN1_R_UNSUPPORTED_ANY_DEFINED_BY_TYPE; -static const int ASN1_R_UNSUPPORTED_ENCRYPTION_ALGORITHM; -static const int ASN1_R_UNSUPPORTED_PUBLIC_KEY_TYPE; -static const int ASN1_R_UNSUPPORTED_TYPE; -static const int ASN1_R_WRONG_TAG; -static const int ASN1_R_WRONG_TYPE; - -static const int EVP_F_AES_INIT_KEY; -static const int EVP_F_D2I_PKEY; -static const int EVP_F_DSA_PKEY2PKCS8; -static const int EVP_F_DSAPKEY2PKCS8; -static const int EVP_F_ECDSA_PKEY2PKCS8; -static const int EVP_F_ECKEY_PKEY2PKCS8; -static const int EVP_F_EVP_CIPHER_CTX_CTRL; -static const int EVP_F_EVP_CIPHER_CTX_SET_KEY_LENGTH; -static const int EVP_F_EVP_CIPHERINIT_EX; -static const int EVP_F_EVP_DECRYPTFINAL_EX; -static const int EVP_F_EVP_DIGESTINIT_EX; -static const int EVP_F_EVP_ENCRYPTFINAL_EX; -static const int EVP_F_EVP_MD_CTX_COPY_EX; -static const int EVP_F_EVP_OPENINIT; -static const int EVP_F_EVP_PBE_ALG_ADD; -static const int EVP_F_EVP_PBE_CIPHERINIT; -static const int EVP_F_EVP_PKCS82PKEY; -static const int EVP_F_EVP_PKEY2PKCS8_BROKEN; -static const int EVP_F_EVP_PKEY_COPY_PARAMETERS; -static const int EVP_F_EVP_PKEY_DECRYPT; -static const int EVP_F_EVP_PKEY_ENCRYPT; -static const int EVP_F_EVP_PKEY_GET1_DH; -static const int EVP_F_EVP_PKEY_GET1_DSA; -static const int EVP_F_EVP_PKEY_GET1_ECDSA; -static const int EVP_F_EVP_PKEY_GET1_EC_KEY; -static const int EVP_F_EVP_PKEY_GET1_RSA; -static const int EVP_F_EVP_PKEY_NEW; -static const int EVP_F_EVP_RIJNDAEL; -static const int EVP_F_EVP_SIGNFINAL; -static const int EVP_F_EVP_VERIFYFINAL; -static const int EVP_F_PKCS5_PBE_KEYIVGEN; -static const int EVP_F_PKCS5_V2_PBE_KEYIVGEN; -static const int EVP_F_PKCS8_SET_BROKEN; -static const int EVP_F_RC2_MAGIC_TO_METH; -static const int EVP_F_RC5_CTRL; - -static const int EVP_R_AES_KEY_SETUP_FAILED; -static const int EVP_R_ASN1_LIB; -static const int EVP_R_BAD_BLOCK_LENGTH; -static const int EVP_R_BAD_DECRYPT; -static const int EVP_R_BAD_KEY_LENGTH; -static const int EVP_R_BN_DECODE_ERROR; -static const int EVP_R_BN_PUBKEY_ERROR; -static const int EVP_R_CIPHER_PARAMETER_ERROR; -static const int EVP_R_CTRL_NOT_IMPLEMENTED; -static const int EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED; -static const int EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH; -static const int EVP_R_DECODE_ERROR; -static const int EVP_R_DIFFERENT_KEY_TYPES; -static const int EVP_R_ENCODE_ERROR; -static const int EVP_R_INITIALIZATION_ERROR; -static const int EVP_R_INPUT_NOT_INITIALIZED; -static const int EVP_R_INVALID_KEY_LENGTH; -static const int EVP_R_IV_TOO_LARGE; -static const int EVP_R_KEYGEN_FAILURE; -static const int EVP_R_MISSING_PARAMETERS; -static const int EVP_R_NO_CIPHER_SET; -static const int EVP_R_NO_DIGEST_SET; -static const int EVP_R_NO_DSA_PARAMETERS; -static const int EVP_R_NO_SIGN_FUNCTION_CONFIGURED; -static const int EVP_R_NO_VERIFY_FUNCTION_CONFIGURED; -static const int EVP_R_PKCS8_UNKNOWN_BROKEN_TYPE; -static const int EVP_R_PUBLIC_KEY_NOT_RSA; -static const int EVP_R_UNKNOWN_PBE_ALGORITHM; -static const int EVP_R_UNSUPORTED_NUMBER_OF_ROUNDS; -static const int EVP_R_UNSUPPORTED_CIPHER; -static const int EVP_R_UNSUPPORTED_KEY_DERIVATION_FUNCTION; -static const int EVP_R_UNSUPPORTED_KEYLENGTH; -static const int EVP_R_UNSUPPORTED_SALT_TYPE; -static const int EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM; -static const int EVP_R_WRONG_FINAL_BLOCK_LENGTH; -static const int EVP_R_WRONG_PUBLIC_KEY_TYPE; - -static const int EC_F_EC_GROUP_NEW_BY_CURVE_NAME; - -static const int EC_R_UNKNOWN_GROUP; - -static const int PEM_F_D2I_PKCS8PRIVATEKEY_BIO; -static const int PEM_F_D2I_PKCS8PRIVATEKEY_FP; -static const int PEM_F_DO_PK8PKEY; -static const int PEM_F_DO_PK8PKEY_FP; -static const int PEM_F_LOAD_IV; -static const int PEM_F_PEM_ASN1_READ; -static const int PEM_F_PEM_ASN1_READ_BIO; -static const int PEM_F_PEM_ASN1_WRITE; -static const int PEM_F_PEM_ASN1_WRITE_BIO; -static const int PEM_F_PEM_DEF_CALLBACK; -static const int PEM_F_PEM_DO_HEADER; -static const int PEM_F_PEM_F_PEM_WRITE_PKCS8PRIVATEKEY; -static const int PEM_F_PEM_GET_EVP_CIPHER_INFO; -static const int PEM_F_PEM_PK8PKEY; -static const int PEM_F_PEM_READ; -static const int PEM_F_PEM_READ_BIO; -static const int PEM_F_PEM_READ_BIO_PRIVATEKEY; -static const int PEM_F_PEM_READ_PRIVATEKEY; -static const int PEM_F_PEM_SEALFINAL; -static const int PEM_F_PEM_SEALINIT; -static const int PEM_F_PEM_SIGNFINAL; -static const int PEM_F_PEM_WRITE; -static const int PEM_F_PEM_WRITE_BIO; -static const int PEM_F_PEM_X509_INFO_READ; -static const int PEM_F_PEM_X509_INFO_READ_BIO; -static const int PEM_F_PEM_X509_INFO_WRITE_BIO; - -static const int PEM_R_BAD_BASE64_DECODE; -static const int PEM_R_BAD_DECRYPT; -static const int PEM_R_BAD_END_LINE; -static const int PEM_R_BAD_IV_CHARS; -static const int PEM_R_BAD_PASSWORD_READ; -static const int PEM_R_ERROR_CONVERTING_PRIVATE_KEY; -static const int PEM_R_NO_START_LINE; -static const int PEM_R_NOT_DEK_INFO; -static const int PEM_R_NOT_ENCRYPTED; -static const int PEM_R_NOT_PROC_TYPE; -static const int PEM_R_PROBLEMS_GETTING_PASSWORD; -static const int PEM_R_PUBLIC_KEY_NO_RSA; -static const int PEM_R_READ_KEY; -static const int PEM_R_SHORT_HEADER; -static const int PEM_R_UNSUPPORTED_CIPHER; -static const int PEM_R_UNSUPPORTED_ENCRYPTION; - -static const int RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE; -static const int RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY; -static const int RSA_R_BLOCK_TYPE_IS_NOT_01; -static const int RSA_R_BLOCK_TYPE_IS_NOT_02; -""" - -FUNCTIONS = """ -void ERR_load_crypto_strings(void); -void ERR_load_SSL_strings(void); -void ERR_free_strings(void); -char* ERR_error_string(unsigned long, char *); -void ERR_error_string_n(unsigned long, char *, size_t); -const char* ERR_lib_error_string(unsigned long); -const char* ERR_func_error_string(unsigned long); -const char* ERR_reason_error_string(unsigned long); -void ERR_print_errors(BIO *); -void ERR_print_errors_fp(FILE *); -unsigned long ERR_get_error(void); -unsigned long ERR_peek_error(void); -unsigned long ERR_peek_last_error(void); -unsigned long ERR_get_error_line(const char **, int *); -unsigned long ERR_peek_error_line(const char **, int *); -unsigned long ERR_peek_last_error_line(const char **, int *); -unsigned long ERR_get_error_line_data(const char **, int *, - const char **, int *); -unsigned long ERR_peek_error_line_data(const char **, - int *, const char **, int *); -unsigned long ERR_peek_last_error_line_data(const char **, - int *, const char **, int *); -void ERR_put_error(int, int, int, const char *, int); -void ERR_add_error_data(int, ...); -int ERR_get_next_error_library(void); -""" - -MACROS = """ -unsigned long ERR_PACK(int, int, int); -int ERR_GET_LIB(unsigned long); -int ERR_GET_FUNC(unsigned long); -int ERR_GET_REASON(unsigned long); -int ERR_FATAL_ERROR(unsigned long); -/* introduced in 1.0.0 so we have to handle this specially to continue - * supporting 0.9.8 - */ -void ERR_remove_thread_state(const CRYPTO_THREADID *); - -/* These were added in OpenSSL 0.9.8h. When we drop support for RHEL/CentOS 5 - we should be able to move these back to TYPES. */ -static const int ASN1_F_B64_READ_ASN1; -static const int ASN1_F_B64_WRITE_ASN1; -static const int ASN1_F_SMIME_READ_ASN1; -static const int ASN1_F_SMIME_TEXT; -static const int ASN1_R_NO_CONTENT_TYPE; -static const int ASN1_R_NO_MULTIPART_BODY_FAILURE; -static const int ASN1_R_NO_MULTIPART_BOUNDARY; -/* These were added in OpenSSL 0.9.8c. */ -static const int EVP_F_CAMELLIA_INIT_KEY; -static const int EVP_R_CAMELLIA_KEY_SETUP_FAILED; -""" - -CUSTOMIZATIONS = """ -#if OPENSSL_VERSION_NUMBER >= 0x10000000L -static const long Cryptography_HAS_REMOVE_THREAD_STATE = 1; -#else -static const long Cryptography_HAS_REMOVE_THREAD_STATE = 0; -typedef uint32_t CRYPTO_THREADID; -void (*ERR_remove_thread_state)(const CRYPTO_THREADID *) = NULL; -#endif - -/* OpenSSL 0.9.8h+ */ -#if OPENSSL_VERSION_NUMBER >= 0x0090808fL -static const long Cryptography_HAS_098H_ERROR_CODES = 1; -#else -static const long Cryptography_HAS_098H_ERROR_CODES = 0; -static const int ASN1_F_B64_READ_ASN1 = 0; -static const int ASN1_F_B64_WRITE_ASN1 = 0; -static const int ASN1_F_SMIME_READ_ASN1 = 0; -static const int ASN1_F_SMIME_TEXT = 0; -static const int ASN1_R_NO_CONTENT_TYPE = 0; -static const int ASN1_R_NO_MULTIPART_BODY_FAILURE = 0; -static const int ASN1_R_NO_MULTIPART_BOUNDARY = 0; -#endif - -/* OpenSSL 0.9.8c+ */ -#ifdef EVP_F_CAMELLIA_INIT_KEY -static const long Cryptography_HAS_098C_CAMELLIA_CODES = 1; -#else -static const long Cryptography_HAS_098C_CAMELLIA_CODES = 0; -static const int EVP_F_CAMELLIA_INIT_KEY = 0; -static const int EVP_R_CAMELLIA_KEY_SETUP_FAILED = 0; -#endif - -// OpenSSL without EC. e.g. RHEL -#ifndef OPENSSL_NO_EC -static const long Cryptography_HAS_EC_CODES = 1; -#else -static const long Cryptography_HAS_EC_CODES = 0; -static const int EC_R_UNKNOWN_GROUP = 0; -static const int EC_F_EC_GROUP_NEW_BY_CURVE_NAME = 0; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_REMOVE_THREAD_STATE": [ - "ERR_remove_thread_state" - ], - "Cryptography_HAS_098H_ERROR_CODES": [ - "ASN1_F_B64_READ_ASN1", - "ASN1_F_B64_WRITE_ASN1", - "ASN1_F_SMIME_READ_ASN1", - "ASN1_F_SMIME_TEXT", - "ASN1_R_NO_CONTENT_TYPE", - "ASN1_R_NO_MULTIPART_BODY_FAILURE", - "ASN1_R_NO_MULTIPART_BOUNDARY", - ], - "Cryptography_HAS_098C_CAMELLIA_CODES": [ - "EVP_F_CAMELLIA_INIT_KEY", - "EVP_R_CAMELLIA_KEY_SETUP_FAILED" - ], - "Cryptography_HAS_EC_CODES": [ - "EC_R_UNKNOWN_GROUP", - "EC_F_EC_GROUP_NEW_BY_CURVE_NAME" - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/evp.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/evp.py deleted file mode 100644 index 1183450..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/evp.py +++ /dev/null @@ -1,261 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... EVP_CIPHER; -typedef struct { - const EVP_CIPHER *cipher; - ENGINE *engine; - int encrypt; - ...; -} EVP_CIPHER_CTX; -typedef ... EVP_MD; -typedef struct env_md_ctx_st { - ...; -} EVP_MD_CTX; - -typedef struct evp_pkey_st { - int type; - ...; -} EVP_PKEY; -typedef ... EVP_PKEY_CTX; -static const int EVP_PKEY_RSA; -static const int EVP_PKEY_DSA; -static const int EVP_PKEY_EC; -static const int EVP_MAX_MD_SIZE; -static const int EVP_CTRL_GCM_SET_IVLEN; -static const int EVP_CTRL_GCM_GET_TAG; -static const int EVP_CTRL_GCM_SET_TAG; - -static const int Cryptography_HAS_GCM; -static const int Cryptography_HAS_PBKDF2_HMAC; -static const int Cryptography_HAS_PKEY_CTX; -""" - -FUNCTIONS = """ -const EVP_CIPHER *EVP_get_cipherbyname(const char *); -int EVP_EncryptInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, - const unsigned char *, const unsigned char *); -int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *, int); -int EVP_EncryptUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, - const unsigned char *, int); -int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); -int EVP_DecryptInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, - const unsigned char *, const unsigned char *); -int EVP_DecryptUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, - const unsigned char *, int); -int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); -int EVP_CipherInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, - const unsigned char *, const unsigned char *, int); -int EVP_CipherUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, - const unsigned char *, int); -int EVP_CipherFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); -int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *); -void EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *); -EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void); -void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *); -int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *, int); - -EVP_MD_CTX *EVP_MD_CTX_create(void); -int EVP_MD_CTX_copy_ex(EVP_MD_CTX *, const EVP_MD_CTX *); -int EVP_DigestInit_ex(EVP_MD_CTX *, const EVP_MD *, ENGINE *); -int EVP_DigestUpdate(EVP_MD_CTX *, const void *, size_t); -int EVP_DigestFinal_ex(EVP_MD_CTX *, unsigned char *, unsigned int *); -int EVP_MD_CTX_cleanup(EVP_MD_CTX *); -void EVP_MD_CTX_destroy(EVP_MD_CTX *); -const EVP_MD *EVP_get_digestbyname(const char *); - -EVP_PKEY *EVP_PKEY_new(void); -void EVP_PKEY_free(EVP_PKEY *); -int EVP_PKEY_type(int); -int EVP_PKEY_bits(EVP_PKEY *); -int EVP_PKEY_size(EVP_PKEY *); -RSA *EVP_PKEY_get1_RSA(EVP_PKEY *); -DSA *EVP_PKEY_get1_DSA(EVP_PKEY *); -DH *EVP_PKEY_get1_DH(EVP_PKEY *); - -int EVP_SignInit(EVP_MD_CTX *, const EVP_MD *); -int EVP_SignUpdate(EVP_MD_CTX *, const void *, size_t); -int EVP_SignFinal(EVP_MD_CTX *, unsigned char *, unsigned int *, EVP_PKEY *); - -int EVP_VerifyInit(EVP_MD_CTX *, const EVP_MD *); -int EVP_VerifyUpdate(EVP_MD_CTX *, const void *, size_t); -int EVP_VerifyFinal(EVP_MD_CTX *, const unsigned char *, unsigned int, - EVP_PKEY *); - -const EVP_MD *EVP_md5(void); - -int PKCS5_PBKDF2_HMAC_SHA1(const char *, int, const unsigned char *, int, int, - int, unsigned char *); - -int EVP_PKEY_set1_RSA(EVP_PKEY *, struct rsa_st *); -int EVP_PKEY_set1_DSA(EVP_PKEY *, struct dsa_st *); -int EVP_PKEY_set1_DH(EVP_PKEY *, DH *); - -int EVP_PKEY_get_attr_count(const EVP_PKEY *); -int EVP_PKEY_get_attr_by_NID(const EVP_PKEY *, int, int); -int EVP_PKEY_get_attr_by_OBJ(const EVP_PKEY *, ASN1_OBJECT *, int); -X509_ATTRIBUTE *EVP_PKEY_get_attr(const EVP_PKEY *, int); -X509_ATTRIBUTE *EVP_PKEY_delete_attr(EVP_PKEY *, int); -int EVP_PKEY_add1_attr(EVP_PKEY *, X509_ATTRIBUTE *); -int EVP_PKEY_add1_attr_by_OBJ(EVP_PKEY *, const ASN1_OBJECT *, int, - const unsigned char *, int); -int EVP_PKEY_add1_attr_by_NID(EVP_PKEY *, int, int, - const unsigned char *, int); -int EVP_PKEY_add1_attr_by_txt(EVP_PKEY *, const char *, int, - const unsigned char *, int); -""" - -MACROS = """ -void OpenSSL_add_all_algorithms(void); -int EVP_PKEY_assign_RSA(EVP_PKEY *, RSA *); -int EVP_PKEY_assign_DSA(EVP_PKEY *, DSA *); - -int EVP_PKEY_assign_EC_KEY(EVP_PKEY *, EC_KEY *); -EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *); -int EVP_PKEY_set1_EC_KEY(EVP_PKEY *, EC_KEY *); - -int EVP_CIPHER_CTX_block_size(const EVP_CIPHER_CTX *); -int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *, int, int, void *); - -int PKCS5_PBKDF2_HMAC(const char *, int, const unsigned char *, int, int, - const EVP_MD *, int, unsigned char *); - -int EVP_PKEY_CTX_set_signature_md(EVP_PKEY_CTX *, const EVP_MD *); - -/* These aren't macros, but must be in this section because they're not - available in 0.9.8. */ -EVP_PKEY_CTX *EVP_PKEY_CTX_new(EVP_PKEY *, ENGINE *); -EVP_PKEY_CTX *EVP_PKEY_CTX_new_id(int, ENGINE *); -EVP_PKEY_CTX *EVP_PKEY_CTX_dup(EVP_PKEY_CTX *); -void EVP_PKEY_CTX_free(EVP_PKEY_CTX *); -int EVP_PKEY_sign_init(EVP_PKEY_CTX *); -int EVP_PKEY_sign(EVP_PKEY_CTX *, unsigned char *, size_t *, - const unsigned char *, size_t); -int EVP_PKEY_verify_init(EVP_PKEY_CTX *); -int EVP_PKEY_verify(EVP_PKEY_CTX *, const unsigned char *, size_t, - const unsigned char *, size_t); -int EVP_PKEY_encrypt_init(EVP_PKEY_CTX *); -int EVP_PKEY_decrypt_init(EVP_PKEY_CTX *); - -/* The following were macros in 0.9.8e. Once we drop support for RHEL/CentOS 5 - we should move these back to FUNCTIONS. */ -const EVP_CIPHER *EVP_CIPHER_CTX_cipher(const EVP_CIPHER_CTX *); -int EVP_CIPHER_block_size(const EVP_CIPHER *); -const EVP_MD *EVP_MD_CTX_md(const EVP_MD_CTX *); -int EVP_MD_size(const EVP_MD *); - -/* Must be in macros because EVP_PKEY_CTX is undefined in 0.9.8 */ -int Cryptography_EVP_PKEY_encrypt(EVP_PKEY_CTX *ctx, unsigned char *out, - size_t *outlen, const unsigned char *in, - size_t inlen); -int Cryptography_EVP_PKEY_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, - size_t *outlen, const unsigned char *in, - size_t inlen); -""" - -CUSTOMIZATIONS = """ -#ifdef EVP_CTRL_GCM_SET_TAG -const long Cryptography_HAS_GCM = 1; -#else -const long Cryptography_HAS_GCM = 0; -const long EVP_CTRL_GCM_GET_TAG = -1; -const long EVP_CTRL_GCM_SET_TAG = -1; -const long EVP_CTRL_GCM_SET_IVLEN = -1; -#endif -#if OPENSSL_VERSION_NUMBER >= 0x10000000L -const long Cryptography_HAS_PBKDF2_HMAC = 1; -const long Cryptography_HAS_PKEY_CTX = 1; - -/* OpenSSL 0.9.8 defines EVP_PKEY_encrypt and EVP_PKEY_decrypt functions, - but they are a completely different signature from the ones in 1.0.0+. - These wrapper functions allows us to safely declare them on any version and - conditionally remove them on 0.9.8. */ -int Cryptography_EVP_PKEY_encrypt(EVP_PKEY_CTX *ctx, unsigned char *out, - size_t *outlen, const unsigned char *in, - size_t inlen) { - return EVP_PKEY_encrypt(ctx, out, outlen, in, inlen); -} -int Cryptography_EVP_PKEY_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, - size_t *outlen, const unsigned char *in, - size_t inlen) { - return EVP_PKEY_decrypt(ctx, out, outlen, in, inlen); -} -#else -const long Cryptography_HAS_PBKDF2_HMAC = 0; -int (*PKCS5_PBKDF2_HMAC)(const char *, int, const unsigned char *, int, int, - const EVP_MD *, int, unsigned char *) = NULL; -const long Cryptography_HAS_PKEY_CTX = 0; -typedef void EVP_PKEY_CTX; -int (*EVP_PKEY_CTX_set_signature_md)(EVP_PKEY_CTX *, const EVP_MD *) = NULL; -int (*EVP_PKEY_sign_init)(EVP_PKEY_CTX *) = NULL; -int (*EVP_PKEY_sign)(EVP_PKEY_CTX *, unsigned char *, size_t *, - const unsigned char *, size_t) = NULL; -int (*EVP_PKEY_verify_init)(EVP_PKEY_CTX *) = NULL; -int (*EVP_PKEY_verify)(EVP_PKEY_CTX *, const unsigned char *, size_t, - const unsigned char *, size_t) = NULL; -EVP_PKEY_CTX *(*EVP_PKEY_CTX_new)(EVP_PKEY *, ENGINE *) = NULL; -EVP_PKEY_CTX *(*EVP_PKEY_CTX_new_id)(int, ENGINE *) = NULL; -EVP_PKEY_CTX *(*EVP_PKEY_CTX_dup)(EVP_PKEY_CTX *) = NULL; -void (*EVP_PKEY_CTX_free)(EVP_PKEY_CTX *) = NULL; -int (*EVP_PKEY_encrypt_init)(EVP_PKEY_CTX *) = NULL; -int (*EVP_PKEY_decrypt_init)(EVP_PKEY_CTX *) = NULL; -int (*Cryptography_EVP_PKEY_encrypt)(EVP_PKEY_CTX *, unsigned char *, size_t *, - const unsigned char *, size_t) = NULL; -int (*Cryptography_EVP_PKEY_decrypt)(EVP_PKEY_CTX *, unsigned char *, size_t *, - const unsigned char *, size_t) = NULL; -#endif -#ifdef OPENSSL_NO_EC -int (*EVP_PKEY_assign_EC_KEY)(EVP_PKEY *, EC_KEY *) = NULL; -EC_KEY *(*EVP_PKEY_get1_EC_KEY)(EVP_PKEY *) = NULL; -int (*EVP_PKEY_set1_EC_KEY)(EVP_PKEY *, EC_KEY *) = NULL; -#endif - -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_GCM": [ - "EVP_CTRL_GCM_GET_TAG", - "EVP_CTRL_GCM_SET_TAG", - "EVP_CTRL_GCM_SET_IVLEN", - ], - "Cryptography_HAS_PBKDF2_HMAC": [ - "PKCS5_PBKDF2_HMAC" - ], - "Cryptography_HAS_PKEY_CTX": [ - "EVP_PKEY_CTX_new", - "EVP_PKEY_CTX_new_id", - "EVP_PKEY_CTX_dup", - "EVP_PKEY_CTX_free", - "EVP_PKEY_sign", - "EVP_PKEY_sign_init", - "EVP_PKEY_verify", - "EVP_PKEY_verify_init", - "Cryptography_EVP_PKEY_encrypt", - "EVP_PKEY_encrypt_init", - "Cryptography_EVP_PKEY_decrypt", - "EVP_PKEY_decrypt_init", - "EVP_PKEY_CTX_set_signature_md", - ], - "Cryptography_HAS_EC": [ - "EVP_PKEY_assign_EC_KEY", - "EVP_PKEY_get1_EC_KEY", - "EVP_PKEY_set1_EC_KEY", - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/hmac.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/hmac.py deleted file mode 100644 index 6a64b92..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/hmac.py +++ /dev/null @@ -1,94 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { ...; } HMAC_CTX; -""" - -FUNCTIONS = """ -void HMAC_CTX_init(HMAC_CTX *); -void HMAC_CTX_cleanup(HMAC_CTX *); - -int Cryptography_HMAC_Init_ex(HMAC_CTX *, const void *, int, const EVP_MD *, - ENGINE *); -int Cryptography_HMAC_Update(HMAC_CTX *, const unsigned char *, size_t); -int Cryptography_HMAC_Final(HMAC_CTX *, unsigned char *, unsigned int *); -int Cryptography_HMAC_CTX_copy(HMAC_CTX *, HMAC_CTX *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -int Cryptography_HMAC_Init_ex(HMAC_CTX *ctx, const void *key, int key_len, - const EVP_MD *md, ENGINE *impl) { -#if OPENSSL_VERSION_NUMBER >= 0x010000000 - return HMAC_Init_ex(ctx, key, key_len, md, impl); -#else - HMAC_Init_ex(ctx, key, key_len, md, impl); - return 1; -#endif -} - -int Cryptography_HMAC_Update(HMAC_CTX *ctx, const unsigned char *data, - size_t data_len) { -#if OPENSSL_VERSION_NUMBER >= 0x010000000 - return HMAC_Update(ctx, data, data_len); -#else - HMAC_Update(ctx, data, data_len); - return 1; -#endif -} - -int Cryptography_HMAC_Final(HMAC_CTX *ctx, unsigned char *digest, - unsigned int *outlen) { -#if OPENSSL_VERSION_NUMBER >= 0x010000000 - return HMAC_Final(ctx, digest, outlen); -#else - HMAC_Final(ctx, digest, outlen); - return 1; -#endif -} - -int Cryptography_HMAC_CTX_copy(HMAC_CTX *dst_ctx, HMAC_CTX *src_ctx) { -#if OPENSSL_VERSION_NUMBER >= 0x010000000 - return HMAC_CTX_copy(dst_ctx, src_ctx); -#else - HMAC_CTX_init(dst_ctx); - if (!EVP_MD_CTX_copy_ex(&dst_ctx->i_ctx, &src_ctx->i_ctx)) { - goto err; - } - if (!EVP_MD_CTX_copy_ex(&dst_ctx->o_ctx, &src_ctx->o_ctx)) { - goto err; - } - if (!EVP_MD_CTX_copy_ex(&dst_ctx->md_ctx, &src_ctx->md_ctx)) { - goto err; - } - memcpy(dst_ctx->key, src_ctx->key, HMAC_MAX_MD_CBLOCK); - dst_ctx->key_length = src_ctx->key_length; - dst_ctx->md = src_ctx->md; - return 1; - - err: - return 0; -#endif -} -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/nid.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/nid.py deleted file mode 100644 index 133d2ca..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/nid.py +++ /dev/null @@ -1,216 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = "" - -TYPES = """ -static const int Cryptography_HAS_ECDSA_SHA2_NIDS; - -static const int NID_undef; -static const int NID_dsa; -static const int NID_dsaWithSHA; -static const int NID_dsaWithSHA1; -static const int NID_md2; -static const int NID_md4; -static const int NID_md5; -static const int NID_mdc2; -static const int NID_ripemd160; -static const int NID_sha; -static const int NID_sha1; -static const int NID_sha256; -static const int NID_sha384; -static const int NID_sha512; -static const int NID_sha224; -static const int NID_sha; -static const int NID_ecdsa_with_SHA1; -static const int NID_ecdsa_with_SHA224; -static const int NID_ecdsa_with_SHA256; -static const int NID_ecdsa_with_SHA384; -static const int NID_ecdsa_with_SHA512; -static const int NID_crl_reason; -static const int NID_pbe_WithSHA1And3_Key_TripleDES_CBC; -static const int NID_subject_alt_name; -static const int NID_issuer_alt_name; -static const int NID_X9_62_c2pnb163v1; -static const int NID_X9_62_c2pnb163v2; -static const int NID_X9_62_c2pnb163v3; -static const int NID_X9_62_c2pnb176v1; -static const int NID_X9_62_c2tnb191v1; -static const int NID_X9_62_c2tnb191v2; -static const int NID_X9_62_c2tnb191v3; -static const int NID_X9_62_c2onb191v4; -static const int NID_X9_62_c2onb191v5; -static const int NID_X9_62_c2pnb208w1; -static const int NID_X9_62_c2tnb239v1; -static const int NID_X9_62_c2tnb239v2; -static const int NID_X9_62_c2tnb239v3; -static const int NID_X9_62_c2onb239v4; -static const int NID_X9_62_c2onb239v5; -static const int NID_X9_62_c2pnb272w1; -static const int NID_X9_62_c2pnb304w1; -static const int NID_X9_62_c2tnb359v1; -static const int NID_X9_62_c2pnb368w1; -static const int NID_X9_62_c2tnb431r1; -static const int NID_X9_62_prime192v1; -static const int NID_X9_62_prime192v2; -static const int NID_X9_62_prime192v3; -static const int NID_X9_62_prime239v1; -static const int NID_X9_62_prime239v2; -static const int NID_X9_62_prime239v3; -static const int NID_X9_62_prime256v1; -static const int NID_secp112r1; -static const int NID_secp112r2; -static const int NID_secp128r1; -static const int NID_secp128r2; -static const int NID_secp160k1; -static const int NID_secp160r1; -static const int NID_secp160r2; -static const int NID_sect163k1; -static const int NID_sect163r1; -static const int NID_sect163r2; -static const int NID_secp192k1; -static const int NID_secp224k1; -static const int NID_secp224r1; -static const int NID_secp256k1; -static const int NID_secp384r1; -static const int NID_secp521r1; -static const int NID_sect113r1; -static const int NID_sect113r2; -static const int NID_sect131r1; -static const int NID_sect131r2; -static const int NID_sect193r1; -static const int NID_sect193r2; -static const int NID_sect233k1; -static const int NID_sect233r1; -static const int NID_sect239k1; -static const int NID_sect283k1; -static const int NID_sect283r1; -static const int NID_sect409k1; -static const int NID_sect409r1; -static const int NID_sect571k1; -static const int NID_sect571r1; -static const int NID_wap_wsg_idm_ecid_wtls1; -static const int NID_wap_wsg_idm_ecid_wtls3; -static const int NID_wap_wsg_idm_ecid_wtls4; -static const int NID_wap_wsg_idm_ecid_wtls5; -static const int NID_wap_wsg_idm_ecid_wtls6; -static const int NID_wap_wsg_idm_ecid_wtls7; -static const int NID_wap_wsg_idm_ecid_wtls8; -static const int NID_wap_wsg_idm_ecid_wtls9; -static const int NID_wap_wsg_idm_ecid_wtls10; -static const int NID_wap_wsg_idm_ecid_wtls11; -static const int NID_wap_wsg_idm_ecid_wtls12; -static const int NID_ipsec3; -static const int NID_ipsec4; -static const char *const SN_X9_62_c2pnb163v1; -static const char *const SN_X9_62_c2pnb163v2; -static const char *const SN_X9_62_c2pnb163v3; -static const char *const SN_X9_62_c2pnb176v1; -static const char *const SN_X9_62_c2tnb191v1; -static const char *const SN_X9_62_c2tnb191v2; -static const char *const SN_X9_62_c2tnb191v3; -static const char *const SN_X9_62_c2onb191v4; -static const char *const SN_X9_62_c2onb191v5; -static const char *const SN_X9_62_c2pnb208w1; -static const char *const SN_X9_62_c2tnb239v1; -static const char *const SN_X9_62_c2tnb239v2; -static const char *const SN_X9_62_c2tnb239v3; -static const char *const SN_X9_62_c2onb239v4; -static const char *const SN_X9_62_c2onb239v5; -static const char *const SN_X9_62_c2pnb272w1; -static const char *const SN_X9_62_c2pnb304w1; -static const char *const SN_X9_62_c2tnb359v1; -static const char *const SN_X9_62_c2pnb368w1; -static const char *const SN_X9_62_c2tnb431r1; -static const char *const SN_X9_62_prime192v1; -static const char *const SN_X9_62_prime192v2; -static const char *const SN_X9_62_prime192v3; -static const char *const SN_X9_62_prime239v1; -static const char *const SN_X9_62_prime239v2; -static const char *const SN_X9_62_prime239v3; -static const char *const SN_X9_62_prime256v1; -static const char *const SN_secp112r1; -static const char *const SN_secp112r2; -static const char *const SN_secp128r1; -static const char *const SN_secp128r2; -static const char *const SN_secp160k1; -static const char *const SN_secp160r1; -static const char *const SN_secp160r2; -static const char *const SN_sect163k1; -static const char *const SN_sect163r1; -static const char *const SN_sect163r2; -static const char *const SN_secp192k1; -static const char *const SN_secp224k1; -static const char *const SN_secp224r1; -static const char *const SN_secp256k1; -static const char *const SN_secp384r1; -static const char *const SN_secp521r1; -static const char *const SN_sect113r1; -static const char *const SN_sect113r2; -static const char *const SN_sect131r1; -static const char *const SN_sect131r2; -static const char *const SN_sect193r1; -static const char *const SN_sect193r2; -static const char *const SN_sect233k1; -static const char *const SN_sect233r1; -static const char *const SN_sect239k1; -static const char *const SN_sect283k1; -static const char *const SN_sect283r1; -static const char *const SN_sect409k1; -static const char *const SN_sect409r1; -static const char *const SN_sect571k1; -static const char *const SN_sect571r1; -static const char *const SN_wap_wsg_idm_ecid_wtls1; -static const char *const SN_wap_wsg_idm_ecid_wtls3; -static const char *const SN_wap_wsg_idm_ecid_wtls4; -static const char *const SN_wap_wsg_idm_ecid_wtls5; -static const char *const SN_wap_wsg_idm_ecid_wtls6; -static const char *const SN_wap_wsg_idm_ecid_wtls7; -static const char *const SN_wap_wsg_idm_ecid_wtls8; -static const char *const SN_wap_wsg_idm_ecid_wtls9; -static const char *const SN_wap_wsg_idm_ecid_wtls10; -static const char *const SN_wap_wsg_idm_ecid_wtls11; -static const char *const SN_wap_wsg_idm_ecid_wtls12; -static const char *const SN_ipsec3; -static const char *const SN_ipsec4; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -/* OpenSSL 0.9.8g+ */ -#if OPENSSL_VERSION_NUMBER >= 0x0090807fL -static const long Cryptography_HAS_ECDSA_SHA2_NIDS = 1; -#else -static const long Cryptography_HAS_ECDSA_SHA2_NIDS = 0; -static const int NID_ecdsa_with_SHA224 = 0; -static const int NID_ecdsa_with_SHA256 = 0; -static const int NID_ecdsa_with_SHA384 = 0; -static const int NID_ecdsa_with_SHA512 = 0; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_ECDSA_SHA2_NIDS": [ - "NID_ecdsa_with_SHA224", - "NID_ecdsa_with_SHA256", - "NID_ecdsa_with_SHA384", - "NID_ecdsa_with_SHA512", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/objects.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/objects.py deleted file mode 100644 index 557c015..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/objects.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -""" - -FUNCTIONS = """ -ASN1_OBJECT *OBJ_nid2obj(int); -const char *OBJ_nid2ln(int); -const char *OBJ_nid2sn(int); -int OBJ_obj2nid(const ASN1_OBJECT *); -int OBJ_ln2nid(const char *); -int OBJ_sn2nid(const char *); -int OBJ_txt2nid(const char *); -ASN1_OBJECT *OBJ_txt2obj(const char *, int); -int OBJ_obj2txt(char *, int, const ASN1_OBJECT *, int); -int OBJ_cmp(const ASN1_OBJECT *, const ASN1_OBJECT *); -ASN1_OBJECT *OBJ_dup(const ASN1_OBJECT *); -int OBJ_create(const char *, const char *, const char *); -void OBJ_cleanup(void); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py deleted file mode 100644 index ef6e057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -/* Note that these will be resolved when cryptography is compiled and are NOT - guaranteed to be the version that it actually loads. */ -static const int OPENSSL_VERSION_NUMBER; -static const char *const OPENSSL_VERSION_TEXT; -""" - -FUNCTIONS = """ -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py deleted file mode 100644 index 462997c..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py +++ /dev/null @@ -1,218 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#ifdef _WIN32 -#include -#else -#include -#include -#endif -""" - -TYPES = """ -static const char *const Cryptography_osrandom_engine_name; -static const char *const Cryptography_osrandom_engine_id; -""" - -FUNCTIONS = """ -int Cryptography_add_osrandom_engine(void); -""" - -MACROS = """ -""" - -WIN32_CUSTOMIZATIONS = """ -static HCRYPTPROV hCryptProv = 0; - -static int osrandom_init(ENGINE *e) { - if (hCryptProv > 0) { - return 1; - } - if (CryptAcquireContext(&hCryptProv, NULL, NULL, - PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) { - return 1; - } else { - return 0; - } -} - -static int osrandom_rand_bytes(unsigned char *buffer, int size) { - if (hCryptProv == 0) { - return 0; - } - - if (!CryptGenRandom(hCryptProv, (DWORD)size, buffer)) { - ERR_put_error( - ERR_LIB_RAND, 0, ERR_R_RAND_LIB, "osrandom_engine.py", 0 - ); - return 0; - } - return 1; -} - -static int osrandom_finish(ENGINE *e) { - if (CryptReleaseContext(hCryptProv, 0)) { - hCryptProv = 0; - return 1; - } else { - return 0; - } -} - -static int osrandom_rand_status(void) { - if (hCryptProv == 0) { - return 0; - } else { - return 1; - } -} -""" - -POSIX_CUSTOMIZATIONS = """ -static int urandom_fd = -1; - -static int osrandom_finish(ENGINE *e); - -static int osrandom_init(ENGINE *e) { - if (urandom_fd > -1) { - return 1; - } - urandom_fd = open("/dev/urandom", O_RDONLY); - if (urandom_fd > -1) { - int flags = fcntl(urandom_fd, F_GETFD); - if (flags == -1) { - osrandom_finish(e); - return 0; - } else if (fcntl(urandom_fd, F_SETFD, flags | FD_CLOEXEC) == -1) { - osrandom_finish(e); - return 0; - } - return 1; - } else { - return 0; - } -} - -static int osrandom_rand_bytes(unsigned char *buffer, int size) { - ssize_t n; - while (size > 0) { - do { - n = read(urandom_fd, buffer, (size_t)size); - } while (n < 0 && errno == EINTR); - if (n <= 0) { - ERR_put_error( - ERR_LIB_RAND, 0, ERR_R_RAND_LIB, "osrandom_engine.py", 0 - ); - return 0; - } - buffer += n; - size -= n; - } - return 1; -} - -static int osrandom_finish(ENGINE *e) { - int n; - do { - n = close(urandom_fd); - } while (n < 0 && errno == EINTR); - urandom_fd = -1; - if (n < 0) { - return 0; - } else { - return 1; - } -} - -static int osrandom_rand_status(void) { - if (urandom_fd == -1) { - return 0; - } else { - return 1; - } -} -""" - -CUSTOMIZATIONS = """ -static const char *Cryptography_osrandom_engine_id = "osrandom"; -static const char *Cryptography_osrandom_engine_name = "osrandom_engine"; - -#if defined(_WIN32) -%(WIN32_CUSTOMIZATIONS)s -#else -%(POSIX_CUSTOMIZATIONS)s -#endif - -/* This replicates the behavior of the OpenSSL FIPS RNG, which returns a - -1 in the event that there is an error when calling RAND_pseudo_bytes. */ -static int osrandom_pseudo_rand_bytes(unsigned char *buffer, int size) { - int res = osrandom_rand_bytes(buffer, size); - if (res == 0) { - return -1; - } else { - return res; - } -} - -static RAND_METHOD osrandom_rand = { - NULL, - osrandom_rand_bytes, - NULL, - NULL, - osrandom_pseudo_rand_bytes, - osrandom_rand_status, -}; - -/* Returns 1 if successfully added, 2 if engine has previously been added, - and 0 for error. */ -int Cryptography_add_osrandom_engine(void) { - ENGINE *e; - e = ENGINE_by_id(Cryptography_osrandom_engine_id); - if (e != NULL) { - ENGINE_free(e); - return 2; - } else { - ERR_clear_error(); - } - - e = ENGINE_new(); - if (e == NULL) { - return 0; - } - if(!ENGINE_set_id(e, Cryptography_osrandom_engine_id) || - !ENGINE_set_name(e, Cryptography_osrandom_engine_name) || - !ENGINE_set_RAND(e, &osrandom_rand) || - !ENGINE_set_init_function(e, osrandom_init) || - !ENGINE_set_finish_function(e, osrandom_finish)) { - ENGINE_free(e); - return 0; - } - if (!ENGINE_add(e)) { - ENGINE_free(e); - return 0; - } - if (!ENGINE_free(e)) { - return 0; - } - - return 1; -} -""" % { - "WIN32_CUSTOMIZATIONS": WIN32_CUSTOMIZATIONS, - "POSIX_CUSTOMIZATIONS": POSIX_CUSTOMIZATIONS, -} - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pem.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pem.py deleted file mode 100644 index 752f198..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pem.py +++ /dev/null @@ -1,89 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef int pem_password_cb(char *buf, int size, int rwflag, void *userdata); -""" - -FUNCTIONS = """ -X509 *PEM_read_bio_X509(BIO *, X509 **, pem_password_cb *, void *); -int PEM_write_bio_X509(BIO *, X509 *); - -int PEM_write_bio_PrivateKey(BIO *, EVP_PKEY *, const EVP_CIPHER *, - unsigned char *, int, pem_password_cb *, void *); - -EVP_PKEY *PEM_read_bio_PrivateKey(BIO *, EVP_PKEY **, pem_password_cb *, - void *); - -int PEM_write_bio_PKCS8PrivateKey(BIO *, EVP_PKEY *, const EVP_CIPHER *, - char *, int, pem_password_cb *, void *); -int PEM_write_bio_PKCS8PrivateKey_nid(BIO *, EVP_PKEY *, int, char *, int, - pem_password_cb *, void *); - -int i2d_PKCS8PrivateKey_bio(BIO *, EVP_PKEY *, const EVP_CIPHER *, - char *, int, pem_password_cb *, void *); -int i2d_PKCS8PrivateKey_nid_bio(BIO *, EVP_PKEY *, int, - char *, int, pem_password_cb *, void *); - -PKCS7 *d2i_PKCS7_bio(BIO *, PKCS7 **); -EVP_PKEY *d2i_PKCS8PrivateKey_bio(BIO *, EVP_PKEY **, pem_password_cb *, - void *); - -int PEM_write_bio_X509_REQ(BIO *, X509_REQ *); - -X509_REQ *PEM_read_bio_X509_REQ(BIO *, X509_REQ **, pem_password_cb *, void *); - -X509_CRL *PEM_read_bio_X509_CRL(BIO *, X509_CRL **, pem_password_cb *, void *); - -int PEM_write_bio_X509_CRL(BIO *, X509_CRL *); - -PKCS7 *PEM_read_bio_PKCS7(BIO *, PKCS7 **, pem_password_cb *, void *); -DH *PEM_read_bio_DHparams(BIO *, DH **, pem_password_cb *, void *); - -DSA *PEM_read_bio_DSAPrivateKey(BIO *, DSA **, pem_password_cb *, void *); - -RSA *PEM_read_bio_RSAPrivateKey(BIO *, RSA **, pem_password_cb *, void *); - -int PEM_write_bio_DSAPrivateKey(BIO *, DSA *, const EVP_CIPHER *, - unsigned char *, int, - pem_password_cb *, void *); - -int PEM_write_bio_RSAPrivateKey(BIO *, RSA *, const EVP_CIPHER *, - unsigned char *, int, - pem_password_cb *, void *); - -DSA *PEM_read_bio_DSA_PUBKEY(BIO *, DSA **, pem_password_cb *, void *); - -RSA *PEM_read_bio_RSAPublicKey(BIO *, RSA **, pem_password_cb *, void *); - -int PEM_write_bio_DSA_PUBKEY(BIO *, DSA *); - -int PEM_write_bio_RSAPublicKey(BIO *, const RSA *); - -EVP_PKEY *PEM_read_bio_PUBKEY(BIO *, EVP_PKEY **, pem_password_cb *, void *); -int PEM_write_bio_PUBKEY(BIO *, EVP_PKEY *); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py deleted file mode 100644 index a8f106f..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py +++ /dev/null @@ -1,41 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef ... PKCS12; -""" - -FUNCTIONS = """ -void PKCS12_free(PKCS12 *); - -PKCS12 *d2i_PKCS12_bio(BIO *, PKCS12 **); -int i2d_PKCS12_bio(BIO *, PKCS12 *); -""" - -MACROS = """ -int PKCS12_parse(PKCS12 *, const char *, EVP_PKEY **, X509 **, - Cryptography_STACK_OF_X509 **); -PKCS12 *PKCS12_create(char *, char *, EVP_PKEY *, X509 *, - Cryptography_STACK_OF_X509 *, int, int, int, int, int); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py deleted file mode 100644 index 1343e56..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py +++ /dev/null @@ -1,41 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { - ASN1_OBJECT *type; - ...; -} PKCS7; -""" - -FUNCTIONS = """ -void PKCS7_free(PKCS7 *); -""" - -MACROS = """ -int PKCS7_type_is_signed(PKCS7 *); -int PKCS7_type_is_enveloped(PKCS7 *); -int PKCS7_type_is_signedAndEnveloped(PKCS7 *); -int PKCS7_type_is_data(PKCS7 *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rand.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rand.py deleted file mode 100644 index 7b1be9d..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rand.py +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -""" - -FUNCTIONS = """ -void ERR_load_RAND_strings(void); -void RAND_seed(const void *, int); -void RAND_add(const void *, int, double); -int RAND_status(void); -int RAND_egd(const char *); -int RAND_egd_bytes(const char *, int); -int RAND_query_egd_bytes(const char *, unsigned char *, int); -const char *RAND_file_name(char *, size_t); -int RAND_load_file(const char *, long); -int RAND_write_file(const char *); -void RAND_cleanup(void); -int RAND_bytes(unsigned char *, int); -int RAND_pseudo_bytes(unsigned char *, int); -""" - -MACROS = """ -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rsa.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rsa.py deleted file mode 100644 index cb8e701..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/rsa.py +++ /dev/null @@ -1,108 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct rsa_st { - BIGNUM *n; - BIGNUM *e; - BIGNUM *d; - BIGNUM *p; - BIGNUM *q; - BIGNUM *dmp1; - BIGNUM *dmq1; - BIGNUM *iqmp; - ...; -} RSA; -typedef ... BN_GENCB; -static const int RSA_PKCS1_PADDING; -static const int RSA_SSLV23_PADDING; -static const int RSA_NO_PADDING; -static const int RSA_PKCS1_OAEP_PADDING; -static const int RSA_X931_PADDING; -static const int RSA_PKCS1_PSS_PADDING; -static const int RSA_F4; - -static const int Cryptography_HAS_PSS_PADDING; -static const int Cryptography_HAS_MGF1_MD; -""" - -FUNCTIONS = """ -RSA *RSA_new(void); -void RSA_free(RSA *); -int RSA_size(const RSA *); -int RSA_generate_key_ex(RSA *, int, BIGNUM *, BN_GENCB *); -int RSA_check_key(const RSA *); -RSA *RSAPublicKey_dup(RSA *); -int RSA_blinding_on(RSA *, BN_CTX *); -void RSA_blinding_off(RSA *); -int RSA_public_encrypt(int, const unsigned char *, unsigned char *, - RSA *, int); -int RSA_private_encrypt(int, const unsigned char *, unsigned char *, - RSA *, int); -int RSA_public_decrypt(int, const unsigned char *, unsigned char *, - RSA *, int); -int RSA_private_decrypt(int, const unsigned char *, unsigned char *, - RSA *, int); -int RSA_print(BIO *, const RSA *, int); -int RSA_verify_PKCS1_PSS(RSA *, const unsigned char *, const EVP_MD *, - const unsigned char *, int); -int RSA_padding_add_PKCS1_PSS(RSA *, unsigned char *, const unsigned char *, - const EVP_MD *, int); -int RSA_padding_add_PKCS1_OAEP(unsigned char *, int, const unsigned char *, - int, const unsigned char *, int); -int RSA_padding_check_PKCS1_OAEP(unsigned char *, int, const unsigned char *, - int, int, const unsigned char *, int); -""" - -MACROS = """ -int EVP_PKEY_CTX_set_rsa_padding(EVP_PKEY_CTX *, int); -int EVP_PKEY_CTX_set_rsa_pss_saltlen(EVP_PKEY_CTX *, int); -int EVP_PKEY_CTX_set_rsa_mgf1_md(EVP_PKEY_CTX *, EVP_MD *); -""" - -CUSTOMIZATIONS = """ -#if OPENSSL_VERSION_NUMBER >= 0x10000000 -static const long Cryptography_HAS_PSS_PADDING = 1; -#else -/* see evp.py for the definition of Cryptography_HAS_PKEY_CTX */ -static const long Cryptography_HAS_PSS_PADDING = 0; -int (*EVP_PKEY_CTX_set_rsa_padding)(EVP_PKEY_CTX *, int) = NULL; -int (*EVP_PKEY_CTX_set_rsa_pss_saltlen)(EVP_PKEY_CTX *, int) = NULL; -static const long RSA_PKCS1_PSS_PADDING = 0; -#endif -#if OPENSSL_VERSION_NUMBER >= 0x1000100f -static const long Cryptography_HAS_MGF1_MD = 1; -#else -static const long Cryptography_HAS_MGF1_MD = 0; -int (*EVP_PKEY_CTX_set_rsa_mgf1_md)(EVP_PKEY_CTX *, EVP_MD *) = NULL; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_PKEY_CTX": [ - "EVP_PKEY_CTX_set_rsa_padding", - "EVP_PKEY_CTX_set_rsa_pss_saltlen", - ], - "Cryptography_HAS_PSS_PADDING": [ - "RSA_PKCS1_PSS_PADDING", - ], - "Cryptography_HAS_MGF1_MD": [ - "EVP_PKEY_CTX_set_rsa_mgf1_md", - ], -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ssl.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ssl.py deleted file mode 100644 index 7d805e7..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/ssl.py +++ /dev/null @@ -1,620 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include - -typedef STACK_OF(SSL_CIPHER) Cryptography_STACK_OF_SSL_CIPHER; -""" - -TYPES = """ -/* - * Internally invented symbols to tell which versions of SSL/TLS are supported. -*/ -static const long Cryptography_HAS_SSL2; -static const long Cryptography_HAS_TLSv1_1; -static const long Cryptography_HAS_TLSv1_2; -static const long Cryptography_HAS_SECURE_RENEGOTIATION; - -/* Internally invented symbol to tell us if SNI is supported */ -static const long Cryptography_HAS_TLSEXT_HOSTNAME; - -/* Internally invented symbol to tell us if SSL_MODE_RELEASE_BUFFERS is - * supported - */ -static const long Cryptography_HAS_RELEASE_BUFFERS; - -/* Internally invented symbol to tell us if SSL_OP_NO_COMPRESSION is - * supported - */ -static const long Cryptography_HAS_OP_NO_COMPRESSION; - -static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING; -static const long Cryptography_HAS_SSL_SET_SSL_CTX; -static const long Cryptography_HAS_SSL_OP_NO_TICKET; -static const long Cryptography_HAS_NETBSD_D1_METH; -static const long Cryptography_HAS_NEXTPROTONEG; -static const long Cryptography_HAS_ALPN; - -static const long SSL_FILETYPE_PEM; -static const long SSL_FILETYPE_ASN1; -static const long SSL_ERROR_NONE; -static const long SSL_ERROR_ZERO_RETURN; -static const long SSL_ERROR_WANT_READ; -static const long SSL_ERROR_WANT_WRITE; -static const long SSL_ERROR_WANT_X509_LOOKUP; -static const long SSL_ERROR_SYSCALL; -static const long SSL_ERROR_SSL; -static const long SSL_SENT_SHUTDOWN; -static const long SSL_RECEIVED_SHUTDOWN; -static const long SSL_OP_NO_SSLv2; -static const long SSL_OP_NO_SSLv3; -static const long SSL_OP_NO_TLSv1; -static const long SSL_OP_NO_TLSv1_1; -static const long SSL_OP_NO_TLSv1_2; -static const long SSL_OP_NO_COMPRESSION; -static const long SSL_OP_SINGLE_DH_USE; -static const long SSL_OP_EPHEMERAL_RSA; -static const long SSL_OP_MICROSOFT_SESS_ID_BUG; -static const long SSL_OP_NETSCAPE_CHALLENGE_BUG; -static const long SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG; -static const long SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG; -static const long SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER; -static const long SSL_OP_MSIE_SSLV2_RSA_PADDING; -static const long SSL_OP_SSLEAY_080_CLIENT_DH_BUG; -static const long SSL_OP_TLS_D5_BUG; -static const long SSL_OP_TLS_BLOCK_PADDING_BUG; -static const long SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS; -static const long SSL_OP_CIPHER_SERVER_PREFERENCE; -static const long SSL_OP_TLS_ROLLBACK_BUG; -static const long SSL_OP_PKCS1_CHECK_1; -static const long SSL_OP_PKCS1_CHECK_2; -static const long SSL_OP_NETSCAPE_CA_DN_BUG; -static const long SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG; -static const long SSL_OP_NO_QUERY_MTU; -static const long SSL_OP_COOKIE_EXCHANGE; -static const long SSL_OP_NO_TICKET; -static const long SSL_OP_ALL; -static const long SSL_OP_SINGLE_ECDH_USE; -static const long SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION; -static const long SSL_OP_LEGACY_SERVER_CONNECT; -static const long SSL_VERIFY_PEER; -static const long SSL_VERIFY_FAIL_IF_NO_PEER_CERT; -static const long SSL_VERIFY_CLIENT_ONCE; -static const long SSL_VERIFY_NONE; -static const long SSL_SESS_CACHE_OFF; -static const long SSL_SESS_CACHE_CLIENT; -static const long SSL_SESS_CACHE_SERVER; -static const long SSL_SESS_CACHE_BOTH; -static const long SSL_SESS_CACHE_NO_AUTO_CLEAR; -static const long SSL_SESS_CACHE_NO_INTERNAL_LOOKUP; -static const long SSL_SESS_CACHE_NO_INTERNAL_STORE; -static const long SSL_SESS_CACHE_NO_INTERNAL; -static const long SSL_ST_CONNECT; -static const long SSL_ST_ACCEPT; -static const long SSL_ST_MASK; -static const long SSL_ST_INIT; -static const long SSL_ST_BEFORE; -static const long SSL_ST_OK; -static const long SSL_ST_RENEGOTIATE; -static const long SSL_CB_LOOP; -static const long SSL_CB_EXIT; -static const long SSL_CB_READ; -static const long SSL_CB_WRITE; -static const long SSL_CB_ALERT; -static const long SSL_CB_READ_ALERT; -static const long SSL_CB_WRITE_ALERT; -static const long SSL_CB_ACCEPT_LOOP; -static const long SSL_CB_ACCEPT_EXIT; -static const long SSL_CB_CONNECT_LOOP; -static const long SSL_CB_CONNECT_EXIT; -static const long SSL_CB_HANDSHAKE_START; -static const long SSL_CB_HANDSHAKE_DONE; -static const long SSL_MODE_RELEASE_BUFFERS; -static const long SSL_MODE_ENABLE_PARTIAL_WRITE; -static const long SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER; -static const long SSL_MODE_AUTO_RETRY; -static const long SSL3_RANDOM_SIZE; -typedef ... SSL_METHOD; -typedef struct ssl_st { - int version; - int type; - ...; -} SSL_CTX; - -typedef struct { - int master_key_length; - unsigned char master_key[...]; - ...; -} SSL_SESSION; - -typedef struct { - unsigned char server_random[...]; - unsigned char client_random[...]; - ...; -} SSL3_STATE; - -typedef struct { - SSL3_STATE *s3; - SSL_SESSION *session; - int type; - ...; -} SSL; - -static const long TLSEXT_NAMETYPE_host_name; - -typedef ... SSL_CIPHER; -typedef ... Cryptography_STACK_OF_SSL_CIPHER; -typedef ... COMP_METHOD; -""" - -FUNCTIONS = """ -void SSL_load_error_strings(void); -int SSL_library_init(void); - -/* SSL */ -const char *SSL_state_string_long(const SSL *); -SSL_SESSION *SSL_get1_session(SSL *); -int SSL_set_session(SSL *, SSL_SESSION *); -int SSL_get_verify_mode(const SSL *); -void SSL_set_verify_depth(SSL *, int); -int SSL_get_verify_depth(const SSL *); -int (*SSL_get_verify_callback(const SSL *))(int, X509_STORE_CTX *); -void SSL_set_info_callback(SSL *ssl, void (*)(const SSL *, int, int)); -void (*SSL_get_info_callback(const SSL *))(const SSL *, int, int); -SSL *SSL_new(SSL_CTX *); -void SSL_free(SSL *); -int SSL_set_fd(SSL *, int); -void SSL_set_bio(SSL *, BIO *, BIO *); -void SSL_set_connect_state(SSL *); -void SSL_set_accept_state(SSL *); -void SSL_set_shutdown(SSL *, int); -int SSL_get_shutdown(const SSL *); -int SSL_pending(const SSL *); -int SSL_write(SSL *, const void *, int); -int SSL_read(SSL *, void *, int); -X509 *SSL_get_peer_certificate(const SSL *); -int SSL_get_ex_data_X509_STORE_CTX_idx(void); - -Cryptography_STACK_OF_X509 *SSL_get_peer_cert_chain(const SSL *); -Cryptography_STACK_OF_X509_NAME *SSL_get_client_CA_list(const SSL *); - -int SSL_get_error(const SSL *, int); -int SSL_do_handshake(SSL *); -int SSL_shutdown(SSL *); -const char *SSL_get_cipher_list(const SSL *, int); -Cryptography_STACK_OF_SSL_CIPHER *SSL_get_ciphers(const SSL *); - -const COMP_METHOD *SSL_get_current_compression(SSL *); -const COMP_METHOD *SSL_get_current_expansion(SSL *); -const char *SSL_COMP_get_name(const COMP_METHOD *); - -/* context */ -void SSL_CTX_free(SSL_CTX *); -long SSL_CTX_set_timeout(SSL_CTX *, long); -int SSL_CTX_set_default_verify_paths(SSL_CTX *); -void SSL_CTX_set_verify(SSL_CTX *, int, int (*)(int, X509_STORE_CTX *)); -void SSL_CTX_set_verify_depth(SSL_CTX *, int); -int (*SSL_CTX_get_verify_callback(const SSL_CTX *))(int, X509_STORE_CTX *); -int SSL_CTX_get_verify_mode(const SSL_CTX *); -int SSL_CTX_get_verify_depth(const SSL_CTX *); -int SSL_CTX_set_cipher_list(SSL_CTX *, const char *); -int SSL_CTX_load_verify_locations(SSL_CTX *, const char *, const char *); -void SSL_CTX_set_default_passwd_cb(SSL_CTX *, pem_password_cb *); -void SSL_CTX_set_default_passwd_cb_userdata(SSL_CTX *, void *); -int SSL_CTX_use_certificate(SSL_CTX *, X509 *); -int SSL_CTX_use_certificate_file(SSL_CTX *, const char *, int); -int SSL_CTX_use_certificate_chain_file(SSL_CTX *, const char *); -int SSL_CTX_use_PrivateKey(SSL_CTX *, EVP_PKEY *); -int SSL_CTX_use_PrivateKey_file(SSL_CTX *, const char *, int); -void SSL_CTX_set_cert_store(SSL_CTX *, X509_STORE *); -X509_STORE *SSL_CTX_get_cert_store(const SSL_CTX *); -int SSL_CTX_add_client_CA(SSL_CTX *, X509 *); - -void SSL_CTX_set_client_CA_list(SSL_CTX *, Cryptography_STACK_OF_X509_NAME *); - -/* SSL_SESSION */ -void SSL_SESSION_free(SSL_SESSION *); - -/* Information about actually used cipher */ -const char *SSL_CIPHER_get_name(const SSL_CIPHER *); -int SSL_CIPHER_get_bits(const SSL_CIPHER *, int *); -char *SSL_CIPHER_get_version(const SSL_CIPHER *); - -size_t SSL_get_finished(const SSL *, void *, size_t); -size_t SSL_get_peer_finished(const SSL *, void *, size_t); -""" - -MACROS = """ -unsigned long SSL_set_mode(SSL *, unsigned long); -unsigned long SSL_get_mode(SSL *); - -unsigned long SSL_set_options(SSL *, unsigned long); -unsigned long SSL_get_options(SSL *); - -int SSL_want_read(const SSL *); -int SSL_want_write(const SSL *); - -long SSL_total_renegotiations(SSL *); -long SSL_get_secure_renegotiation_support(SSL *); - -/* Defined as unsigned long because SSL_OP_ALL is greater than signed 32-bit - and Windows defines long as 32-bit. */ -unsigned long SSL_CTX_set_options(SSL_CTX *, unsigned long); -unsigned long SSL_CTX_get_options(SSL_CTX *); -unsigned long SSL_CTX_set_mode(SSL_CTX *, unsigned long); -unsigned long SSL_CTX_get_mode(SSL_CTX *); -unsigned long SSL_CTX_set_session_cache_mode(SSL_CTX *, unsigned long); -unsigned long SSL_CTX_get_session_cache_mode(SSL_CTX *); -unsigned long SSL_CTX_set_tmp_dh(SSL_CTX *, DH *); -unsigned long SSL_CTX_set_tmp_ecdh(SSL_CTX *, EC_KEY *); -unsigned long SSL_CTX_add_extra_chain_cert(SSL_CTX *, X509 *); - -/*- These aren't macros these functions are all const X on openssl > 1.0.x -*/ - -/* methods */ - -/* SSLv2 support is compiled out of some versions of OpenSSL. These will - * get special support when we generate the bindings so that if they are - * available they will be wrapped, but if they are not they won't cause - * problems (like link errors). - */ -const SSL_METHOD *SSLv2_method(void); -const SSL_METHOD *SSLv2_server_method(void); -const SSL_METHOD *SSLv2_client_method(void); - -/* - * TLSv1_1 and TLSv1_2 are recent additions. Only sufficiently new versions of - * OpenSSL support them. - */ -const SSL_METHOD *TLSv1_1_method(void); -const SSL_METHOD *TLSv1_1_server_method(void); -const SSL_METHOD *TLSv1_1_client_method(void); - -const SSL_METHOD *TLSv1_2_method(void); -const SSL_METHOD *TLSv1_2_server_method(void); -const SSL_METHOD *TLSv1_2_client_method(void); - -const SSL_METHOD *SSLv3_method(void); -const SSL_METHOD *SSLv3_server_method(void); -const SSL_METHOD *SSLv3_client_method(void); - -const SSL_METHOD *TLSv1_method(void); -const SSL_METHOD *TLSv1_server_method(void); -const SSL_METHOD *TLSv1_client_method(void); - -const SSL_METHOD *DTLSv1_method(void); -const SSL_METHOD *DTLSv1_server_method(void); -const SSL_METHOD *DTLSv1_client_method(void); - -const SSL_METHOD *SSLv23_method(void); -const SSL_METHOD *SSLv23_server_method(void); -const SSL_METHOD *SSLv23_client_method(void); - -/*- These aren't macros these arguments are all const X on openssl > 1.0.x -*/ -SSL_CTX *SSL_CTX_new(SSL_METHOD *); -long SSL_CTX_get_timeout(const SSL_CTX *); - -const SSL_CIPHER *SSL_get_current_cipher(const SSL *); - -/* SNI APIs were introduced in OpenSSL 1.0.0. To continue to support - * earlier versions some special handling of these is necessary. - */ -const char *SSL_get_servername(const SSL *, const int); -void SSL_set_tlsext_host_name(SSL *, char *); -void SSL_CTX_set_tlsext_servername_callback( - SSL_CTX *, - int (*)(const SSL *, int *, void *)); - -long SSL_session_reused(SSL *); - -/* The following were macros in 0.9.8e. Once we drop support for RHEL/CentOS 5 - we should move these back to FUNCTIONS. */ -void SSL_CTX_set_info_callback(SSL_CTX *, void (*)(const SSL *, int, int)); -void (*SSL_CTX_get_info_callback(SSL_CTX *))(const SSL *, int, int); -/* This function does not exist in 0.9.8e. Once we drop support for - RHEL/CentOS 5 this can be moved back to FUNCTIONS. */ -SSL_CTX *SSL_set_SSL_CTX(SSL *, SSL_CTX *); - -const SSL_METHOD* Cryptography_SSL_CTX_get_method(const SSL_CTX*); - -/* NPN APIs were introduced in OpenSSL 1.0.1. To continue to support earlier - * versions some special handling of these is necessary. - */ -void SSL_CTX_set_next_protos_advertised_cb(SSL_CTX *, - int (*)(SSL *, - const unsigned char **, - unsigned int *, - void *), - void *); -void SSL_CTX_set_next_proto_select_cb(SSL_CTX *, - int (*)(SSL *, - unsigned char **, - unsigned char *, - const unsigned char *, - unsigned int, - void *), - void *); -int SSL_select_next_proto(unsigned char **, unsigned char *, - const unsigned char *, unsigned int, - const unsigned char *, unsigned int); -void SSL_get0_next_proto_negotiated(const SSL *, - const unsigned char **, unsigned *); - -int sk_SSL_CIPHER_num(Cryptography_STACK_OF_SSL_CIPHER *); -SSL_CIPHER *sk_SSL_CIPHER_value(Cryptography_STACK_OF_SSL_CIPHER *, int); - -/* ALPN APIs were introduced in OpenSSL 1.0.2. To continue to support earlier - * versions some special handling of these is necessary. - */ -int SSL_CTX_set_alpn_protos(SSL_CTX *, const unsigned char*, unsigned); -int SSL_set_alpn_protos(SSL *, const unsigned char*, unsigned); -void SSL_CTX_set_alpn_select_cb(SSL_CTX *, - int (*) (SSL *, - const unsigned char **, - unsigned char *, - const unsigned char *, - unsigned int, - void *), - void *); -void SSL_get0_alpn_selected(const SSL *, const unsigned char **, unsigned *); -""" - -CUSTOMIZATIONS = """ -/** Secure renegotiation is supported in OpenSSL >= 0.9.8m - * But some Linux distributions have back ported some features. - */ -#ifndef SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION -static const long Cryptography_HAS_SECURE_RENEGOTIATION = 0; -long (*SSL_get_secure_renegotiation_support)(SSL *) = NULL; -const long SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION = 0; -const long SSL_OP_LEGACY_SERVER_CONNECT = 0; -#else -static const long Cryptography_HAS_SECURE_RENEGOTIATION = 1; -#endif -#ifdef OPENSSL_NO_SSL2 -static const long Cryptography_HAS_SSL2 = 0; -SSL_METHOD* (*SSLv2_method)(void) = NULL; -SSL_METHOD* (*SSLv2_client_method)(void) = NULL; -SSL_METHOD* (*SSLv2_server_method)(void) = NULL; -#else -static const long Cryptography_HAS_SSL2 = 1; -#endif - -#ifdef SSL_CTRL_SET_TLSEXT_HOSTNAME -static const long Cryptography_HAS_TLSEXT_HOSTNAME = 1; -#else -static const long Cryptography_HAS_TLSEXT_HOSTNAME = 0; -void (*SSL_set_tlsext_host_name)(SSL *, char *) = NULL; -const char* (*SSL_get_servername)(const SSL *, const int) = NULL; -void (*SSL_CTX_set_tlsext_servername_callback)( - SSL_CTX *, - int (*)(const SSL *, int *, void *)) = NULL; -#endif - -#ifdef SSL_MODE_RELEASE_BUFFERS -static const long Cryptography_HAS_RELEASE_BUFFERS = 1; -#else -static const long Cryptography_HAS_RELEASE_BUFFERS = 0; -const long SSL_MODE_RELEASE_BUFFERS = 0; -#endif - -#ifdef SSL_OP_NO_COMPRESSION -static const long Cryptography_HAS_OP_NO_COMPRESSION = 1; -#else -static const long Cryptography_HAS_OP_NO_COMPRESSION = 0; -const long SSL_OP_NO_COMPRESSION = 0; -#endif - -#ifdef SSL_OP_NO_TLSv1_1 -static const long Cryptography_HAS_TLSv1_1 = 1; -#else -static const long Cryptography_HAS_TLSv1_1 = 0; -static const long SSL_OP_NO_TLSv1_1 = 0; -SSL_METHOD* (*TLSv1_1_method)(void) = NULL; -SSL_METHOD* (*TLSv1_1_client_method)(void) = NULL; -SSL_METHOD* (*TLSv1_1_server_method)(void) = NULL; -#endif - -#ifdef SSL_OP_NO_TLSv1_2 -static const long Cryptography_HAS_TLSv1_2 = 1; -#else -static const long Cryptography_HAS_TLSv1_2 = 0; -static const long SSL_OP_NO_TLSv1_2 = 0; -SSL_METHOD* (*TLSv1_2_method)(void) = NULL; -SSL_METHOD* (*TLSv1_2_client_method)(void) = NULL; -SSL_METHOD* (*TLSv1_2_server_method)(void) = NULL; -#endif - -#ifdef SSL_OP_MSIE_SSLV2_RSA_PADDING -static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING = 1; -#else -static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING = 0; -const long SSL_OP_MSIE_SSLV2_RSA_PADDING = 0; -#endif - -#ifdef OPENSSL_NO_EC -long (*SSL_CTX_set_tmp_ecdh)(SSL_CTX *, EC_KEY *) = NULL; -#endif - -#ifdef SSL_OP_NO_TICKET -static const long Cryptography_HAS_SSL_OP_NO_TICKET = 1; -#else -static const long Cryptography_HAS_SSL_OP_NO_TICKET = 0; -const long SSL_OP_NO_TICKET = 0; -#endif - -/* OpenSSL 0.9.8f+ */ -#if OPENSSL_VERSION_NUMBER >= 0x00908070L -static const long Cryptography_HAS_SSL_SET_SSL_CTX = 1; -#else -static const long Cryptography_HAS_SSL_SET_SSL_CTX = 0; -static const long TLSEXT_NAMETYPE_host_name = 0; -SSL_CTX *(*SSL_set_SSL_CTX)(SSL *, SSL_CTX *) = NULL; -#endif - -/* NetBSD shipped without including d1_meth.c. This workaround checks to see - if the version of NetBSD we're currently running on is old enough to - have the bug and provides an empty implementation so we can link and - then remove the function from the ffi object. */ -#ifdef __NetBSD__ -# include -# if (__NetBSD_Version__ < 699003800) -static const long Cryptography_HAS_NETBSD_D1_METH = 0; -const SSL_METHOD *DTLSv1_method(void) { - return NULL; -} -# else -static const long Cryptography_HAS_NETBSD_D1_METH = 1; -# endif -#else -static const long Cryptography_HAS_NETBSD_D1_METH = 1; -#endif - -/* Workaround for #794 caused by cffi const** bug. */ -const SSL_METHOD* Cryptography_SSL_CTX_get_method(const SSL_CTX* ctx) { - return ctx->method; -} - -/* Because OPENSSL defines macros that claim lack of support for things, rather - * than macros that claim support for things, we need to do a version check in - * addition to a definition check. NPN was added in 1.0.1: for any version - * before that, there is no compatibility. - */ -#if defined(OPENSSL_NO_NEXTPROTONEG) || OPENSSL_VERSION_NUMBER < 0x1000100fL -static const long Cryptography_HAS_NEXTPROTONEG = 0; -void (*SSL_CTX_set_next_protos_advertised_cb)(SSL_CTX *, - int (*)(SSL *, - const unsigned char **, - unsigned int *, - void *), - void *) = NULL; -void (*SSL_CTX_set_next_proto_select_cb)(SSL_CTX *, - int (*)(SSL *, - unsigned char **, - unsigned char *, - const unsigned char *, - unsigned int, - void *), - void *) = NULL; -int (*SSL_select_next_proto)(unsigned char **, unsigned char *, - const unsigned char *, unsigned int, - const unsigned char *, unsigned int) = NULL; -void (*SSL_get0_next_proto_negotiated)(const SSL *, - const unsigned char **, - unsigned *) = NULL; -#else -static const long Cryptography_HAS_NEXTPROTONEG = 1; -#endif - -/* ALPN was added in OpenSSL 1.0.2. */ -#if OPENSSL_VERSION_NUMBER < 0x10002001L -int (*SSL_CTX_set_alpn_protos)(SSL_CTX *, - const unsigned char*, - unsigned) = NULL; -int (*SSL_set_alpn_protos)(SSL *, const unsigned char*, unsigned) = NULL; -void (*SSL_CTX_set_alpn_select_cb)(SSL_CTX *, - int (*) (SSL *, - const unsigned char **, - unsigned char *, - const unsigned char *, - unsigned int, - void *), - void *) = NULL; -void (*SSL_get0_alpn_selected)(const SSL *, - const unsigned char **, - unsigned *) = NULL; -static const long Cryptography_HAS_ALPN = 0; -#else -static const long Cryptography_HAS_ALPN = 1; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_TLSv1_1": [ - "SSL_OP_NO_TLSv1_1", - "TLSv1_1_method", - "TLSv1_1_server_method", - "TLSv1_1_client_method", - ], - - "Cryptography_HAS_TLSv1_2": [ - "SSL_OP_NO_TLSv1_2", - "TLSv1_2_method", - "TLSv1_2_server_method", - "TLSv1_2_client_method", - ], - - "Cryptography_HAS_SSL2": [ - "SSLv2_method", - "SSLv2_client_method", - "SSLv2_server_method", - ], - - "Cryptography_HAS_TLSEXT_HOSTNAME": [ - "SSL_set_tlsext_host_name", - "SSL_get_servername", - "SSL_CTX_set_tlsext_servername_callback", - ], - - "Cryptography_HAS_RELEASE_BUFFERS": [ - "SSL_MODE_RELEASE_BUFFERS", - ], - - "Cryptography_HAS_OP_NO_COMPRESSION": [ - "SSL_OP_NO_COMPRESSION", - ], - - "Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [ - "SSL_OP_MSIE_SSLV2_RSA_PADDING", - ], - - "Cryptography_HAS_EC": [ - "SSL_CTX_set_tmp_ecdh", - ], - - "Cryptography_HAS_SSL_OP_NO_TICKET": [ - "SSL_OP_NO_TICKET", - ], - - "Cryptography_HAS_SSL_SET_SSL_CTX": [ - "SSL_set_SSL_CTX", - "TLSEXT_NAMETYPE_host_name", - ], - - "Cryptography_HAS_NETBSD_D1_METH": [ - "DTLSv1_method", - ], - - "Cryptography_HAS_NEXTPROTONEG": [ - "SSL_CTX_set_next_protos_advertised_cb", - "SSL_CTX_set_next_proto_select_cb", - "SSL_select_next_proto", - "SSL_get0_next_proto_negotiated", - ], - - "Cryptography_HAS_SECURE_RENEGOTIATION": [ - "SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION", - "SSL_OP_LEGACY_SERVER_CONNECT", - "SSL_get_secure_renegotiation_support", - ], - - "Cryptography_HAS_ALPN": [ - "SSL_CTX_set_alpn_protos", - "SSL_set_alpn_protos", - "SSL_CTX_set_alpn_select_cb", - "SSL_get0_alpn_selected", - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509.py deleted file mode 100644 index b74c118..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509.py +++ /dev/null @@ -1,271 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include - -/* - * This is part of a work-around for the difficulty cffi has in dealing with - * `STACK_OF(foo)` as the name of a type. We invent a new, simpler name that - * will be an alias for this type and use the alias throughout. This works - * together with another opaque typedef for the same name in the TYPES section. - * Note that the result is an opaque type. - */ -typedef STACK_OF(X509) Cryptography_STACK_OF_X509; -typedef STACK_OF(X509_CRL) Cryptography_STACK_OF_X509_CRL; -typedef STACK_OF(X509_REVOKED) Cryptography_STACK_OF_X509_REVOKED; -""" - -TYPES = """ -typedef ... Cryptography_STACK_OF_X509; -typedef ... Cryptography_STACK_OF_X509_CRL; -typedef ... Cryptography_STACK_OF_X509_REVOKED; - -typedef struct { - ASN1_OBJECT *algorithm; - ...; -} X509_ALGOR; - -typedef ... X509_ATTRIBUTE; - -typedef struct { - X509_ALGOR *signature; - ...; -} X509_CINF; - -typedef struct { - ASN1_OBJECT *object; - ASN1_BOOLEAN critical; - ASN1_OCTET_STRING *value; -} X509_EXTENSION; - -typedef ... X509_EXTENSIONS; - -typedef ... X509_REQ; - -typedef struct { - ASN1_INTEGER *serialNumber; - ASN1_TIME *revocationDate; - X509_EXTENSIONS *extensions; - int sequence; - ...; -} X509_REVOKED; - -typedef struct { - Cryptography_STACK_OF_X509_REVOKED *revoked; - ...; -} X509_CRL_INFO; - -typedef struct { - X509_CRL_INFO *crl; - ...; -} X509_CRL; - -typedef struct { - X509_CINF *cert_info; - ...; -} X509; - -typedef ... NETSCAPE_SPKI; -""" - -FUNCTIONS = """ -X509 *X509_new(void); -void X509_free(X509 *); -X509 *X509_dup(X509 *); - -int X509_print_ex(BIO *, X509 *, unsigned long, unsigned long); - -int X509_set_version(X509 *, long); - -EVP_PKEY *X509_get_pubkey(X509 *); -int X509_set_pubkey(X509 *, EVP_PKEY *); - -unsigned char *X509_alias_get0(X509 *, int *); -int X509_sign(X509 *, EVP_PKEY *, const EVP_MD *); - -int X509_digest(const X509 *, const EVP_MD *, unsigned char *, unsigned int *); - -ASN1_TIME *X509_gmtime_adj(ASN1_TIME *, long); - -unsigned long X509_subject_name_hash(X509 *); - -X509_NAME *X509_get_subject_name(X509 *); -int X509_set_subject_name(X509 *, X509_NAME *); - -X509_NAME *X509_get_issuer_name(X509 *); -int X509_set_issuer_name(X509 *, X509_NAME *); - -int X509_get_ext_count(X509 *); -int X509_add_ext(X509 *, X509_EXTENSION *, int); -X509_EXTENSION *X509_EXTENSION_dup(X509_EXTENSION *); -X509_EXTENSION *X509_get_ext(X509 *, int); -int X509_EXTENSION_get_critical(X509_EXTENSION *); -ASN1_OBJECT *X509_EXTENSION_get_object(X509_EXTENSION *); -void X509_EXTENSION_free(X509_EXTENSION *); - -int X509_REQ_set_version(X509_REQ *, long); -X509_REQ *X509_REQ_new(void); -void X509_REQ_free(X509_REQ *); -int X509_REQ_set_pubkey(X509_REQ *, EVP_PKEY *); -int X509_REQ_sign(X509_REQ *, EVP_PKEY *, const EVP_MD *); -int X509_REQ_verify(X509_REQ *, EVP_PKEY *); -EVP_PKEY *X509_REQ_get_pubkey(X509_REQ *); -int X509_REQ_print_ex(BIO *, X509_REQ *, unsigned long, unsigned long); - -int X509V3_EXT_print(BIO *, X509_EXTENSION *, unsigned long, int); -ASN1_OCTET_STRING *X509_EXTENSION_get_data(X509_EXTENSION *); - -X509_REVOKED *X509_REVOKED_new(void); -void X509_REVOKED_free(X509_REVOKED *); - -int X509_REVOKED_set_serialNumber(X509_REVOKED *, ASN1_INTEGER *); - -int X509_REVOKED_add1_ext_i2d(X509_REVOKED *, int, void *, int, unsigned long); - -X509_CRL *d2i_X509_CRL_bio(BIO *, X509_CRL **); -X509_CRL *X509_CRL_new(void); -void X509_CRL_free(X509_CRL *); -int X509_CRL_add0_revoked(X509_CRL *, X509_REVOKED *); -int i2d_X509_CRL_bio(BIO *, X509_CRL *); -int X509_CRL_print(BIO *, X509_CRL *); -int X509_CRL_set_issuer_name(X509_CRL *, X509_NAME *); -int X509_CRL_sign(X509_CRL *, EVP_PKEY *, const EVP_MD *); - -int NETSCAPE_SPKI_verify(NETSCAPE_SPKI *, EVP_PKEY *); -int NETSCAPE_SPKI_sign(NETSCAPE_SPKI *, EVP_PKEY *, const EVP_MD *); -char *NETSCAPE_SPKI_b64_encode(NETSCAPE_SPKI *); -EVP_PKEY *NETSCAPE_SPKI_get_pubkey(NETSCAPE_SPKI *); -int NETSCAPE_SPKI_set_pubkey(NETSCAPE_SPKI *, EVP_PKEY *); -NETSCAPE_SPKI *NETSCAPE_SPKI_new(void); -void NETSCAPE_SPKI_free(NETSCAPE_SPKI *); - -/* ASN1 serialization */ -int i2d_X509_bio(BIO *, X509 *); -X509 *d2i_X509_bio(BIO *, X509 **); - -int i2d_X509_REQ_bio(BIO *, X509_REQ *); -X509_REQ *d2i_X509_REQ_bio(BIO *, X509_REQ **); - -int i2d_PrivateKey_bio(BIO *, EVP_PKEY *); -EVP_PKEY *d2i_PrivateKey_bio(BIO *, EVP_PKEY **); -int i2d_PUBKEY_bio(BIO *, EVP_PKEY *); -EVP_PKEY *d2i_PUBKEY_bio(BIO *, EVP_PKEY **); - -ASN1_INTEGER *X509_get_serialNumber(X509 *); -int X509_set_serialNumber(X509 *, ASN1_INTEGER *); - -const char *X509_verify_cert_error_string(long); - -const char *X509_get_default_cert_area(void); -const char *X509_get_default_cert_dir(void); -const char *X509_get_default_cert_file(void); -const char *X509_get_default_cert_dir_env(void); -const char *X509_get_default_cert_file_env(void); -const char *X509_get_default_private_dir(void); - -int i2d_RSA_PUBKEY(RSA *, unsigned char **); -RSA *d2i_RSA_PUBKEY(RSA **, const unsigned char **, long); -RSA *d2i_RSAPublicKey(RSA **, const unsigned char **, long); -RSA *d2i_RSAPrivateKey(RSA **, const unsigned char **, long); -int i2d_DSA_PUBKEY(DSA *, unsigned char **); -DSA *d2i_DSA_PUBKEY(DSA **, const unsigned char **, long); -DSA *d2i_DSAPublicKey(DSA **, const unsigned char **, long); -DSA *d2i_DSAPrivateKey(DSA **, const unsigned char **, long); - -RSA *d2i_RSAPrivateKey_bio(BIO *, RSA **); -int i2d_RSAPrivateKey_bio(BIO *, RSA *); -RSA *d2i_RSAPublicKey_bio(BIO *, RSA **); -int i2d_RSAPublicKey_bio(BIO *, RSA *); -RSA *d2i_RSA_PUBKEY_bio(BIO *, RSA **); -int i2d_RSA_PUBKEY_bio(BIO *, RSA *); -DSA *d2i_DSA_PUBKEY_bio(BIO *, DSA **); -int i2d_DSA_PUBKEY_bio(BIO *, DSA *); -DSA *d2i_DSAPrivateKey_bio(BIO *, DSA **); -int i2d_DSAPrivateKey_bio(BIO *, DSA *); -""" - -MACROS = """ -long X509_get_version(X509 *); - -ASN1_TIME *X509_get_notBefore(X509 *); -ASN1_TIME *X509_get_notAfter(X509 *); - -long X509_REQ_get_version(X509_REQ *); -X509_NAME *X509_REQ_get_subject_name(X509_REQ *); - -Cryptography_STACK_OF_X509 *sk_X509_new_null(void); -void sk_X509_free(Cryptography_STACK_OF_X509 *); -int sk_X509_num(Cryptography_STACK_OF_X509 *); -int sk_X509_push(Cryptography_STACK_OF_X509 *, X509 *); -X509 *sk_X509_value(Cryptography_STACK_OF_X509 *, int); - -X509_EXTENSIONS *sk_X509_EXTENSION_new_null(void); -int sk_X509_EXTENSION_num(X509_EXTENSIONS *); -X509_EXTENSION *sk_X509_EXTENSION_value(X509_EXTENSIONS *, int); -int sk_X509_EXTENSION_push(X509_EXTENSIONS *, X509_EXTENSION *); -X509_EXTENSION *sk_X509_EXTENSION_delete(X509_EXTENSIONS *, int); -void sk_X509_EXTENSION_free(X509_EXTENSIONS *); - -int sk_X509_REVOKED_num(Cryptography_STACK_OF_X509_REVOKED *); -X509_REVOKED *sk_X509_REVOKED_value(Cryptography_STACK_OF_X509_REVOKED *, int); - -int i2d_RSAPublicKey(RSA *, unsigned char **); -int i2d_RSAPrivateKey(RSA *, unsigned char **); -int i2d_DSAPublicKey(DSA *, unsigned char **); -int i2d_DSAPrivateKey(DSA *, unsigned char **); - -/* These aren't macros these arguments are all const X on openssl > 1.0.x */ -int X509_CRL_set_lastUpdate(X509_CRL *, ASN1_TIME *); -int X509_CRL_set_nextUpdate(X509_CRL *, ASN1_TIME *); - -/* These use STACK_OF(X509_EXTENSION) in 0.9.8e. Once we drop support for - RHEL/CentOS 5 we should move these back to FUNCTIONS. */ -int X509_REQ_add_extensions(X509_REQ *, X509_EXTENSIONS *); -X509_EXTENSIONS *X509_REQ_get_extensions(X509_REQ *); - -int i2d_EC_PUBKEY(EC_KEY *, unsigned char **); -EC_KEY *d2i_EC_PUBKEY(EC_KEY **, const unsigned char **, long); -EC_KEY *d2i_EC_PUBKEY_bio(BIO *, EC_KEY **); -int i2d_EC_PUBKEY_bio(BIO *, EC_KEY *); -EC_KEY *d2i_ECPrivateKey_bio(BIO *, EC_KEY **); -int i2d_ECPrivateKey_bio(BIO *, EC_KEY *); -""" - -CUSTOMIZATIONS = """ -/* OpenSSL 0.9.8e does not have this definition. */ -#if OPENSSL_VERSION_NUMBER <= 0x0090805fL -typedef STACK_OF(X509_EXTENSION) X509_EXTENSIONS; -#endif -#ifdef OPENSSL_NO_EC -int (*i2d_EC_PUBKEY)(EC_KEY *, unsigned char **) = NULL; -EC_KEY *(*d2i_EC_PUBKEY)(EC_KEY **, const unsigned char **, long) = NULL; -EC_KEY *(*d2i_EC_PUBKEY_bio)(BIO *, EC_KEY **) = NULL; -int (*i2d_EC_PUBKEY_bio)(BIO *, EC_KEY *) = NULL; -EC_KEY *(*d2i_ECPrivateKey_bio)(BIO *, EC_KEY **) = NULL; -int (*i2d_ECPrivateKey_bio)(BIO *, EC_KEY *) = NULL; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_EC": [ - "i2d_EC_PUBKEY", - "d2i_EC_PUBKEY", - "d2i_EC_PUBKEY_bio", - "i2d_EC_PUBKEY_bio", - "d2i_ECPrivateKey_bio", - "i2d_ECPrivateKey_bio", - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py deleted file mode 100644 index 601926c..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py +++ /dev/null @@ -1,336 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include - -/* - * This is part of a work-around for the difficulty cffi has in dealing with - * `STACK_OF(foo)` as the name of a type. We invent a new, simpler name that - * will be an alias for this type and use the alias throughout. This works - * together with another opaque typedef for the same name in the TYPES section. - * Note that the result is an opaque type. - */ -typedef STACK_OF(ASN1_OBJECT) Cryptography_STACK_OF_ASN1_OBJECT; -""" - -TYPES = """ -static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES; -static const long Cryptography_HAS_102_VERIFICATION_PARAMS; -static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST; -static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN; -static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES; -static const long Cryptography_HAS_100_VERIFICATION_PARAMS; -static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE; - -typedef ... Cryptography_STACK_OF_ASN1_OBJECT; - -typedef ... X509_STORE; -typedef ... X509_STORE_CTX; -typedef ... X509_VERIFY_PARAM; - -/* While these are defined in the source as ints, they're tagged here - as longs, just in case they ever grow to large, such as what we saw - with OP_ALL. */ - -/* Verification error codes */ -static const int X509_V_OK; -static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT; -static const int X509_V_ERR_UNABLE_TO_GET_CRL; -static const int X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE; -static const int X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE; -static const int X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY; -static const int X509_V_ERR_CERT_SIGNATURE_FAILURE; -static const int X509_V_ERR_CRL_SIGNATURE_FAILURE; -static const int X509_V_ERR_CERT_NOT_YET_VALID; -static const int X509_V_ERR_CERT_HAS_EXPIRED; -static const int X509_V_ERR_CRL_NOT_YET_VALID; -static const int X509_V_ERR_CRL_HAS_EXPIRED; -static const int X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD; -static const int X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD; -static const int X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD; -static const int X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD; -static const int X509_V_ERR_OUT_OF_MEM; -static const int X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT; -static const int X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN; -static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY; -static const int X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE; -static const int X509_V_ERR_CERT_CHAIN_TOO_LONG; -static const int X509_V_ERR_CERT_REVOKED; -static const int X509_V_ERR_INVALID_CA; -static const int X509_V_ERR_PATH_LENGTH_EXCEEDED; -static const int X509_V_ERR_INVALID_PURPOSE; -static const int X509_V_ERR_CERT_UNTRUSTED; -static const int X509_V_ERR_CERT_REJECTED; -static const int X509_V_ERR_SUBJECT_ISSUER_MISMATCH; -static const int X509_V_ERR_AKID_SKID_MISMATCH; -static const int X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH; -static const int X509_V_ERR_KEYUSAGE_NO_CERTSIGN; -static const int X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER; -static const int X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION; -static const int X509_V_ERR_KEYUSAGE_NO_CRL_SIGN; -static const int X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION; -static const int X509_V_ERR_INVALID_NON_CA; -static const int X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED; -static const int X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE; -static const int X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED; -static const int X509_V_ERR_INVALID_EXTENSION; -static const int X509_V_ERR_INVALID_POLICY_EXTENSION; -static const int X509_V_ERR_NO_EXPLICIT_POLICY; -static const int X509_V_ERR_DIFFERENT_CRL_SCOPE; -static const int X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE; -static const int X509_V_ERR_UNNESTED_RESOURCE; -static const int X509_V_ERR_PERMITTED_VIOLATION; -static const int X509_V_ERR_EXCLUDED_VIOLATION; -static const int X509_V_ERR_SUBTREE_MINMAX; -static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE; -static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX; -static const int X509_V_ERR_UNSUPPORTED_NAME_SYNTAX; -static const int X509_V_ERR_CRL_PATH_VALIDATION_ERROR; -static const int X509_V_ERR_SUITE_B_INVALID_VERSION; -static const int X509_V_ERR_SUITE_B_INVALID_ALGORITHM; -static const int X509_V_ERR_SUITE_B_INVALID_CURVE; -static const int X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM; -static const int X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED; -static const int X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256; -static const int X509_V_ERR_HOSTNAME_MISMATCH; -static const int X509_V_ERR_EMAIL_MISMATCH; -static const int X509_V_ERR_IP_ADDRESS_MISMATCH; -static const int X509_V_ERR_APPLICATION_VERIFICATION; - -/* Verification parameters */ -static const long X509_V_FLAG_CB_ISSUER_CHECK; -static const long X509_V_FLAG_USE_CHECK_TIME; -static const long X509_V_FLAG_CRL_CHECK; -static const long X509_V_FLAG_CRL_CHECK_ALL; -static const long X509_V_FLAG_IGNORE_CRITICAL; -static const long X509_V_FLAG_X509_STRICT; -static const long X509_V_FLAG_ALLOW_PROXY_CERTS; -static const long X509_V_FLAG_POLICY_CHECK; -static const long X509_V_FLAG_EXPLICIT_POLICY; -static const long X509_V_FLAG_INHIBIT_ANY; -static const long X509_V_FLAG_INHIBIT_MAP; -static const long X509_V_FLAG_NOTIFY_POLICY; -static const long X509_V_FLAG_EXTENDED_CRL_SUPPORT; -static const long X509_V_FLAG_USE_DELTAS; -static const long X509_V_FLAG_CHECK_SS_SIGNATURE; -static const long X509_V_FLAG_TRUSTED_FIRST; -static const long X509_V_FLAG_SUITEB_128_LOS_ONLY; -static const long X509_V_FLAG_SUITEB_192_LOS; -static const long X509_V_FLAG_SUITEB_128_LOS; -static const long X509_V_FLAG_PARTIAL_CHAIN; -""" - -FUNCTIONS = """ -int X509_verify_cert(X509_STORE_CTX *); - -/* X509_STORE */ -X509_STORE *X509_STORE_new(void); -void X509_STORE_free(X509_STORE *); -int X509_STORE_add_cert(X509_STORE *, X509 *); - -/* X509_STORE_CTX */ -X509_STORE_CTX *X509_STORE_CTX_new(void); -void X509_STORE_CTX_cleanup(X509_STORE_CTX *); -void X509_STORE_CTX_free(X509_STORE_CTX *); -int X509_STORE_CTX_init(X509_STORE_CTX *, X509_STORE *, X509 *, - Cryptography_STACK_OF_X509 *); -void X509_STORE_CTX_trusted_stack(X509_STORE_CTX *, - Cryptography_STACK_OF_X509 *); -void X509_STORE_CTX_set_cert(X509_STORE_CTX *, X509 *); -void X509_STORE_CTX_set_chain(X509_STORE_CTX *,Cryptography_STACK_OF_X509 *); -X509_VERIFY_PARAM *X509_STORE_CTX_get0_param(X509_STORE_CTX *); -void X509_STORE_CTX_set0_param(X509_STORE_CTX *, X509_VERIFY_PARAM *); -int X509_STORE_CTX_set_default(X509_STORE_CTX *, const char *); -void X509_STORE_CTX_set_verify_cb(X509_STORE_CTX *, - int (*)(int, X509_STORE_CTX *)); -Cryptography_STACK_OF_X509 *X509_STORE_CTX_get_chain(X509_STORE_CTX *); -Cryptography_STACK_OF_X509 *X509_STORE_CTX_get1_chain(X509_STORE_CTX *); -int X509_STORE_CTX_get_error(X509_STORE_CTX *); -void X509_STORE_CTX_set_error(X509_STORE_CTX *, int); -int X509_STORE_CTX_get_error_depth(X509_STORE_CTX *); -X509 *X509_STORE_CTX_get_current_cert(X509_STORE_CTX *); -int X509_STORE_CTX_set_ex_data(X509_STORE_CTX *, int, void *); -void *X509_STORE_CTX_get_ex_data(X509_STORE_CTX *, int); - -/* X509_VERIFY_PARAM */ -X509_VERIFY_PARAM *X509_VERIFY_PARAM_new(void); -int X509_VERIFY_PARAM_set_flags(X509_VERIFY_PARAM *, unsigned long); -int X509_VERIFY_PARAM_clear_flags(X509_VERIFY_PARAM *, unsigned long); -unsigned long X509_VERIFY_PARAM_get_flags(X509_VERIFY_PARAM *); -int X509_VERIFY_PARAM_set_purpose(X509_VERIFY_PARAM *, int); -int X509_VERIFY_PARAM_set_trust(X509_VERIFY_PARAM *, int); -void X509_VERIFY_PARAM_set_time(X509_VERIFY_PARAM *, time_t); -int X509_VERIFY_PARAM_add0_policy(X509_VERIFY_PARAM *, ASN1_OBJECT *); -int X509_VERIFY_PARAM_set1_policies(X509_VERIFY_PARAM *, - Cryptography_STACK_OF_ASN1_OBJECT *); -void X509_VERIFY_PARAM_set_depth(X509_VERIFY_PARAM *, int); -int X509_VERIFY_PARAM_get_depth(const X509_VERIFY_PARAM *); -""" - -MACROS = """ -/* X509_STORE_CTX */ -void X509_STORE_CTX_set0_crls(X509_STORE_CTX *, - Cryptography_STACK_OF_X509_CRL *); - -/* X509_VERIFY_PARAM */ -int X509_VERIFY_PARAM_set1_host(X509_VERIFY_PARAM *, const char *, - size_t); -void X509_VERIFY_PARAM_set_hostflags(X509_VERIFY_PARAM *, unsigned int); -int X509_VERIFY_PARAM_set1_email(X509_VERIFY_PARAM *, const char *, - size_t); -int X509_VERIFY_PARAM_set1_ip(X509_VERIFY_PARAM *, const unsigned char *, - size_t); -int X509_VERIFY_PARAM_set1_ip_asc(X509_VERIFY_PARAM *, const char *); -""" - -CUSTOMIZATIONS = """ -/* OpenSSL 1.0.2+ verification error codes */ -#if OPENSSL_VERSION_NUMBER >= 0x10002000L -static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 1; -#else -static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 0; -static const long X509_V_ERR_SUITE_B_INVALID_VERSION = 0; -static const long X509_V_ERR_SUITE_B_INVALID_ALGORITHM = 0; -static const long X509_V_ERR_SUITE_B_INVALID_CURVE = 0; -static const long X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM = 0; -static const long X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED = 0; -static const long X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256 = 0; -static const long X509_V_ERR_HOSTNAME_MISMATCH = 0; -static const long X509_V_ERR_EMAIL_MISMATCH = 0; -static const long X509_V_ERR_IP_ADDRESS_MISMATCH = 0; -#endif - -/* OpenSSL 1.0.2+ verification parameters */ -#if OPENSSL_VERSION_NUMBER >= 0x10002000L -static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 1; -#else -static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 0; -/* X509_V_FLAG_TRUSTED_FIRST is also new in 1.0.2+, but it is added separately - below because it shows up in some earlier 3rd party OpenSSL packages. */ -static const long X509_V_FLAG_SUITEB_128_LOS_ONLY = 0; -static const long X509_V_FLAG_SUITEB_192_LOS = 0; -static const long X509_V_FLAG_SUITEB_128_LOS = 0; - -int (*X509_VERIFY_PARAM_set1_host)(X509_VERIFY_PARAM *, const char *, - size_t) = NULL; -int (*X509_VERIFY_PARAM_set1_email)(X509_VERIFY_PARAM *, const char *, - size_t) = NULL; -int (*X509_VERIFY_PARAM_set1_ip)(X509_VERIFY_PARAM *, const unsigned char *, - size_t) = NULL; -int (*X509_VERIFY_PARAM_set1_ip_asc)(X509_VERIFY_PARAM *, const char *) = NULL; -void (*X509_VERIFY_PARAM_set_hostflags)(X509_VERIFY_PARAM *, - unsigned int) = NULL; -#endif - -/* OpenSSL 1.0.2+ or Solaris's backport */ -#ifdef X509_V_FLAG_PARTIAL_CHAIN -static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 1; -#else -static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 0; -static const long X509_V_FLAG_PARTIAL_CHAIN = 0; -#endif - -/* OpenSSL 1.0.2+, *or* Fedora 20's flavor of OpenSSL 1.0.1e... */ -#ifdef X509_V_FLAG_TRUSTED_FIRST -static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 1; -#else -static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 0; -static const long X509_V_FLAG_TRUSTED_FIRST = 0; -#endif - -/* OpenSSL 1.0.0+ verification error codes */ -#if OPENSSL_VERSION_NUMBER >= 0x10000000L -static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES = 1; -#else -static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES = 0; -static const long X509_V_ERR_DIFFERENT_CRL_SCOPE = 0; -static const long X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE = 0; -static const long X509_V_ERR_PERMITTED_VIOLATION = 0; -static const long X509_V_ERR_EXCLUDED_VIOLATION = 0; -static const long X509_V_ERR_SUBTREE_MINMAX = 0; -static const long X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE = 0; -static const long X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX = 0; -static const long X509_V_ERR_UNSUPPORTED_NAME_SYNTAX = 0; -static const long X509_V_ERR_CRL_PATH_VALIDATION_ERROR = 0; -#endif - -/* OpenSSL 1.0.0+ verification parameters */ -#if OPENSSL_VERSION_NUMBER >= 0x10000000L -static const long Cryptography_HAS_100_VERIFICATION_PARAMS = 1; -#else -static const long Cryptography_HAS_100_VERIFICATION_PARAMS = 0; -static const long X509_V_FLAG_EXTENDED_CRL_SUPPORT = 0; -static const long X509_V_FLAG_USE_DELTAS = 0; -#endif - -/* OpenSSL 0.9.8recent+ */ -#ifdef X509_V_FLAG_CHECK_SS_SIGNATURE -static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE = 1; -#else -static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE = 0; -static const long X509_V_FLAG_CHECK_SS_SIGNATURE = 0; -#endif -""" - -CONDITIONAL_NAMES = { - "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [ - 'X509_V_ERR_SUITE_B_INVALID_VERSION', - 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', - 'X509_V_ERR_SUITE_B_INVALID_CURVE', - 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', - 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', - 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', - 'X509_V_ERR_HOSTNAME_MISMATCH', - 'X509_V_ERR_EMAIL_MISMATCH', - 'X509_V_ERR_IP_ADDRESS_MISMATCH' - ], - "Cryptography_HAS_102_VERIFICATION_PARAMS": [ - "X509_V_FLAG_SUITEB_128_LOS_ONLY", - "X509_V_FLAG_SUITEB_192_LOS", - "X509_V_FLAG_SUITEB_128_LOS", - "X509_VERIFY_PARAM_set1_host", - "X509_VERIFY_PARAM_set1_email", - "X509_VERIFY_PARAM_set1_ip", - "X509_VERIFY_PARAM_set1_ip_asc", - "X509_VERIFY_PARAM_set_hostflags", - ], - "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [ - "X509_V_FLAG_TRUSTED_FIRST", - ], - "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [ - "X509_V_FLAG_PARTIAL_CHAIN", - ], - "Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [ - 'X509_V_ERR_DIFFERENT_CRL_SCOPE', - 'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE', - 'X509_V_ERR_UNNESTED_RESOURCE', - 'X509_V_ERR_PERMITTED_VIOLATION', - 'X509_V_ERR_EXCLUDED_VIOLATION', - 'X509_V_ERR_SUBTREE_MINMAX', - 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE', - 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX', - 'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX', - 'X509_V_ERR_CRL_PATH_VALIDATION_ERROR', - ], - "Cryptography_HAS_100_VERIFICATION_PARAMS": [ - "Cryptography_HAS_100_VERIFICATION_PARAMS", - "X509_V_FLAG_EXTENDED_CRL_SUPPORT", - "X509_V_FLAG_USE_DELTAS", - ], - "Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [ - "X509_V_FLAG_CHECK_SS_SIGNATURE", - ] -} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509name.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509name.py deleted file mode 100644 index 50abee2..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509name.py +++ /dev/null @@ -1,61 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include - -/* - * See the comment above Cryptography_STACK_OF_X509 in x509.py - */ -typedef STACK_OF(X509_NAME) Cryptography_STACK_OF_X509_NAME; -""" - -TYPES = """ -typedef ... X509_NAME; -typedef ... X509_NAME_ENTRY; -typedef ... Cryptography_STACK_OF_X509_NAME; -""" - -FUNCTIONS = """ -int X509_NAME_entry_count(X509_NAME *); -X509_NAME_ENTRY *X509_NAME_get_entry(X509_NAME *, int); -ASN1_OBJECT *X509_NAME_ENTRY_get_object(X509_NAME_ENTRY *); -ASN1_STRING *X509_NAME_ENTRY_get_data(X509_NAME_ENTRY *); -unsigned long X509_NAME_hash(X509_NAME *); - -int i2d_X509_NAME(X509_NAME *, unsigned char **); -int X509_NAME_add_entry_by_NID(X509_NAME *, int, int, unsigned char *, - int, int, int); -X509_NAME_ENTRY *X509_NAME_delete_entry(X509_NAME *, int); -void X509_NAME_ENTRY_free(X509_NAME_ENTRY *); -int X509_NAME_get_index_by_NID(X509_NAME *, int, int); -int X509_NAME_cmp(const X509_NAME *, const X509_NAME *); -char *X509_NAME_oneline(X509_NAME *, char *, int); -X509_NAME *X509_NAME_dup(X509_NAME *); -void X509_NAME_free(X509_NAME *); -""" - -MACROS = """ -Cryptography_STACK_OF_X509_NAME *sk_X509_NAME_new_null(void); -int sk_X509_NAME_num(Cryptography_STACK_OF_X509_NAME *); -int sk_X509_NAME_push(Cryptography_STACK_OF_X509_NAME *, X509_NAME *); -X509_NAME *sk_X509_NAME_value(Cryptography_STACK_OF_X509_NAME *, int); -void sk_X509_NAME_free(Cryptography_STACK_OF_X509_NAME *); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py deleted file mode 100644 index cf4be1f..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py +++ /dev/null @@ -1,103 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -INCLUDES = """ -#include -""" - -TYPES = """ -typedef struct { - X509 *issuer_cert; - X509 *subject_cert; - ...; -} X509V3_CTX; - -typedef void * (*X509V3_EXT_D2I)(void *, const unsigned char **, long); - -typedef struct { - ASN1_ITEM_EXP *it; - X509V3_EXT_D2I d2i; - ...; -} X509V3_EXT_METHOD; - -static const int GEN_OTHERNAME; -static const int GEN_EMAIL; -static const int GEN_X400; -static const int GEN_DNS; -static const int GEN_URI; -static const int GEN_DIRNAME; -static const int GEN_EDIPARTY; -static const int GEN_IPADD; -static const int GEN_RID; - -typedef struct { - ...; -} OTHERNAME; - -typedef struct { - ...; -} EDIPARTYNAME; - -typedef struct { - int type; - union { - char *ptr; - OTHERNAME *otherName; /* otherName */ - ASN1_IA5STRING *rfc822Name; - ASN1_IA5STRING *dNSName; - ASN1_TYPE *x400Address; - X509_NAME *directoryName; - EDIPARTYNAME *ediPartyName; - ASN1_IA5STRING *uniformResourceIdentifier; - ASN1_OCTET_STRING *iPAddress; - ASN1_OBJECT *registeredID; - - /* Old names */ - ASN1_OCTET_STRING *ip; /* iPAddress */ - X509_NAME *dirn; /* dirn */ - ASN1_IA5STRING *ia5; /* rfc822Name, dNSName, */ - /* uniformResourceIdentifier */ - ASN1_OBJECT *rid; /* registeredID */ - ASN1_TYPE *other; /* x400Address */ - } d; - ...; -} GENERAL_NAME; - -typedef struct stack_st_GENERAL_NAME GENERAL_NAMES; -""" - -FUNCTIONS = """ -void X509V3_set_ctx(X509V3_CTX *, X509 *, X509 *, X509_REQ *, X509_CRL *, int); -X509_EXTENSION *X509V3_EXT_nconf(CONF *, X509V3_CTX *, char *, char *); -int GENERAL_NAME_print(BIO *, GENERAL_NAME *); -void GENERAL_NAMES_free(GENERAL_NAMES *); -void *X509V3_EXT_d2i(X509_EXTENSION *); -""" - -MACROS = """ -void *X509V3_set_ctx_nodb(X509V3_CTX *); -int sk_GENERAL_NAME_num(struct stack_st_GENERAL_NAME *); -int sk_GENERAL_NAME_push(struct stack_st_GENERAL_NAME *, GENERAL_NAME *); -GENERAL_NAME *sk_GENERAL_NAME_value(struct stack_st_GENERAL_NAME *, int); - -/* These aren't macros these functions are all const X on openssl > 1.0.x */ -const X509V3_EXT_METHOD *X509V3_EXT_get(X509_EXTENSION *); -const X509V3_EXT_METHOD *X509V3_EXT_get_nid(int); -""" - -CUSTOMIZATIONS = """ -""" - -CONDITIONAL_NAMES = {} diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/utils.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/utils.py deleted file mode 100644 index 1c48116..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/bindings/utils.py +++ /dev/null @@ -1,108 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import binascii - -import sys - -import cffi - - -def build_ffi(module_prefix, modules, pre_include="", post_include="", - libraries=[], extra_compile_args=[], extra_link_args=[]): - """ - Modules listed in ``modules`` should have the following attributes: - - * ``INCLUDES``: A string containing C includes. - * ``TYPES``: A string containing C declarations for types. - * ``FUNCTIONS``: A string containing C declarations for functions. - * ``MACROS``: A string containing C declarations for any macros. - * ``CUSTOMIZATIONS``: A string containing arbitrary top-level C code, this - can be used to do things like test for a define and provide an - alternate implementation based on that. - * ``CONDITIONAL_NAMES``: A dict mapping strings of condition names from the - library to a list of names which will not be present without the - condition. - """ - ffi = cffi.FFI() - types = [] - includes = [] - functions = [] - macros = [] - customizations = [] - for name in modules: - module_name = module_prefix + name - __import__(module_name) - module = sys.modules[module_name] - - types.append(module.TYPES) - macros.append(module.MACROS) - functions.append(module.FUNCTIONS) - includes.append(module.INCLUDES) - customizations.append(module.CUSTOMIZATIONS) - - cdef_sources = types + functions + macros - ffi.cdef("\n".join(cdef_sources)) - - # We include functions here so that if we got any of their definitions - # wrong, the underlying C compiler will explode. In C you are allowed - # to re-declare a function if it has the same signature. That is: - # int foo(int); - # int foo(int); - # is legal, but the following will fail to compile: - # int foo(int); - # int foo(short); - source = "\n".join( - [pre_include] + - includes + - [post_include] + - functions + - customizations - ) - lib = ffi.verify( - source=source, - modulename=_create_modulename(cdef_sources, source, sys.version), - libraries=libraries, - ext_package="cryptography", - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - ) - - for name in modules: - module_name = module_prefix + name - module = sys.modules[module_name] - for condition, names in module.CONDITIONAL_NAMES.items(): - if not getattr(lib, condition): - for name in names: - delattr(lib, name) - - return ffi, lib - - -def _create_modulename(cdef_sources, source, sys_version): - """ - cffi creates a modulename internally that incorporates the cffi version. - This will cause cryptography's wheels to break when the version of cffi - the user has does not match what was used when building the wheel. To - resolve this we build our own modulename that uses most of the same code - from cffi but elides the version key. - """ - key = '\x00'.join([sys_version[:3], source] + cdef_sources) - key = key.encode('utf-8') - k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) - k1 = k1.lstrip('0x').rstrip('L') - k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) - k2 = k2.lstrip('0').rstrip('L') - return '_Cryptography_cffi_{0}{1}'.format(k1, k2) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py deleted file mode 100644 index 04b2272..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py +++ /dev/null @@ -1,337 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import warnings - -import six - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.backends.interfaces import DSABackend -from cryptography.hazmat.primitives import interfaces - - -def generate_parameters(key_size, backend): - return backend.generate_dsa_parameters(key_size) - - -def generate_private_key(key_size, backend): - return backend.generate_dsa_private_key_and_parameters(key_size) - - -def _check_dsa_parameters(parameters): - if (utils.bit_length(parameters.p), - utils.bit_length(parameters.q)) not in ( - (1024, 160), - (2048, 256), - (3072, 256)): - raise ValueError("p and q lengths must be " - "one of these pairs (1024, 160) or (2048, 256) " - "or (3072, 256).") - - if not (1 < parameters.g < parameters.p): - raise ValueError("g, p don't satisfy 1 < g < p.") - - -def _check_dsa_private_numbers(numbers): - parameters = numbers.public_numbers.parameter_numbers - _check_dsa_parameters(parameters) - if numbers.x <= 0 or numbers.x >= parameters.q: - raise ValueError("x must be > 0 and < q.") - - if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): - raise ValueError("y must be equal to (g ** x % p).") - - -@utils.register_interface(interfaces.DSAParameters) -class DSAParameters(object): - def __init__(self, modulus, subgroup_order, generator): - warnings.warn( - "The DSAParameters class is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - _check_dsa_parameters( - DSAParameterNumbers( - p=modulus, - q=subgroup_order, - g=generator - ) - ) - - self._modulus = modulus - self._subgroup_order = subgroup_order - self._generator = generator - - @classmethod - def generate(cls, key_size, backend): - warnings.warn( - "generate is deprecated and will be removed in a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if not isinstance(backend, DSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - parameters = backend.generate_dsa_parameters(key_size) - numbers = parameters.parameter_numbers() - return cls( - modulus=numbers.p, - subgroup_order=numbers.q, - generator=numbers.g - ) - - @property - def modulus(self): - return self._modulus - - @property - def subgroup_order(self): - return self._subgroup_order - - @property - def generator(self): - return self._generator - - @property - def p(self): - return self.modulus - - @property - def q(self): - return self.subgroup_order - - @property - def g(self): - return self.generator - - -@utils.register_interface(interfaces.DSAPrivateKey) -class DSAPrivateKey(object): - def __init__(self, modulus, subgroup_order, generator, x, y): - warnings.warn( - "The DSAPrivateKey class is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if ( - not isinstance(x, six.integer_types) or - not isinstance(y, six.integer_types) - ): - raise TypeError("DSAPrivateKey arguments must be integers.") - - _check_dsa_private_numbers( - DSAPrivateNumbers( - public_numbers=DSAPublicNumbers( - parameter_numbers=DSAParameterNumbers( - p=modulus, - q=subgroup_order, - g=generator - ), - y=y - ), - x=x - ) - ) - - self._modulus = modulus - self._subgroup_order = subgroup_order - self._generator = generator - self._x = x - self._y = y - - @classmethod - def generate(cls, parameters, backend): - warnings.warn( - "generate is deprecated and will be removed in a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if not isinstance(backend, DSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - key = backend.generate_dsa_private_key(parameters) - private_numbers = key.private_numbers() - return cls( - modulus=private_numbers.public_numbers.parameter_numbers.p, - subgroup_order=private_numbers.public_numbers.parameter_numbers.q, - generator=private_numbers.public_numbers.parameter_numbers.g, - x=private_numbers.x, - y=private_numbers.public_numbers.y - ) - - def signer(self, algorithm, backend): - if not isinstance(backend, DSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.create_dsa_signature_ctx(self, algorithm) - - @property - def key_size(self): - return utils.bit_length(self._modulus) - - def public_key(self): - return DSAPublicKey(self._modulus, self._subgroup_order, - self._generator, self.y) - - @property - def x(self): - return self._x - - @property - def y(self): - return self._y - - def parameters(self): - return DSAParameters(self._modulus, self._subgroup_order, - self._generator) - - -@utils.register_interface(interfaces.DSAPublicKey) -class DSAPublicKey(object): - def __init__(self, modulus, subgroup_order, generator, y): - warnings.warn( - "The DSAPublicKey class is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - _check_dsa_parameters( - DSAParameterNumbers( - p=modulus, - q=subgroup_order, - g=generator - ) - ) - if not isinstance(y, six.integer_types): - raise TypeError("y must be an integer.") - - self._modulus = modulus - self._subgroup_order = subgroup_order - self._generator = generator - self._y = y - - def verifier(self, signature, algorithm, backend): - if not isinstance(backend, DSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.create_dsa_verification_ctx(self, signature, - algorithm) - - @property - def key_size(self): - return utils.bit_length(self._modulus) - - @property - def y(self): - return self._y - - def parameters(self): - return DSAParameters(self._modulus, self._subgroup_order, - self._generator) - - -class DSAParameterNumbers(object): - def __init__(self, p, q, g): - if ( - not isinstance(p, six.integer_types) or - not isinstance(q, six.integer_types) or - not isinstance(g, six.integer_types) - ): - raise TypeError( - "DSAParameterNumbers p, q, and g arguments must be integers." - ) - - self._p = p - self._q = q - self._g = g - - @property - def p(self): - return self._p - - @property - def q(self): - return self._q - - @property - def g(self): - return self._g - - def parameters(self, backend): - return backend.load_dsa_parameter_numbers(self) - - -class DSAPublicNumbers(object): - def __init__(self, y, parameter_numbers): - if not isinstance(y, six.integer_types): - raise TypeError("DSAPublicNumbers y argument must be an integer.") - - if not isinstance(parameter_numbers, DSAParameterNumbers): - raise TypeError( - "parameter_numbers must be a DSAParameterNumbers instance." - ) - - self._y = y - self._parameter_numbers = parameter_numbers - - @property - def y(self): - return self._y - - @property - def parameter_numbers(self): - return self._parameter_numbers - - def public_key(self, backend): - return backend.load_dsa_public_numbers(self) - - -class DSAPrivateNumbers(object): - def __init__(self, x, public_numbers): - if not isinstance(x, six.integer_types): - raise TypeError("DSAPrivateNumbers x argument must be an integer.") - - if not isinstance(public_numbers, DSAPublicNumbers): - raise TypeError( - "public_numbers must be a DSAPublicNumbers instance." - ) - self._public_numbers = public_numbers - self._x = x - - @property - def x(self): - return self._x - - @property - def public_numbers(self): - return self._public_numbers - - def private_key(self, backend): - return backend.load_dsa_private_numbers(self) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py deleted file mode 100644 index 220a419..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py +++ /dev/null @@ -1,255 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import six - -from cryptography import utils -from cryptography.hazmat.primitives import interfaces - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT571R1(object): - @property - def name(self): - return "sect571r1" - - @property - def key_size(self): - return 571 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT409R1(object): - @property - def name(self): - return "sect409r1" - - @property - def key_size(self): - return 409 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT283R1(object): - @property - def name(self): - return "sect283r1" - - @property - def key_size(self): - return 283 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT233R1(object): - @property - def name(self): - return "sect233r1" - - @property - def key_size(self): - return 233 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT163R2(object): - @property - def name(self): - return "sect163r2" - - @property - def key_size(self): - return 163 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT571K1(object): - @property - def name(self): - return "sect571k1" - - @property - def key_size(self): - return 571 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT409K1(object): - @property - def name(self): - return "sect409k1" - - @property - def key_size(self): - return 409 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT283K1(object): - @property - def name(self): - return "sect283k1" - - @property - def key_size(self): - return 283 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT233K1(object): - @property - def name(self): - return "sect233k1" - - @property - def key_size(self): - return 233 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECT163K1(object): - @property - def name(self): - return "sect163k1" - - @property - def key_size(self): - return 163 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECP521R1(object): - @property - def name(self): - return "secp521r1" - - @property - def key_size(self): - return 521 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECP384R1(object): - @property - def name(self): - return "secp384r1" - - @property - def key_size(self): - return 384 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECP256R1(object): - @property - def name(self): - return "secp256r1" - - @property - def key_size(self): - return 256 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECP224R1(object): - @property - def name(self): - return "secp224r1" - - @property - def key_size(self): - return 224 - - -@utils.register_interface(interfaces.EllipticCurve) -class SECP192R1(object): - @property - def name(self): - return "secp192r1" - - @property - def key_size(self): - return 192 - - -@utils.register_interface(interfaces.EllipticCurveSignatureAlgorithm) -class ECDSA(object): - def __init__(self, algorithm): - self._algorithm = algorithm - - @property - def algorithm(self): - return self._algorithm - - -def generate_private_key(curve, backend): - return backend.generate_elliptic_curve_private_key(curve) - - -class EllipticCurvePublicNumbers(object): - def __init__(self, x, y, curve): - if ( - not isinstance(x, six.integer_types) or - not isinstance(y, six.integer_types) - ): - raise TypeError("x and y must be integers.") - - if not isinstance(curve, interfaces.EllipticCurve): - raise TypeError("curve must provide the EllipticCurve interface.") - - self._y = y - self._x = x - self._curve = curve - - def public_key(self, backend): - return backend.elliptic_curve_public_key_from_numbers(self) - - @property - def curve(self): - return self._curve - - @property - def x(self): - return self._x - - @property - def y(self): - return self._y - - -class EllipticCurvePrivateNumbers(object): - def __init__(self, private_value, public_numbers): - if not isinstance(private_value, six.integer_types): - raise TypeError("private_value must be an integer.") - - if not isinstance(public_numbers, EllipticCurvePublicNumbers): - raise TypeError( - "public_numbers must be an EllipticCurvePublicNumbers " - "instance." - ) - - self._private_value = private_value - self._public_numbers = public_numbers - - def private_key(self, backend): - return backend.elliptic_curve_private_key_from_numbers(self) - - @property - def private_value(self): - return self._private_value - - @property - def public_numbers(self): - return self._public_numbers diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py deleted file mode 100644 index d44bbda..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py +++ /dev/null @@ -1,94 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import warnings - -import six - -from cryptography import utils -from cryptography.hazmat.primitives import interfaces - - -@utils.register_interface(interfaces.AsymmetricPadding) -class PKCS1v15(object): - name = "EMSA-PKCS1-v1_5" - - -@utils.register_interface(interfaces.AsymmetricPadding) -class PSS(object): - MAX_LENGTH = object() - name = "EMSA-PSS" - - def __init__(self, mgf, salt_length=None): - self._mgf = mgf - - if salt_length is None: - warnings.warn( - "salt_length is deprecated on MGF1 and should be added via the" - " PSS constructor.", - utils.DeprecatedIn04, - stacklevel=2 - ) - else: - if (not isinstance(salt_length, six.integer_types) and - salt_length is not self.MAX_LENGTH): - raise TypeError("salt_length must be an integer.") - - if salt_length is not self.MAX_LENGTH and salt_length < 0: - raise ValueError("salt_length must be zero or greater.") - - if salt_length is None and self._mgf._salt_length is None: - raise ValueError("You must supply salt_length.") - - self._salt_length = salt_length - - -@utils.register_interface(interfaces.AsymmetricPadding) -class OAEP(object): - name = "EME-OAEP" - - def __init__(self, mgf, algorithm, label): - if not isinstance(algorithm, interfaces.HashAlgorithm): - raise TypeError("Expected instance of interfaces.HashAlgorithm.") - - self._mgf = mgf - self._algorithm = algorithm - self._label = label - - -class MGF1(object): - MAX_LENGTH = object() - - def __init__(self, algorithm, salt_length=None): - if not isinstance(algorithm, interfaces.HashAlgorithm): - raise TypeError("Expected instance of interfaces.HashAlgorithm.") - - self._algorithm = algorithm - - if salt_length is not None: - warnings.warn( - "salt_length is deprecated on MGF1 and should be passed to " - "the PSS constructor instead.", - utils.DeprecatedIn04, - stacklevel=2 - ) - if (not isinstance(salt_length, six.integer_types) and - salt_length is not self.MAX_LENGTH): - raise TypeError("salt_length must be an integer.") - - if salt_length is not self.MAX_LENGTH and salt_length < 0: - raise ValueError("salt_length must be zero or greater.") - - self._salt_length = salt_length diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py deleted file mode 100644 index 15ec52a..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py +++ /dev/null @@ -1,404 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import warnings - -import six - -from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.backends.interfaces import RSABackend - - -def generate_private_key(public_exponent, key_size, backend): - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - _verify_rsa_parameters(public_exponent, key_size) - return backend.generate_rsa_private_key(public_exponent, key_size) - - -def _verify_rsa_parameters(public_exponent, key_size): - if public_exponent < 3: - raise ValueError("public_exponent must be >= 3.") - - if public_exponent & 1 == 0: - raise ValueError("public_exponent must be odd.") - - if key_size < 512: - raise ValueError("key_size must be at least 512-bits.") - - -def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, - public_exponent, modulus): - if modulus < 3: - raise ValueError("modulus must be >= 3.") - - if p >= modulus: - raise ValueError("p must be < modulus.") - - if q >= modulus: - raise ValueError("q must be < modulus.") - - if dmp1 >= modulus: - raise ValueError("dmp1 must be < modulus.") - - if dmq1 >= modulus: - raise ValueError("dmq1 must be < modulus.") - - if iqmp >= modulus: - raise ValueError("iqmp must be < modulus.") - - if private_exponent >= modulus: - raise ValueError("private_exponent must be < modulus.") - - if public_exponent < 3 or public_exponent >= modulus: - raise ValueError("public_exponent must be >= 3 and < modulus.") - - if public_exponent & 1 == 0: - raise ValueError("public_exponent must be odd.") - - if dmp1 & 1 == 0: - raise ValueError("dmp1 must be odd.") - - if dmq1 & 1 == 0: - raise ValueError("dmq1 must be odd.") - - if p * q != modulus: - raise ValueError("p*q must equal modulus.") - - -def _check_public_key_components(e, n): - if n < 3: - raise ValueError("n must be >= 3.") - - if e < 3 or e >= n: - raise ValueError("e must be >= 3 and < n.") - - if e & 1 == 0: - raise ValueError("e must be odd.") - - -class RSAPublicKey(object): - def __init__(self, public_exponent, modulus): - warnings.warn( - "The RSAPublicKey class is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if ( - not isinstance(public_exponent, six.integer_types) or - not isinstance(modulus, six.integer_types) - ): - raise TypeError("RSAPublicKey arguments must be integers.") - - _check_public_key_components(public_exponent, modulus) - - self._public_exponent = public_exponent - self._modulus = modulus - - def verifier(self, signature, padding, algorithm, backend): - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.create_rsa_verification_ctx(self, signature, padding, - algorithm) - - def encrypt(self, plaintext, padding, backend): - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.encrypt_rsa(self, plaintext, padding) - - @property - def key_size(self): - return utils.bit_length(self.modulus) - - @property - def public_exponent(self): - return self._public_exponent - - @property - def modulus(self): - return self._modulus - - @property - def e(self): - return self.public_exponent - - @property - def n(self): - return self.modulus - - -def _modinv(e, m): - """ - Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 - """ - x1, y1, x2, y2 = 1, 0, 0, 1 - a, b = e, m - while b > 0: - q, r = divmod(a, b) - xn, yn = x1 - q * x2, y1 - q * y2 - a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn - return x1 % m - - -def rsa_crt_iqmp(p, q): - """ - Compute the CRT (q ** -1) % p value from RSA primes p and q. - """ - return _modinv(q, p) - - -def rsa_crt_dmp1(private_exponent, p): - """ - Compute the CRT private_exponent % (p - 1) value from the RSA - private_exponent and p. - """ - return private_exponent % (p - 1) - - -def rsa_crt_dmq1(private_exponent, q): - """ - Compute the CRT private_exponent % (q - 1) value from the RSA - private_exponent and q. - """ - return private_exponent % (q - 1) - - -class RSAPrivateKey(object): - def __init__(self, p, q, private_exponent, dmp1, dmq1, iqmp, - public_exponent, modulus): - warnings.warn( - "The RSAPrivateKey class is deprecated and will be removed in a " - "future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if ( - not isinstance(p, six.integer_types) or - not isinstance(q, six.integer_types) or - not isinstance(dmp1, six.integer_types) or - not isinstance(dmq1, six.integer_types) or - not isinstance(iqmp, six.integer_types) or - not isinstance(private_exponent, six.integer_types) or - not isinstance(public_exponent, six.integer_types) or - not isinstance(modulus, six.integer_types) - ): - raise TypeError("RSAPrivateKey arguments must be integers.") - - _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, - public_exponent, modulus) - - self._p = p - self._q = q - self._dmp1 = dmp1 - self._dmq1 = dmq1 - self._iqmp = iqmp - self._private_exponent = private_exponent - self._public_exponent = public_exponent - self._modulus = modulus - - @classmethod - def generate(cls, public_exponent, key_size, backend): - warnings.warn( - "generate is deprecated and will be removed in a future version.", - utils.DeprecatedIn05, - stacklevel=2 - ) - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - _verify_rsa_parameters(public_exponent, key_size) - key = backend.generate_rsa_private_key(public_exponent, key_size) - private_numbers = key.private_numbers() - return RSAPrivateKey( - p=private_numbers.p, - q=private_numbers.q, - dmp1=private_numbers.dmp1, - dmq1=private_numbers.dmq1, - iqmp=private_numbers.iqmp, - private_exponent=private_numbers.d, - public_exponent=private_numbers.public_numbers.e, - modulus=private_numbers.public_numbers.n - ) - - def signer(self, padding, algorithm, backend): - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.create_rsa_signature_ctx(self, padding, algorithm) - - def decrypt(self, ciphertext, padding, backend): - if not isinstance(backend, RSABackend): - raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend.", - _Reasons.BACKEND_MISSING_INTERFACE - ) - - return backend.decrypt_rsa(self, ciphertext, padding) - - @property - def key_size(self): - return utils.bit_length(self.modulus) - - def public_key(self): - return RSAPublicKey(self.public_exponent, self.modulus) - - @property - def p(self): - return self._p - - @property - def q(self): - return self._q - - @property - def private_exponent(self): - return self._private_exponent - - @property - def public_exponent(self): - return self._public_exponent - - @property - def modulus(self): - return self._modulus - - @property - def d(self): - return self.private_exponent - - @property - def dmp1(self): - return self._dmp1 - - @property - def dmq1(self): - return self._dmq1 - - @property - def iqmp(self): - return self._iqmp - - @property - def e(self): - return self.public_exponent - - @property - def n(self): - return self.modulus - - -class RSAPrivateNumbers(object): - def __init__(self, p, q, d, dmp1, dmq1, iqmp, - public_numbers): - if ( - not isinstance(p, six.integer_types) or - not isinstance(q, six.integer_types) or - not isinstance(d, six.integer_types) or - not isinstance(dmp1, six.integer_types) or - not isinstance(dmq1, six.integer_types) or - not isinstance(iqmp, six.integer_types) - ): - raise TypeError( - "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" - " all be an integers." - ) - - if not isinstance(public_numbers, RSAPublicNumbers): - raise TypeError( - "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" - " instance." - ) - - self._p = p - self._q = q - self._d = d - self._dmp1 = dmp1 - self._dmq1 = dmq1 - self._iqmp = iqmp - self._public_numbers = public_numbers - - @property - def p(self): - return self._p - - @property - def q(self): - return self._q - - @property - def d(self): - return self._d - - @property - def dmp1(self): - return self._dmp1 - - @property - def dmq1(self): - return self._dmq1 - - @property - def iqmp(self): - return self._iqmp - - @property - def public_numbers(self): - return self._public_numbers - - def private_key(self, backend): - return backend.load_rsa_private_numbers(self) - - -class RSAPublicNumbers(object): - def __init__(self, e, n): - if ( - not isinstance(e, six.integer_types) or - not isinstance(n, six.integer_types) - ): - raise TypeError("RSAPublicNumbers arguments must be integers.") - - self._e = e - self._n = n - - @property - def e(self): - return self._e - - @property - def n(self): - return self._n - - def public_key(self, backend): - return backend.load_rsa_public_numbers(self) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py deleted file mode 100644 index 509b4de..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/modes.py +++ /dev/null @@ -1,116 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -from cryptography import utils -from cryptography.hazmat.primitives import interfaces - - -def _check_iv_length(mode, algorithm): - if len(mode.initialization_vector) * 8 != algorithm.block_size: - raise ValueError("Invalid IV size ({0}) for {1}.".format( - len(mode.initialization_vector), mode.name - )) - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithInitializationVector) -class CBC(object): - name = "CBC" - - def __init__(self, initialization_vector): - self.initialization_vector = initialization_vector - - validate_for_algorithm = _check_iv_length - - -@utils.register_interface(interfaces.Mode) -class ECB(object): - name = "ECB" - - def validate_for_algorithm(self, algorithm): - pass - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithInitializationVector) -class OFB(object): - name = "OFB" - - def __init__(self, initialization_vector): - self.initialization_vector = initialization_vector - - validate_for_algorithm = _check_iv_length - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithInitializationVector) -class CFB(object): - name = "CFB" - - def __init__(self, initialization_vector): - self.initialization_vector = initialization_vector - - validate_for_algorithm = _check_iv_length - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithInitializationVector) -class CFB8(object): - name = "CFB8" - - def __init__(self, initialization_vector): - self.initialization_vector = initialization_vector - - validate_for_algorithm = _check_iv_length - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithNonce) -class CTR(object): - name = "CTR" - - def __init__(self, nonce): - self.nonce = nonce - - def validate_for_algorithm(self, algorithm): - if len(self.nonce) * 8 != algorithm.block_size: - raise ValueError("Invalid nonce size ({0}) for {1}.".format( - len(self.nonce), self.name - )) - - -@utils.register_interface(interfaces.Mode) -@utils.register_interface(interfaces.ModeWithInitializationVector) -@utils.register_interface(interfaces.ModeWithAuthenticationTag) -class GCM(object): - name = "GCM" - - def __init__(self, initialization_vector, tag=None, min_tag_length=16): - # len(initialization_vector) must in [1, 2 ** 64), but it's impossible - # to actually construct a bytes object that large, so we don't check - # for it - if min_tag_length < 4: - raise ValueError("min_tag_length must be >= 4") - if tag is not None and len(tag) < min_tag_length: - raise ValueError( - "Authentication tag must be {0} bytes or longer.".format( - min_tag_length) - ) - - self.initialization_vector = initialization_vector - self.tag = tag - - def validate_for_algorithm(self, algorithm): - pass diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py deleted file mode 100644 index 9789851..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/constant_time.py +++ /dev/null @@ -1,71 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import hmac -import sys - -import cffi - -from cryptography.hazmat.bindings.utils import _create_modulename - -TYPES = """ -uint8_t Cryptography_constant_time_bytes_eq(uint8_t *, size_t, uint8_t *, - size_t); -""" - -FUNCTIONS = """ -uint8_t Cryptography_constant_time_bytes_eq(uint8_t *a, size_t len_a, - uint8_t *b, size_t len_b) { - size_t i = 0; - uint8_t mismatch = 0; - if (len_a != len_b) { - return 0; - } - for (i = 0; i < len_a; i++) { - mismatch |= a[i] ^ b[i]; - } - - /* Make sure any bits set are copied to the lowest bit */ - mismatch |= mismatch >> 4; - mismatch |= mismatch >> 2; - mismatch |= mismatch >> 1; - /* Now check the low bit to see if it's set */ - return (mismatch & 1) == 0; -} -""" - -_ffi = cffi.FFI() -_ffi.cdef(TYPES) -_lib = _ffi.verify( - source=FUNCTIONS, - modulename=_create_modulename([TYPES], FUNCTIONS, sys.version), - ext_package="cryptography", -) - -if hasattr(hmac, "compare_digest"): - def bytes_eq(a, b): - if not isinstance(a, bytes) or not isinstance(b, bytes): - raise TypeError("a and b must be bytes.") - - return hmac.compare_digest(a, b) - -else: - def bytes_eq(a, b): - if not isinstance(a, bytes) or not isinstance(b, bytes): - raise TypeError("a and b must be bytes.") - - return _lib.Cryptography_constant_time_bytes_eq( - a, len(a), b, len(b) - ) == 1 diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces.py deleted file mode 100644 index d60f9e0..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/interfaces.py +++ /dev/null @@ -1,458 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import abc - -import six - - -@six.add_metaclass(abc.ABCMeta) -class CipherAlgorithm(object): - @abc.abstractproperty - def name(self): - """ - A string naming this mode (e.g. "AES", "Camellia"). - """ - - @abc.abstractproperty - def key_size(self): - """ - The size of the key being used as an integer in bits (e.g. 128, 256). - """ - - -@six.add_metaclass(abc.ABCMeta) -class BlockCipherAlgorithm(object): - @abc.abstractproperty - def block_size(self): - """ - The size of a block as an integer in bits (e.g. 64, 128). - """ - - -@six.add_metaclass(abc.ABCMeta) -class Mode(object): - @abc.abstractproperty - def name(self): - """ - A string naming this mode (e.g. "ECB", "CBC"). - """ - - @abc.abstractmethod - def validate_for_algorithm(self, algorithm): - """ - Checks that all the necessary invariants of this (mode, algorithm) - combination are met. - """ - - -@six.add_metaclass(abc.ABCMeta) -class ModeWithInitializationVector(object): - @abc.abstractproperty - def initialization_vector(self): - """ - The value of the initialization vector for this mode as bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class ModeWithNonce(object): - @abc.abstractproperty - def nonce(self): - """ - The value of the nonce for this mode as bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class ModeWithAuthenticationTag(object): - @abc.abstractproperty - def tag(self): - """ - The value of the tag supplied to the constructor of this mode. - """ - - -@six.add_metaclass(abc.ABCMeta) -class CipherContext(object): - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes through the cipher and returns the results - as bytes. - """ - - @abc.abstractmethod - def finalize(self): - """ - Returns the results of processing the final block as bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class AEADCipherContext(object): - @abc.abstractmethod - def authenticate_additional_data(self, data): - """ - Authenticates the provided bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class AEADEncryptionContext(object): - @abc.abstractproperty - def tag(self): - """ - Returns tag bytes. This is only available after encryption is - finalized. - """ - - -@six.add_metaclass(abc.ABCMeta) -class PaddingContext(object): - @abc.abstractmethod - def update(self, data): - """ - Pads the provided bytes and returns any available data as bytes. - """ - - @abc.abstractmethod - def finalize(self): - """ - Finalize the padding, returns bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class HashAlgorithm(object): - @abc.abstractproperty - def name(self): - """ - A string naming this algorithm (e.g. "sha256", "md5"). - """ - - @abc.abstractproperty - def digest_size(self): - """ - The size of the resulting digest in bytes. - """ - - @abc.abstractproperty - def block_size(self): - """ - The internal block size of the hash algorithm in bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class HashContext(object): - @abc.abstractproperty - def algorithm(self): - """ - A HashAlgorithm that will be used by this context. - """ - - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes through the hash. - """ - - @abc.abstractmethod - def finalize(self): - """ - Finalizes the hash context and returns the hash digest as bytes. - """ - - @abc.abstractmethod - def copy(self): - """ - Return a HashContext that is a copy of the current context. - """ - - -@six.add_metaclass(abc.ABCMeta) -class RSAPrivateKey(object): - @abc.abstractmethod - def signer(self, padding, algorithm): - """ - Returns an AsymmetricSignatureContext used for signing data. - """ - - @abc.abstractmethod - def decrypt(self, ciphertext, padding): - """ - Decrypts the provided ciphertext. - """ - - @abc.abstractproperty - def key_size(self): - """ - The bit length of the public modulus. - """ - - @abc.abstractmethod - def public_key(self): - """ - The RSAPublicKey associated with this private key. - """ - - -@six.add_metaclass(abc.ABCMeta) -class RSAPrivateKeyWithNumbers(RSAPrivateKey): - @abc.abstractmethod - def private_numbers(self): - """ - Returns an RSAPrivateNumbers. - """ - - -@six.add_metaclass(abc.ABCMeta) -class RSAPublicKey(object): - @abc.abstractmethod - def verifier(self, signature, padding, algorithm): - """ - Returns an AsymmetricVerificationContext used for verifying signatures. - """ - - @abc.abstractmethod - def encrypt(self, plaintext, padding): - """ - Encrypts the given plaintext. - """ - - @abc.abstractproperty - def key_size(self): - """ - The bit length of the public modulus. - """ - - -@six.add_metaclass(abc.ABCMeta) -class RSAPublicKeyWithNumbers(RSAPublicKey): - @abc.abstractmethod - def public_numbers(self): - """ - Returns an RSAPublicNumbers - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAParameters(object): - @abc.abstractmethod - def generate_private_key(self): - """ - Generates and returns a DSAPrivateKey. - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAParametersWithNumbers(DSAParameters): - @abc.abstractmethod - def parameter_numbers(self): - """ - Returns a DSAParameterNumbers. - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAPrivateKey(object): - @abc.abstractproperty - def key_size(self): - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def public_key(self): - """ - The DSAPublicKey associated with this private key. - """ - - @abc.abstractmethod - def parameters(self): - """ - The DSAParameters object associated with this private key. - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAPrivateKeyWithNumbers(DSAPrivateKey): - @abc.abstractmethod - def private_numbers(self): - """ - Returns a DSAPrivateNumbers. - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAPublicKey(object): - @abc.abstractproperty - def key_size(self): - """ - The bit length of the prime modulus. - """ - - @abc.abstractmethod - def parameters(self): - """ - The DSAParameters object associated with this public key. - """ - - -@six.add_metaclass(abc.ABCMeta) -class DSAPublicKeyWithNumbers(DSAPublicKey): - @abc.abstractmethod - def public_numbers(self): - """ - Returns a DSAPublicNumbers. - """ - - -@six.add_metaclass(abc.ABCMeta) -class AsymmetricSignatureContext(object): - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes and returns nothing. - """ - - @abc.abstractmethod - def finalize(self): - """ - Returns the signature as bytes. - """ - - -@six.add_metaclass(abc.ABCMeta) -class AsymmetricVerificationContext(object): - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes and returns nothing. - """ - - @abc.abstractmethod - def verify(self): - """ - Raises an exception if the bytes provided to update do not match the - signature or the signature does not match the public key. - """ - - -@six.add_metaclass(abc.ABCMeta) -class AsymmetricPadding(object): - @abc.abstractproperty - def name(self): - """ - A string naming this padding (e.g. "PSS", "PKCS1"). - """ - - -@six.add_metaclass(abc.ABCMeta) -class KeyDerivationFunction(object): - @abc.abstractmethod - def derive(self, key_material): - """ - Deterministically generates and returns a new key based on the existing - key material. - """ - - @abc.abstractmethod - def verify(self, key_material, expected_key): - """ - Checks whether the key generated by the key material matches the - expected derived key. Raises an exception if they do not match. - """ - - -@six.add_metaclass(abc.ABCMeta) -class CMACContext(object): - @abc.abstractmethod - def update(self, data): - """ - Processes the provided bytes. - """ - - def finalize(self): - """ - Returns the message authentication code as bytes. - """ - - @abc.abstractmethod - def copy(self): - """ - Return a CMACContext that is a copy of the current context. - """ - - -@six.add_metaclass(abc.ABCMeta) -class EllipticCurve(object): - @abc.abstractproperty - def name(self): - """ - The name of the curve. e.g. secp256r1. - """ - - @abc.abstractproperty - def key_size(self): - """ - The bit length of the base point of the curve. - """ - - -@six.add_metaclass(abc.ABCMeta) -class EllipticCurveSignatureAlgorithm(object): - @abc.abstractproperty - def algorithm(self): - """ - The digest algorithm used with this signature. - """ - - -@six.add_metaclass(abc.ABCMeta) -class EllipticCurvePrivateKey(object): - @abc.abstractmethod - def signer(self, signature_algorithm): - """ - Returns an AsymmetricSignatureContext used for signing data. - """ - - @abc.abstractmethod - def public_key(self): - """ - The EllipticCurvePublicKey for this private key. - """ - - @abc.abstractproperty - def curve(self): - """ - The EllipticCurve that this key is on. - """ - - -@six.add_metaclass(abc.ABCMeta) -class EllipticCurvePublicKey(object): - @abc.abstractmethod - def verifier(self, signature, signature_algorithm): - """ - Returns an AsymmetricVerificationContext used for signing data. - """ - - @abc.abstractproperty - def curve(self): - """ - The EllipticCurve that this key is on. - """ diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py deleted file mode 100644 index ed73c4c..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/serialization.py +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - - -def load_pem_traditional_openssl_private_key(data, password, backend): - return backend.load_traditional_openssl_pem_private_key( - data, password - ) - - -def load_pem_pkcs8_private_key(data, password, backend): - return backend.load_pkcs8_pem_private_key( - data, password - ) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py deleted file mode 100644 index 2f42057..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.4/site-packages/cryptography/utils.py b/Darwin/lib/python3.4/site-packages/cryptography/utils.py deleted file mode 100644 index 1db1615..0000000 --- a/Darwin/lib/python3.4/site-packages/cryptography/utils.py +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import, division, print_function - -import sys - - -DeprecatedIn04 = DeprecationWarning -DeprecatedIn05 = PendingDeprecationWarning - - -def register_interface(iface): - def register_decorator(klass): - iface.register(klass) - return klass - return register_decorator - - -def bit_length(x): - if sys.version_info >= (2, 7): - return x.bit_length() - else: - return len(bin(x)) - (2 + (x <= 0)) diff --git a/Darwin/lib/python3.4/site-packages/easy-install.pth b/Darwin/lib/python3.4/site-packages/easy-install.pth deleted file mode 100644 index bfe2e6d..0000000 --- a/Darwin/lib/python3.4/site-packages/easy-install.pth +++ /dev/null @@ -1,3 +0,0 @@ -import sys; sys.__plen = len(sys.path) -/Users/build/platform_darwin/build/PythonPackages/src/ed25519 -import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new) diff --git a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 46a81ef..0000000 --- a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,38 +0,0 @@ -LICENSE -MANIFEST.in -Makefile -NEWS -README.md -kat-ed25519.txt -kat.py -test_ed25519_kat.py -versioneer.py -bin/edsig -ed25519.egg-info/PKG-INFO -ed25519.egg-info/SOURCES.txt -ed25519.egg-info/dependency_links.txt -ed25519.egg-info/top_level.txt -src/ed25519/__init__.py -src/ed25519/_version.py -src/ed25519/keys.py -src/ed25519/test_ed25519.py -src/ed25519-glue/ed25519module.c -src/ed25519-supercop-ref/Makefile -src/ed25519-supercop-ref/api.h -src/ed25519-supercop-ref/crypto_int32.h -src/ed25519-supercop-ref/crypto_sign.h -src/ed25519-supercop-ref/crypto_uint32.h -src/ed25519-supercop-ref/crypto_verify_32.h -src/ed25519-supercop-ref/ed25519.c -src/ed25519-supercop-ref/fe25519.c -src/ed25519-supercop-ref/fe25519.h -src/ed25519-supercop-ref/ge25519.c -src/ed25519-supercop-ref/ge25519.h -src/ed25519-supercop-ref/ge25519_base.data -src/ed25519-supercop-ref/sc25519.c -src/ed25519-supercop-ref/sc25519.h -src/ed25519-supercop-ref/sha512-blocks.c -src/ed25519-supercop-ref/sha512-hash.c -src/ed25519-supercop-ref/sha512.h -src/ed25519-supercop-ref/test.c -src/ed25519-supercop-ref/verify.c \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 485b837..0000000 --- a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,15 +0,0 @@ -../ed25519/__init__.py -../ed25519/_version.py -../ed25519/keys.py -../ed25519/test_ed25519.py -../ed25519/__init__.pyc -../ed25519/_version.pyc -../ed25519/keys.pyc -../ed25519/test_ed25519.pyc -../ed25519/_ed25519.so -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt -../../../../bin/edsig diff --git a/Darwin/lib/python3.4/site-packages/ed25519/_ed25519.so b/Darwin/lib/python3.4/site-packages/ed25519/_ed25519.so deleted file mode 100755 index 879352b..0000000 Binary files a/Darwin/lib/python3.4/site-packages/ed25519/_ed25519.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/ed25519/_version.py b/Darwin/lib/python3.4/site-packages/ed25519/_version.py deleted file mode 100644 index f98ff6c..0000000 --- a/Darwin/lib/python3.4/site-packages/ed25519/_version.py +++ /dev/null @@ -1,11 +0,0 @@ - -# This file was generated by 'versioneer.py' (0.11) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -version_version = '1.3' -version_full = '06c43e2d15ba45dbcfda780a81e9b49c199bce16' -def get_versions(default={}, verbose=False): - return {'version': version_version, 'full': version_full} - diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/PKG-INFO deleted file mode 100644 index b98b227..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,75 +0,0 @@ -Metadata-Version: 1.1 -Name: lxml -Version: 3.3.6 -Summary: Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API. -Home-page: http://lxml.de/ -Author: lxml dev team -Author-email: lxml-dev@lxml.de -License: UNKNOWN -Download-URL: http://pypi.python.org/packages/source/l/lxml/lxml-3.3.6.tar.gz -Description: lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It - provides safe and convenient access to these libraries using the ElementTree - API. - - It extends the ElementTree API significantly to offer support for XPath, - RelaxNG, XML Schema, XSLT, C14N and much more. - - To contact the project, go to the `project home page - `_ or see our bug tracker at - https://launchpad.net/lxml - - In case you want to use the current in-development version of lxml, - you can get it from the github repository at - https://github.com/lxml/lxml . Note that this requires Cython to - build the sources, see the build instructions on the project home - page. To the same end, running ``easy_install lxml==dev`` will - install lxml from - https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have - an appropriate version of Cython installed. - - - After an official release of a new stable series, bug fixes may become - available at - https://github.com/lxml/lxml/tree/lxml-3.3 . - Running ``easy_install lxml==3.3bugfix`` will install - the unreleased branch state from - https://github.com/lxml/lxml/tarball/lxml-3.3#egg=lxml-3.3bugfix - as soon as a maintenance branch has been established. Note that this - requires Cython to be installed at an appropriate version for the build. - - 3.3.6 (2014-08-28) - ================== - - Bugs fixed - ---------- - - * Prevent tree cycle creation when adding Elements as siblings. - - * LP#1361948: crash when deallocating Element siblings without parent. - - * LP#1354652: crash when traversing internally loaded documents in XSLT - extension functions. - - - -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: BSD License -Classifier: Programming Language :: Cython -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.4 -Classifier: Programming Language :: Python :: 2.5 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: C -Classifier: Operating System :: OS Independent -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Topic :: Text Processing :: Markup :: XML -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 595cdcb..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,755 +0,0 @@ -CHANGES.txt -CREDITS.txt -INSTALL.txt -LICENSES.txt -MANIFEST.in -Makefile -README.rst -TODO.txt -buildlibxml.py -ez_setup.py -requirements.txt -selftest.py -selftest2.py -setup.cfg -setup.py -setupinfo.py -test.py -update-error-constants.py -version.txt -versioninfo.py -benchmark/bench_etree.py -benchmark/bench_objectify.py -benchmark/bench_xpath.py -benchmark/bench_xslt.py -benchmark/benchbase.py -doc/FAQ.txt -doc/api.txt -doc/build.txt -doc/capi.txt -doc/compatibility.txt -doc/cssselect.txt -doc/docstructure.py -doc/element_classes.txt -doc/elementsoup.txt -doc/extensions.txt -doc/html5parser.txt -doc/intro.txt -doc/lxml-source-howto.txt -doc/lxml.mgp -doc/lxml2.txt -doc/lxmlhtml.txt -doc/main.txt -doc/memorymanagement.txt -doc/mkhtml.py -doc/mklatex.py -doc/objectify.txt -doc/parsing.txt -doc/performance.txt -doc/pubkey.asc -doc/resolvers.txt -doc/rest2html.py -doc/rest2latex.py -doc/sax.txt -doc/test.xml -doc/tutorial.txt -doc/valgrind.txt -doc/validation.txt -doc/xpathxslt.txt -doc/html/FAQ.html -doc/html/api.html -doc/html/build.html -doc/html/capi.html -doc/html/changes-3.3.6.html -doc/html/compatibility.html -doc/html/credits.html -doc/html/cssselect.html -doc/html/element_classes.html -doc/html/elementsoup.html -doc/html/extensions.html -doc/html/html5parser.html -doc/html/index.html -doc/html/installation.html -doc/html/intro.html -doc/html/lxml-source-howto.html -doc/html/lxmlhtml.html -doc/html/objectify.html -doc/html/parsing.html -doc/html/performance.html -doc/html/pubkey.asc -doc/html/resolvers.html -doc/html/sax.html -doc/html/sitemap.html -doc/html/style.css -doc/html/tagpython-big.png -doc/html/tutorial.html -doc/html/validation.html -doc/html/xpathxslt.html -doc/html/api/abc.ABCMeta-class.html -doc/html/api/api-objects.txt -doc/html/api/class-tree.html -doc/html/api/cssselect.parser.SelectorError-class.html -doc/html/api/cssselect.parser.SelectorSyntaxError-class.html -doc/html/api/cssselect.xpath.ExpressionError-class.html -doc/html/api/cssselect.xpath.XPathExpr-class.html -doc/html/api/deprecated-index.html -doc/html/api/epydoc.css -doc/html/api/exceptions.AssertionError-class.html -doc/html/api/frames.html -doc/html/api/help.html -doc/html/api/identifier-index-A.html -doc/html/api/identifier-index-B.html -doc/html/api/identifier-index-C.html -doc/html/api/identifier-index-D.html -doc/html/api/identifier-index-E.html -doc/html/api/identifier-index-F.html -doc/html/api/identifier-index-G.html -doc/html/api/identifier-index-H.html -doc/html/api/identifier-index-I.html -doc/html/api/identifier-index-J.html -doc/html/api/identifier-index-K.html -doc/html/api/identifier-index-L.html -doc/html/api/identifier-index-M.html -doc/html/api/identifier-index-N.html -doc/html/api/identifier-index-O.html -doc/html/api/identifier-index-P.html -doc/html/api/identifier-index-Q.html -doc/html/api/identifier-index-R.html -doc/html/api/identifier-index-S.html -doc/html/api/identifier-index-T.html -doc/html/api/identifier-index-U.html -doc/html/api/identifier-index-V.html -doc/html/api/identifier-index-W.html -doc/html/api/identifier-index-X.html -doc/html/api/identifier-index-Y.html -doc/html/api/identifier-index-Z.html -doc/html/api/identifier-index-_.html -doc/html/api/identifier-index.html -doc/html/api/index.html -doc/html/api/lxml-module.html -doc/html/api/lxml-pysrc.html -doc/html/api/lxml.ElementInclude-module.html -doc/html/api/lxml.ElementInclude-pysrc.html -doc/html/api/lxml.ElementInclude.FatalIncludeError-class.html -doc/html/api/lxml.builder-module.html -doc/html/api/lxml.builder-pysrc.html -doc/html/api/lxml.builder.ElementMaker-class.html -doc/html/api/lxml.cssselect-module.html -doc/html/api/lxml.cssselect-pysrc.html -doc/html/api/lxml.cssselect.CSSSelector-class.html -doc/html/api/lxml.cssselect.LxmlHTMLTranslator-class.html -doc/html/api/lxml.cssselect.LxmlTranslator-class.html -doc/html/api/lxml.doctestcompare-module.html -doc/html/api/lxml.doctestcompare-pysrc.html -doc/html/api/lxml.doctestcompare.LHTMLOutputChecker-class.html -doc/html/api/lxml.doctestcompare.LXMLOutputChecker-class.html -doc/html/api/lxml.doctestcompare._RestoreChecker-class.html -doc/html/api/lxml.etree-module.html -doc/html/api/lxml.etree.AncestorsIterator-class.html -doc/html/api/lxml.etree.AttributeBasedElementClassLookup-class.html -doc/html/api/lxml.etree.C14NError-class.html -doc/html/api/lxml.etree.CDATA-class.html -doc/html/api/lxml.etree.CommentBase-class.html -doc/html/api/lxml.etree.CustomElementClassLookup-class.html -doc/html/api/lxml.etree.DTD-class.html -doc/html/api/lxml.etree.DTDError-class.html -doc/html/api/lxml.etree.DTDParseError-class.html -doc/html/api/lxml.etree.DTDValidateError-class.html -doc/html/api/lxml.etree.DocInfo-class.html -doc/html/api/lxml.etree.DocumentInvalid-class.html -doc/html/api/lxml.etree.ETCompatXMLParser-class.html -doc/html/api/lxml.etree.ETXPath-class.html -doc/html/api/lxml.etree.ElementBase-class.html -doc/html/api/lxml.etree.ElementChildIterator-class.html -doc/html/api/lxml.etree.ElementClassLookup-class.html -doc/html/api/lxml.etree.ElementDefaultClassLookup-class.html -doc/html/api/lxml.etree.ElementDepthFirstIterator-class.html -doc/html/api/lxml.etree.ElementNamespaceClassLookup-class.html -doc/html/api/lxml.etree.ElementTextIterator-class.html -doc/html/api/lxml.etree.EntityBase-class.html -doc/html/api/lxml.etree.Error-class.html -doc/html/api/lxml.etree.ErrorDomains-class.html -doc/html/api/lxml.etree.ErrorLevels-class.html -doc/html/api/lxml.etree.ErrorTypes-class.html -doc/html/api/lxml.etree.FallbackElementClassLookup-class.html -doc/html/api/lxml.etree.HTMLParser-class.html -doc/html/api/lxml.etree.HTMLPullParser-class.html -doc/html/api/lxml.etree.LxmlError-class.html -doc/html/api/lxml.etree.LxmlRegistryError-class.html -doc/html/api/lxml.etree.LxmlSyntaxError-class.html -doc/html/api/lxml.etree.NamespaceRegistryError-class.html -doc/html/api/lxml.etree.PIBase-class.html -doc/html/api/lxml.etree.ParseError-class.html -doc/html/api/lxml.etree.ParserBasedElementClassLookup-class.html -doc/html/api/lxml.etree.ParserError-class.html -doc/html/api/lxml.etree.PyErrorLog-class.html -doc/html/api/lxml.etree.PythonElementClassLookup-class.html -doc/html/api/lxml.etree.QName-class.html -doc/html/api/lxml.etree.RelaxNG-class.html -doc/html/api/lxml.etree.RelaxNGError-class.html -doc/html/api/lxml.etree.RelaxNGErrorTypes-class.html -doc/html/api/lxml.etree.RelaxNGParseError-class.html -doc/html/api/lxml.etree.RelaxNGValidateError-class.html -doc/html/api/lxml.etree.Resolver-class.html -doc/html/api/lxml.etree.Schematron-class.html -doc/html/api/lxml.etree.SchematronError-class.html -doc/html/api/lxml.etree.SchematronParseError-class.html -doc/html/api/lxml.etree.SchematronValidateError-class.html -doc/html/api/lxml.etree.SerialisationError-class.html -doc/html/api/lxml.etree.SiblingsIterator-class.html -doc/html/api/lxml.etree.TreeBuilder-class.html -doc/html/api/lxml.etree.XInclude-class.html -doc/html/api/lxml.etree.XIncludeError-class.html -doc/html/api/lxml.etree.XMLParser-class.html -doc/html/api/lxml.etree.XMLPullParser-class.html -doc/html/api/lxml.etree.XMLSchema-class.html -doc/html/api/lxml.etree.XMLSchemaError-class.html -doc/html/api/lxml.etree.XMLSchemaParseError-class.html -doc/html/api/lxml.etree.XMLSchemaValidateError-class.html -doc/html/api/lxml.etree.XMLSyntaxError-class.html -doc/html/api/lxml.etree.XPath-class.html -doc/html/api/lxml.etree.XPathDocumentEvaluator-class.html -doc/html/api/lxml.etree.XPathElementEvaluator-class.html -doc/html/api/lxml.etree.XPathError-class.html -doc/html/api/lxml.etree.XPathEvalError-class.html -doc/html/api/lxml.etree.XPathFunctionError-class.html -doc/html/api/lxml.etree.XPathResultError-class.html -doc/html/api/lxml.etree.XPathSyntaxError-class.html -doc/html/api/lxml.etree.XSLT-class.html -doc/html/api/lxml.etree.XSLTAccessControl-class.html -doc/html/api/lxml.etree.XSLTApplyError-class.html -doc/html/api/lxml.etree.XSLTError-class.html -doc/html/api/lxml.etree.XSLTExtension-class.html -doc/html/api/lxml.etree.XSLTExtensionError-class.html -doc/html/api/lxml.etree.XSLTParseError-class.html -doc/html/api/lxml.etree.XSLTSaveError-class.html -doc/html/api/lxml.etree._Attrib-class.html -doc/html/api/lxml.etree._BaseErrorLog-class.html -doc/html/api/lxml.etree._Comment-class.html -doc/html/api/lxml.etree._Document-class.html -doc/html/api/lxml.etree._DomainErrorLog-class.html -doc/html/api/lxml.etree._Element-class.html -doc/html/api/lxml.etree._ElementIterator-class.html -doc/html/api/lxml.etree._ElementMatchIterator-class.html -doc/html/api/lxml.etree._ElementStringResult-class.html -doc/html/api/lxml.etree._ElementTagMatcher-class.html -doc/html/api/lxml.etree._ElementTree-class.html -doc/html/api/lxml.etree._ElementUnicodeResult-class.html -doc/html/api/lxml.etree._Entity-class.html -doc/html/api/lxml.etree._ErrorLog-class.html -doc/html/api/lxml.etree._FeedParser-class.html -doc/html/api/lxml.etree._IDDict-class.html -doc/html/api/lxml.etree._ListErrorLog-class.html -doc/html/api/lxml.etree._LogEntry-class.html -doc/html/api/lxml.etree._ProcessingInstruction-class.html -doc/html/api/lxml.etree._RotatingErrorLog-class.html -doc/html/api/lxml.etree._SaxParserTarget-class.html -doc/html/api/lxml.etree._TargetParserResult-class.html -doc/html/api/lxml.etree._Validator-class.html -doc/html/api/lxml.etree._XPathEvaluatorBase-class.html -doc/html/api/lxml.etree._XSLTProcessingInstruction-class.html -doc/html/api/lxml.etree._XSLTResultTree-class.html -doc/html/api/lxml.etree.iterparse-class.html -doc/html/api/lxml.etree.iterwalk-class.html -doc/html/api/lxml.etree.xmlfile-class.html -doc/html/api/lxml.html-module.html -doc/html/api/lxml.html-pysrc.html -doc/html/api/lxml.html.CheckboxGroup-class.html -doc/html/api/lxml.html.CheckboxValues-class.html -doc/html/api/lxml.html.ElementSoup-module.html -doc/html/api/lxml.html.ElementSoup-pysrc.html -doc/html/api/lxml.html.FieldsDict-class.html -doc/html/api/lxml.html.FormElement-class.html -doc/html/api/lxml.html.HTMLParser-class.html -doc/html/api/lxml.html.HtmlComment-class.html -doc/html/api/lxml.html.HtmlElement-class.html -doc/html/api/lxml.html.HtmlElementClassLookup-class.html -doc/html/api/lxml.html.HtmlEntity-class.html -doc/html/api/lxml.html.HtmlMixin-class.html -doc/html/api/lxml.html.HtmlProcessingInstruction-class.html -doc/html/api/lxml.html.InputElement-class.html -doc/html/api/lxml.html.InputGetter-class.html -doc/html/api/lxml.html.InputMixin-class.html -doc/html/api/lxml.html.LabelElement-class.html -doc/html/api/lxml.html.MultipleSelectOptions-class.html -doc/html/api/lxml.html.RadioGroup-class.html -doc/html/api/lxml.html.SelectElement-class.html -doc/html/api/lxml.html.TextareaElement-class.html -doc/html/api/lxml.html.XHTMLParser-class.html -doc/html/api/lxml.html._MethodFunc-class.html -doc/html/api/lxml.html.builder-module.html -doc/html/api/lxml.html.builder-pysrc.html -doc/html/api/lxml.html.clean-module.html -doc/html/api/lxml.html.clean-pysrc.html -doc/html/api/lxml.html.clean.Cleaner-class.html -doc/html/api/lxml.html.defs-module.html -doc/html/api/lxml.html.defs-pysrc.html -doc/html/api/lxml.html.diff-module.html -doc/html/api/lxml.html.diff-pysrc.html -doc/html/api/lxml.html.diff.DEL_END-class.html -doc/html/api/lxml.html.diff.DEL_START-class.html -doc/html/api/lxml.html.diff.InsensitiveSequenceMatcher-class.html -doc/html/api/lxml.html.diff.NoDeletes-class.html -doc/html/api/lxml.html.diff.href_token-class.html -doc/html/api/lxml.html.diff.tag_token-class.html -doc/html/api/lxml.html.diff.token-class.html -doc/html/api/lxml.html.formfill-module.html -doc/html/api/lxml.html.formfill-pysrc.html -doc/html/api/lxml.html.formfill.DefaultErrorCreator-class.html -doc/html/api/lxml.html.formfill.FormNotFound-class.html -doc/html/api/lxml.html.html5parser-module.html -doc/html/api/lxml.html.html5parser-pysrc.html -doc/html/api/lxml.html.html5parser.HTMLParser-class.html -doc/html/api/lxml.html.html5parser.XHTMLParser-class.html -doc/html/api/lxml.html.soupparser-module.html -doc/html/api/lxml.html.soupparser-pysrc.html -doc/html/api/lxml.html.usedoctest-module.html -doc/html/api/lxml.html.usedoctest-pysrc.html -doc/html/api/lxml.includes-module.html -doc/html/api/lxml.includes-pysrc.html -doc/html/api/lxml.isoschematron-module.html -doc/html/api/lxml.isoschematron-pysrc.html -doc/html/api/lxml.isoschematron.Schematron-class.html -doc/html/api/lxml.objectify-module.html -doc/html/api/lxml.objectify.BoolElement-class.html -doc/html/api/lxml.objectify.ElementMaker-class.html -doc/html/api/lxml.objectify.FloatElement-class.html -doc/html/api/lxml.objectify.IntElement-class.html -doc/html/api/lxml.objectify.LongElement-class.html -doc/html/api/lxml.objectify.NoneElement-class.html -doc/html/api/lxml.objectify.NumberElement-class.html -doc/html/api/lxml.objectify.ObjectPath-class.html -doc/html/api/lxml.objectify.ObjectifiedDataElement-class.html -doc/html/api/lxml.objectify.ObjectifiedElement-class.html -doc/html/api/lxml.objectify.ObjectifyElementClassLookup-class.html -doc/html/api/lxml.objectify.PyType-class.html -doc/html/api/lxml.objectify.StringElement-class.html -doc/html/api/lxml.pyclasslookup-module.html -doc/html/api/lxml.pyclasslookup-pysrc.html -doc/html/api/lxml.sax-module.html -doc/html/api/lxml.sax-pysrc.html -doc/html/api/lxml.sax.ElementTreeContentHandler-class.html -doc/html/api/lxml.sax.ElementTreeProducer-class.html -doc/html/api/lxml.sax.SaxError-class.html -doc/html/api/lxml.tests-module.html -doc/html/api/lxml.tests-pysrc.html -doc/html/api/lxml.tests.common_imports-module.html -doc/html/api/lxml.tests.common_imports-pysrc.html -doc/html/api/lxml.tests.common_imports.HelperTestCase-class.html -doc/html/api/lxml.tests.common_imports.LargeFileLike-class.html -doc/html/api/lxml.tests.common_imports.LargeFileLikeUnicode-class.html -doc/html/api/lxml.tests.common_imports.SillyFileLike-class.html -doc/html/api/lxml.tests.dummy_http_server-module.html -doc/html/api/lxml.tests.dummy_http_server-pysrc.html -doc/html/api/lxml.tests.dummy_http_server.HTTPRequestCollector-class.html -doc/html/api/lxml.tests.dummy_http_server.WebServer-class.html -doc/html/api/lxml.tests.dummy_http_server._RequestHandler-class.html -doc/html/api/lxml.tests.test_builder-module.html -doc/html/api/lxml.tests.test_builder-pysrc.html -doc/html/api/lxml.tests.test_builder.BuilderTestCase-class.html -doc/html/api/lxml.tests.test_classlookup-module.html -doc/html/api/lxml.tests.test_classlookup-pysrc.html -doc/html/api/lxml.tests.test_classlookup.ClassLookupTestCase-class.html -doc/html/api/lxml.tests.test_classlookup.ProxyTestCase-class.html -doc/html/api/lxml.tests.test_css-module.html -doc/html/api/lxml.tests.test_css-pysrc.html -doc/html/api/lxml.tests.test_css.CSSTestCase-class.html -doc/html/api/lxml.tests.test_doctestcompare-module.html -doc/html/api/lxml.tests.test_doctestcompare-pysrc.html -doc/html/api/lxml.tests.test_doctestcompare.DoctestCompareTest-class.html -doc/html/api/lxml.tests.test_doctestcompare.DummyInput-class.html -doc/html/api/lxml.tests.test_dtd-module.html -doc/html/api/lxml.tests.test_dtd-pysrc.html -doc/html/api/lxml.tests.test_dtd.ETreeDtdTestCase-class.html -doc/html/api/lxml.tests.test_elementtree-module.html -doc/html/api/lxml.tests.test_elementtree-pysrc.html -doc/html/api/lxml.tests.test_elementtree.CElementTreeTestCase-class.html -doc/html/api/lxml.tests.test_elementtree.ETreePullTestCase-class.html -doc/html/api/lxml.tests.test_elementtree.ETreeTestCase-class.html -doc/html/api/lxml.tests.test_elementtree.ElementTreeTestCase-class.html -doc/html/api/lxml.tests.test_elementtree._ETreeTestCaseBase-class.html -doc/html/api/lxml.tests.test_elementtree._XMLPullParserTest-class.html -doc/html/api/lxml.tests.test_errors-module.html -doc/html/api/lxml.tests.test_errors-pysrc.html -doc/html/api/lxml.tests.test_errors.ErrorTestCase-class.html -doc/html/api/lxml.tests.test_etree-module.html -doc/html/api/lxml.tests.test_etree-pysrc.html -doc/html/api/lxml.tests.test_etree.ETreeC14NTestCase-class.html -doc/html/api/lxml.tests.test_etree.ETreeErrorLogTest-class.html -doc/html/api/lxml.tests.test_etree.ETreeOnlyTestCase-class.html -doc/html/api/lxml.tests.test_etree.ETreeWriteTestCase-class.html -doc/html/api/lxml.tests.test_etree.ETreeXIncludeTestCase-class.html -doc/html/api/lxml.tests.test_etree.ElementIncludeTestCase-class.html -doc/html/api/lxml.tests.test_etree.XMLPullParserTest-class.html -doc/html/api/lxml.tests.test_etree._XIncludeTestCase-class.html -doc/html/api/lxml.tests.test_htmlparser-module.html -doc/html/api/lxml.tests.test_htmlparser-pysrc.html -doc/html/api/lxml.tests.test_htmlparser.HtmlParserTestCase-class.html -doc/html/api/lxml.tests.test_http_io-module.html -doc/html/api/lxml.tests.test_http_io-pysrc.html -doc/html/api/lxml.tests.test_http_io.HttpIOTestCase-class.html -doc/html/api/lxml.tests.test_incremental_xmlfile-module.html -doc/html/api/lxml.tests.test_incremental_xmlfile-pysrc.html -doc/html/api/lxml.tests.test_incremental_xmlfile.BytesIOXmlFileTestCase-class.html -doc/html/api/lxml.tests.test_incremental_xmlfile.SimpleFileLikeXmlFileTestCase-class.html -doc/html/api/lxml.tests.test_incremental_xmlfile.SimpleFileLikeXmlFileTestCase.SimpleFileLike-class.html -doc/html/api/lxml.tests.test_incremental_xmlfile.TempXmlFileTestCase-class.html -doc/html/api/lxml.tests.test_incremental_xmlfile._XmlFileTestCaseBase-class.html -doc/html/api/lxml.tests.test_io-module.html -doc/html/api/lxml.tests.test_io-pysrc.html -doc/html/api/lxml.tests.test_io.ETreeIOTestCase-class.html -doc/html/api/lxml.tests.test_io.ElementTreeIOTestCase-class.html -doc/html/api/lxml.tests.test_io._IOTestCaseBase-class.html -doc/html/api/lxml.tests.test_isoschematron-module.html -doc/html/api/lxml.tests.test_isoschematron-pysrc.html -doc/html/api/lxml.tests.test_isoschematron.ETreeISOSchematronTestCase-class.html -doc/html/api/lxml.tests.test_nsclasses-module.html -doc/html/api/lxml.tests.test_nsclasses-pysrc.html -doc/html/api/lxml.tests.test_nsclasses.ETreeNamespaceClassesTestCase-class.html -doc/html/api/lxml.tests.test_nsclasses.ETreeNamespaceClassesTestCase.bluff_class-class.html -doc/html/api/lxml.tests.test_nsclasses.ETreeNamespaceClassesTestCase.default_class-class.html -doc/html/api/lxml.tests.test_nsclasses.ETreeNamespaceClassesTestCase.maeh_class-class.html -doc/html/api/lxml.tests.test_objectify-module.html -doc/html/api/lxml.tests.test_objectify-pysrc.html -doc/html/api/lxml.tests.test_objectify.ObjectifyTestCase-class.html -doc/html/api/lxml.tests.test_pyclasslookup-module.html -doc/html/api/lxml.tests.test_pyclasslookup-pysrc.html -doc/html/api/lxml.tests.test_pyclasslookup.PyClassLookupTestCase-class.html -doc/html/api/lxml.tests.test_relaxng-module.html -doc/html/api/lxml.tests.test_relaxng-pysrc.html -doc/html/api/lxml.tests.test_relaxng.ETreeRelaxNGTestCase-class.html -doc/html/api/lxml.tests.test_sax-module.html -doc/html/api/lxml.tests.test_sax-pysrc.html -doc/html/api/lxml.tests.test_sax.ETreeSaxTestCase-class.html -doc/html/api/lxml.tests.test_schematron-module.html -doc/html/api/lxml.tests.test_schematron-pysrc.html -doc/html/api/lxml.tests.test_schematron.ETreeSchematronTestCase-class.html -doc/html/api/lxml.tests.test_threading-module.html -doc/html/api/lxml.tests.test_threading-pysrc.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.ParseAndExtendWorker-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.ParseWorker-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.ReverseWorker-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.RotateWorker-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.SerialiseWorker-class.html -doc/html/api/lxml.tests.test_threading.ThreadPipelineTestCase.Worker-class.html -doc/html/api/lxml.tests.test_threading.ThreadingTestCase-class.html -doc/html/api/lxml.tests.test_unicode-module.html -doc/html/api/lxml.tests.test_unicode-pysrc.html -doc/html/api/lxml.tests.test_unicode.UnicodeTestCase-class.html -doc/html/api/lxml.tests.test_xmlschema-module.html -doc/html/api/lxml.tests.test_xmlschema-pysrc.html -doc/html/api/lxml.tests.test_xmlschema.ETreeXMLSchemaResolversTestCase-class.html -doc/html/api/lxml.tests.test_xmlschema.ETreeXMLSchemaResolversTestCase.simple_resolver-class.html -doc/html/api/lxml.tests.test_xmlschema.ETreeXMLSchemaTestCase-class.html -doc/html/api/lxml.tests.test_xpathevaluator-module.html -doc/html/api/lxml.tests.test_xpathevaluator-pysrc.html -doc/html/api/lxml.tests.test_xpathevaluator.ETreeETXPathClassTestCase-class.html -doc/html/api/lxml.tests.test_xpathevaluator.ETreeXPathClassTestCase-class.html -doc/html/api/lxml.tests.test_xpathevaluator.ETreeXPathExsltTestCase-class.html -doc/html/api/lxml.tests.test_xpathevaluator.ETreeXPathTestCase-class.html -doc/html/api/lxml.tests.test_xslt-module.html -doc/html/api/lxml.tests.test_xslt-pysrc.html -doc/html/api/lxml.tests.test_xslt.ETreeEXSLTTestCase-class.html -doc/html/api/lxml.tests.test_xslt.ETreeXSLTExtElementTestCase-class.html -doc/html/api/lxml.tests.test_xslt.ETreeXSLTExtFuncTestCase-class.html -doc/html/api/lxml.tests.test_xslt.ETreeXSLTTestCase-class.html -doc/html/api/lxml.tests.test_xslt.Py3XSLTTestCase-class.html -doc/html/api/lxml.usedoctest-module.html -doc/html/api/lxml.usedoctest-pysrc.html -doc/html/api/mimetools.Message-class.html -doc/html/api/module-tree.html -doc/html/api/redirect.html -doc/html/api/str-class.html -doc/html/api/toc-everything.html -doc/html/api/toc-lxml-module.html -doc/html/api/toc-lxml.ElementInclude-module.html -doc/html/api/toc-lxml.builder-module.html -doc/html/api/toc-lxml.cssselect-module.html -doc/html/api/toc-lxml.doctestcompare-module.html -doc/html/api/toc-lxml.etree-module.html -doc/html/api/toc-lxml.html-module.html -doc/html/api/toc-lxml.html.ElementSoup-module.html -doc/html/api/toc-lxml.html.builder-module.html -doc/html/api/toc-lxml.html.clean-module.html -doc/html/api/toc-lxml.html.defs-module.html -doc/html/api/toc-lxml.html.diff-module.html -doc/html/api/toc-lxml.html.formfill-module.html -doc/html/api/toc-lxml.html.html5parser-module.html -doc/html/api/toc-lxml.html.soupparser-module.html -doc/html/api/toc-lxml.html.usedoctest-module.html -doc/html/api/toc-lxml.includes-module.html -doc/html/api/toc-lxml.isoschematron-module.html -doc/html/api/toc-lxml.objectify-module.html -doc/html/api/toc-lxml.pyclasslookup-module.html -doc/html/api/toc-lxml.sax-module.html -doc/html/api/toc-lxml.tests-module.html -doc/html/api/toc-lxml.tests.common_imports-module.html -doc/html/api/toc-lxml.tests.dummy_http_server-module.html -doc/html/api/toc-lxml.tests.test_builder-module.html -doc/html/api/toc-lxml.tests.test_classlookup-module.html -doc/html/api/toc-lxml.tests.test_css-module.html -doc/html/api/toc-lxml.tests.test_doctestcompare-module.html -doc/html/api/toc-lxml.tests.test_dtd-module.html -doc/html/api/toc-lxml.tests.test_elementtree-module.html -doc/html/api/toc-lxml.tests.test_errors-module.html -doc/html/api/toc-lxml.tests.test_etree-module.html -doc/html/api/toc-lxml.tests.test_htmlparser-module.html -doc/html/api/toc-lxml.tests.test_http_io-module.html -doc/html/api/toc-lxml.tests.test_incremental_xmlfile-module.html -doc/html/api/toc-lxml.tests.test_io-module.html -doc/html/api/toc-lxml.tests.test_isoschematron-module.html -doc/html/api/toc-lxml.tests.test_nsclasses-module.html -doc/html/api/toc-lxml.tests.test_objectify-module.html -doc/html/api/toc-lxml.tests.test_pyclasslookup-module.html -doc/html/api/toc-lxml.tests.test_relaxng-module.html -doc/html/api/toc-lxml.tests.test_sax-module.html -doc/html/api/toc-lxml.tests.test_schematron-module.html -doc/html/api/toc-lxml.tests.test_threading-module.html -doc/html/api/toc-lxml.tests.test_unicode-module.html -doc/html/api/toc-lxml.tests.test_xmlschema-module.html -doc/html/api/toc-lxml.tests.test_xpathevaluator-module.html -doc/html/api/toc-lxml.tests.test_xslt-module.html -doc/html/api/toc-lxml.usedoctest-module.html -doc/html/api/toc-xml.etree.ElementTree-module.html -doc/html/api/toc.html -doc/html/api/xml.etree.ElementTree-module.html -doc/html/api/xml.etree.ElementTree-pysrc.html -doc/html/api/xml.etree.ElementTree.Element-class.html -doc/html/api/xml.etree.ElementTree.ElementTree-class.html -doc/html/api/xml.etree.ElementTree.ParseError-class.html -doc/html/api/xml.etree.ElementTree.QName-class.html -doc/html/api/xml.etree.ElementTree.TreeBuilder-class.html -doc/html/api/xml.etree.ElementTree.XMLParser-class.html -doc/html/api/xml.etree.ElementTree._IterParseIterator-class.html -doc/html/api/xml.etree.ElementTree._SimpleElementPath-class.html -doc/licenses/BSD.txt -doc/licenses/GPL.txt -doc/licenses/ZopePublicLicense.txt -doc/licenses/elementtree.txt -doc/pdf/pubkey.asc -doc/s5/Makefile -doc/s5/lxml-ep2008.html -doc/s5/lxml-ep2008.txt -doc/s5/tagpython.png -doc/s5/ep2008/atom-example.xml -doc/s5/ep2008/atom.py -doc/s5/ep2008/atom.rng -doc/s5/ep2008/atomgen.py -doc/s5/ep2008/proxies.png -doc/s5/ui/default/blank.gif -doc/s5/ui/default/bodybg.gif -doc/s5/ui/default/framing.css -doc/s5/ui/default/iepngfix.htc -doc/s5/ui/default/lxml-logo64.png -doc/s5/ui/default/opera.css -doc/s5/ui/default/outline.css -doc/s5/ui/default/pretty.css -doc/s5/ui/default/print.css -doc/s5/ui/default/s5-core.css -doc/s5/ui/default/slides.css -doc/s5/ui/default/slides.js -doc/s5/ui/default/tagpython.png -samples/simple-ns.xml -samples/simple.xml -src/local_doctest.py -src/lxml/ElementInclude.py -src/lxml/__init__.py -src/lxml/_elementpath.py -src/lxml/apihelpers.pxi -src/lxml/builder.py -src/lxml/classlookup.pxi -src/lxml/cleanup.pxi -src/lxml/cssselect.py -src/lxml/cvarargs.pxd -src/lxml/debug.pxi -src/lxml/docloader.pxi -src/lxml/doctestcompare.py -src/lxml/dtd.pxi -src/lxml/extensions.pxi -src/lxml/iterparse.pxi -src/lxml/lxml.etree.c -src/lxml/lxml.etree.h -src/lxml/lxml.etree.pyx -src/lxml/lxml.etree_api.h -src/lxml/lxml.objectify.c -src/lxml/lxml.objectify.pyx -src/lxml/lxml_endian.h -src/lxml/nsclasses.pxi -src/lxml/objectpath.pxi -src/lxml/parser.pxi -src/lxml/parsertarget.pxi -src/lxml/proxy.pxi -src/lxml/public-api.pxi -src/lxml/pyclasslookup.py -src/lxml/python.pxd -src/lxml/readonlytree.pxi -src/lxml/relaxng.pxi -src/lxml/sax.py -src/lxml/saxparser.pxi -src/lxml/schematron.pxi -src/lxml/serializer.pxi -src/lxml/usedoctest.py -src/lxml/xinclude.pxi -src/lxml/xmlerror.pxi -src/lxml/xmlid.pxi -src/lxml/xmlschema.pxi -src/lxml/xpath.pxi -src/lxml/xslt.pxi -src/lxml/xsltext.pxi -src/lxml.egg-info/PKG-INFO -src/lxml.egg-info/SOURCES.txt -src/lxml.egg-info/dependency_links.txt -src/lxml.egg-info/not-zip-safe -src/lxml.egg-info/requires.txt -src/lxml.egg-info/top_level.txt -src/lxml/html/ElementSoup.py -src/lxml/html/__init__.py -src/lxml/html/_diffcommand.py -src/lxml/html/_html5builder.py -src/lxml/html/_setmixin.py -src/lxml/html/builder.py -src/lxml/html/clean.py -src/lxml/html/defs.py -src/lxml/html/diff.py -src/lxml/html/formfill.py -src/lxml/html/html5parser.py -src/lxml/html/soupparser.py -src/lxml/html/usedoctest.py -src/lxml/html/tests/__init__.py -src/lxml/html/tests/test_autolink.py -src/lxml/html/tests/test_autolink.txt -src/lxml/html/tests/test_basic.py -src/lxml/html/tests/test_basic.txt -src/lxml/html/tests/test_clean.py -src/lxml/html/tests/test_clean.txt -src/lxml/html/tests/test_clean_embed.txt -src/lxml/html/tests/test_diff.py -src/lxml/html/tests/test_diff.txt -src/lxml/html/tests/test_elementsoup.py -src/lxml/html/tests/test_feedparser_data.py -src/lxml/html/tests/test_formfill.py -src/lxml/html/tests/test_formfill.txt -src/lxml/html/tests/test_forms.py -src/lxml/html/tests/test_forms.txt -src/lxml/html/tests/test_frames.py -src/lxml/html/tests/test_html5parser.py -src/lxml/html/tests/test_rewritelinks.py -src/lxml/html/tests/test_rewritelinks.txt -src/lxml/html/tests/test_xhtml.py -src/lxml/html/tests/test_xhtml.txt -src/lxml/html/tests/transform_feedparser_data.py -src/lxml/html/tests/feedparser-data/entry_content_applet.data -src/lxml/html/tests/feedparser-data/entry_content_blink.data -src/lxml/html/tests/feedparser-data/entry_content_crazy.data -src/lxml/html/tests/feedparser-data/entry_content_embed.data -src/lxml/html/tests/feedparser-data/entry_content_frame.data -src/lxml/html/tests/feedparser-data/entry_content_iframe.data -src/lxml/html/tests/feedparser-data/entry_content_link.data -src/lxml/html/tests/feedparser-data/entry_content_meta.data -src/lxml/html/tests/feedparser-data/entry_content_object.data -src/lxml/html/tests/feedparser-data/entry_content_onabort.data -src/lxml/html/tests/feedparser-data/entry_content_onblur.data -src/lxml/html/tests/feedparser-data/entry_content_onchange.data -src/lxml/html/tests/feedparser-data/entry_content_onclick.data -src/lxml/html/tests/feedparser-data/entry_content_ondblclick.data -src/lxml/html/tests/feedparser-data/entry_content_onerror.data -src/lxml/html/tests/feedparser-data/entry_content_onfocus.data -src/lxml/html/tests/feedparser-data/entry_content_onkeydown.data -src/lxml/html/tests/feedparser-data/entry_content_onkeypress.data -src/lxml/html/tests/feedparser-data/entry_content_onkeyup.data -src/lxml/html/tests/feedparser-data/entry_content_onload.data -src/lxml/html/tests/feedparser-data/entry_content_onmousedown.data -src/lxml/html/tests/feedparser-data/entry_content_onmouseout.data -src/lxml/html/tests/feedparser-data/entry_content_onmouseover.data -src/lxml/html/tests/feedparser-data/entry_content_onmouseup.data -src/lxml/html/tests/feedparser-data/entry_content_onreset.data -src/lxml/html/tests/feedparser-data/entry_content_onresize.data -src/lxml/html/tests/feedparser-data/entry_content_onsubmit.data -src/lxml/html/tests/feedparser-data/entry_content_onunload.data -src/lxml/html/tests/feedparser-data/entry_content_script.data -src/lxml/html/tests/feedparser-data/entry_content_script_cdata.data -src/lxml/html/tests/feedparser-data/entry_content_script_inline.data -src/lxml/html/tests/feedparser-data/entry_content_style.data -src/lxml/html/tests/hackers-org-data/background-image-plus.data -src/lxml/html/tests/hackers-org-data/background-image-with-unicoded.data -src/lxml/html/tests/hackers-org-data/downlevel-hidden.data -src/lxml/html/tests/hackers-org-data/html-plus-time.data -src/lxml/html/tests/hackers-org-data/javascript-link.data -src/lxml/html/tests/hackers-org-data/style-comment.data -src/lxml/html/tests/hackers-org-data/style-expression.data -src/lxml/html/tests/hackers-org-data/style-import.data -src/lxml/html/tests/hackers-org-data/style-js-tag.data -src/lxml/html/tests/hackers-org-data/style-url-js.data -src/lxml/html/tests/hackers-org-data/xml-data-island.data -src/lxml/html/tests/hackers-org-data/xml-embedded-js.data -src/lxml/includes/__init__.py -src/lxml/includes/c14n.pxd -src/lxml/includes/config.pxd -src/lxml/includes/dtdvalid.pxd -src/lxml/includes/etree_defs.h -src/lxml/includes/etreepublic.pxd -src/lxml/includes/htmlparser.pxd -src/lxml/includes/lxml-version.h -src/lxml/includes/relaxng.pxd -src/lxml/includes/schematron.pxd -src/lxml/includes/tree.pxd -src/lxml/includes/uri.pxd -src/lxml/includes/xinclude.pxd -src/lxml/includes/xmlerror.pxd -src/lxml/includes/xmlparser.pxd -src/lxml/includes/xmlschema.pxd -src/lxml/includes/xpath.pxd -src/lxml/includes/xslt.pxd -src/lxml/isoschematron/__init__.py -src/lxml/isoschematron/resources/rng/iso-schematron.rng -src/lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl -src/lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl -src/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt -src/lxml/tests/__init__.py -src/lxml/tests/common_imports.py -src/lxml/tests/dummy_http_server.py -src/lxml/tests/shakespeare.html -src/lxml/tests/test-document.xslt -src/lxml/tests/test-string.xml -src/lxml/tests/test.dtd -src/lxml/tests/test.sch -src/lxml/tests/test.xml -src/lxml/tests/test.xsd -src/lxml/tests/test1.rng -src/lxml/tests/test1.xslt -src/lxml/tests/test2.rng -src/lxml/tests/test2.xslt -src/lxml/tests/test_broken.xml -src/lxml/tests/test_builder.py -src/lxml/tests/test_classlookup.py -src/lxml/tests/test_css.py -src/lxml/tests/test_doctestcompare.py -src/lxml/tests/test_dtd.py -src/lxml/tests/test_elementtree.py -src/lxml/tests/test_errors.py -src/lxml/tests/test_etree.py -src/lxml/tests/test_htmlparser.py -src/lxml/tests/test_http_io.py -src/lxml/tests/test_import.xsd -src/lxml/tests/test_inc.xsd -src/lxml/tests/test_incremental_xmlfile.py -src/lxml/tests/test_io.py -src/lxml/tests/test_isoschematron.py -src/lxml/tests/test_nsclasses.py -src/lxml/tests/test_objectify.py -src/lxml/tests/test_pyclasslookup.py -src/lxml/tests/test_relaxng.py -src/lxml/tests/test_sax.py -src/lxml/tests/test_schematron.py -src/lxml/tests/test_threading.py -src/lxml/tests/test_unicode.py -src/lxml/tests/test_xmlschema.py -src/lxml/tests/test_xpathevaluator.py -src/lxml/tests/test_xslt.py -src/lxml/tests/include/test_xinclude.xml \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 0966d5d..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,85 +0,0 @@ -../lxml/__init__.py -../lxml/_elementpath.py -../lxml/builder.py -../lxml/cssselect.py -../lxml/doctestcompare.py -../lxml/ElementInclude.py -../lxml/pyclasslookup.py -../lxml/sax.py -../lxml/usedoctest.py -../lxml/includes/__init__.py -../lxml/html/__init__.py -../lxml/html/_diffcommand.py -../lxml/html/_html5builder.py -../lxml/html/_setmixin.py -../lxml/html/builder.py -../lxml/html/clean.py -../lxml/html/defs.py -../lxml/html/diff.py -../lxml/html/ElementSoup.py -../lxml/html/formfill.py -../lxml/html/html5parser.py -../lxml/html/soupparser.py -../lxml/html/usedoctest.py -../lxml/isoschematron/__init__.py -../lxml/lxml.etree.h -../lxml/lxml.etree_api.h -../lxml/includes/c14n.pxd -../lxml/includes/config.pxd -../lxml/includes/dtdvalid.pxd -../lxml/includes/etreepublic.pxd -../lxml/includes/htmlparser.pxd -../lxml/includes/relaxng.pxd -../lxml/includes/schematron.pxd -../lxml/includes/tree.pxd -../lxml/includes/uri.pxd -../lxml/includes/xinclude.pxd -../lxml/includes/xmlerror.pxd -../lxml/includes/xmlparser.pxd -../lxml/includes/xmlschema.pxd -../lxml/includes/xpath.pxd -../lxml/includes/xslt.pxd -../lxml/includes/etree_defs.h -../lxml/includes/lxml-version.h -../lxml/isoschematron/resources/rng/iso-schematron.rng -../lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl -../lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl -../lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt -../lxml/__init__.pyc -../lxml/_elementpath.pyc -../lxml/builder.pyc -../lxml/cssselect.pyc -../lxml/doctestcompare.pyc -../lxml/ElementInclude.pyc -../lxml/pyclasslookup.pyc -../lxml/sax.pyc -../lxml/usedoctest.pyc -../lxml/includes/__init__.pyc -../lxml/html/__init__.pyc -../lxml/html/_diffcommand.pyc -../lxml/html/_html5builder.pyc -../lxml/html/_setmixin.pyc -../lxml/html/builder.pyc -../lxml/html/clean.pyc -../lxml/html/defs.pyc -../lxml/html/diff.pyc -../lxml/html/ElementSoup.pyc -../lxml/html/formfill.pyc -../lxml/html/html5parser.pyc -../lxml/html/soupparser.pyc -../lxml/html/usedoctest.pyc -../lxml/isoschematron/__init__.pyc -../lxml/etree.so -../lxml/objectify.so -./ -dependency_links.txt -not-zip-safe -PKG-INFO -requires.txt -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/not-zip-safe b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/requires.txt b/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/requires.txt deleted file mode 100644 index 0c3dd59..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/requires.txt +++ /dev/null @@ -1,13 +0,0 @@ - - -[source] -Cython>=0.20 - -[cssselect] -cssselect>=0.7 - -[html5] -html5lib - -[htmlsoup] -BeautifulSoup4 \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/lxml/etree.so b/Darwin/lib/python3.4/site-packages/lxml/etree.so deleted file mode 100755 index 19fd427..0000000 Binary files a/Darwin/lib/python3.4/site-packages/lxml/etree.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/lxml/html/_setmixin.py b/Darwin/lib/python3.4/site-packages/lxml/html/_setmixin.py deleted file mode 100644 index 81310ff..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml/html/_setmixin.py +++ /dev/null @@ -1,115 +0,0 @@ -class SetMixin(object): - - """ - Mix-in for sets. You must define __iter__, add, remove - """ - - def __len__(self): - length = 0 - for item in self: - length += 1 - return length - - def __contains__(self, item): - for has_item in self: - if item == has_item: - return True - return False - - def issubset(self, other): - for item in other: - if item not in self: - return False - return True - - __le__ = issubset - - def issuperset(self, other): - for item in self: - if item not in other: - return False - return True - - __ge__ = issuperset - - def union(self, other): - return self | other - - def __or__(self, other): - new = self.copy() - new |= other - return new - - def intersection(self, other): - return self & other - - def __and__(self, other): - new = self.copy() - new &= other - return new - - def difference(self, other): - return self - other - - def __sub__(self, other): - new = self.copy() - new -= other - return new - - def symmetric_difference(self, other): - return self ^ other - - def __xor__(self, other): - new = self.copy() - new ^= other - return new - - def copy(self): - return set(self) - - def update(self, other): - for item in other: - self.add(item) - - def __ior__(self, other): - self.update(other) - return self - - def intersection_update(self, other): - for item in self: - if item not in other: - self.remove(item) - - def __iand__(self, other): - self.intersection_update(other) - return self - - def difference_update(self, other): - for item in other: - if item in self: - self.remove(item) - - def __isub__(self, other): - self.difference_update(other) - return self - - def symmetric_difference_update(self, other): - for item in other: - if item in self: - self.remove(item) - else: - self.add(item) - - def __ixor__(self, other): - self.symmetric_difference_update(other) - return self - - def discard(self, item): - try: - self.remove(item) - except KeyError: - pass - - def clear(self): - for item in list(self): - self.remove(item) diff --git a/Darwin/lib/python3.4/site-packages/lxml/html/soupparser.py b/Darwin/lib/python3.4/site-packages/lxml/html/soupparser.py deleted file mode 100644 index bfb9fdf..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml/html/soupparser.py +++ /dev/null @@ -1,125 +0,0 @@ -__doc__ = """External interface to the BeautifulSoup HTML parser. -""" - -__all__ = ["fromstring", "parse", "convert_tree"] - -from lxml import etree, html -from BeautifulSoup import \ - BeautifulSoup, Tag, Comment, ProcessingInstruction, NavigableString - - -def fromstring(data, beautifulsoup=None, makeelement=None, **bsargs): - """Parse a string of HTML data into an Element tree using the - BeautifulSoup parser. - - Returns the root ```` Element of the tree. - - You can pass a different BeautifulSoup parser through the - `beautifulsoup` keyword, and a diffent Element factory function - through the `makeelement` keyword. By default, the standard - ``BeautifulSoup`` class and the default factory of `lxml.html` are - used. - """ - return _parse(data, beautifulsoup, makeelement, **bsargs) - -def parse(file, beautifulsoup=None, makeelement=None, **bsargs): - """Parse a file into an ElemenTree using the BeautifulSoup parser. - - You can pass a different BeautifulSoup parser through the - `beautifulsoup` keyword, and a diffent Element factory function - through the `makeelement` keyword. By default, the standard - ``BeautifulSoup`` class and the default factory of `lxml.html` are - used. - """ - if not hasattr(file, 'read'): - file = open(file) - root = _parse(file, beautifulsoup, makeelement, **bsargs) - return etree.ElementTree(root) - -def convert_tree(beautiful_soup_tree, makeelement=None): - """Convert a BeautifulSoup tree to a list of Element trees. - - Returns a list instead of a single root Element to support - HTML-like soup with more than one root element. - - You can pass a different Element factory through the `makeelement` - keyword. - """ - if makeelement is None: - makeelement = html.html_parser.makeelement - root = _convert_tree(beautiful_soup_tree, makeelement) - children = root.getchildren() - for child in children: - root.remove(child) - return children - - -# helpers - -def _parse(source, beautifulsoup, makeelement, **bsargs): - if beautifulsoup is None: - beautifulsoup = BeautifulSoup - if makeelement is None: - makeelement = html.html_parser.makeelement - if 'convertEntities' not in bsargs: - bsargs['convertEntities'] = 'html' - tree = beautifulsoup(source, **bsargs) - root = _convert_tree(tree, makeelement) - # from ET: wrap the document in a html root element, if necessary - if len(root) == 1 and root[0].tag == "html": - return root[0] - root.tag = "html" - return root - -def _convert_tree(beautiful_soup_tree, makeelement): - root = makeelement(beautiful_soup_tree.name, - attrib=dict(beautiful_soup_tree.attrs)) - _convert_children(root, beautiful_soup_tree, makeelement) - return root - -def _convert_children(parent, beautiful_soup_tree, makeelement): - SubElement = etree.SubElement - et_child = None - for child in beautiful_soup_tree: - if isinstance(child, Tag): - et_child = SubElement(parent, child.name, attrib=dict( - [(k, unescape(v)) for (k,v) in child.attrs])) - _convert_children(et_child, child, makeelement) - elif type(child) is NavigableString: - _append_text(parent, et_child, unescape(child)) - else: - if isinstance(child, Comment): - parent.append(etree.Comment(child)) - elif isinstance(child, ProcessingInstruction): - parent.append(etree.ProcessingInstruction( - *child.split(' ', 1))) - else: # CData - _append_text(parent, et_child, unescape(child)) - -def _append_text(parent, element, text): - if element is None: - parent.text = (parent.text or '') + text - else: - element.tail = (element.tail or '') + text - - -# copied from ET's ElementSoup - -try: - from html.entities import name2codepoint # Python 3 -except ImportError: - from htmlentitydefs import name2codepoint -import re - -handle_entities = re.compile("&(\w+);").sub - -def unescape(string): - if not string: - return '' - # work around oddities in BeautifulSoup's entity handling - def unescape_entity(m): - try: - return unichr(name2codepoint[m.group(1)]) - except KeyError: - return m.group(0) # use as is - return handle_entities(unescape_entity, string) diff --git a/Darwin/lib/python3.4/site-packages/lxml/includes/lxml-version.h b/Darwin/lib/python3.4/site-packages/lxml/includes/lxml-version.h deleted file mode 100644 index 62dece8..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml/includes/lxml-version.h +++ /dev/null @@ -1,3 +0,0 @@ -#ifndef LXML_VERSION_STRING -#define LXML_VERSION_STRING "3.3.6" -#endif diff --git a/Darwin/lib/python3.4/site-packages/lxml/lxml.etree_api.h b/Darwin/lib/python3.4/site-packages/lxml/lxml.etree_api.h deleted file mode 100644 index e36608e..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml/lxml.etree_api.h +++ /dev/null @@ -1,228 +0,0 @@ -#ifndef __PYX_HAVE_API__lxml__etree -#define __PYX_HAVE_API__lxml__etree -#include "Python.h" -#include "lxml.etree.h" - -static struct LxmlElement *(*__pyx_f_4lxml_5etree_deepcopyNodeToDocument)(struct LxmlDocument *, xmlNode *) = 0; -#define deepcopyNodeToDocument __pyx_f_4lxml_5etree_deepcopyNodeToDocument -static struct LxmlElementTree *(*__pyx_f_4lxml_5etree_elementTreeFactory)(struct LxmlElement *) = 0; -#define elementTreeFactory __pyx_f_4lxml_5etree_elementTreeFactory -static struct LxmlElementTree *(*__pyx_f_4lxml_5etree_newElementTree)(struct LxmlElement *, PyObject *) = 0; -#define newElementTree __pyx_f_4lxml_5etree_newElementTree -static struct LxmlElement *(*__pyx_f_4lxml_5etree_elementFactory)(struct LxmlDocument *, xmlNode *) = 0; -#define elementFactory __pyx_f_4lxml_5etree_elementFactory -static struct LxmlElement *(*__pyx_f_4lxml_5etree_makeElement)(PyObject *, struct LxmlDocument *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *) = 0; -#define makeElement __pyx_f_4lxml_5etree_makeElement -static struct LxmlElement *(*__pyx_f_4lxml_5etree_makeSubElement)(struct LxmlElement *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *) = 0; -#define makeSubElement __pyx_f_4lxml_5etree_makeSubElement -static void (*__pyx_f_4lxml_5etree_setElementClassLookupFunction)(_element_class_lookup_function, PyObject *) = 0; -#define setElementClassLookupFunction __pyx_f_4lxml_5etree_setElementClassLookupFunction -static PyObject *(*__pyx_f_4lxml_5etree_lookupDefaultElementClass)(PyObject *, PyObject *, xmlNode *) = 0; -#define lookupDefaultElementClass __pyx_f_4lxml_5etree_lookupDefaultElementClass -static PyObject *(*__pyx_f_4lxml_5etree_lookupNamespaceElementClass)(PyObject *, PyObject *, xmlNode *) = 0; -#define lookupNamespaceElementClass __pyx_f_4lxml_5etree_lookupNamespaceElementClass -static PyObject *(*__pyx_f_4lxml_5etree_callLookupFallback)(struct LxmlFallbackElementClassLookup *, struct LxmlDocument *, xmlNode *) = 0; -#define callLookupFallback __pyx_f_4lxml_5etree_callLookupFallback -static int (*__pyx_f_4lxml_5etree_tagMatches)(xmlNode *, const xmlChar *, const xmlChar *) = 0; -#define tagMatches __pyx_f_4lxml_5etree_tagMatches -static struct LxmlDocument *(*__pyx_f_4lxml_5etree_documentOrRaise)(PyObject *) = 0; -#define documentOrRaise __pyx_f_4lxml_5etree_documentOrRaise -static struct LxmlElement *(*__pyx_f_4lxml_5etree_rootNodeOrRaise)(PyObject *) = 0; -#define rootNodeOrRaise __pyx_f_4lxml_5etree_rootNodeOrRaise -static int (*__pyx_f_4lxml_5etree_hasText)(xmlNode *) = 0; -#define hasText __pyx_f_4lxml_5etree_hasText -static int (*__pyx_f_4lxml_5etree_hasTail)(xmlNode *) = 0; -#define hasTail __pyx_f_4lxml_5etree_hasTail -static PyObject *(*__pyx_f_4lxml_5etree_textOf)(xmlNode *) = 0; -#define textOf __pyx_f_4lxml_5etree_textOf -static PyObject *(*__pyx_f_4lxml_5etree_tailOf)(xmlNode *) = 0; -#define tailOf __pyx_f_4lxml_5etree_tailOf -static int (*__pyx_f_4lxml_5etree_setNodeText)(xmlNode *, PyObject *) = 0; -#define setNodeText __pyx_f_4lxml_5etree_setNodeText -static int (*__pyx_f_4lxml_5etree_setTailText)(xmlNode *, PyObject *) = 0; -#define setTailText __pyx_f_4lxml_5etree_setTailText -static PyObject *(*__pyx_f_4lxml_5etree_attributeValue)(xmlNode *, xmlAttr *) = 0; -#define attributeValue __pyx_f_4lxml_5etree_attributeValue -static PyObject *(*__pyx_f_4lxml_5etree_attributeValueFromNsName)(xmlNode *, const xmlChar *, const xmlChar *) = 0; -#define attributeValueFromNsName __pyx_f_4lxml_5etree_attributeValueFromNsName -static PyObject *(*__pyx_f_4lxml_5etree_getAttributeValue)(struct LxmlElement *, PyObject *, PyObject *) = 0; -#define getAttributeValue __pyx_f_4lxml_5etree_getAttributeValue -static PyObject *(*__pyx_f_4lxml_5etree_iterattributes)(struct LxmlElement *, int) = 0; -#define iterattributes __pyx_f_4lxml_5etree_iterattributes -static PyObject *(*__pyx_f_4lxml_5etree_collectAttributes)(xmlNode *, int) = 0; -#define collectAttributes __pyx_f_4lxml_5etree_collectAttributes -static int (*__pyx_f_4lxml_5etree_setAttributeValue)(struct LxmlElement *, PyObject *, PyObject *) = 0; -#define setAttributeValue __pyx_f_4lxml_5etree_setAttributeValue -static int (*__pyx_f_4lxml_5etree_delAttribute)(struct LxmlElement *, PyObject *) = 0; -#define delAttribute __pyx_f_4lxml_5etree_delAttribute -static int (*__pyx_f_4lxml_5etree_delAttributeFromNsName)(xmlNode *, const xmlChar *, const xmlChar *) = 0; -#define delAttributeFromNsName __pyx_f_4lxml_5etree_delAttributeFromNsName -static int (*__pyx_f_4lxml_5etree_hasChild)(xmlNode *) = 0; -#define hasChild __pyx_f_4lxml_5etree_hasChild -static xmlNode *(*__pyx_f_4lxml_5etree_findChild)(xmlNode *, Py_ssize_t) = 0; -#define findChild __pyx_f_4lxml_5etree_findChild -static xmlNode *(*__pyx_f_4lxml_5etree_findChildForwards)(xmlNode *, Py_ssize_t) = 0; -#define findChildForwards __pyx_f_4lxml_5etree_findChildForwards -static xmlNode *(*__pyx_f_4lxml_5etree_findChildBackwards)(xmlNode *, Py_ssize_t) = 0; -#define findChildBackwards __pyx_f_4lxml_5etree_findChildBackwards -static xmlNode *(*__pyx_f_4lxml_5etree_nextElement)(xmlNode *) = 0; -#define nextElement __pyx_f_4lxml_5etree_nextElement -static xmlNode *(*__pyx_f_4lxml_5etree_previousElement)(xmlNode *) = 0; -#define previousElement __pyx_f_4lxml_5etree_previousElement -static void (*__pyx_f_4lxml_5etree_appendChild)(struct LxmlElement *, struct LxmlElement *) = 0; -#define appendChild __pyx_f_4lxml_5etree_appendChild -static int (*__pyx_f_4lxml_5etree_appendChildToElement)(struct LxmlElement *, struct LxmlElement *) = 0; -#define appendChildToElement __pyx_f_4lxml_5etree_appendChildToElement -static PyObject *(*__pyx_f_4lxml_5etree_pyunicode)(const xmlChar *) = 0; -#define pyunicode __pyx_f_4lxml_5etree_pyunicode -static PyObject *(*__pyx_f_4lxml_5etree_utf8)(PyObject *) = 0; -#define utf8 __pyx_f_4lxml_5etree_utf8 -static PyObject *(*__pyx_f_4lxml_5etree_getNsTag)(PyObject *) = 0; -#define getNsTag __pyx_f_4lxml_5etree_getNsTag -static PyObject *(*__pyx_f_4lxml_5etree_getNsTagWithEmptyNs)(PyObject *) = 0; -#define getNsTagWithEmptyNs __pyx_f_4lxml_5etree_getNsTagWithEmptyNs -static PyObject *(*__pyx_f_4lxml_5etree_namespacedName)(xmlNode *) = 0; -#define namespacedName __pyx_f_4lxml_5etree_namespacedName -static PyObject *(*__pyx_f_4lxml_5etree_namespacedNameFromNsName)(const xmlChar *, const xmlChar *) = 0; -#define namespacedNameFromNsName __pyx_f_4lxml_5etree_namespacedNameFromNsName -static void (*__pyx_f_4lxml_5etree_iteratorStoreNext)(struct LxmlElementIterator *, struct LxmlElement *) = 0; -#define iteratorStoreNext __pyx_f_4lxml_5etree_iteratorStoreNext -static void (*__pyx_f_4lxml_5etree_initTagMatch)(struct LxmlElementTagMatcher *, PyObject *) = 0; -#define initTagMatch __pyx_f_4lxml_5etree_initTagMatch -static xmlNs *(*__pyx_f_4lxml_5etree_findOrBuildNodeNsPrefix)(struct LxmlDocument *, xmlNode *, const xmlChar *, const xmlChar *) = 0; -#define findOrBuildNodeNsPrefix __pyx_f_4lxml_5etree_findOrBuildNodeNsPrefix -#if !defined(__Pyx_PyIdentifier_FromString) -#if PY_MAJOR_VERSION < 3 - #define __Pyx_PyIdentifier_FromString(s) PyString_FromString(s) -#else - #define __Pyx_PyIdentifier_FromString(s) PyUnicode_FromString(s) -#endif -#endif - -#ifndef __PYX_HAVE_RT_ImportModule -#define __PYX_HAVE_RT_ImportModule -static PyObject *__Pyx_ImportModule(const char *name) { - PyObject *py_name = 0; - PyObject *py_module = 0; - py_name = __Pyx_PyIdentifier_FromString(name); - if (!py_name) - goto bad; - py_module = PyImport_Import(py_name); - Py_DECREF(py_name); - return py_module; -bad: - Py_XDECREF(py_name); - return 0; -} -#endif - -#ifndef __PYX_HAVE_RT_ImportFunction -#define __PYX_HAVE_RT_ImportFunction -static int __Pyx_ImportFunction(PyObject *module, const char *funcname, void (**f)(void), const char *sig) { - PyObject *d = 0; - PyObject *cobj = 0; - union { - void (*fp)(void); - void *p; - } tmp; - d = PyObject_GetAttrString(module, (char *)"__pyx_capi__"); - if (!d) - goto bad; - cobj = PyDict_GetItemString(d, funcname); - if (!cobj) { - PyErr_Format(PyExc_ImportError, - "%.200s does not export expected C function %.200s", - PyModule_GetName(module), funcname); - goto bad; - } -#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3 && PY_MINOR_VERSION==0) - if (!PyCapsule_IsValid(cobj, sig)) { - PyErr_Format(PyExc_TypeError, - "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", - PyModule_GetName(module), funcname, sig, PyCapsule_GetName(cobj)); - goto bad; - } - tmp.p = PyCapsule_GetPointer(cobj, sig); -#else - {const char *desc, *s1, *s2; - desc = (const char *)PyCObject_GetDesc(cobj); - if (!desc) - goto bad; - s1 = desc; s2 = sig; - while (*s1 != '\0' && *s1 == *s2) { s1++; s2++; } - if (*s1 != *s2) { - PyErr_Format(PyExc_TypeError, - "C function %.200s.%.200s has wrong signature (expected %.500s, got %.500s)", - PyModule_GetName(module), funcname, sig, desc); - goto bad; - } - tmp.p = PyCObject_AsVoidPtr(cobj);} -#endif - *f = tmp.fp; - if (!(*f)) - goto bad; - Py_DECREF(d); - return 0; -bad: - Py_XDECREF(d); - return -1; -} -#endif - - -static int import_lxml__etree(void) { - PyObject *module = 0; - module = __Pyx_ImportModule("lxml.etree"); - if (!module) goto bad; - if (__Pyx_ImportFunction(module, "deepcopyNodeToDocument", (void (**)(void))&__pyx_f_4lxml_5etree_deepcopyNodeToDocument, "struct LxmlElement *(struct LxmlDocument *, xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "elementTreeFactory", (void (**)(void))&__pyx_f_4lxml_5etree_elementTreeFactory, "struct LxmlElementTree *(struct LxmlElement *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "newElementTree", (void (**)(void))&__pyx_f_4lxml_5etree_newElementTree, "struct LxmlElementTree *(struct LxmlElement *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "elementFactory", (void (**)(void))&__pyx_f_4lxml_5etree_elementFactory, "struct LxmlElement *(struct LxmlDocument *, xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "makeElement", (void (**)(void))&__pyx_f_4lxml_5etree_makeElement, "struct LxmlElement *(PyObject *, struct LxmlDocument *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "makeSubElement", (void (**)(void))&__pyx_f_4lxml_5etree_makeSubElement, "struct LxmlElement *(struct LxmlElement *, PyObject *, PyObject *, PyObject *, PyObject *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "setElementClassLookupFunction", (void (**)(void))&__pyx_f_4lxml_5etree_setElementClassLookupFunction, "void (_element_class_lookup_function, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "lookupDefaultElementClass", (void (**)(void))&__pyx_f_4lxml_5etree_lookupDefaultElementClass, "PyObject *(PyObject *, PyObject *, xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "lookupNamespaceElementClass", (void (**)(void))&__pyx_f_4lxml_5etree_lookupNamespaceElementClass, "PyObject *(PyObject *, PyObject *, xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "callLookupFallback", (void (**)(void))&__pyx_f_4lxml_5etree_callLookupFallback, "PyObject *(struct LxmlFallbackElementClassLookup *, struct LxmlDocument *, xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "tagMatches", (void (**)(void))&__pyx_f_4lxml_5etree_tagMatches, "int (xmlNode *, const xmlChar *, const xmlChar *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "documentOrRaise", (void (**)(void))&__pyx_f_4lxml_5etree_documentOrRaise, "struct LxmlDocument *(PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "rootNodeOrRaise", (void (**)(void))&__pyx_f_4lxml_5etree_rootNodeOrRaise, "struct LxmlElement *(PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "hasText", (void (**)(void))&__pyx_f_4lxml_5etree_hasText, "int (xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "hasTail", (void (**)(void))&__pyx_f_4lxml_5etree_hasTail, "int (xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "textOf", (void (**)(void))&__pyx_f_4lxml_5etree_textOf, "PyObject *(xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "tailOf", (void (**)(void))&__pyx_f_4lxml_5etree_tailOf, "PyObject *(xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "setNodeText", (void (**)(void))&__pyx_f_4lxml_5etree_setNodeText, "int (xmlNode *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "setTailText", (void (**)(void))&__pyx_f_4lxml_5etree_setTailText, "int (xmlNode *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "attributeValue", (void (**)(void))&__pyx_f_4lxml_5etree_attributeValue, "PyObject *(xmlNode *, xmlAttr *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "attributeValueFromNsName", (void (**)(void))&__pyx_f_4lxml_5etree_attributeValueFromNsName, "PyObject *(xmlNode *, const xmlChar *, const xmlChar *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "getAttributeValue", (void (**)(void))&__pyx_f_4lxml_5etree_getAttributeValue, "PyObject *(struct LxmlElement *, PyObject *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "iterattributes", (void (**)(void))&__pyx_f_4lxml_5etree_iterattributes, "PyObject *(struct LxmlElement *, int)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "collectAttributes", (void (**)(void))&__pyx_f_4lxml_5etree_collectAttributes, "PyObject *(xmlNode *, int)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "setAttributeValue", (void (**)(void))&__pyx_f_4lxml_5etree_setAttributeValue, "int (struct LxmlElement *, PyObject *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "delAttribute", (void (**)(void))&__pyx_f_4lxml_5etree_delAttribute, "int (struct LxmlElement *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "delAttributeFromNsName", (void (**)(void))&__pyx_f_4lxml_5etree_delAttributeFromNsName, "int (xmlNode *, const xmlChar *, const xmlChar *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "hasChild", (void (**)(void))&__pyx_f_4lxml_5etree_hasChild, "int (xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "findChild", (void (**)(void))&__pyx_f_4lxml_5etree_findChild, "xmlNode *(xmlNode *, Py_ssize_t)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "findChildForwards", (void (**)(void))&__pyx_f_4lxml_5etree_findChildForwards, "xmlNode *(xmlNode *, Py_ssize_t)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "findChildBackwards", (void (**)(void))&__pyx_f_4lxml_5etree_findChildBackwards, "xmlNode *(xmlNode *, Py_ssize_t)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "nextElement", (void (**)(void))&__pyx_f_4lxml_5etree_nextElement, "xmlNode *(xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "previousElement", (void (**)(void))&__pyx_f_4lxml_5etree_previousElement, "xmlNode *(xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "appendChild", (void (**)(void))&__pyx_f_4lxml_5etree_appendChild, "void (struct LxmlElement *, struct LxmlElement *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "appendChildToElement", (void (**)(void))&__pyx_f_4lxml_5etree_appendChildToElement, "int (struct LxmlElement *, struct LxmlElement *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "pyunicode", (void (**)(void))&__pyx_f_4lxml_5etree_pyunicode, "PyObject *(const xmlChar *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "utf8", (void (**)(void))&__pyx_f_4lxml_5etree_utf8, "PyObject *(PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "getNsTag", (void (**)(void))&__pyx_f_4lxml_5etree_getNsTag, "PyObject *(PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "getNsTagWithEmptyNs", (void (**)(void))&__pyx_f_4lxml_5etree_getNsTagWithEmptyNs, "PyObject *(PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "namespacedName", (void (**)(void))&__pyx_f_4lxml_5etree_namespacedName, "PyObject *(xmlNode *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "namespacedNameFromNsName", (void (**)(void))&__pyx_f_4lxml_5etree_namespacedNameFromNsName, "PyObject *(const xmlChar *, const xmlChar *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "iteratorStoreNext", (void (**)(void))&__pyx_f_4lxml_5etree_iteratorStoreNext, "void (struct LxmlElementIterator *, struct LxmlElement *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "initTagMatch", (void (**)(void))&__pyx_f_4lxml_5etree_initTagMatch, "void (struct LxmlElementTagMatcher *, PyObject *)") < 0) goto bad; - if (__Pyx_ImportFunction(module, "findOrBuildNodeNsPrefix", (void (**)(void))&__pyx_f_4lxml_5etree_findOrBuildNodeNsPrefix, "xmlNs *(struct LxmlDocument *, xmlNode *, const xmlChar *, const xmlChar *)") < 0) goto bad; - Py_DECREF(module); module = 0; - return 0; - bad: - Py_XDECREF(module); - return -1; -} - -#endif /* !__PYX_HAVE_API__lxml__etree */ diff --git a/Darwin/lib/python3.4/site-packages/lxml/objectify.so b/Darwin/lib/python3.4/site-packages/lxml/objectify.so deleted file mode 100755 index 5305d15..0000000 Binary files a/Darwin/lib/python3.4/site-packages/lxml/objectify.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/lxml/pyclasslookup.py b/Darwin/lib/python3.4/site-packages/lxml/pyclasslookup.py deleted file mode 100644 index 32c010c..0000000 --- a/Darwin/lib/python3.4/site-packages/lxml/pyclasslookup.py +++ /dev/null @@ -1,3 +0,0 @@ -# dummy module for backwards compatibility - -from etree import PythonElementClassLookup diff --git a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst deleted file mode 100644 index 2e2d679..0000000 --- a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,71 +0,0 @@ - -Project Info -============ - -* Project Page: https://github.com/pypa/pip -* Install howto: https://pip.pypa.io/en/latest/installing.html -* Changelog: https://pip.pypa.io/en/latest/news.html -* Bug Tracking: https://github.com/pypa/pip/issues -* Mailing list: http://groups.google.com/group/python-virtualenv -* Docs: https://pip.pypa.io/ -* User IRC: #pypa on Freenode. -* Dev IRC: #pypa-dev on Freenode. - -Quickstart -========== - -First, :doc:`Install pip `. - -Install a package from `PyPI`_: - -:: - - $ pip install SomePackage - [...] - Successfully installed SomePackage - -Show what files were installed: - -:: - - $ pip show --files SomePackage - Name: SomePackage - Version: 1.0 - Location: /my/env/lib/pythonx.x/site-packages - Files: - ../somepackage/__init__.py - [...] - -List what packages are outdated: - -:: - - $ pip list --outdated - SomePackage (Current: 1.0 Latest: 2.0) - -Upgrade a package: - -:: - - $ pip install --upgrade SomePackage - [...] - Found existing installation: SomePackage 1.0 - Uninstalling SomePackage: - Successfully uninstalled SomePackage - Running setup.py install for SomePackage - Successfully installed SomePackage - -Uninstall a package: - -:: - - $ pip uninstall SomePackage - Uninstalling SomePackage: - /my/env/lib/pythonx.x/site-packages/somepackage - Proceed (y/n)? y - Successfully uninstalled SomePackage - - -.. _PyPI: http://pypi.python.org/pypi/ - - diff --git a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA b/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA deleted file mode 100644 index 12ecc51..0000000 --- a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/METADATA +++ /dev/null @@ -1,98 +0,0 @@ -Metadata-Version: 2.0 -Name: pip -Version: 1.5.6 -Summary: A tool for installing and managing Python packages. -Home-page: https://pip.pypa.io/ -Author: The pip developers -Author-email: python-virtualenv@groups.google.com -License: MIT -Keywords: easy_install distutils setuptools egg virtualenv -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Topic :: Software Development :: Build Tools -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Provides-Extra: testing -Requires-Dist: pytest; extra == 'testing' -Requires-Dist: virtualenv (>=1.10); extra == 'testing' -Requires-Dist: scripttest (>=1.3); extra == 'testing' -Requires-Dist: mock; extra == 'testing' - - -Project Info -============ - -* Project Page: https://github.com/pypa/pip -* Install howto: https://pip.pypa.io/en/latest/installing.html -* Changelog: https://pip.pypa.io/en/latest/news.html -* Bug Tracking: https://github.com/pypa/pip/issues -* Mailing list: http://groups.google.com/group/python-virtualenv -* Docs: https://pip.pypa.io/ -* User IRC: #pypa on Freenode. -* Dev IRC: #pypa-dev on Freenode. - -Quickstart -========== - -First, :doc:`Install pip `. - -Install a package from `PyPI`_: - -:: - - $ pip install SomePackage - [...] - Successfully installed SomePackage - -Show what files were installed: - -:: - - $ pip show --files SomePackage - Name: SomePackage - Version: 1.0 - Location: /my/env/lib/pythonx.x/site-packages - Files: - ../somepackage/__init__.py - [...] - -List what packages are outdated: - -:: - - $ pip list --outdated - SomePackage (Current: 1.0 Latest: 2.0) - -Upgrade a package: - -:: - - $ pip install --upgrade SomePackage - [...] - Found existing installation: SomePackage 1.0 - Uninstalling SomePackage: - Successfully uninstalled SomePackage - Running setup.py install for SomePackage - Successfully installed SomePackage - -Uninstall a package: - -:: - - $ pip uninstall SomePackage - Uninstalling SomePackage: - /my/env/lib/pythonx.x/site-packages/somepackage - Proceed (y/n)? y - Successfully uninstalled SomePackage - - -.. _PyPI: http://pypi.python.org/pypi/ - - diff --git a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD b/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD deleted file mode 100644 index e8bb8b3..0000000 --- a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/RECORD +++ /dev/null @@ -1,373 +0,0 @@ -pip/__init__.py,sha256=j6Zp28eSA6gvpf2Vm7gb4ybz5Y_OKfXxY3a146aRH6g,9450 -pip/__main__.py,sha256=9JBJhprGRLUy1fEvAdufs0tsjKFAvFAY_nTde6GDkHk,116 -pip/basecommand.py,sha256=N_nE7BCcoMA7t2nRNTiJB8T__1XqI74SJI2G72VaM2E,6578 -pip/baseparser.py,sha256=DZKWTOA1OeD5mLyBUx183Jx-M16cqWOXPZZuJN4-4j8,8162 -pip/cmdoptions.py,sha256=C0JuSfhGIgrp2hMoVDYVPekPlPiG0wIFcIIFDbrsatg,9507 -pip/download.py,sha256=jnZvTGYutxPtgJvF0URMnsBGkTABNrfgFevu5QmscfE,22580 -pip/exceptions.py,sha256=wAoboA4PdhGN7xH-ayf_dcDFPYZe9XAivAlZJbOgCN4,1086 -pip/index.py,sha256=CLPb0crVhOQ3aZpl4feUKpf1pVR6qLhBiJTa71PoIkM,40403 -pip/locations.py,sha256=YyFyCLYADKgT5x-Ctj_LeZl5bEzkbBXuR2Iv8IbVqDA,6202 -pip/log.py,sha256=1fW7cVRIRBhfqWz4JH2HhJRHzVQ4PJTRbolRj3S33f8,9455 -pip/pep425tags.py,sha256=jb5Rq395Gz_Uv8kn3L9Im1HX7EhEj8nqyYX0nXulzWo,2969 -pip/req.py,sha256=DMGDl2N30fmLzh4VzhqQyix-bifSRKNhp2c_OS_gza8,83557 -pip/runner.py,sha256=VkcZKNikprpDOSl2Y3m0FaQbdGuYsoHkxdhjtL0N3oA,431 -pip/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116 -pip/util.py,sha256=GTnXa80tWauVlIvSQiYeNw12ly5X4hMPmDbRVQ79hwk,24172 -pip/wheel.py,sha256=PwTueHq1c30KvZF3-0wlTTUiR8hxBpE3THxN1bc0eS8,20618 -pip/_vendor/__init__.py,sha256=f-xO4dF7jRP89yrL4h26_nULYgYTzgnTgUFlkDasJrc,266 -pip/_vendor/pkg_resources.py,sha256=0y2CkvxQcHCBVOmTCNvdMN8hTPPUogThjhkCxegggII,100025 -pip/_vendor/re-vendor.py,sha256=PcdZ40d0ohMsdJmA4t0AeAWbPXi1tFsvAwA5KE5FGeY,773 -pip/_vendor/six.py,sha256=whAS1kvmixxh-pKqb5qQ05-fHMnGjuKyU6m7_wa09O4,23462 -pip/_vendor/_markerlib/__init__.py,sha256=2hgtRuYDOrimZF9-ENCkrP8gnJ59HZEtlk-zoTEvn1Y,564 -pip/_vendor/_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 -pip/_vendor/colorama/__init__.py,sha256=eABG0aR8L-8JfIiftmvixrDZvqHawB7KIIxiRdKsi1k,217 -pip/_vendor/colorama/ansi.py,sha256=spKO9jqXAB9POAj6M3bZLrdCf-W9wUVeDCqF664WSGs,1039 -pip/_vendor/colorama/ansitowin32.py,sha256=C0mA80tFRvkdHVTHzvyrCzsI0CNQ1gY9ng6deCxkbGY,6664 -pip/_vendor/colorama/initialise.py,sha256=sL44vQFKG5BAoFgoIxfhH0wG2NnLBw9mtnWFVtXkHGs,1297 -pip/_vendor/colorama/win32.py,sha256=tMAHgaTSySAt5BI5hBoID6oshHIYnJBlUqDsOoHbV0w,4911 -pip/_vendor/colorama/winterm.py,sha256=RhWJPcGA_T1knfS-84AXpQ0C7cn8XWE6iQkqy1GPsmk,4206 -pip/_vendor/distlib/__init__.py,sha256=kTBiZ-2Ndb1k7wb8yCvl7iRBCDTnvaOR15qf32RgvsY,581 -pip/_vendor/distlib/compat.py,sha256=pVILHBDu1P72lO1lEgPFpwRWGs4Cj2-eo_xOPLKjprk,38875 -pip/_vendor/distlib/database.py,sha256=reYrKl6tqHs3eOrMrdXX4W0OGlBqathuqSzCy-696MI,49138 -pip/_vendor/distlib/index.py,sha256=D-Zz8pQ6Gtr4t7HlFebU36P9inMPSfExmit061S8DUk,19476 -pip/_vendor/distlib/locators.py,sha256=Vi88LbYqF981rmCc8dbD9qnmt5MeJzv7qiAlvgrK3vs,46946 -pip/_vendor/distlib/manifest.py,sha256=I-JG2bVBN3Zmf4gpt9MFjgsukcIt0rMrz95jQ1kwbgE,13497 -pip/_vendor/distlib/markers.py,sha256=iRrVWwpyVwjkKJSX8NEQ92_MRMwpROcfNGKCD-Ch1QM,6282 -pip/_vendor/distlib/metadata.py,sha256=IS0Q8hNbQWEhaLtItUlLxYC3j9zVtFD76DgXbHxcRZE,36815 -pip/_vendor/distlib/resources.py,sha256=H9QmD7R_8t-iLxZne9EnjAHAdSEbJ22PYotEppOGxts,9432 -pip/_vendor/distlib/scripts.py,sha256=0bIHpJFwyvymJ4CEnm1-x3AcJZbYTEkhBAKuas295Ic,12307 -pip/_vendor/distlib/t32.exe,sha256=snr0x6iR5Yd7ZndiuvGMuuQGg8VklUc5538uQcOeVYQ,91136 -pip/_vendor/distlib/t64.exe,sha256=71TYxgPfviERRKhyYaXPx2pl4o4ugs5zNWuIMLlm7ss,94720 -pip/_vendor/distlib/util.py,sha256=UBy5ki-nyb0nJyM_-TlXSfni_cEOI4r6LzglMMB_1zc,51230 -pip/_vendor/distlib/version.py,sha256=FgTBNWH7dDY12fqTFy6nATw21wV8kKJw5G19aFouwDE,22996 -pip/_vendor/distlib/w32.exe,sha256=QoyveFPxLH-db4j2YDzb-VmP7DmGT1vHwPOLGpzc3uw,87040 -pip/_vendor/distlib/w64.exe,sha256=haGpLW73-UaPHkjhKaQVPTBJCZ9Lg1rMBZdhpi7f_V8,91648 -pip/_vendor/distlib/wheel.py,sha256=TUMl4Pzri_jglkvr2jZ4GJfWAumG3VNzT2CaW7oBXQQ,38259 -pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 -pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 -pip/_vendor/distlib/_backport/shutil.py,sha256=AUi8718iRoJ9K26mRi-rywtt8Gx7ykvrvbUbZszjfYE,25650 -pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 -pip/_vendor/distlib/_backport/sysconfig.py,sha256=7WdYP0wbw8izH1eAEGNA-HXUyJrhzIAGK_LniUs4UNI,26958 -pip/_vendor/distlib/_backport/tarfile.py,sha256=bjyTNONZb-YEXrHFLExOSuagtSOoPaONP2UUoxwkAqE,92627 -pip/_vendor/html5lib/__init__.py,sha256=6fwIe3NEcpx7aLb1bBXUpsDgJFE9PnbpRADB7i2QhBw,714 -pip/_vendor/html5lib/constants.py,sha256=w_Lrxu8h6qE4KATYy0SL5hiJ5ebuB28SlCcdXUHf6to,87346 -pip/_vendor/html5lib/html5parser.py,sha256=qMHEOEahKSZzLHHkqLRVbuIJYgAteVR-nmkjMp59Tvw,117029 -pip/_vendor/html5lib/ihatexml.py,sha256=MT12cVXAKaW-ALUkUeN175HpUP73xK8wAIpPzQ8cgfI,16581 -pip/_vendor/html5lib/inputstream.py,sha256=qa-xwqbm-w250UR-uVzooXPSHFI4Ho6drLhPl7VWvHI,30636 -pip/_vendor/html5lib/sanitizer.py,sha256=sg7g5CXF9tfvykIoSVAvA8647MgScy3ncZC7IYH-8SA,16428 -pip/_vendor/html5lib/tokenizer.py,sha256=6Uf8sDUkvNn661bcBSBYUCTfXzSs9EyCTiPcj5PAjYI,76929 -pip/_vendor/html5lib/utils.py,sha256=T-BFeUVGJDjVCRbNoqar2qxn8jEoCOOJXE1nH0nDHEQ,2545 -pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/filters/_base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 -pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=fpRLbz6TCe5yXEkGmyMlJ80FekWsTR-sHk3Ano0U9LQ,624 -pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=xllv1I7unxhcyZTf3LTsv30wh2mAkT7wmTZx7zIhpuY,2746 -pip/_vendor/html5lib/filters/lint.py,sha256=6rlGRUTxD5KWwEVoXVHI_PeyUHN6Vw2v_ovg0YiHsDA,4306 -pip/_vendor/html5lib/filters/optionaltags.py,sha256=4ozLwBgMRaxe7iqxefLQpDhp3irK7YHo9LgSGsvZYMw,10500 -pip/_vendor/html5lib/filters/sanitizer.py,sha256=MvGUs_v2taWPgGhjxswRSUiHfxrqMUhsNPz-eSeUYUQ,352 -pip/_vendor/html5lib/filters/whitespace.py,sha256=LbOUcC0zQ9z703KNZrArOr0kVBO7OMXjKjucDW32LU4,1142 -pip/_vendor/html5lib/serializer/__init__.py,sha256=xFXFP-inaTNlbnau5c5DGrH_O8yPm-C6HWbJxpiSqFE,490 -pip/_vendor/html5lib/serializer/htmlserializer.py,sha256=bSXUuFJB6s-ODOl0nzFN0UA6xlQRU-BwYamPeJvsNSE,12909 -pip/_vendor/html5lib/treeadapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/html5lib/treeadapters/sax.py,sha256=3of4vvaUYIAic7pngebwJV24hpOS7Zg9ggJa_WQegy4,1661 -pip/_vendor/html5lib/treebuilders/__init__.py,sha256=Xz4X6B5DA1R-5GyRa44j0sJwfl6dUNyb0NBu9-7sK3U,3405 -pip/_vendor/html5lib/treebuilders/_base.py,sha256=Xf0FZVcVwIQS6tEseJdj5wKbYucbNCnbAsnsG4lONis,13711 -pip/_vendor/html5lib/treebuilders/dom.py,sha256=ylkIlwEV2NsIWBpwEtfqF0LVoCGg4oXazEWs4-486jk,8469 -pip/_vendor/html5lib/treebuilders/etree.py,sha256=etbO6yQlyV46rWlj9mSyVqQOWrgoHgyJ01Tut4lWZkk,12621 -pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=z3Bnfm2MstEEb_lbaAeicl5l-ab6MSQa5Q1ZZreK7Pc,14031 -pip/_vendor/html5lib/treewalkers/__init__.py,sha256=44g-xYZEoYxzkMu6CepBTLm4m-g9iy7Vm_IG8PWAbhY,2323 -pip/_vendor/html5lib/treewalkers/_base.py,sha256=hnL6zMgGJoGqEJYKVKveDmfpz1d2xriyuuau6479xq4,6919 -pip/_vendor/html5lib/treewalkers/dom.py,sha256=mAg05wBWN2k-CGPoo0KNxa55QAlHciNccp8AezCa8j8,1457 -pip/_vendor/html5lib/treewalkers/etree.py,sha256=waFU6dxcV5y4SEMyxZpQ9M4I5pKpMmCtUSN1GbuCVcE,4625 -pip/_vendor/html5lib/treewalkers/genshistream.py,sha256=IbBFrlgi-59-K7P1zm0d7ZFIknBN4c5E57PHJDkx39s,2278 -pip/_vendor/html5lib/treewalkers/lxmletree.py,sha256=vWfXWK3GOSrq2trQm2aPmIRWPhVuYDZ3g9Fu8hUeBQg,6215 -pip/_vendor/html5lib/treewalkers/pulldom.py,sha256=9W6i8yWtUzayV6EwX-okVacttHaqpQZwdBCc2S3XeQ4,2302 -pip/_vendor/html5lib/trie/__init__.py,sha256=mec5zyJ5wIKRM8819gIcIsYQwncg91rEmPwGH1dG3Ho,212 -pip/_vendor/html5lib/trie/_base.py,sha256=WGY8SGptFmx4O0aKLJ54zrIQOoyuvhS0ngA36vAcIcc,927 -pip/_vendor/html5lib/trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 -pip/_vendor/html5lib/trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 -pip/_vendor/requests/__init__.py,sha256=Rl423kV-MBTFDiTtEOJiP9CuaubFSi8rHaqzq1KBN0w,1856 -pip/_vendor/requests/adapters.py,sha256=fp4t_woMNJPv8vikcrLbkkDxhJHJMZ3h8MlgtKvpuE8,14608 -pip/_vendor/requests/api.py,sha256=4xrabBN80yaqHxsomHVQD09v2VndgCz1cSsEnRvGGL0,4344 -pip/_vendor/requests/auth.py,sha256=x2bFqHK3Lkbm7qPUyh_dAqYLTDTotBi-1za9EpCdA0U,6123 -pip/_vendor/requests/cacert.pem,sha256=ak7q_q8ozHdQ9ff27U-E1vCNrLisFRQSMy9zJkdpQlM,308434 -pip/_vendor/requests/certs.py,sha256=wSaqhSNoB0igp6Da-hWw0jtXICKXBbL8aS9swthlt50,544 -pip/_vendor/requests/compat.py,sha256=JGrJPV2YGatzwrexl9kSt8Z8QtmFboRZH9ywsLK_MMA,2556 -pip/_vendor/requests/cookies.py,sha256=BjMKtrI8TXQD5oQVeToYtkRBF149eI85UhWsBGwsJac,16686 -pip/_vendor/requests/exceptions.py,sha256=z-3QpicafKtIh85bMEaClL2OpNPdsc6TP_83KcsVh8Y,1877 -pip/_vendor/requests/hooks.py,sha256=9vNiuiRHRd5Qy6BX_0p1H3NsUzDo1M_HaFR2AFL41Tg,820 -pip/_vendor/requests/models.py,sha256=OYZOkemxZPpeTp5cvhdx-gAnl9bW82wdnGIr4uoVH20,26436 -pip/_vendor/requests/sessions.py,sha256=kwPPNj1M3D6yfHPcygUERvfqWaSx8odBijhqKTqRX_Q,22290 -pip/_vendor/requests/status_codes.py,sha256=LYpqLv4AEKuTPby-QSvgl_gI7fcVlUDqSBcndIwX-Qg,3136 -pip/_vendor/requests/structures.py,sha256=d7f7ZXZZzgZtvrBQBZA1boJYX_QlP1YqL-_xtpzImGw,3541 -pip/_vendor/requests/utils.py,sha256=MxSUha_2szwhfLKmGg5bolxt6lA6OghSvVZ4xMZwQhM,19973 -pip/_vendor/requests/packages/__init__.py,sha256=aXkbNCjM_WhryRBocE4AaA_p7-CTxL5LOutY7XzKm4s,62 -pip/_vendor/requests/packages/chardet/__init__.py,sha256=8-39Dg2qEuod5DNN7RMdn2ZYOO9zFU3fFfaE80iDWGc,1295 -pip/_vendor/requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594 -pip/_vendor/requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684 -pip/_vendor/requests/packages/chardet/chardetect.py,sha256=8g-dRSA97bSE6M25Tqe1roKKtl3XHSMnqi6vTzpHNV0,1141 -pip/_vendor/requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226 -pip/_vendor/requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791 -pip/_vendor/requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902 -pip/_vendor/requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318 -pip/_vendor/requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157 -pip/_vendor/requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335 -pip/_vendor/requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782 -pip/_vendor/requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187 -pip/_vendor/requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839 -pip/_vendor/requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678 -pip/_vendor/requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978 -pip/_vendor/requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675 -pip/_vendor/requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872 -pip/_vendor/requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676 -pip/_vendor/requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011 -pip/_vendor/requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681 -pip/_vendor/requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359 -pip/_vendor/requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315 -pip/_vendor/requests/packages/chardet/jpcntx.py,sha256=9fJ9oS0BUarcdZNySwmzVRuT03sYdClSmFwXDj3yVNg,19104 -pip/_vendor/requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784 -pip/_vendor/requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725 -pip/_vendor/requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628 -pip/_vendor/requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318 -pip/_vendor/requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536 -pip/_vendor/requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275 -pip/_vendor/requests/packages/chardet/latin1prober.py,sha256=g67gqZ2z89LUOlR7BZEAh4-p5a1yGWss9nWy8FCNm8Q,5241 -pip/_vendor/requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268 -pip/_vendor/requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967 -pip/_vendor/requests/packages/chardet/mbcssm.py,sha256=UuiA4Ic8vEc0XpTKDneqZyiH2TwGuFVZxOxWJep3X_4,19608 -pip/_vendor/requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793 -pip/_vendor/requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291 -pip/_vendor/requests/packages/chardet/sjisprober.py,sha256=1RjpQ2LU2gvoEB_4O839xDQVchWx2fG_C7_vXh52P5I,3734 -pip/_vendor/requests/packages/chardet/universaldetector.py,sha256=GkZdwNyNfbFWC8I1uqnzyhOUF7favWCqCOKqdQlx6gQ,6831 -pip/_vendor/requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652 -pip/_vendor/requests/packages/urllib3/__init__.py,sha256=sLIKv9dGJjDloiVXUBBjXDWWq8bM66kcvTH2SU_WZKg,1701 -pip/_vendor/requests/packages/urllib3/_collections.py,sha256=Oh1gxPZRqtOSy3pTV0pWQ949t9sjRAFMautsHiHP_pY,6557 -pip/_vendor/requests/packages/urllib3/connection.py,sha256=eug-y4_dOa-x9bxDlURwpqlZuQDhl9Tjre5D1S5bijE,6533 -pip/_vendor/requests/packages/urllib3/connectionpool.py,sha256=8eDsWYJzKYOyXMiP4CJqtLeychOw3iD_P20Ov-dbGUs,26904 -pip/_vendor/requests/packages/urllib3/exceptions.py,sha256=T-ILeqVPpEvrOYAq8XEyTF0X8XRWcFVGT2gMrF00km0,3364 -pip/_vendor/requests/packages/urllib3/fields.py,sha256=UuTJzGxUc9H1LPYZnD3f8GW308Vx_znb5pt4yimORsI,5976 -pip/_vendor/requests/packages/urllib3/filepost.py,sha256=tWPY33HnFM_RPpEU9PHv9D34n67w8ZRt80ZSsWIv0Kk,2512 -pip/_vendor/requests/packages/urllib3/poolmanager.py,sha256=bMYHdNaVI5O4YrJHr1T6tE2RYHEMzM2_K0cTy7uzX2M,8977 -pip/_vendor/requests/packages/urllib3/request.py,sha256=cXTcrr9d50Rt213ZXLgCf53KNWxe4LQ8lxSV1HBYa9E,5808 -pip/_vendor/requests/packages/urllib3/response.py,sha256=ff9-9sZkghNWCyeoECvImHspITTum7KOM20J2ia4SAw,10347 -pip/_vendor/requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py,sha256=VJ-GjxpYITxSj4UDKX0iqvHwaatyg2RA3PaTym5Wp6w,4741 -pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py,sha256=D1cbFXSoWj4ahM0azQdvIDFkvNnzMLxOvo1wnMrPo8M,15086 -pip/_vendor/requests/packages/urllib3/packages/__init__.py,sha256=EKCTAOjZtPR_HC50e7X8hS5j4bkFkN87XZOT-Wdpfus,74 -pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py,sha256=HtHphtStJlorzQqoIat8zUH0lqLns416gfSO9y_aSAQ,8936 -pip/_vendor/requests/packages/urllib3/packages/six.py,sha256=U-rO-WBrFS8PxHeamSl6okKCjqPF18NhiZb0qPZ67XM,11628 -pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460 -pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778 -pip/_vendor/requests/packages/urllib3/util/__init__.py,sha256=nI42Lb9ShOOOl_uRDyJfZP_nxHCR4wTFJXmDa_GEe7c,622 -pip/_vendor/requests/packages/urllib3/util/connection.py,sha256=Df5MeJaIpPigbIxEa87ojZ7qxXm1-V9dhnc2m7S6lNA,1348 -pip/_vendor/requests/packages/urllib3/util/request.py,sha256=-mIHA_M2aZwEmW5PpNfxEi9B47YDmFcoTW5FvQ7prf4,1924 -pip/_vendor/requests/packages/urllib3/util/response.py,sha256=EVO-5Q1Wc9K61i3RIhPB83CXfnvZpphL_NNp0SLznzI,354 -pip/_vendor/requests/packages/urllib3/util/ssl_.py,sha256=0SJbyE9KURi8IjmOwo8sqmmit4sRkXf9eg_ODWOhSD0,4235 -pip/_vendor/requests/packages/urllib3/util/timeout.py,sha256=WGx3s4593QcpwyHLY1FpFZgGPiTdL26A2nJhsZa9Rj8,9236 -pip/_vendor/requests/packages/urllib3/util/url.py,sha256=ftfW-i1XtyFZEOEKfXLkKNmM7APmgNHbtcGWYr-6xdI,4273 -pip/backwardcompat/__init__.py,sha256=AcP5dr3nL-4AGxSwsFIEUcf9ki0ROUFwfc0IrIeHaJI,3756 -pip/commands/__init__.py,sha256=N_4io-oGcWF9-raDN5TYXbGlJFsx5po36HZmwgLso6I,2236 -pip/commands/bundle.py,sha256=tK8LU3Khjkrz65y3brNP71QOBkQCb9mlv9x8s1W02T4,1787 -pip/commands/completion.py,sha256=LnJgUrpGGO4x2Y8VdwhKda4kGZWMFO28P4jYzYT5Q8k,1838 -pip/commands/freeze.py,sha256=Hyx1gzMaTFwTMcP98fwNCRVvvrWenX9j1RBziLCIo0A,4664 -pip/commands/help.py,sha256=ETLg8xfv8uFwS3KvxmsCE-I56S15jUTvfkwaPAA18pE,927 -pip/commands/install.py,sha256=PPFxd9RyUpVxfkumnztooBdrUkMguDI6eRHS0IrePUE,12694 -pip/commands/list.py,sha256=FHf7H35AajbCuymiG2z8xAGNSx8W5CNZKj6Hh2QGo38,6814 -pip/commands/search.py,sha256=_4Mza0qEb6P1aDA2OROYd-KuOJg0NrITOtQoiCDJF5Q,4736 -pip/commands/show.py,sha256=ipjEcTrk-hgvFysSKJ5E9PSPXZGTuE3NIXLYvXnsdNk,2767 -pip/commands/uninstall.py,sha256=MF4zSLfMxnH3E8T673ORNWz0Bsc4C6LEI5KImpAQrck,2203 -pip/commands/unzip.py,sha256=_PeTWKOd_iRxPt_7njQ8jGFpjX006vobn593tcIyeUc,185 -pip/commands/wheel.py,sha256=gyzZ4dQ0Ua8cP2H3ihvuNbv2z87ov-1Irxpu3m2MQqo,7320 -pip/commands/zip.py,sha256=KECCb3oCHxJqDT3kUEnlf0udp31Ckoe8oyEKdS7EKNQ,14821 -pip/vcs/__init__.py,sha256=kS31hLmJ6BgKnBu8kvXKQlJEwoj1MxYE7wfRuFL-daM,8748 -pip/vcs/bazaar.py,sha256=qUIuIqDJqwZ_nP6WR52YwvYVy1lvIUmvaT-IdxDYUHo,4943 -pip/vcs/git.py,sha256=ib3TqDwJyfjBnSRFKVe_HhNdwkmfcOZfJHbqt2RUOVg,7898 -pip/vcs/mercurial.py,sha256=71ESfgxotPPPZjiH6sMTBWcj5TS8kjgJxVnWrRb3bwo,5820 -pip/vcs/subversion.py,sha256=P31K7o83JdcipIyuEVlnpSp5KZqakb4OJ1PKT-FB7C8,10640 -pip-1.5.6.dist-info/DESCRIPTION.rst,sha256=n5sT7bxCOnG9ej7TtEjjARQZ_n2ECqWFDiJK88BM0u0,1422 -pip-1.5.6.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68 -pip-1.5.6.dist-info/METADATA,sha256=lUBJx4V5mJY0jPlxYlu1x4YUNML-AN4dn4Dv-EFX8-Y,2499 -pip-1.5.6.dist-info/metadata.json,sha256=QZKMcKbHx-PWAvCvO0LdJX7JZjzx_yoKl2TTi1yKEKE,1361 -pip-1.5.6.dist-info/RECORD,, -pip-1.5.6.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip-1.5.6.dist-info/WHEEL,sha256=6lxp_S3wZGmTBtGMVmNNLyvKFcp7HqQw2Wn4YYk-Suo,110 -/Users/build/platform_darwin/bin/pip3,sha256=8ypTNHdXLeXuApkAcLaVvwtUUmISNDBfVkTfn2qA33I,231 -/Users/build/platform_darwin/bin/pip3.4,sha256=8ypTNHdXLeXuApkAcLaVvwtUUmISNDBfVkTfn2qA33I,231 -pip/_vendor/requests/packages/chardet/__pycache__/langthaimodel.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/pulldom.cpython-34.pyc,, -pip/__pycache__/wheel.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/lxmletree.cpython-34.pyc,, -pip/__pycache__/locations.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/exceptions.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/scripts.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/euckrprober.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/hooks.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-34.pyc,, -pip/_vendor/html5lib/serializer/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/compat.cpython-34.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/unzip.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/big5freq.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/contrib/__pycache__/pyopenssl.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/initialise.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/poolmanager.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/escsm.cpython-34.pyc,, -pip/__pycache__/pep425tags.cpython-34.pyc,, -pip/commands/__pycache__/list.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/lint.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/fields.cpython-34.pyc,, -pip/_vendor/_markerlib/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/charsetgroupprober.cpython-34.pyc,, -pip/__pycache__/runner.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/connection.cpython-34.pyc,, -pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-34.pyc,, -pip/commands/__pycache__/completion.cpython-34.pyc,, -pip/_vendor/_markerlib/__pycache__/markers.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/codingstatemachine.cpython-34.pyc,, -pip/commands/__pycache__/zip.cpython-34.pyc,, -pip/__pycache__/__main__.cpython-34.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/adapters.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/sbcharsetprober.cpython-34.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-34.pyc,, -pip/_vendor/distlib/_backport/__pycache__/misc.cpython-34.pyc,, -pip/__pycache__/baseparser.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/sessions.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/jpcntx.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/exceptions.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/trie/__pycache__/_base.cpython-34.pyc,, -pip/backwardcompat/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/charsetprober.cpython-34.pyc,, -pip/_vendor/html5lib/trie/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/serializer/__pycache__/htmlserializer.cpython-34.pyc,, -pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/response.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/sanitizer.cpython-34.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/constants.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/html5parser.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/win32.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/constants.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/_base.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/__pycache__/six.cpython-34.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/_base.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/markers.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/mbcsgroupprober.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/latin1prober.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/_collections.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/inputstream.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/_base.cpython-34.pyc,, -pip/commands/__pycache__/install.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/certs.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/langgreekmodel.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/compat.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/ansitowin32.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-34.pyc,, -pip/commands/__pycache__/help.cpython-34.pyc,, -pip/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/sjisprober.cpython-34.pyc,, -pip/vcs/__pycache__/subversion.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/chardetect.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/packages/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/api.cpython-34.pyc,, -pip/__pycache__/req.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/manifest.cpython-34.pyc,, -pip/__pycache__/status_codes.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/gb2312prober.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/ssl_.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/exceptions.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/mbcssm.cpython-34.pyc,, -pip/_vendor/__pycache__/pkg_resources.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/connectionpool.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/euctwfreq.cpython-34.pyc,, -pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/utf8prober.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/metadata.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/utils.cpython-34.pyc,, -pip/vcs/__pycache__/mercurial.cpython-34.pyc,, -pip/_vendor/html5lib/trie/__pycache__/datrie.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/ihatexml.cpython-34.pyc,, -pip/_vendor/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/tokenizer.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/big5prober.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/genshistream.cpython-34.pyc,, -pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/database.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/sbcsgroupprober.cpython-34.pyc,, -pip/__pycache__/download.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/hebrewprober.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/locators.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/cookies.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/mbcharsetprober.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/filepost.cpython-34.pyc,, -pip/_vendor/html5lib/trie/__pycache__/py.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/timeout.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/chardistribution.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/connection.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-34.pyc,, -pip/commands/__pycache__/show.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/contrib/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/eucjpprober.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/jisfreq.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/winterm.cpython-34.pyc,, -pip/commands/__pycache__/uninstall.cpython-34.pyc,, -pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/util.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/request.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-34.pyc,, -pip/vcs/__pycache__/git.cpython-34.pyc,, -pip/__pycache__/util.cpython-34.pyc,, -pip/_vendor/colorama/__pycache__/ansi.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/cp949prober.cpython-34.pyc,, -pip/commands/__pycache__/bundle.cpython-34.pyc,, -pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-34.pyc,, -pip/vcs/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/__pycache__/re-vendor.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/contrib/__pycache__/ntlmpool.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/__init__.cpython-34.pyc,, -pip/_vendor/html5lib/__pycache__/utils.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/index.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/gb2312freq.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/structures.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/auth.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/universaldetector.cpython-34.pyc,, -pip/commands/__pycache__/wheel.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/euctwprober.cpython-34.pyc,, -pip/_vendor/requests/packages/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/basecommand.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/util/__pycache__/url.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/response.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/version.cpython-34.pyc,, -pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/models.cpython-34.pyc,, -pip/vcs/__pycache__/bazaar.cpython-34.pyc,, -pip/commands/__pycache__/search.cpython-34.pyc,, -pip/commands/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/cmdoptions.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/wheel.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/index.cpython-34.pyc,, -pip/__pycache__/log.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/packages/__pycache__/ordered_dict.cpython-34.pyc,, -pip/_vendor/requests/__pycache__/status_codes.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/escprober.cpython-34.pyc,, -pip/commands/__pycache__/freeze.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/resources.cpython-34.pyc,, -pip/_vendor/requests/packages/chardet/__pycache__/euckrfreq.cpython-34.pyc,, -pip/_vendor/distlib/__pycache__/compat.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/packages/__pycache__/six.cpython-34.pyc,, -pip/_vendor/requests/packages/urllib3/__pycache__/request.cpython-34.pyc,, diff --git a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json b/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json deleted file mode 100644 index 2a13d37..0000000 --- a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"version": "1.5.6", "license": "MIT", "summary": "A tool for installing and managing Python packages.", "test_requires": [{"requires": ["pytest", "virtualenv (>=1.10)", "scripttest (>=1.3)", "mock"]}], "generator": "bdist_wheel (0.23.0)", "document_names": {"description": "DESCRIPTION.rst"}, "run_requires": [{"extra": "testing", "requires": ["pytest", "virtualenv (>=1.10)", "scripttest (>=1.3)", "mock"]}], "exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}, "name": "pip", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3"], "metadata_version": "2.0", "keywords": "easy_install distutils setuptools egg virtualenv", "extras": ["testing"], "commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}, "contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "project_urls": {"Home": "https://pip.pypa.io/"}} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/pip/__main__.py b/Darwin/lib/python3.4/site-packages/pip/__main__.py deleted file mode 100644 index 5ca3746..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/__main__.py +++ /dev/null @@ -1,7 +0,0 @@ -import sys -from .runner import run - -if __name__ == '__main__': - exit = run() - if exit: - sys.exit(exit) diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/__init__.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/__init__.py deleted file mode 100644 index f233ca0..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -""" -pip._vendor is for vendoring dependencies of pip to prevent needing pip to -depend on something external. - -Files inside of pip._vendor should be considered immutable and should only be -updated to versions from upstream. -""" -from __future__ import absolute_import diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py deleted file mode 100644 index 5dfe374..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' - -def code_to_chars(code): - return CSI + str(code) + 'm' - -class AnsiCodes(object): - def __init__(self, codes): - for name in dir(codes): - if not name.startswith('_'): - value = getattr(codes, name) - setattr(self, name, code_to_chars(value)) - -class AnsiFore: - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - -class AnsiBack: - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - -class AnsiStyle: - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiCodes( AnsiFore ) -Back = AnsiCodes( AnsiBack ) -Style = AnsiCodes( AnsiStyle ) - diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe b/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe deleted file mode 100644 index 43f39f3..0000000 Binary files a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t64.exe b/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t64.exe deleted file mode 100644 index 73e2f40..0000000 Binary files a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/t64.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe b/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe deleted file mode 100644 index 09e7635..0000000 Binary files a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe b/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe deleted file mode 100644 index 29e44e1..0000000 Binary files a/Darwin/lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py deleted file mode 100644 index 18124e7..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -"""A collection of modules for iterating through different kinds of -tree, generating tokens identical to those produced by the tokenizer -module. - -To create a tree walker for a new type of tree, you need to do -implement a tree walker object (called TreeWalker by convention) that -implements a 'serialize' method taking a tree as sole argument and -returning an iterator generating tokens. -""" - -from __future__ import absolute_import, division, unicode_literals - -import sys - -from ..utils import default_etree - -treeWalkerCache = {} - - -def getTreeWalker(treeType, implementation=None, **kwargs): - """Get a TreeWalker class for various types of tree with built-in support - - treeType - the name of the tree type required (case-insensitive). Supported - values are: - - "dom" - The xml.dom.minidom DOM implementation - "pulldom" - The xml.dom.pulldom event stream - "etree" - A generic walker for tree implementations exposing an - elementtree-like interface (known to work with - ElementTree, cElementTree and lxml.etree). - "lxml" - Optimized walker for lxml.etree - "genshi" - a Genshi stream - - implementation - (Currently applies to the "etree" tree type only). A module - implementing the tree type e.g. xml.etree.ElementTree or - cElementTree.""" - - treeType = treeType.lower() - if treeType not in treeWalkerCache: - if treeType in ("dom", "pulldom"): - name = "%s.%s" % (__name__, treeType) - __import__(name) - mod = sys.modules[name] - treeWalkerCache[treeType] = mod.TreeWalker - elif treeType == "genshi": - from . import genshistream - treeWalkerCache[treeType] = genshistream.TreeWalker - elif treeType == "lxml": - from . import lxmletree - treeWalkerCache[treeType] = lxmletree.TreeWalker - elif treeType == "etree": - from . import etree - if implementation is None: - implementation = default_etree - # XXX: NEVER cache here, caching is done in the etree submodule - return etree.getETreeModule(implementation, **kwargs).TreeWalker - return treeWalkerCache.get(treeType) diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py deleted file mode 100644 index ecd0163..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -""" -Script which takes one or more file paths and reports on their detected -encodings - -Example:: - - % chardetect somefile someotherfile - somefile: windows-1252 with confidence 0.5 - someotherfile: ascii with confidence 1.0 - -If no paths are provided, it takes its input from stdin. - -""" -from io import open -from sys import argv, stdin - -from chardet.universaldetector import UniversalDetector - - -def description_of(file, name='stdin'): - """Return a string describing the probable encoding of a file.""" - u = UniversalDetector() - for line in file: - u.feed(line) - u.close() - result = u.result - if result['encoding']: - return '%s: %s with confidence %s' % (name, - result['encoding'], - result['confidence']) - else: - return '%s: no result' % name - - -def main(): - if len(argv) <= 1: - print(description_of(stdin)) - else: - for path in argv[1:]: - with open(path, 'rb') as f: - print(description_of(f, path)) - - -if __name__ == '__main__': - main() diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py deleted file mode 100644 index 9cea3a4..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py +++ /dev/null @@ -1,205 +0,0 @@ -# urllib3/_collections.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - -from collections import Mapping, MutableMapping -try: - from threading import RLock -except ImportError: # Platform-specific: No threads available - class RLock: - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - pass - - -try: # Python 2.7+ - from collections import OrderedDict -except ImportError: - from .packages.ordered_dict import OrderedDict -from .packages.six import itervalues - - -__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] - - -_Null = object() - - -class RecentlyUsedContainer(MutableMapping): - """ - Provides a thread-safe dict-like container which maintains up to - ``maxsize`` keys while throwing away the least-recently-used keys beyond - ``maxsize``. - - :param maxsize: - Maximum number of recent elements to retain. - - :param dispose_func: - Every time an item is evicted from the container, - ``dispose_func(value)`` is called. Callback which will get called - """ - - ContainerCls = OrderedDict - - def __init__(self, maxsize=10, dispose_func=None): - self._maxsize = maxsize - self.dispose_func = dispose_func - - self._container = self.ContainerCls() - self.lock = RLock() - - def __getitem__(self, key): - # Re-insert the item, moving it to the end of the eviction line. - with self.lock: - item = self._container.pop(key) - self._container[key] = item - return item - - def __setitem__(self, key, value): - evicted_value = _Null - with self.lock: - # Possibly evict the existing value of 'key' - evicted_value = self._container.get(key, _Null) - self._container[key] = value - - # If we didn't evict an existing value, we might have to evict the - # least recently used item from the beginning of the container. - if len(self._container) > self._maxsize: - _key, evicted_value = self._container.popitem(last=False) - - if self.dispose_func and evicted_value is not _Null: - self.dispose_func(evicted_value) - - def __delitem__(self, key): - with self.lock: - value = self._container.pop(key) - - if self.dispose_func: - self.dispose_func(value) - - def __len__(self): - with self.lock: - return len(self._container) - - def __iter__(self): - raise NotImplementedError('Iteration over this class is unlikely to be threadsafe.') - - def clear(self): - with self.lock: - # Copy pointers to all values, then wipe the mapping - # under Python 2, this copies the list of values twice :-| - values = list(self._container.values()) - self._container.clear() - - if self.dispose_func: - for value in values: - self.dispose_func(value) - - def keys(self): - with self.lock: - return self._container.keys() - - -class HTTPHeaderDict(MutableMapping): - """ - :param headers: - An iterable of field-value pairs. Must not contain multiple field names - when compared case-insensitively. - - :param kwargs: - Additional field-value pairs to pass in to ``dict.update``. - - A ``dict`` like container for storing HTTP Headers. - - Field names are stored and compared case-insensitively in compliance with - RFC 2616. Iteration provides the first case-sensitive key seen for each - case-insensitive pair. - - Using ``__setitem__`` syntax overwrites fields that compare equal - case-insensitively in order to maintain ``dict``'s api. For fields that - compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` - in a loop. - - If multiple fields that are equal case-insensitively are passed to the - constructor or ``.update``, the behavior is undefined and some will be - lost. - - >>> headers = HTTPHeaderDict() - >>> headers.add('Set-Cookie', 'foo=bar') - >>> headers.add('set-cookie', 'baz=quxx') - >>> headers['content-length'] = '7' - >>> headers['SET-cookie'] - 'foo=bar, baz=quxx' - >>> headers['Content-Length'] - '7' - - If you want to access the raw headers with their original casing - for debugging purposes you can access the private ``._data`` attribute - which is a normal python ``dict`` that maps the case-insensitive key to a - list of tuples stored as (case-sensitive-original-name, value). Using the - structure from above as our example: - - >>> headers._data - {'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')], - 'content-length': [('content-length', '7')]} - """ - - def __init__(self, headers=None, **kwargs): - self._data = {} - if headers is None: - headers = {} - self.update(headers, **kwargs) - - def add(self, key, value): - """Adds a (name, value) pair, doesn't overwrite the value if it already - exists. - - >>> headers = HTTPHeaderDict(foo='bar') - >>> headers.add('Foo', 'baz') - >>> headers['foo'] - 'bar, baz' - """ - self._data.setdefault(key.lower(), []).append((key, value)) - - def getlist(self, key): - """Returns a list of all the values for the named field. Returns an - empty list if the key doesn't exist.""" - return self[key].split(', ') if key in self else [] - - def copy(self): - h = HTTPHeaderDict() - for key in self._data: - for rawkey, value in self._data[key]: - h.add(rawkey, value) - return h - - def __eq__(self, other): - if not isinstance(other, Mapping): - return False - other = HTTPHeaderDict(other) - return dict((k1, self[k1]) for k1 in self._data) == \ - dict((k2, other[k2]) for k2 in other._data) - - def __getitem__(self, key): - values = self._data[key.lower()] - return ', '.join(value[1] for value in values) - - def __setitem__(self, key, value): - self._data[key.lower()] = [(key, value)] - - def __delitem__(self, key): - del self._data[key.lower()] - - def __len__(self): - return len(self._data) - - def __iter__(self): - for headers in itervalues(self._data): - yield headers[0][0] - - def __repr__(self): - return '%s(%r)' % (self.__class__.__name__, dict(self.items())) diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py deleted file mode 100644 index 21a12c6..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py +++ /dev/null @@ -1,422 +0,0 @@ -'''SSL with SNI_-support for Python 2. Follow these instructions if you would -like to verify SSL certificates in Python 2. Note, the default libraries do -*not* do certificate checking; you need to do additional work to validate -certificates yourself. - -This needs the following packages installed: - -* pyOpenSSL (tested with 0.13) -* ndg-httpsclient (tested with 0.3.2) -* pyasn1 (tested with 0.1.6) - -You can install them with the following command: - - pip install pyopenssl ndg-httpsclient pyasn1 - -To activate certificate checking, call -:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code -before you begin making HTTP requests. This can be done in a ``sitecustomize`` -module, or at any other time before your application begins using ``urllib3``, -like this:: - - try: - import urllib3.contrib.pyopenssl - urllib3.contrib.pyopenssl.inject_into_urllib3() - except ImportError: - pass - -Now you can use :mod:`urllib3` as you normally would, and it will support SNI -when the required modules are installed. - -Activating this module also has the positive side effect of disabling SSL/TLS -encryption in Python 2 (see `CRIME attack`_). - -If you want to configure the default list of supported cipher suites, you can -set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. - -Module Variables ----------------- - -:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. - Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES: - ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS`` - -.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication -.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) - -''' - -from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT -from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName -import OpenSSL.SSL -from pyasn1.codec.der import decoder as der_decoder -from pyasn1.type import univ, constraint -from socket import _fileobject, timeout -import ssl -import select -from cStringIO import StringIO - -from .. import connection -from .. import util - -__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] - -# SNI only *really* works if we can read the subjectAltName of certificates. -HAS_SNI = SUBJ_ALT_NAME_SUPPORT - -# Map from urllib3 to PyOpenSSL compatible parameter-values. -_openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, - ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, -} -_openssl_verify = { - ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, - ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER - + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, -} - -# A secure default. -# Sources for more information on TLS ciphers: -# -# - https://wiki.mozilla.org/Security/Server_Side_TLS -# - https://www.ssllabs.com/projects/best-practices/index.html -# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ -# -# The general intent is: -# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), -# - prefer ECDHE over DHE for better performance, -# - prefer any AES-GCM over any AES-CBC for better performance and security, -# - use 3DES as fallback which is secure but slow, -# - disable NULL authentication, MD5 MACs and DSS for security reasons. -DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \ - "ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \ - "!aNULL:!MD5:!DSS" - - -orig_util_HAS_SNI = util.HAS_SNI -orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket - - -def inject_into_urllib3(): - 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' - - connection.ssl_wrap_socket = ssl_wrap_socket - util.HAS_SNI = HAS_SNI - - -def extract_from_urllib3(): - 'Undo monkey-patching by :func:`inject_into_urllib3`.' - - connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket - util.HAS_SNI = orig_util_HAS_SNI - - -### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. -class SubjectAltName(BaseSubjectAltName): - '''ASN.1 implementation for subjectAltNames support''' - - # There is no limit to how many SAN certificates a certificate may have, - # however this needs to have some limit so we'll set an arbitrarily high - # limit. - sizeSpec = univ.SequenceOf.sizeSpec + \ - constraint.ValueSizeConstraint(1, 1024) - - -### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. -def get_subj_alt_name(peer_cert): - # Search through extensions - dns_name = [] - if not SUBJ_ALT_NAME_SUPPORT: - return dns_name - - general_names = SubjectAltName() - for i in range(peer_cert.get_extension_count()): - ext = peer_cert.get_extension(i) - ext_name = ext.get_short_name() - if ext_name != 'subjectAltName': - continue - - # PyOpenSSL returns extension data in ASN.1 encoded form - ext_dat = ext.get_data() - decoded_dat = der_decoder.decode(ext_dat, - asn1Spec=general_names) - - for name in decoded_dat: - if not isinstance(name, SubjectAltName): - continue - for entry in range(len(name)): - component = name.getComponentByPosition(entry) - if component.getName() != 'dNSName': - continue - dns_name.append(str(component.getComponent())) - - return dns_name - - -class fileobject(_fileobject): - - def _wait_for_sock(self): - rd, wd, ed = select.select([self._sock], [], [], - self._sock.gettimeout()) - if not rd: - raise timeout() - - - def read(self, size=-1): - # Use max, disallow tiny reads in a loop as they are very inefficient. - # We never leave read() with any leftover data from a new recv() call - # in our internal buffer. - rbufsize = max(self._rbufsize, self.default_bufsize) - # Our use of StringIO rather than lists of string objects returned by - # recv() minimizes memory usage and fragmentation that occurs when - # rbufsize is large compared to the typical return value of recv(). - buf = self._rbuf - buf.seek(0, 2) # seek end - if size < 0: - # Read until EOF - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or EOF seen, whichever comes first - buf_len = buf.tell() - if buf_len >= size: - # Already have size bytes in our buffer? Extract and return. - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - left = size - buf_len - # recv() will malloc the amount of memory given as its - # parameter even though it often returns much less data - # than that. The returned data string is short lived - # as we copy it into a StringIO and free it. This avoids - # fragmentation issues on many platforms. - try: - data = self._sock.recv(left) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid buffer data copies when: - # - We have no data in our buffer. - # AND - # - Our call to recv returned exactly the - # number of bytes we were asked to read. - return data - if n == left: - buf.write(data) - del data # explicit free - break - assert n <= left, "recv(%d) returned %d bytes" % (left, n) - buf.write(data) - buf_len += n - del data # explicit free - #assert buf_len == buf.tell() - return buf.getvalue() - - def readline(self, size=-1): - buf = self._rbuf - buf.seek(0, 2) # seek end - if buf.tell() > 0: - # check if we already have it in our buffer - buf.seek(0) - bline = buf.readline(size) - if bline.endswith('\n') or len(bline) == size: - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return bline - del bline - if size < 0: - # Read until \n or EOF, whichever comes first - if self._rbufsize <= 1: - # Speed up unbuffered case - buf.seek(0) - buffers = [buf.read()] - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - data = None - recv = self._sock.recv - while True: - try: - while data != "\n": - data = recv(1) - if not data: - break - buffers.append(data) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - break - return "".join(buffers) - - buf.seek(0, 2) # seek end - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - nl = data.find('\n') - if nl >= 0: - nl += 1 - buf.write(data[:nl]) - self._rbuf.write(data[nl:]) - del data - break - buf.write(data) - return buf.getvalue() - else: - # Read until size bytes or \n or EOF seen, whichever comes first - buf.seek(0, 2) # seek end - buf_len = buf.tell() - if buf_len >= size: - buf.seek(0) - rv = buf.read(size) - self._rbuf = StringIO() - self._rbuf.write(buf.read()) - return rv - self._rbuf = StringIO() # reset _rbuf. we consume it via buf. - while True: - try: - data = self._sock.recv(self._rbufsize) - except OpenSSL.SSL.WantReadError: - self._wait_for_sock() - continue - if not data: - break - left = size - buf_len - # did we just receive a newline? - nl = data.find('\n', 0, left) - if nl >= 0: - nl += 1 - # save the excess data to _rbuf - self._rbuf.write(data[nl:]) - if buf_len: - buf.write(data[:nl]) - break - else: - # Shortcut. Avoid data copy through buf when returning - # a substring of our first recv(). - return data[:nl] - n = len(data) - if n == size and not buf_len: - # Shortcut. Avoid data copy through buf when - # returning exactly all of our first recv(). - return data - if n >= left: - buf.write(data[:left]) - self._rbuf.write(data[left:]) - break - buf.write(data) - buf_len += n - #assert buf_len == buf.tell() - return buf.getvalue() - - -class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' - - def __init__(self, connection, socket): - self.connection = connection - self.socket = socket - - def fileno(self): - return self.socket.fileno() - - def makefile(self, mode, bufsize=-1): - return fileobject(self.connection, mode, bufsize) - - def settimeout(self, timeout): - return self.socket.settimeout(timeout) - - def sendall(self, data): - return self.connection.sendall(data) - - def close(self): - return self.connection.shutdown() - - def getpeercert(self, binary_form=False): - x509 = self.connection.get_peer_certificate() - - if not x509: - return x509 - - if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) - - return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': [ - ('DNS', value) - for value in get_subj_alt_name(x509) - ] - } - - -def _verify_callback(cnx, x509, err_no, err_depth, return_code): - return err_no == 0 - - -def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) - if certfile: - ctx.use_certificate_file(certfile) - if keyfile: - ctx.use_privatekey_file(keyfile) - if cert_reqs != ssl.CERT_NONE: - ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) - if ca_certs: - try: - ctx.load_verify_locations(ca_certs, None) - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) - else: - ctx.set_default_verify_paths() - - # Disable TLS compression to migitate CRIME attack (issue #309) - OP_NO_COMPRESSION = 0x20000 - ctx.set_options(OP_NO_COMPRESSION) - - # Set list of supported ciphersuites. - ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) - - cnx = OpenSSL.SSL.Connection(ctx, sock) - cnx.set_tlsext_host_name(server_hostname) - cnx.set_connect_state() - while True: - try: - cnx.do_handshake() - except OpenSSL.SSL.WantReadError: - select.select([sock], [], []) - continue - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake', e) - break - - return WrappedSocket(cnx, sock) diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/response.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/response.py deleted file mode 100644 index db44182..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/response.py +++ /dev/null @@ -1,308 +0,0 @@ -# urllib3/response.py -# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) -# -# This module is part of urllib3 and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -import logging -import zlib -import io - -from ._collections import HTTPHeaderDict -from .exceptions import DecodeError -from .packages.six import string_types as basestring, binary_type -from .util import is_fp_closed - - -log = logging.getLogger(__name__) - - -class DeflateDecoder(object): - - def __init__(self): - self._first_try = True - self._data = binary_type() - self._obj = zlib.decompressobj() - - def __getattr__(self, name): - return getattr(self._obj, name) - - def decompress(self, data): - if not self._first_try: - return self._obj.decompress(data) - - self._data += data - try: - return self._obj.decompress(data) - except zlib.error: - self._first_try = False - self._obj = zlib.decompressobj(-zlib.MAX_WBITS) - try: - return self.decompress(self._data) - finally: - self._data = None - - -def _get_decoder(mode): - if mode == 'gzip': - return zlib.decompressobj(16 + zlib.MAX_WBITS) - - return DeflateDecoder() - - -class HTTPResponse(io.IOBase): - """ - HTTP Response container. - - Backwards-compatible to httplib's HTTPResponse but the response ``body`` is - loaded and decoded on-demand when the ``data`` property is accessed. - - Extra parameters for behaviour not present in httplib.HTTPResponse: - - :param preload_content: - If True, the response's body will be preloaded during construction. - - :param decode_content: - If True, attempts to decode specific content-encoding's based on headers - (like 'gzip' and 'deflate') will be skipped and raw data will be used - instead. - - :param original_response: - When this HTTPResponse wrapper is generated from an httplib.HTTPResponse - object, it's convenient to include the original for debug purposes. It's - otherwise unused. - """ - - CONTENT_DECODERS = ['gzip', 'deflate'] - REDIRECT_STATUSES = [301, 302, 303, 307, 308] - - def __init__(self, body='', headers=None, status=0, version=0, reason=None, - strict=0, preload_content=True, decode_content=True, - original_response=None, pool=None, connection=None): - - self.headers = HTTPHeaderDict() - if headers: - self.headers.update(headers) - self.status = status - self.version = version - self.reason = reason - self.strict = strict - self.decode_content = decode_content - - self._decoder = None - self._body = body if body and isinstance(body, basestring) else None - self._fp = None - self._original_response = original_response - self._fp_bytes_read = 0 - - self._pool = pool - self._connection = connection - - if hasattr(body, 'read'): - self._fp = body - - if preload_content and not self._body: - self._body = self.read(decode_content=decode_content) - - def get_redirect_location(self): - """ - Should we redirect and where to? - - :returns: Truthy redirect location string if we got a redirect status - code and valid location. ``None`` if redirect status and no - location. ``False`` if not a redirect status code. - """ - if self.status in self.REDIRECT_STATUSES: - return self.headers.get('location') - - return False - - def release_conn(self): - if not self._pool or not self._connection: - return - - self._pool._put_conn(self._connection) - self._connection = None - - @property - def data(self): - # For backwords-compat with earlier urllib3 0.4 and earlier. - if self._body: - return self._body - - if self._fp: - return self.read(cache_content=True) - - def tell(self): - """ - Obtain the number of bytes pulled over the wire so far. May differ from - the amount of content returned by :meth:``HTTPResponse.read`` if bytes - are encoded on the wire (e.g, compressed). - """ - return self._fp_bytes_read - - def read(self, amt=None, decode_content=None, cache_content=False): - """ - Similar to :meth:`httplib.HTTPResponse.read`, but with two additional - parameters: ``decode_content`` and ``cache_content``. - - :param amt: - How much of the content to read. If specified, caching is skipped - because it doesn't make sense to cache partial content as the full - response. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - - :param cache_content: - If True, will save the returned data such that the same result is - returned despite of the state of the underlying file object. This - is useful if you want the ``.data`` property to continue working - after having ``.read()`` the file object. (Overridden if ``amt`` is - set.) - """ - # Note: content-encoding value should be case-insensitive, per RFC 2616 - # Section 3.5 - content_encoding = self.headers.get('content-encoding', '').lower() - if self._decoder is None: - if content_encoding in self.CONTENT_DECODERS: - self._decoder = _get_decoder(content_encoding) - if decode_content is None: - decode_content = self.decode_content - - if self._fp is None: - return - - flush_decoder = False - - try: - if amt is None: - # cStringIO doesn't like amt=None - data = self._fp.read() - flush_decoder = True - else: - cache_content = False - data = self._fp.read(amt) - if amt != 0 and not data: # Platform-specific: Buggy versions of Python. - # Close the connection when no data is returned - # - # This is redundant to what httplib/http.client _should_ - # already do. However, versions of python released before - # December 15, 2012 (http://bugs.python.org/issue16298) do not - # properly close the connection in all cases. There is no harm - # in redundantly calling close. - self._fp.close() - flush_decoder = True - - self._fp_bytes_read += len(data) - - try: - if decode_content and self._decoder: - data = self._decoder.decompress(data) - except (IOError, zlib.error) as e: - raise DecodeError( - "Received response with content-encoding: %s, but " - "failed to decode it." % content_encoding, - e) - - if flush_decoder and decode_content and self._decoder: - buf = self._decoder.decompress(binary_type()) - data += buf + self._decoder.flush() - - if cache_content: - self._body = data - - return data - - finally: - if self._original_response and self._original_response.isclosed(): - self.release_conn() - - def stream(self, amt=2**16, decode_content=None): - """ - A generator wrapper for the read() method. A call will block until - ``amt`` bytes have been read from the connection or until the - connection is closed. - - :param amt: - How much of the content to read. The generator will return up to - much data per iteration, but may return less. This is particularly - likely when using compressed data. However, the empty string will - never be returned. - - :param decode_content: - If True, will attempt to decode the body based on the - 'content-encoding' header. - """ - while not is_fp_closed(self._fp): - data = self.read(amt=amt, decode_content=decode_content) - - if data: - yield data - - - @classmethod - def from_httplib(ResponseCls, r, **response_kw): - """ - Given an :class:`httplib.HTTPResponse` instance ``r``, return a - corresponding :class:`urllib3.response.HTTPResponse` object. - - Remaining parameters are passed to the HTTPResponse constructor, along - with ``original_response=r``. - """ - - headers = HTTPHeaderDict() - for k, v in r.getheaders(): - headers.add(k, v) - - # HTTPResponse objects in Python 3 don't have a .strict attribute - strict = getattr(r, 'strict', 0) - return ResponseCls(body=r, - headers=headers, - status=r.status, - version=r.version, - reason=r.reason, - strict=strict, - original_response=r, - **response_kw) - - # Backwards-compatibility methods for httplib.HTTPResponse - def getheaders(self): - return self.headers - - def getheader(self, name, default=None): - return self.headers.get(name, default) - - # Overrides from io.IOBase - def close(self): - if not self.closed: - self._fp.close() - - @property - def closed(self): - if self._fp is None: - return True - elif hasattr(self._fp, 'closed'): - return self._fp.closed - elif hasattr(self._fp, 'isclosed'): # Python 2 - return self._fp.isclosed() - else: - return True - - def fileno(self): - if self._fp is None: - raise IOError("HTTPResponse has no file to get a fileno from") - elif hasattr(self._fp, "fileno"): - return self._fp.fileno() - else: - raise IOError("The file-like object this HTTPResponse is wrapped " - "around has no file descriptor") - - def flush(self): - if self._fp is not None and hasattr(self._fp, 'flush'): - return self._fp.flush() - - def readable(self): - return True diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py deleted file mode 100644 index 8deeab5..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py +++ /dev/null @@ -1,45 +0,0 @@ -from socket import error as SocketError -try: - from select import poll, POLLIN -except ImportError: # `poll` doesn't exist on OSX and other platforms - poll = False - try: - from select import select - except ImportError: # `select` doesn't exist on AppEngine. - select = False - -def is_connection_dropped(conn): # Platform-specific - """ - Returns True if the connection is dropped and should be closed. - - :param conn: - :class:`httplib.HTTPConnection` object. - - Note: For platforms like AppEngine, this will always return ``False`` to - let the platform handle connection recycling transparently for us. - """ - sock = getattr(conn, 'sock', False) - if sock is False: # Platform-specific: AppEngine - return False - if sock is None: # Connection already closed (such as by httplib). - return False - - if not poll: - if not select: # Platform-specific: AppEngine - return False - - try: - return select([sock], [], [], 0.0)[0] - except SocketError: - return True - - # This version is better on platforms that support it. - p = poll() - p.register(sock, POLLIN) - for (fno, ev) in p.poll(0.0): - if fno == sock.fileno(): - # Either data is buffered (bad), or the connection is dropped. - return True - - - diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py deleted file mode 100644 index d0325bc..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py +++ /dev/null @@ -1,13 +0,0 @@ -def is_fp_closed(obj): - """ - Checks whether a given file-like object is closed. - - :param obj: - The file-like object to check. - """ - if hasattr(obj, 'fp'): - # Object is a container for another file-like object that gets released - # on exhaustion (e.g. HTTPResponse) - return obj.fp is None - - return obj.closed diff --git a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py b/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py deleted file mode 100644 index dee4b87..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py +++ /dev/null @@ -1,133 +0,0 @@ -from binascii import hexlify, unhexlify -from hashlib import md5, sha1 - -from ..exceptions import SSLError - - -try: # Test for SSL features - SSLContext = None - HAS_SNI = False - - import ssl - from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 - from ssl import SSLContext # Modern SSL? - from ssl import HAS_SNI # Has SNI? -except ImportError: - pass - - -def assert_fingerprint(cert, fingerprint): - """ - Checks if given fingerprint matches the supplied certificate. - - :param cert: - Certificate as bytes object. - :param fingerprint: - Fingerprint as string of hexdigits, can be interspersed by colons. - """ - - # Maps the length of a digest to a possible hash function producing - # this digest. - hashfunc_map = { - 16: md5, - 20: sha1 - } - - fingerprint = fingerprint.replace(':', '').lower() - - digest_length, rest = divmod(len(fingerprint), 2) - - if rest or digest_length not in hashfunc_map: - raise SSLError('Fingerprint is of invalid length.') - - # We need encode() here for py32; works on py2 and p33. - fingerprint_bytes = unhexlify(fingerprint.encode()) - - hashfunc = hashfunc_map[digest_length] - - cert_digest = hashfunc(cert).digest() - - if not cert_digest == fingerprint_bytes: - raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' - .format(hexlify(fingerprint_bytes), - hexlify(cert_digest))) - - -def resolve_cert_reqs(candidate): - """ - Resolves the argument to a numeric constant, which can be passed to - the wrap_socket function/method from the ssl module. - Defaults to :data:`ssl.CERT_NONE`. - If given a string it is assumed to be the name of the constant in the - :mod:`ssl` module or its abbrevation. - (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. - If it's neither `None` nor a string we assume it is already the numeric - constant which can directly be passed to wrap_socket. - """ - if candidate is None: - return CERT_NONE - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'CERT_' + candidate) - return res - - return candidate - - -def resolve_ssl_version(candidate): - """ - like resolve_cert_reqs - """ - if candidate is None: - return PROTOCOL_SSLv23 - - if isinstance(candidate, str): - res = getattr(ssl, candidate, None) - if res is None: - res = getattr(ssl, 'PROTOCOL_' + candidate) - return res - - return candidate - - -if SSLContext is not None: # Python 3.2+ - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - """ - All arguments except `server_hostname` have the same meaning as for - :func:`ssl.wrap_socket` - - :param server_hostname: - Hostname of the expected certificate - """ - context = SSLContext(ssl_version) - context.verify_mode = cert_reqs - - # Disable TLS compression to migitate CRIME attack (issue #309) - OP_NO_COMPRESSION = 0x20000 - context.options |= OP_NO_COMPRESSION - - if ca_certs: - try: - context.load_verify_locations(ca_certs) - # Py32 raises IOError - # Py33 raises FileNotFoundError - except Exception as e: # Reraise as SSLError - raise SSLError(e) - if certfile: - # FIXME: This block needs a test. - context.load_cert_chain(certfile, keyfile) - if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI - return context.wrap_socket(sock, server_hostname=server_hostname) - return context.wrap_socket(sock) - -else: # Python 3.1 and earlier - def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, - ssl_version=None): - return wrap_socket(sock, keyfile=keyfile, certfile=certfile, - ca_certs=ca_certs, cert_reqs=cert_reqs, - ssl_version=ssl_version) diff --git a/Darwin/lib/python3.4/site-packages/pip/backwardcompat/__init__.py b/Darwin/lib/python3.4/site-packages/pip/backwardcompat/__init__.py deleted file mode 100644 index c327bbe..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/backwardcompat/__init__.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Stuff that differs in different Python versions and platform -distributions.""" - -import os -import imp -import sys -import site - -__all__ = ['WindowsError'] - -uses_pycache = hasattr(imp, 'cache_from_source') - -class NeverUsedException(Exception): - """this exception should never be raised""" - -try: - WindowsError = WindowsError -except NameError: - WindowsError = NeverUsedException - -try: - #new in Python 3.3 - PermissionError = PermissionError -except NameError: - PermissionError = NeverUsedException - -console_encoding = sys.__stdout__.encoding - -if sys.version_info >= (3,): - from io import StringIO, BytesIO - from functools import reduce - from urllib.error import URLError, HTTPError - from queue import Queue, Empty - from urllib.request import url2pathname, urlretrieve, pathname2url - from email import message as emailmessage - import urllib.parse as urllib - import urllib.request as urllib2 - import configparser as ConfigParser - import xmlrpc.client as xmlrpclib - import urllib.parse as urlparse - import http.client as httplib - - def cmp(a, b): - return (a > b) - (a < b) - - def b(s): - return s.encode('utf-8') - - def u(s): - return s.decode('utf-8') - - def console_to_str(s): - try: - return s.decode(console_encoding) - except UnicodeDecodeError: - return s.decode('utf_8') - - def get_http_message_param(http_message, param, default_value): - return http_message.get_param(param, default_value) - - bytes = bytes - string_types = (str,) - raw_input = input -else: - from cStringIO import StringIO - from urllib2 import URLError, HTTPError - from Queue import Queue, Empty - from urllib import url2pathname, urlretrieve, pathname2url - from email import Message as emailmessage - import urllib - import urllib2 - import urlparse - import ConfigParser - import xmlrpclib - import httplib - - def b(s): - return s - - def u(s): - return s - - def console_to_str(s): - return s - - def get_http_message_param(http_message, param, default_value): - result = http_message.getparam(param) - return result or default_value - - bytes = str - string_types = (basestring,) - reduce = reduce - cmp = cmp - raw_input = raw_input - BytesIO = StringIO - - -from distutils.sysconfig import get_python_lib, get_python_version - -#site.USER_SITE was created in py2.6 -user_site = getattr(site, 'USER_SITE', None) - - -def product(*args, **kwds): - # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy - # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 - pools = list(map(tuple, args)) * kwds.get('repeat', 1) - result = [[]] - for pool in pools: - result = [x + [y] for x in result for y in pool] - for prod in result: - yield tuple(prod) - - -def get_path_uid(path): - """ - Return path's uid. - - Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003 - - Placed this function in backwardcompat due to differences on AIX and Jython, - that should eventually go away. - - :raises OSError: When path is a symlink or can't be read. - """ - if hasattr(os, 'O_NOFOLLOW'): - fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) - file_uid = os.fstat(fd).st_uid - os.close(fd) - else: # AIX and Jython - # WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW - if not os.path.islink(path): - # older versions of Jython don't have `os.fstat` - file_uid = os.stat(path).st_uid - else: - # raise OSError for parity with os.O_NOFOLLOW above - raise OSError("%s is a symlink; Will not return uid for symlinks" % path) - return file_uid diff --git a/Darwin/lib/python3.4/site-packages/pip/basecommand.py b/Darwin/lib/python3.4/site-packages/pip/basecommand.py deleted file mode 100644 index e467019..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/basecommand.py +++ /dev/null @@ -1,201 +0,0 @@ -"""Base Command class, and related routines""" - -import os -import sys -import tempfile -import traceback -import time -import optparse - -from pip import cmdoptions -from pip.locations import running_under_virtualenv -from pip.log import logger -from pip.download import PipSession -from pip.exceptions import (BadCommand, InstallationError, UninstallationError, - CommandError, PreviousBuildDirError) -from pip.backwardcompat import StringIO -from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter -from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, - PREVIOUS_BUILD_DIR_ERROR) -from pip.util import get_prog - - -__all__ = ['Command'] - - -class Command(object): - name = None - usage = None - hidden = False - - def __init__(self): - parser_kw = { - 'usage': self.usage, - 'prog': '%s %s' % (get_prog(), self.name), - 'formatter': UpdatingDefaultsHelpFormatter(), - 'add_help_option': False, - 'name': self.name, - 'description': self.__doc__, - } - - self.parser = ConfigOptionParser(**parser_kw) - - # Commands should add options to this option group - optgroup_name = '%s Options' % self.name.capitalize() - self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) - - # Add the general options - gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser) - self.parser.add_option_group(gen_opts) - - def _build_session(self, options): - session = PipSession() - - # Handle custom ca-bundles from the user - if options.cert: - session.verify = options.cert - - # Handle timeouts - if options.timeout: - session.timeout = options.timeout - - # Handle configured proxies - if options.proxy: - session.proxies = { - "http": options.proxy, - "https": options.proxy, - } - - # Determine if we can prompt the user for authentication or not - session.auth.prompting = not options.no_input - - return session - - def setup_logging(self): - pass - - def parse_args(self, args): - # factored out for testability - return self.parser.parse_args(args) - - def main(self, args): - options, args = self.parse_args(args) - - level = 1 # Notify - level += options.verbose - level -= options.quiet - level = logger.level_for_integer(4 - level) - complete_log = [] - logger.add_consumers( - (level, sys.stdout), - (logger.DEBUG, complete_log.append), - ) - if options.log_explicit_levels: - logger.explicit_levels = True - - self.setup_logging() - - #TODO: try to get these passing down from the command? - # without resorting to os.environ to hold these. - - if options.no_input: - os.environ['PIP_NO_INPUT'] = '1' - - if options.exists_action: - os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) - - if options.require_venv: - # If a venv is required check if it can really be found - if not running_under_virtualenv(): - logger.fatal('Could not find an activated virtualenv (required).') - sys.exit(VIRTUALENV_NOT_FOUND) - - if options.log: - log_fp = open_logfile(options.log, 'a') - logger.add_consumers((logger.DEBUG, log_fp)) - else: - log_fp = None - - exit = SUCCESS - store_log = False - try: - status = self.run(options, args) - # FIXME: all commands should return an exit status - # and when it is done, isinstance is not needed anymore - if isinstance(status, int): - exit = status - except PreviousBuildDirError: - e = sys.exc_info()[1] - logger.fatal(str(e)) - logger.info('Exception information:\n%s' % format_exc()) - store_log = True - exit = PREVIOUS_BUILD_DIR_ERROR - except (InstallationError, UninstallationError): - e = sys.exc_info()[1] - logger.fatal(str(e)) - logger.info('Exception information:\n%s' % format_exc()) - store_log = True - exit = ERROR - except BadCommand: - e = sys.exc_info()[1] - logger.fatal(str(e)) - logger.info('Exception information:\n%s' % format_exc()) - store_log = True - exit = ERROR - except CommandError: - e = sys.exc_info()[1] - logger.fatal('ERROR: %s' % e) - logger.info('Exception information:\n%s' % format_exc()) - exit = ERROR - except KeyboardInterrupt: - logger.fatal('Operation cancelled by user') - logger.info('Exception information:\n%s' % format_exc()) - store_log = True - exit = ERROR - except: - logger.fatal('Exception:\n%s' % format_exc()) - store_log = True - exit = UNKNOWN_ERROR - if store_log: - log_file_fn = options.log_file - text = '\n'.join(complete_log) - try: - log_file_fp = open_logfile(log_file_fn, 'w') - except IOError: - temp = tempfile.NamedTemporaryFile(delete=False) - log_file_fn = temp.name - log_file_fp = open_logfile(log_file_fn, 'w') - logger.fatal('Storing debug log for failure in %s' % log_file_fn) - log_file_fp.write(text) - log_file_fp.close() - if log_fp is not None: - log_fp.close() - return exit - - -def format_exc(exc_info=None): - if exc_info is None: - exc_info = sys.exc_info() - out = StringIO() - traceback.print_exception(*exc_info, **dict(file=out)) - return out.getvalue() - - -def open_logfile(filename, mode='a'): - """Open the named log file in append mode. - - If the file already exists, a separator will also be printed to - the file to separate past activity from current activity. - """ - filename = os.path.expanduser(filename) - filename = os.path.abspath(filename) - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - exists = os.path.exists(filename) - - log_fp = open(filename, mode) - if exists: - log_fp.write('%s\n' % ('-' * 60)) - log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c'))) - return log_fp diff --git a/Darwin/lib/python3.4/site-packages/pip/cmdoptions.py b/Darwin/lib/python3.4/site-packages/pip/cmdoptions.py deleted file mode 100644 index 8ed3d91..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/cmdoptions.py +++ /dev/null @@ -1,371 +0,0 @@ -""" -shared options and groups - -The principle here is to define options once, but *not* instantiate them globally. -One reason being that options with action='append' can carry state between parses. -pip parse's general options twice internally, and shouldn't pass on state. -To be consistent, all options will follow this design. - -""" -import copy -from optparse import OptionGroup, SUPPRESS_HELP, Option -from pip.locations import build_prefix, default_log_file - - -def make_option_group(group, parser): - """ - Return an OptionGroup object - group -- assumed to be dict with 'name' and 'options' keys - parser -- an optparse Parser - """ - option_group = OptionGroup(parser, group['name']) - for option in group['options']: - option_group.add_option(option.make()) - return option_group - -class OptionMaker(object): - """Class that stores the args/kwargs that would be used to make an Option, - for making them later, and uses deepcopy's to reset state.""" - def __init__(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - def make(self): - args_copy = copy.deepcopy(self.args) - kwargs_copy = copy.deepcopy(self.kwargs) - return Option(*args_copy, **kwargs_copy) - -########### -# options # -########### - -help_ = OptionMaker( - '-h', '--help', - dest='help', - action='help', - help='Show help.') - -require_virtualenv = OptionMaker( - # Run only if inside a virtualenv, bail if not. - '--require-virtualenv', '--require-venv', - dest='require_venv', - action='store_true', - default=False, - help=SUPPRESS_HELP) - -verbose = OptionMaker( - '-v', '--verbose', - dest='verbose', - action='count', - default=0, - help='Give more output. Option is additive, and can be used up to 3 times.') - -version = OptionMaker( - '-V', '--version', - dest='version', - action='store_true', - help='Show version and exit.') - -quiet = OptionMaker( - '-q', '--quiet', - dest='quiet', - action='count', - default=0, - help='Give less output.') - -log = OptionMaker( - '--log', - dest='log', - metavar='path', - help='Path to a verbose appending log. This log is inactive by default.') - -log_explicit_levels = OptionMaker( - # Writes the log levels explicitely to the log' - '--log-explicit-levels', - dest='log_explicit_levels', - action='store_true', - default=False, - help=SUPPRESS_HELP) - -log_file = OptionMaker( - # The default log file - '--log-file', '--local-log', - dest='log_file', - metavar='path', - default=default_log_file, - help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.') - -no_input = OptionMaker( - # Don't ask for input - '--no-input', - dest='no_input', - action='store_true', - default=False, - help=SUPPRESS_HELP) - -proxy = OptionMaker( - '--proxy', - dest='proxy', - type='str', - default='', - help="Specify a proxy in the form [user:passwd@]proxy.server:port.") - -timeout = OptionMaker( - '--timeout', '--default-timeout', - metavar='sec', - dest='timeout', - type='float', - default=15, - help='Set the socket timeout (default %default seconds).') - -default_vcs = OptionMaker( - # The default version control system for editables, e.g. 'svn' - '--default-vcs', - dest='default_vcs', - type='str', - default='', - help=SUPPRESS_HELP) - -skip_requirements_regex = OptionMaker( - # A regex to be used to skip requirements - '--skip-requirements-regex', - dest='skip_requirements_regex', - type='str', - default='', - help=SUPPRESS_HELP) - -exists_action = OptionMaker( - # Option when path already exist - '--exists-action', - dest='exists_action', - type='choice', - choices=['s', 'i', 'w', 'b'], - default=[], - action='append', - metavar='action', - help="Default action when a path already exists: " - "(s)witch, (i)gnore, (w)ipe, (b)ackup.") - -cert = OptionMaker( - '--cert', - dest='cert', - type='str', - default='', - metavar='path', - help = "Path to alternate CA bundle.") - -index_url = OptionMaker( - '-i', '--index-url', '--pypi-url', - dest='index_url', - metavar='URL', - default='https://pypi.python.org/simple/', - help='Base URL of Python Package Index (default %default).') - -extra_index_url = OptionMaker( - '--extra-index-url', - dest='extra_index_urls', - metavar='URL', - action='append', - default=[], - help='Extra URLs of package indexes to use in addition to --index-url.') - -no_index = OptionMaker( - '--no-index', - dest='no_index', - action='store_true', - default=False, - help='Ignore package index (only looking at --find-links URLs instead).') - -find_links = OptionMaker( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='url', - help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.") - -# TODO: Remove after 1.6 -use_mirrors = OptionMaker( - '-M', '--use-mirrors', - dest='use_mirrors', - action='store_true', - default=False, - help=SUPPRESS_HELP) - -# TODO: Remove after 1.6 -mirrors = OptionMaker( - '--mirrors', - dest='mirrors', - metavar='URL', - action='append', - default=[], - help=SUPPRESS_HELP) - -allow_external = OptionMaker( - "--allow-external", - dest="allow_external", - action="append", - default=[], - metavar="PACKAGE", - help="Allow the installation of externally hosted files", -) - -allow_all_external = OptionMaker( - "--allow-all-external", - dest="allow_all_external", - action="store_true", - default=False, - help="Allow the installation of all externally hosted files", -) - -# Remove after 1.7 -no_allow_external = OptionMaker( - "--no-allow-external", - dest="allow_all_external", - action="store_false", - default=False, - help=SUPPRESS_HELP, -) - -# Remove --allow-insecure after 1.7 -allow_unsafe = OptionMaker( - "--allow-unverified", "--allow-insecure", - dest="allow_unverified", - action="append", - default=[], - metavar="PACKAGE", - help="Allow the installation of insecure and unverifiable files", -) - -# Remove after 1.7 -no_allow_unsafe = OptionMaker( - "--no-allow-insecure", - dest="allow_all_insecure", - action="store_false", - default=False, - help=SUPPRESS_HELP -) - -# Remove after 1.5 -process_dependency_links = OptionMaker( - "--process-dependency-links", - dest="process_dependency_links", - action="store_true", - default=False, - help="Enable the processing of dependency links.", -) - -requirements = OptionMaker( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='file', - help='Install from the given requirements file. ' - 'This option can be used multiple times.') - -use_wheel = OptionMaker( - '--use-wheel', - dest='use_wheel', - action='store_true', - help=SUPPRESS_HELP, -) - -no_use_wheel = OptionMaker( - '--no-use-wheel', - dest='use_wheel', - action='store_false', - default=True, - help=('Do not Find and prefer wheel archives when searching indexes and ' - 'find-links locations.'), -) - -download_cache = OptionMaker( - '--download-cache', - dest='download_cache', - metavar='dir', - default=None, - help='Cache downloaded packages in .') - -no_deps = OptionMaker( - '--no-deps', '--no-dependencies', - dest='ignore_dependencies', - action='store_true', - default=False, - help="Don't install package dependencies.") - -build_dir = OptionMaker( - '-b', '--build', '--build-dir', '--build-directory', - dest='build_dir', - metavar='dir', - default=build_prefix, - help='Directory to unpack packages into and build in. ' - 'The default in a virtualenv is "/build". ' - 'The default for global installs is "/pip_build_".') - -install_options = OptionMaker( - '--install-option', - dest='install_options', - action='append', - metavar='options', - help="Extra arguments to be supplied to the setup.py install " - "command (use like --install-option=\"--install-scripts=/usr/local/bin\"). " - "Use multiple --install-option options to pass multiple options to setup.py install. " - "If you are using an option with a directory path, be sure to use absolute path.") - -global_options = OptionMaker( - '--global-option', - dest='global_options', - action='append', - metavar='options', - help="Extra global options to be supplied to the setup.py " - "call before the install command.") - -no_clean = OptionMaker( - '--no-clean', - action='store_true', - default=False, - help="Don't clean up build directories.") - - -########## -# groups # -########## - -general_group = { - 'name': 'General Options', - 'options': [ - help_, - require_virtualenv, - verbose, - version, - quiet, - log_file, - log, - log_explicit_levels, - no_input, - proxy, - timeout, - default_vcs, - skip_requirements_regex, - exists_action, - cert, - ] - } - -index_group = { - 'name': 'Package Index Options', - 'options': [ - index_url, - extra_index_url, - no_index, - find_links, - use_mirrors, - mirrors, - allow_external, - allow_all_external, - no_allow_external, - allow_unsafe, - no_allow_unsafe, - process_dependency_links, - ] - } diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/bundle.py b/Darwin/lib/python3.4/site-packages/pip/commands/bundle.py deleted file mode 100644 index 69967fe..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/bundle.py +++ /dev/null @@ -1,42 +0,0 @@ -import textwrap -from pip.locations import build_prefix, src_prefix -from pip.util import display_path, backup_dir -from pip.log import logger -from pip.exceptions import InstallationError -from pip.commands.install import InstallCommand - - -class BundleCommand(InstallCommand): - """Create pybundles (archives containing multiple packages).""" - name = 'bundle' - usage = """ - %prog [options] .pybundle ...""" - summary = 'DEPRECATED. Create pybundles.' - bundle = True - - def __init__(self, *args, **kw): - super(BundleCommand, self).__init__(*args, **kw) - # bundle uses different default source and build dirs - build_opt = self.parser.get_option("--build") - build_opt.default = backup_dir(build_prefix, '-bundle') - src_opt = self.parser.get_option("--src") - src_opt.default = backup_dir(src_prefix, '-bundle') - self.parser.set_defaults(**{ - src_opt.dest: src_opt.default, - build_opt.dest: build_opt.default, - }) - - def run(self, options, args): - - logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. " - "See https://github.com/pypa/pip/pull/1046") - - if not args: - raise InstallationError('You must give a bundle filename') - # We have to get everything when creating a bundle: - options.ignore_installed = True - logger.notify('Putting temporary build files in %s and source/develop files in %s' - % (display_path(options.build_dir), display_path(options.src_dir))) - self.bundle_filename = args.pop(0) - requirement_set = super(BundleCommand, self).run(options, args) - return requirement_set diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/freeze.py b/Darwin/lib/python3.4/site-packages/pip/commands/freeze.py deleted file mode 100644 index 930de62..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/freeze.py +++ /dev/null @@ -1,114 +0,0 @@ -import re -import sys -import pip - -from pip.req import InstallRequirement -from pip.log import logger -from pip.basecommand import Command -from pip.util import get_installed_distributions -from pip._vendor import pkg_resources - - -class FreezeCommand(Command): - """Output installed packages in requirements format.""" - name = 'freeze' - usage = """ - %prog [options]""" - summary = 'Output installed packages in requirements format.' - - def __init__(self, *args, **kw): - super(FreezeCommand, self).__init__(*args, **kw) - - self.cmd_opts.add_option( - '-r', '--requirement', - dest='requirement', - action='store', - default=None, - metavar='file', - help="Use the order in the given requirements file and it's comments when generating output.") - self.cmd_opts.add_option( - '-f', '--find-links', - dest='find_links', - action='append', - default=[], - metavar='URL', - help='URL for finding packages, which will be added to the output.') - self.cmd_opts.add_option( - '-l', '--local', - dest='local', - action='store_true', - default=False, - help='If in a virtualenv that has global access, do not output globally-installed packages.') - - self.parser.insert_option_group(0, self.cmd_opts) - - def setup_logging(self): - logger.move_stdout_to_stderr() - - def run(self, options, args): - requirement = options.requirement - find_links = options.find_links or [] - local_only = options.local - ## FIXME: Obviously this should be settable: - find_tags = False - skip_match = None - - skip_regex = options.skip_requirements_regex - if skip_regex: - skip_match = re.compile(skip_regex) - - dependency_links = [] - - f = sys.stdout - - for dist in pkg_resources.working_set: - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend(dist.get_metadata_lines('dependency_links.txt')) - for link in find_links: - if '#egg=' in link: - dependency_links.append(link) - for link in find_links: - f.write('-f %s\n' % link) - installations = {} - for dist in get_installed_distributions(local_only=local_only): - req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags) - installations[req.name] = req - if requirement: - req_f = open(requirement) - for line in req_f: - if not line.strip() or line.strip().startswith('#'): - f.write(line) - continue - if skip_match and skip_match.search(line): - f.write(line) - continue - elif line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip().lstrip('=') - line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs) - elif (line.startswith('-r') or line.startswith('--requirement') - or line.startswith('-Z') or line.startswith('--always-unzip') - or line.startswith('-f') or line.startswith('-i') - or line.startswith('--extra-index-url') - or line.startswith('--find-links') - or line.startswith('--index-url')): - f.write(line) - continue - else: - line_req = InstallRequirement.from_line(line) - if not line_req.name: - logger.notify("Skipping line because it's not clear what it would install: %s" - % line.strip()) - logger.notify(" (add #egg=PackageName to the URL to avoid this warning)") - continue - if line_req.name not in installations: - logger.warn("Requirement file contains %s, but that package is not installed" - % line.strip()) - continue - f.write(str(installations[line_req.name])) - del installations[line_req.name] - f.write('## The following requirements were added by pip --freeze:\n') - for installation in sorted(installations.values(), key=lambda x: x.name): - f.write(str(installation)) diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/install.py b/Darwin/lib/python3.4/site-packages/pip/commands/install.py deleted file mode 100644 index cbf22a0..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/install.py +++ /dev/null @@ -1,314 +0,0 @@ -import os -import sys -import tempfile -import shutil -from pip.req import InstallRequirement, RequirementSet, parse_requirements -from pip.log import logger -from pip.locations import (src_prefix, virtualenv_no_global, distutils_scheme, - build_prefix) -from pip.basecommand import Command -from pip.index import PackageFinder -from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError -from pip import cmdoptions - - -class InstallCommand(Command): - """ - Install packages from: - - - PyPI (and other indexes) using requirement specifiers. - - VCS project urls. - - Local project directories. - - Local or remote source archives. - - pip also supports installing from "requirements files", which provide - an easy way to specify a whole environment to be installed. - """ - name = 'install' - - usage = """ - %prog [options] ... - %prog [options] -r ... - %prog [options] [-e] ... - %prog [options] [-e] ... - %prog [options] ...""" - - summary = 'Install packages.' - bundle = False - - def __init__(self, *args, **kw): - super(InstallCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '-e', '--editable', - dest='editables', - action='append', - default=[], - metavar='path/url', - help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.') - - cmd_opts.add_option(cmdoptions.requirements.make()) - cmd_opts.add_option(cmdoptions.build_dir.make()) - - cmd_opts.add_option( - '-t', '--target', - dest='target_dir', - metavar='dir', - default=None, - help='Install packages into .') - - cmd_opts.add_option( - '-d', '--download', '--download-dir', '--download-directory', - dest='download_dir', - metavar='dir', - default=None, - help="Download packages into instead of installing them, regardless of what's already installed.") - - cmd_opts.add_option(cmdoptions.download_cache.make()) - - cmd_opts.add_option( - '--src', '--source', '--source-dir', '--source-directory', - dest='src_dir', - metavar='dir', - default=src_prefix, - help='Directory to check out editable projects into. ' - 'The default in a virtualenv is "/src". ' - 'The default for global installs is "/src".') - - cmd_opts.add_option( - '-U', '--upgrade', - dest='upgrade', - action='store_true', - help='Upgrade all packages to the newest available version. ' - 'This process is recursive regardless of whether a dependency is already satisfied.') - - cmd_opts.add_option( - '--force-reinstall', - dest='force_reinstall', - action='store_true', - help='When upgrading, reinstall all packages even if they are ' - 'already up-to-date.') - - cmd_opts.add_option( - '-I', '--ignore-installed', - dest='ignore_installed', - action='store_true', - help='Ignore the installed packages (reinstalling instead).') - - cmd_opts.add_option(cmdoptions.no_deps.make()) - - cmd_opts.add_option( - '--no-install', - dest='no_install', - action='store_true', - help="DEPRECATED. Download and unpack all packages, but don't actually install them.") - - cmd_opts.add_option( - '--no-download', - dest='no_download', - action="store_true", - help="DEPRECATED. Don't download any packages, just install the ones already downloaded " - "(completes an install run with --no-install).") - - cmd_opts.add_option(cmdoptions.install_options.make()) - cmd_opts.add_option(cmdoptions.global_options.make()) - - cmd_opts.add_option( - '--user', - dest='use_user_site', - action='store_true', - help='Install using the user scheme.') - - cmd_opts.add_option( - '--egg', - dest='as_egg', - action='store_true', - help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)") - - cmd_opts.add_option( - '--root', - dest='root_path', - metavar='dir', - default=None, - help="Install everything relative to this alternate root directory.") - - cmd_opts.add_option( - "--compile", - action="store_true", - dest="compile", - default=True, - help="Compile py files to pyc", - ) - - cmd_opts.add_option( - "--no-compile", - action="store_false", - dest="compile", - help="Do not compile py files to pyc", - ) - - cmd_opts.add_option(cmdoptions.use_wheel.make()) - cmd_opts.add_option(cmdoptions.no_use_wheel.make()) - - cmd_opts.add_option( - '--pre', - action='store_true', - default=False, - help="Include pre-release and development versions. By default, pip only finds stable versions.") - - cmd_opts.add_option(cmdoptions.no_clean.make()) - - index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def _build_package_finder(self, options, index_urls, session): - """ - Create a package finder appropriate to this install command. - This method is meant to be overridden by subclasses, not - called directly. - """ - return PackageFinder(find_links=options.find_links, - index_urls=index_urls, - use_wheel=options.use_wheel, - allow_external=options.allow_external, - allow_unverified=options.allow_unverified, - allow_all_external=options.allow_all_external, - allow_all_prereleases=options.pre, - process_dependency_links= - options.process_dependency_links, - session=session, - ) - - def run(self, options, args): - - if ( - options.no_install or - options.no_download or - (options.build_dir != build_prefix) or - options.no_clean - ): - logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, ' - 'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.') - - if options.download_dir: - options.no_install = True - options.ignore_installed = True - options.build_dir = os.path.abspath(options.build_dir) - options.src_dir = os.path.abspath(options.src_dir) - install_options = options.install_options or [] - if options.use_user_site: - if virtualenv_no_global(): - raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.") - install_options.append('--user') - - temp_target_dir = None - if options.target_dir: - options.ignore_installed = True - temp_target_dir = tempfile.mkdtemp() - options.target_dir = os.path.abspath(options.target_dir) - if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir): - raise CommandError("Target path exists but is not a directory, will not continue.") - install_options.append('--home=' + temp_target_dir) - - global_options = options.global_options or [] - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) - index_urls = [] - - if options.use_mirrors: - logger.deprecated("1.7", - "--use-mirrors has been deprecated and will be removed" - " in the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - - if options.mirrors: - logger.deprecated("1.7", - "--mirrors has been deprecated and will be removed in " - " the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - index_urls += options.mirrors - - session = self._build_session(options) - - finder = self._build_package_finder(options, index_urls, session) - - requirement_set = RequirementSet( - build_dir=options.build_dir, - src_dir=options.src_dir, - download_dir=options.download_dir, - download_cache=options.download_cache, - upgrade=options.upgrade, - as_egg=options.as_egg, - ignore_installed=options.ignore_installed, - ignore_dependencies=options.ignore_dependencies, - force_reinstall=options.force_reinstall, - use_user_site=options.use_user_site, - target_dir=temp_target_dir, - session=session, - pycompile=options.compile, - ) - for name in args: - requirement_set.add_requirement( - InstallRequirement.from_line(name, None)) - for name in options.editables: - requirement_set.add_requirement( - InstallRequirement.from_editable(name, default_vcs=options.default_vcs)) - for filename in options.requirements: - for req in parse_requirements(filename, finder=finder, options=options, session=session): - requirement_set.add_requirement(req) - if not requirement_set.has_requirements: - opts = {'name': self.name} - if options.find_links: - msg = ('You must give at least one requirement to %(name)s ' - '(maybe you meant "pip %(name)s %(links)s"?)' % - dict(opts, links=' '.join(options.find_links))) - else: - msg = ('You must give at least one requirement ' - 'to %(name)s (see "pip help %(name)s")' % opts) - logger.warn(msg) - return - - try: - if not options.no_download: - requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle) - else: - requirement_set.locate_files() - - if not options.no_install and not self.bundle: - requirement_set.install(install_options, global_options, root=options.root_path) - installed = ' '.join([req.name for req in - requirement_set.successfully_installed]) - if installed: - logger.notify('Successfully installed %s' % installed) - elif not self.bundle: - downloaded = ' '.join([req.name for req in - requirement_set.successfully_downloaded]) - if downloaded: - logger.notify('Successfully downloaded %s' % downloaded) - elif self.bundle: - requirement_set.create_bundle(self.bundle_filename) - logger.notify('Created bundle in %s' % self.bundle_filename) - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - # Clean up - if (not options.no_clean) and ((not options.no_install) or options.download_dir): - requirement_set.cleanup_files(bundle=self.bundle) - - if options.target_dir: - if not os.path.exists(options.target_dir): - os.makedirs(options.target_dir) - lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] - for item in os.listdir(lib_dir): - shutil.move( - os.path.join(lib_dir, item), - os.path.join(options.target_dir, item) - ) - shutil.rmtree(temp_target_dir) - return requirement_set diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/list.py b/Darwin/lib/python3.4/site-packages/pip/commands/list.py deleted file mode 100644 index 207f068..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/list.py +++ /dev/null @@ -1,162 +0,0 @@ -from pip.basecommand import Command -from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled -from pip.index import PackageFinder -from pip.log import logger -from pip.req import InstallRequirement -from pip.util import get_installed_distributions, dist_is_editable -from pip.cmdoptions import make_option_group, index_group - - -class ListCommand(Command): - """List installed packages, including editables.""" - name = 'list' - usage = """ - %prog [options]""" - summary = 'List installed packages.' - - # distributions to skip (python itself is reported by pkg_resources.working_set) - skip = ['python'] - - def __init__(self, *args, **kw): - super(ListCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '-o', '--outdated', - action='store_true', - default=False, - help='List outdated packages (excluding editables)') - cmd_opts.add_option( - '-u', '--uptodate', - action='store_true', - default=False, - help='List uptodate packages (excluding editables)') - cmd_opts.add_option( - '-e', '--editable', - action='store_true', - default=False, - help='List editable projects.') - cmd_opts.add_option( - '-l', '--local', - action='store_true', - default=False, - help='If in a virtualenv that has global access, do not list globally-installed packages.') - - cmd_opts.add_option( - '--pre', - action='store_true', - default=False, - help="Include pre-release and development versions. By default, pip only finds stable versions.") - - index_opts = make_option_group(index_group, self.parser) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def _build_package_finder(self, options, index_urls, session): - """ - Create a package finder appropriate to this list command. - """ - return PackageFinder(find_links=options.find_links, - index_urls=index_urls, - allow_external=options.allow_external, - allow_unverified=options.allow_unverified, - allow_all_external=options.allow_all_external, - allow_all_prereleases=options.pre, - process_dependency_links= - options.process_dependency_links, - session=session, - ) - - def run(self, options, args): - if options.outdated: - self.run_outdated(options) - elif options.uptodate: - self.run_uptodate(options) - elif options.editable: - self.run_editables(options) - else: - self.run_listing(options) - - def run_outdated(self, options): - for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options): - if remote_version_parsed > dist.parsed_version: - logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name, - dist.version, remote_version_raw)) - - def find_packages_latests_versions(self, options): - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) - index_urls = [] - - if options.use_mirrors: - logger.deprecated("1.7", - "--use-mirrors has been deprecated and will be removed" - " in the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - - if options.mirrors: - logger.deprecated("1.7", - "--mirrors has been deprecated and will be removed in " - " the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - index_urls += options.mirrors - - dependency_links = [] - for dist in get_installed_distributions(local_only=options.local, skip=self.skip): - if dist.has_metadata('dependency_links.txt'): - dependency_links.extend( - dist.get_metadata_lines('dependency_links.txt'), - ) - - session = self._build_session(options) - - finder = self._build_package_finder(options, index_urls, session) - finder.add_dependency_links(dependency_links) - - installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip) - for dist in installed_packages: - req = InstallRequirement.from_line(dist.key, None) - try: - link = finder.find_requirement(req, True) - - # If link is None, means installed version is most up-to-date - if link is None: - continue - except DistributionNotFound: - continue - except BestVersionAlreadyInstalled: - remote_version = req.installed_version - else: - # It might be a good idea that link or finder had a public method - # that returned version - remote_version = finder._link_package_versions(link, req.name)[0] - remote_version_raw = remote_version[2] - remote_version_parsed = remote_version[0] - yield dist, remote_version_raw, remote_version_parsed - - def run_listing(self, options): - installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip) - self.output_package_listing(installed_packages) - - def run_editables(self, options): - installed_packages = get_installed_distributions(local_only=options.local, editables_only=True) - self.output_package_listing(installed_packages) - - def output_package_listing(self, installed_packages): - installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower()) - for dist in installed_packages: - if dist_is_editable(dist): - line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location) - else: - line = '%s (%s)' % (dist.project_name, dist.version) - logger.notify(line) - - def run_uptodate(self, options): - uptodate = [] - for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options): - if dist.parsed_version == remote_version_parsed: - uptodate.append(dist) - self.output_package_listing(uptodate) diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/show.py b/Darwin/lib/python3.4/site-packages/pip/commands/show.py deleted file mode 100644 index 02b473a..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/show.py +++ /dev/null @@ -1,80 +0,0 @@ -import os - -from pip.basecommand import Command -from pip.log import logger -from pip._vendor import pkg_resources - - -class ShowCommand(Command): - """Show information about one or more installed packages.""" - name = 'show' - usage = """ - %prog [options] ...""" - summary = 'Show information about installed packages.' - - def __init__(self, *args, **kw): - super(ShowCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-f', '--files', - dest='files', - action='store_true', - default=False, - help='Show the full list of installed files for each package.') - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - if not args: - logger.warn('ERROR: Please provide a package name or names.') - return - query = args - - results = search_packages_info(query) - print_results(results, options.files) - - -def search_packages_info(query): - """ - Gather details from installed distributions. Print distribution name, - version, location, and installed files. Installed files requires a - pip generated 'installed-files.txt' in the distributions '.egg-info' - directory. - """ - installed_packages = dict( - [(p.project_name.lower(), p) for p in pkg_resources.working_set]) - for name in query: - normalized_name = name.lower() - if normalized_name in installed_packages: - dist = installed_packages[normalized_name] - package = { - 'name': dist.project_name, - 'version': dist.version, - 'location': dist.location, - 'requires': [dep.project_name for dep in dist.requires()], - } - filelist = os.path.join( - dist.location, - dist.egg_name() + '.egg-info', - 'installed-files.txt') - if os.path.isfile(filelist): - package['files'] = filelist - yield package - - -def print_results(distributions, list_all_files): - """ - Print the informations from installed distributions found. - """ - for dist in distributions: - logger.notify("---") - logger.notify("Name: %s" % dist['name']) - logger.notify("Version: %s" % dist['version']) - logger.notify("Location: %s" % dist['location']) - logger.notify("Requires: %s" % ', '.join(dist['requires'])) - if list_all_files: - logger.notify("Files:") - if 'files' in dist: - for line in open(dist['files']): - logger.notify(" %s" % line.strip()) - else: - logger.notify("Cannot locate installed-files.txt") diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/uninstall.py b/Darwin/lib/python3.4/site-packages/pip/commands/uninstall.py deleted file mode 100644 index b7099cf..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/uninstall.py +++ /dev/null @@ -1,59 +0,0 @@ -from pip.req import InstallRequirement, RequirementSet, parse_requirements -from pip.basecommand import Command -from pip.exceptions import InstallationError - - -class UninstallCommand(Command): - """ - Uninstall packages. - - pip is able to uninstall most installed packages. Known exceptions are: - - - Pure distutils packages installed with ``python setup.py install``, which - leave behind no metadata to determine what files were installed. - - Script wrappers installed by ``python setup.py develop``. - """ - name = 'uninstall' - usage = """ - %prog [options] ... - %prog [options] -r ...""" - summary = 'Uninstall packages.' - - def __init__(self, *args, **kw): - super(UninstallCommand, self).__init__(*args, **kw) - self.cmd_opts.add_option( - '-r', '--requirement', - dest='requirements', - action='append', - default=[], - metavar='file', - help='Uninstall all the packages listed in the given requirements file. ' - 'This option can be used multiple times.') - self.cmd_opts.add_option( - '-y', '--yes', - dest='yes', - action='store_true', - help="Don't ask for confirmation of uninstall deletions.") - - self.parser.insert_option_group(0, self.cmd_opts) - - def run(self, options, args): - session = self._build_session(options) - - requirement_set = RequirementSet( - build_dir=None, - src_dir=None, - download_dir=None, - session=session, - ) - for name in args: - requirement_set.add_requirement( - InstallRequirement.from_line(name)) - for filename in options.requirements: - for req in parse_requirements(filename, - options=options, session=session): - requirement_set.add_requirement(req) - if not requirement_set.has_requirements: - raise InstallationError('You must give at least one requirement ' - 'to %(name)s (see "pip help %(name)s")' % dict(name=self.name)) - requirement_set.uninstall(auto_confirm=options.yes) diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/unzip.py b/Darwin/lib/python3.4/site-packages/pip/commands/unzip.py deleted file mode 100644 index ed66ab9..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/unzip.py +++ /dev/null @@ -1,7 +0,0 @@ -from pip.commands.zip import ZipCommand - - -class UnzipCommand(ZipCommand): - """Unzip individual packages.""" - name = 'unzip' - summary = 'DEPRECATED. Unzip individual packages.' diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/wheel.py b/Darwin/lib/python3.4/site-packages/pip/commands/wheel.py deleted file mode 100644 index 6527063..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/wheel.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -from __future__ import absolute_import - -import os -import sys -from pip.basecommand import Command -from pip.index import PackageFinder -from pip.log import logger -from pip.exceptions import CommandError, PreviousBuildDirError -from pip.req import InstallRequirement, RequirementSet, parse_requirements -from pip.util import normalize_path -from pip.wheel import WheelBuilder -from pip import cmdoptions - -DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse') - -class WheelCommand(Command): - """ - Build Wheel archives for your requirements and dependencies. - - Wheel is a built-package format, and offers the advantage of not recompiling your software during every install. - For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest. - - Requirements: setuptools>=0.8, and wheel. - - 'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels. - - """ - - name = 'wheel' - usage = """ - %prog [options] ... - %prog [options] -r ... - %prog [options] ... - %prog [options] ... - %prog [options] ...""" - - summary = 'Build wheels from your requirements.' - - def __init__(self, *args, **kw): - super(WheelCommand, self).__init__(*args, **kw) - - cmd_opts = self.cmd_opts - - cmd_opts.add_option( - '-w', '--wheel-dir', - dest='wheel_dir', - metavar='dir', - default=DEFAULT_WHEEL_DIR, - help="Build wheels into , where the default is '/wheelhouse'.") - cmd_opts.add_option(cmdoptions.use_wheel.make()) - cmd_opts.add_option(cmdoptions.no_use_wheel.make()) - cmd_opts.add_option( - '--build-option', - dest='build_options', - metavar='options', - action='append', - help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") - cmd_opts.add_option(cmdoptions.requirements.make()) - cmd_opts.add_option(cmdoptions.download_cache.make()) - cmd_opts.add_option(cmdoptions.no_deps.make()) - cmd_opts.add_option(cmdoptions.build_dir.make()) - - cmd_opts.add_option( - '--global-option', - dest='global_options', - action='append', - metavar='options', - help="Extra global options to be supplied to the setup.py " - "call before the 'bdist_wheel' command.") - - cmd_opts.add_option( - '--pre', - action='store_true', - default=False, - help="Include pre-release and development versions. By default, pip only finds stable versions.") - - cmd_opts.add_option(cmdoptions.no_clean.make()) - - index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser) - - self.parser.insert_option_group(0, index_opts) - self.parser.insert_option_group(0, cmd_opts) - - def run(self, options, args): - - # confirm requirements - try: - import wheel.bdist_wheel - except ImportError: - raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: pip install wheel") - - try: - import pkg_resources - except ImportError: - raise CommandError( - "'pip wheel' requires setuptools >= 0.8 for dist-info support." - " To fix this, run: pip install --upgrade setuptools" - ) - else: - if not hasattr(pkg_resources, 'DistInfoDistribution'): - raise CommandError( - "'pip wheel' requires setuptools >= 0.8 for dist-info " - "support. To fix this, run: pip install --upgrade " - "setuptools" - ) - - index_urls = [options.index_url] + options.extra_index_urls - if options.no_index: - logger.notify('Ignoring indexes: %s' % ','.join(index_urls)) - index_urls = [] - - if options.use_mirrors: - logger.deprecated("1.7", - "--use-mirrors has been deprecated and will be removed" - " in the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - - if options.mirrors: - logger.deprecated("1.7", - "--mirrors has been deprecated and will be removed in " - " the future. Explicit uses of --index-url and/or " - "--extra-index-url is suggested.") - index_urls += options.mirrors - - session = self._build_session(options) - - finder = PackageFinder(find_links=options.find_links, - index_urls=index_urls, - use_wheel=options.use_wheel, - allow_external=options.allow_external, - allow_unverified=options.allow_unverified, - allow_all_external=options.allow_all_external, - allow_all_prereleases=options.pre, - process_dependency_links= - options.process_dependency_links, - session=session, - ) - - options.build_dir = os.path.abspath(options.build_dir) - requirement_set = RequirementSet( - build_dir=options.build_dir, - src_dir=None, - download_dir=None, - download_cache=options.download_cache, - ignore_dependencies=options.ignore_dependencies, - ignore_installed=True, - session=session, - wheel_download_dir=options.wheel_dir - ) - - # make the wheelhouse - if not os.path.exists(options.wheel_dir): - os.makedirs(options.wheel_dir) - - #parse args and/or requirements files - for name in args: - requirement_set.add_requirement( - InstallRequirement.from_line(name, None)) - - for filename in options.requirements: - for req in parse_requirements( - filename, - finder=finder, - options=options, - session=session): - if req.editable: - logger.notify("ignoring %s" % req.url) - continue - requirement_set.add_requirement(req) - - #fail if no requirements - if not requirement_set.has_requirements: - opts = {'name': self.name} - msg = ('You must give at least one requirement ' - 'to %(name)s (see "pip help %(name)s")' % opts) - logger.error(msg) - return - - try: - #build wheels - wb = WheelBuilder( - requirement_set, - finder, - options.wheel_dir, - build_options = options.build_options or [], - global_options = options.global_options or [] - ) - wb.build() - except PreviousBuildDirError: - options.no_clean = True - raise - finally: - if not options.no_clean: - requirement_set.cleanup_files() diff --git a/Darwin/lib/python3.4/site-packages/pip/commands/zip.py b/Darwin/lib/python3.4/site-packages/pip/commands/zip.py deleted file mode 100644 index c801359..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/commands/zip.py +++ /dev/null @@ -1,351 +0,0 @@ -import sys -import re -import fnmatch -import os -import shutil -import zipfile -from pip.util import display_path, backup_dir, rmtree -from pip.log import logger -from pip.exceptions import InstallationError -from pip.basecommand import Command - - -class ZipCommand(Command): - """Zip individual packages.""" - name = 'zip' - usage = """ - %prog [options] ...""" - summary = 'DEPRECATED. Zip individual packages.' - - def __init__(self, *args, **kw): - super(ZipCommand, self).__init__(*args, **kw) - if self.name == 'zip': - self.cmd_opts.add_option( - '--unzip', - action='store_true', - dest='unzip', - help='Unzip (rather than zip) a package.') - else: - self.cmd_opts.add_option( - '--zip', - action='store_false', - dest='unzip', - default=True, - help='Zip (rather than unzip) a package.') - self.cmd_opts.add_option( - '--no-pyc', - action='store_true', - dest='no_pyc', - help='Do not include .pyc files in zip files (useful on Google App Engine).') - self.cmd_opts.add_option( - '-l', '--list', - action='store_true', - dest='list', - help='List the packages available, and their zip status.') - self.cmd_opts.add_option( - '--sort-files', - action='store_true', - dest='sort_files', - help='With --list, sort packages according to how many files they contain.') - self.cmd_opts.add_option( - '--path', - action='append', - dest='paths', - help='Restrict operations to the given paths (may include wildcards).') - self.cmd_opts.add_option( - '-n', '--simulate', - action='store_true', - help='Do not actually perform the zip/unzip operation.') - - self.parser.insert_option_group(0, self.cmd_opts) - - def paths(self): - """All the entries of sys.path, possibly restricted by --path""" - if not self.select_paths: - return sys.path - result = [] - match_any = set() - for path in sys.path: - path = os.path.normcase(os.path.abspath(path)) - for match in self.select_paths: - match = os.path.normcase(os.path.abspath(match)) - if '*' in match: - if re.search(fnmatch.translate(match + '*'), path): - result.append(path) - match_any.add(match) - break - else: - if path.startswith(match): - result.append(path) - match_any.add(match) - break - else: - logger.debug("Skipping path %s because it doesn't match %s" - % (path, ', '.join(self.select_paths))) - for match in self.select_paths: - if match not in match_any and '*' not in match: - result.append(match) - logger.debug("Adding path %s because it doesn't match " - "anything already on sys.path" % match) - return result - - def run(self, options, args): - - logger.deprecated('1.7', "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will be removed in a future release.") - - self.select_paths = options.paths - self.simulate = options.simulate - if options.list: - return self.list(options, args) - if not args: - raise InstallationError( - 'You must give at least one package to zip or unzip') - packages = [] - for arg in args: - module_name, filename = self.find_package(arg) - if options.unzip and os.path.isdir(filename): - raise InstallationError( - 'The module %s (in %s) is not a zip file; cannot be unzipped' - % (module_name, filename)) - elif not options.unzip and not os.path.isdir(filename): - raise InstallationError( - 'The module %s (in %s) is not a directory; cannot be zipped' - % (module_name, filename)) - packages.append((module_name, filename)) - last_status = None - for module_name, filename in packages: - if options.unzip: - last_status = self.unzip_package(module_name, filename) - else: - last_status = self.zip_package(module_name, filename, options.no_pyc) - return last_status - - def unzip_package(self, module_name, filename): - zip_filename = os.path.dirname(filename) - if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename): - raise InstallationError( - 'Module %s (in %s) isn\'t located in a zip file in %s' - % (module_name, filename, zip_filename)) - package_path = os.path.dirname(zip_filename) - if not package_path in self.paths(): - logger.warn( - 'Unpacking %s into %s, but %s is not on sys.path' - % (display_path(zip_filename), display_path(package_path), - display_path(package_path))) - logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename))) - if self.simulate: - logger.notify('Skipping remaining operations because of --simulate') - return - logger.indent += 2 - try: - ## FIXME: this should be undoable: - zip = zipfile.ZipFile(zip_filename) - to_save = [] - for info in zip.infolist(): - name = info.filename - if name.startswith(module_name + os.path.sep): - content = zip.read(name) - dest = os.path.join(package_path, name) - if not os.path.exists(os.path.dirname(dest)): - os.makedirs(os.path.dirname(dest)) - if not content and dest.endswith(os.path.sep): - if not os.path.exists(dest): - os.makedirs(dest) - else: - f = open(dest, 'wb') - f.write(content) - f.close() - else: - to_save.append((name, zip.read(name))) - zip.close() - if not to_save: - logger.info('Removing now-empty zip file %s' % display_path(zip_filename)) - os.unlink(zip_filename) - self.remove_filename_from_pth(zip_filename) - else: - logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename))) - zip = zipfile.ZipFile(zip_filename, 'w') - for name, content in to_save: - zip.writestr(name, content) - zip.close() - finally: - logger.indent -= 2 - - def zip_package(self, module_name, filename, no_pyc): - orig_filename = filename - logger.notify('Zip %s (in %s)' % (module_name, display_path(filename))) - logger.indent += 2 - if filename.endswith('.egg'): - dest_filename = filename - else: - dest_filename = filename + '.zip' - try: - ## FIXME: I think this needs to be undoable: - if filename == dest_filename: - filename = backup_dir(orig_filename) - logger.notify('Moving %s aside to %s' % (orig_filename, filename)) - if not self.simulate: - shutil.move(orig_filename, filename) - try: - logger.info('Creating zip file in %s' % display_path(dest_filename)) - if not self.simulate: - zip = zipfile.ZipFile(dest_filename, 'w') - zip.writestr(module_name + '/', '') - for dirpath, dirnames, filenames in os.walk(filename): - if no_pyc: - filenames = [f for f in filenames - if not f.lower().endswith('.pyc')] - for fns, is_dir in [(dirnames, True), (filenames, False)]: - for fn in fns: - full = os.path.join(dirpath, fn) - dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn) - if is_dir: - zip.writestr(dest + '/', '') - else: - zip.write(full, dest) - zip.close() - logger.info('Removing old directory %s' % display_path(filename)) - if not self.simulate: - rmtree(filename) - except: - ## FIXME: need to do an undo here - raise - ## FIXME: should also be undone: - self.add_filename_to_pth(dest_filename) - finally: - logger.indent -= 2 - - def remove_filename_from_pth(self, filename): - for pth in self.pth_files(): - f = open(pth, 'r') - lines = f.readlines() - f.close() - new_lines = [ - l for l in lines if l.strip() != filename] - if lines != new_lines: - logger.info('Removing reference to %s from .pth file %s' - % (display_path(filename), display_path(pth))) - if not [line for line in new_lines if line]: - logger.info('%s file would be empty: deleting' % display_path(pth)) - if not self.simulate: - os.unlink(pth) - else: - if not self.simulate: - f = open(pth, 'wb') - f.writelines(new_lines) - f.close() - return - logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename)) - - def add_filename_to_pth(self, filename): - path = os.path.dirname(filename) - dest = filename + '.pth' - if path not in self.paths(): - logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest)) - if not self.simulate: - if os.path.exists(dest): - f = open(dest) - lines = f.readlines() - f.close() - if lines and not lines[-1].endswith('\n'): - lines[-1] += '\n' - lines.append(filename + '\n') - else: - lines = [filename + '\n'] - f = open(dest, 'wb') - f.writelines(lines) - f.close() - - def pth_files(self): - for path in self.paths(): - if not os.path.exists(path) or not os.path.isdir(path): - continue - for filename in os.listdir(path): - if filename.endswith('.pth'): - yield os.path.join(path, filename) - - def find_package(self, package): - for path in self.paths(): - full = os.path.join(path, package) - if os.path.exists(full): - return package, full - if not os.path.isdir(path) and zipfile.is_zipfile(path): - zip = zipfile.ZipFile(path, 'r') - try: - zip.read(os.path.join(package, '__init__.py')) - except KeyError: - pass - else: - zip.close() - return package, full - zip.close() - ## FIXME: need special error for package.py case: - raise InstallationError( - 'No package with the name %s found' % package) - - def list(self, options, args): - if args: - raise InstallationError( - 'You cannot give an argument with --list') - for path in sorted(self.paths()): - if not os.path.exists(path): - continue - basename = os.path.basename(path.rstrip(os.path.sep)) - if os.path.isfile(path) and zipfile.is_zipfile(path): - if os.path.dirname(path) not in self.paths(): - logger.notify('Zipped egg: %s' % display_path(path)) - continue - if (basename != 'site-packages' and basename != 'dist-packages' - and not path.replace('\\', '/').endswith('lib/python')): - continue - logger.notify('In %s:' % display_path(path)) - logger.indent += 2 - zipped = [] - unzipped = [] - try: - for filename in sorted(os.listdir(path)): - ext = os.path.splitext(filename)[1].lower() - if ext in ('.pth', '.egg-info', '.egg-link'): - continue - if ext == '.py': - logger.info('Not displaying %s: not a package' % display_path(filename)) - continue - full = os.path.join(path, filename) - if os.path.isdir(full): - unzipped.append((filename, self.count_package(full))) - elif zipfile.is_zipfile(full): - zipped.append(filename) - else: - logger.info('Unknown file: %s' % display_path(filename)) - if zipped: - logger.notify('Zipped packages:') - logger.indent += 2 - try: - for filename in zipped: - logger.notify(filename) - finally: - logger.indent -= 2 - else: - logger.notify('No zipped packages.') - if unzipped: - if options.sort_files: - unzipped.sort(key=lambda x: -x[1]) - logger.notify('Unzipped packages:') - logger.indent += 2 - try: - for filename, count in unzipped: - logger.notify('%s (%i files)' % (filename, count)) - finally: - logger.indent -= 2 - else: - logger.notify('No unzipped packages.') - finally: - logger.indent -= 2 - - def count_package(self, path): - total = 0 - for dirpath, dirnames, filenames in os.walk(path): - filenames = [f for f in filenames - if not f.lower().endswith('.pyc')] - total += len(filenames) - return total diff --git a/Darwin/lib/python3.4/site-packages/pip/download.py b/Darwin/lib/python3.4/site-packages/pip/download.py deleted file mode 100644 index b8cfb79..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/download.py +++ /dev/null @@ -1,644 +0,0 @@ -import cgi -import email.utils -import hashlib -import getpass -import mimetypes -import os -import platform -import re -import shutil -import sys -import tempfile - -import pip - -from pip.backwardcompat import urllib, urlparse, raw_input -from pip.exceptions import InstallationError, HashMismatch -from pip.util import (splitext, rmtree, format_size, display_path, - backup_dir, ask_path_exists, unpack_file, - create_download_cache_folder, cache_download) -from pip.vcs import vcs -from pip.log import logger -from pip._vendor import requests, six -from pip._vendor.requests.adapters import BaseAdapter -from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth -from pip._vendor.requests.compat import IncompleteRead -from pip._vendor.requests.exceptions import InvalidURL, ChunkedEncodingError -from pip._vendor.requests.models import Response -from pip._vendor.requests.structures import CaseInsensitiveDict - -__all__ = ['get_file_content', - 'is_url', 'url_to_path', 'path_to_url', - 'is_archive_file', 'unpack_vcs_link', - 'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url'] - - -def user_agent(): - """Return a string representing the user agent.""" - _implementation = platform.python_implementation() - - if _implementation == 'CPython': - _implementation_version = platform.python_version() - elif _implementation == 'PyPy': - _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - _implementation_version = ''.join([ - _implementation_version, - sys.pypy_version_info.releaselevel, - ]) - elif _implementation == 'Jython': - _implementation_version = platform.python_version() # Complete Guess - elif _implementation == 'IronPython': - _implementation_version = platform.python_version() # Complete Guess - else: - _implementation_version = 'Unknown' - - try: - p_system = platform.system() - p_release = platform.release() - except IOError: - p_system = 'Unknown' - p_release = 'Unknown' - - return " ".join(['pip/%s' % pip.__version__, - '%s/%s' % (_implementation, _implementation_version), - '%s/%s' % (p_system, p_release)]) - - -class MultiDomainBasicAuth(AuthBase): - - def __init__(self, prompting=True): - self.prompting = prompting - self.passwords = {} - - def __call__(self, req): - parsed = urlparse.urlparse(req.url) - - # Get the netloc without any embedded credentials - netloc = parsed.netloc.split("@", 1)[-1] - - # Set the url of the request to the url without any credentials - req.url = urlparse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) - - # Use any stored credentials that we have for this netloc - username, password = self.passwords.get(netloc, (None, None)) - - # Extract credentials embedded in the url if we have none stored - if username is None: - username, password = self.parse_credentials(parsed.netloc) - - if username or password: - # Store the username and password - self.passwords[netloc] = (username, password) - - # Send the basic auth with this request - req = HTTPBasicAuth(username or "", password or "")(req) - - # Attach a hook to handle 401 responses - req.register_hook("response", self.handle_401) - - return req - - def handle_401(self, resp, **kwargs): - # We only care about 401 responses, anything else we want to just - # pass through the actual response - if resp.status_code != 401: - return resp - - # We are not able to prompt the user so simple return the response - if not self.prompting: - return resp - - parsed = urlparse.urlparse(resp.url) - - # Prompt the user for a new username and password - username = raw_input("User for %s: " % parsed.netloc) - password = getpass.getpass("Password: ") - - # Store the new username and password to use for future requests - if username or password: - self.passwords[parsed.netloc] = (username, password) - - # Consume content and release the original connection to allow our new - # request to reuse the same one. - resp.content - resp.raw.release_conn() - - # Add our new username and password to the request - req = HTTPBasicAuth(username or "", password or "")(resp.request) - - # Send our new request - new_resp = resp.connection.send(req, **kwargs) - new_resp.history.append(resp) - - return new_resp - - def parse_credentials(self, netloc): - if "@" in netloc: - userinfo = netloc.rsplit("@", 1)[0] - if ":" in userinfo: - return userinfo.split(":", 1) - return userinfo, None - return None, None - - -class LocalFSResponse(object): - - def __init__(self, fileobj): - self.fileobj = fileobj - - def __getattr__(self, name): - return getattr(self.fileobj, name) - - def read(self, amt=None, decode_content=None, cache_content=False): - return self.fileobj.read(amt) - - # Insert Hacks to Make Cookie Jar work w/ Requests - @property - def _original_response(self): - class FakeMessage(object): - def getheaders(self, header): - return [] - - def get_all(self, header, default): - return [] - - class FakeResponse(object): - @property - def msg(self): - return FakeMessage() - - return FakeResponse() - - -class LocalFSAdapter(BaseAdapter): - - def send(self, request, stream=None, timeout=None, verify=None, cert=None, - proxies=None): - parsed_url = urlparse.urlparse(request.url) - - # We only work for requests with a host of localhost - if parsed_url.netloc.lower() != "localhost": - raise InvalidURL("Invalid URL %r: Only localhost is allowed" % - request.url) - - real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:]) - pathname = url_to_path(real_url) - - resp = Response() - resp.status_code = 200 - resp.url = real_url - - stats = os.stat(pathname) - modified = email.utils.formatdate(stats.st_mtime, usegmt=True) - resp.headers = CaseInsensitiveDict({ - "Content-Type": mimetypes.guess_type(pathname)[0] or "text/plain", - "Content-Length": stats.st_size, - "Last-Modified": modified, - }) - - resp.raw = LocalFSResponse(open(pathname, "rb")) - resp.close = resp.raw.close - - return resp - - def close(self): - pass - - -class PipSession(requests.Session): - - timeout = None - - def __init__(self, *args, **kwargs): - super(PipSession, self).__init__(*args, **kwargs) - - # Attach our User Agent to the request - self.headers["User-Agent"] = user_agent() - - # Attach our Authentication handler to the session - self.auth = MultiDomainBasicAuth() - - # Enable file:// urls - self.mount("file://", LocalFSAdapter()) - - def request(self, method, url, *args, **kwargs): - # Make file:// urls not fail due to lack of a hostname - parsed = urlparse.urlparse(url) - if parsed.scheme == "file": - url = urlparse.urlunparse(parsed[:1] + ("localhost",) + parsed[2:]) - - # Allow setting a default timeout on a session - kwargs.setdefault("timeout", self.timeout) - - # Dispatch the actual request - return super(PipSession, self).request(method, url, *args, **kwargs) - - -def get_file_content(url, comes_from=None, session=None): - """Gets the content of a file; it may be a filename, file: URL, or - http: URL. Returns (location, content). Content is unicode.""" - if session is None: - session = PipSession() - - match = _scheme_re.search(url) - if match: - scheme = match.group(1).lower() - if (scheme == 'file' and comes_from - and comes_from.startswith('http')): - raise InstallationError( - 'Requirements file %s references URL %s, which is local' - % (comes_from, url)) - if scheme == 'file': - path = url.split(':', 1)[1] - path = path.replace('\\', '/') - match = _url_slash_drive_re.match(path) - if match: - path = match.group(1) + ':' + path.split('|', 1)[1] - path = urllib.unquote(path) - if path.startswith('/'): - path = '/' + path.lstrip('/') - url = path - else: - ## FIXME: catch some errors - resp = session.get(url) - resp.raise_for_status() - - if six.PY3: - return resp.url, resp.text - else: - return resp.url, resp.content - try: - f = open(url) - content = f.read() - except IOError: - e = sys.exc_info()[1] - raise InstallationError('Could not open requirements file: %s' % str(e)) - else: - f.close() - return url, content - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) -_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) - - -def is_url(name): - """Returns true if the name looks like a URL""" - if ':' not in name: - return False - scheme = name.split(':', 1)[0].lower() - return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes - - -def url_to_path(url): - """ - Convert a file: URL to a path. - """ - assert url.startswith('file:'), ( - "You can only turn file: urls into filenames (not %r)" % url) - path = url[len('file:'):].lstrip('/') - path = urllib.unquote(path) - if _url_drive_re.match(path): - path = path[0] + ':' + path[2:] - else: - path = '/' + path - return path - - -_drive_re = re.compile('^([a-z]):', re.I) -_url_drive_re = re.compile('^([a-z])[:|]', re.I) - - -def path_to_url(path): - """ - Convert a path to a file: URL. The path will be made absolute and have - quoted path parts. - """ - path = os.path.normpath(os.path.abspath(path)) - drive, path = os.path.splitdrive(path) - filepath = path.split(os.path.sep) - url = '/'.join([urllib.quote(part) for part in filepath]) - if not drive: - url = url.lstrip('/') - return 'file:///' + drive + url - - -def is_archive_file(name): - """Return True if `name` is a considered as an archive file.""" - archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle', - '.whl') - ext = splitext(name)[1].lower() - if ext in archives: - return True - return False - - -def unpack_vcs_link(link, location, only_download=False): - vcs_backend = _get_used_vcs_backend(link) - if only_download: - vcs_backend.export(location) - else: - vcs_backend.unpack(location) - - -def _get_used_vcs_backend(link): - for backend in vcs.backends: - if link.scheme in backend.schemes: - vcs_backend = backend(link.url) - return vcs_backend - - -def is_vcs_url(link): - return bool(_get_used_vcs_backend(link)) - - -def is_file_url(link): - return link.url.lower().startswith('file:') - - -def _check_hash(download_hash, link): - if download_hash.digest_size != hashlib.new(link.hash_name).digest_size: - logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!" - % (download_hash.digest_size, link, link.hash_name)) - raise HashMismatch('Hash name mismatch for package %s' % link) - if download_hash.hexdigest() != link.hash: - logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!" - % (link, download_hash.hexdigest(), link.hash)) - raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link)) - - -def _get_hash_from_file(target_file, link): - try: - download_hash = hashlib.new(link.hash_name) - except (ValueError, TypeError): - logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) - return None - - fp = open(target_file, 'rb') - while True: - chunk = fp.read(4096) - if not chunk: - break - download_hash.update(chunk) - fp.close() - return download_hash - - -def _download_url(resp, link, temp_location): - fp = open(temp_location, 'wb') - download_hash = None - if link.hash and link.hash_name: - try: - download_hash = hashlib.new(link.hash_name) - except ValueError: - logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link)) - try: - total_length = int(resp.headers['content-length']) - except (ValueError, KeyError, TypeError): - total_length = 0 - downloaded = 0 - show_progress = total_length > 40 * 1000 or not total_length - show_url = link.show_url - try: - if show_progress: - ## FIXME: the URL can get really long in this message: - if total_length: - logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length))) - else: - logger.start_progress('Downloading %s (unknown size): ' % show_url) - else: - logger.notify('Downloading %s' % show_url) - logger.info('Downloading from URL %s' % link) - - def resp_read(chunk_size): - try: - # Special case for urllib3. - try: - for chunk in resp.raw.stream( - chunk_size, decode_content=False): - yield chunk - except IncompleteRead as e: - raise ChunkedEncodingError(e) - except AttributeError: - # Standard file-like object. - while True: - chunk = resp.raw.read(chunk_size) - if not chunk: - break - yield chunk - - for chunk in resp_read(4096): - downloaded += len(chunk) - if show_progress: - if not total_length: - logger.show_progress('%s' % format_size(downloaded)) - else: - logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded))) - if download_hash is not None: - download_hash.update(chunk) - fp.write(chunk) - fp.close() - finally: - if show_progress: - logger.end_progress('%s downloaded' % format_size(downloaded)) - return download_hash - - -def _copy_file(filename, location, content_type, link): - copy = True - download_location = os.path.join(location, link.filename) - if os.path.exists(download_location): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % - display_path(download_location), ('i', 'w', 'b')) - if response == 'i': - copy = False - elif response == 'w': - logger.warn('Deleting %s' % display_path(download_location)) - os.remove(download_location) - elif response == 'b': - dest_file = backup_dir(download_location) - logger.warn('Backing up %s to %s' - % (display_path(download_location), display_path(dest_file))) - shutil.move(download_location, dest_file) - if copy: - shutil.copy(filename, download_location) - logger.notify('Saved %s' % display_path(download_location)) - - -def unpack_http_url(link, location, download_cache, download_dir=None, - session=None): - if session is None: - session = PipSession() - - temp_dir = tempfile.mkdtemp('-unpack', 'pip-') - temp_location = None - target_url = link.url.split('#', 1)[0] - already_cached = False - cache_file = None - cache_content_type_file = None - download_hash = None - - # If a download cache is specified, is the file cached there? - if download_cache: - cache_file = os.path.join(download_cache, - urllib.quote(target_url, '')) - cache_content_type_file = cache_file + '.content-type' - already_cached = ( - os.path.exists(cache_file) and - os.path.exists(cache_content_type_file) - ) - if not os.path.isdir(download_cache): - create_download_cache_folder(download_cache) - - # If a download dir is specified, is the file already downloaded there? - already_downloaded = None - if download_dir: - already_downloaded = os.path.join(download_dir, link.filename) - if not os.path.exists(already_downloaded): - already_downloaded = None - - # If already downloaded, does it's hash match? - if already_downloaded: - temp_location = already_downloaded - content_type = mimetypes.guess_type(already_downloaded)[0] - logger.notify('File was already downloaded %s' % already_downloaded) - if link.hash: - download_hash = _get_hash_from_file(temp_location, link) - try: - _check_hash(download_hash, link) - except HashMismatch: - logger.warn( - 'Previously-downloaded file %s has bad hash, ' - 're-downloading.' % temp_location - ) - temp_location = None - os.unlink(already_downloaded) - already_downloaded = None - - # If not a valid download, let's confirm the cached file is valid - if already_cached and not temp_location: - with open(cache_content_type_file) as fp: - content_type = fp.read().strip() - temp_location = cache_file - logger.notify('Using download cache from %s' % cache_file) - if link.hash and link.hash_name: - download_hash = _get_hash_from_file(cache_file, link) - try: - _check_hash(download_hash, link) - except HashMismatch: - logger.warn( - 'Cached file %s has bad hash, ' - 're-downloading.' % temp_location - ) - temp_location = None - os.unlink(cache_file) - os.unlink(cache_content_type_file) - already_cached = False - - # We don't have either a cached or a downloaded copy - # let's download to a tmp dir - if not temp_location: - try: - resp = session.get(target_url, stream=True) - resp.raise_for_status() - except requests.HTTPError as exc: - logger.fatal("HTTP error %s while getting %s" % - (exc.response.status_code, link)) - raise - - content_type = resp.headers.get('content-type', '') - filename = link.filename # fallback - # Have a look at the Content-Disposition header for a better guess - content_disposition = resp.headers.get('content-disposition') - if content_disposition: - type, params = cgi.parse_header(content_disposition) - # We use ``or`` here because we don't want to use an "empty" value - # from the filename param. - filename = params.get('filename') or filename - ext = splitext(filename)[1] - if not ext: - ext = mimetypes.guess_extension(content_type) - if ext: - filename += ext - if not ext and link.url != resp.url: - ext = os.path.splitext(resp.url)[1] - if ext: - filename += ext - temp_location = os.path.join(temp_dir, filename) - download_hash = _download_url(resp, link, temp_location) - if link.hash and link.hash_name: - _check_hash(download_hash, link) - - # a download dir is specified; let's copy the archive there - if download_dir and not already_downloaded: - _copy_file(temp_location, download_dir, content_type, link) - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(temp_location, location, content_type, link) - - # if using a download cache, cache it, if needed - if cache_file and not already_cached: - cache_download(cache_file, temp_location, content_type) - - if not (already_cached or already_downloaded): - os.unlink(temp_location) - - os.rmdir(temp_dir) - - -def unpack_file_url(link, location, download_dir=None): - - link_path = url_to_path(link.url_without_fragment) - already_downloaded = False - - # If it's a url to a local directory - if os.path.isdir(link_path): - if os.path.isdir(location): - rmtree(location) - shutil.copytree(link_path, location, symlinks=True) - return - - # if link has a hash, let's confirm it matches - if link.hash: - link_path_hash = _get_hash_from_file(link_path, link) - _check_hash(link_path_hash, link) - - # If a download dir is specified, is the file already there and valid? - if download_dir: - download_path = os.path.join(download_dir, link.filename) - if os.path.exists(download_path): - content_type = mimetypes.guess_type(download_path)[0] - logger.notify('File was already downloaded %s' % download_path) - if link.hash: - download_hash = _get_hash_from_file(download_path, link) - try: - _check_hash(download_hash, link) - already_downloaded = True - except HashMismatch: - logger.warn( - 'Previously-downloaded file %s has bad hash, ' - 're-downloading.' % link_path - ) - os.unlink(download_path) - else: - already_downloaded = True - - if already_downloaded: - from_path = download_path - else: - from_path = link_path - - content_type = mimetypes.guess_type(from_path)[0] - - # unpack the archive to the build dir location. even when only downloading - # archives, they have to be unpacked to parse dependencies - unpack_file(from_path, location, content_type, link) - - # a download dir is specified and not already downloaded - if download_dir and not already_downloaded: - _copy_file(from_path, download_dir, content_type, link) diff --git a/Darwin/lib/python3.4/site-packages/pip/index.py b/Darwin/lib/python3.4/site-packages/pip/index.py deleted file mode 100644 index 46916c1..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/index.py +++ /dev/null @@ -1,990 +0,0 @@ -"""Routines related to PyPI, indexes""" - -import sys -import os -import re -import mimetypes -import posixpath - -from pip.log import logger -from pip.util import Inf, normalize_name, splitext, is_prerelease -from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled, - InstallationError, InvalidWheelFilename, UnsupportedWheel) -from pip.backwardcompat import urlparse, url2pathname -from pip.download import PipSession, url_to_path, path_to_url -from pip.wheel import Wheel, wheel_ext -from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform -from pip._vendor import html5lib, requests, pkg_resources -from pip._vendor.requests.exceptions import SSLError - - -__all__ = ['PackageFinder'] - - -DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org" - -INSECURE_SCHEMES = { - "http": ["https"], -} - - -class PackageFinder(object): - """This finds packages. - - This is meant to match easy_install's technique for looking for - packages, by reading pages and looking for appropriate links - """ - - def __init__(self, find_links, index_urls, - use_wheel=True, allow_external=[], allow_unverified=[], - allow_all_external=False, allow_all_prereleases=False, - process_dependency_links=False, session=None): - self.find_links = find_links - self.index_urls = index_urls - self.dependency_links = [] - self.cache = PageCache() - # These are boring links that have already been logged somehow: - self.logged_links = set() - - self.use_wheel = use_wheel - - # Do we allow (safe and verifiable) externally hosted files? - self.allow_external = set(normalize_name(n) for n in allow_external) - - # Which names are allowed to install insecure and unverifiable files? - self.allow_unverified = set( - normalize_name(n) for n in allow_unverified - ) - - # Anything that is allowed unverified is also allowed external - self.allow_external |= self.allow_unverified - - # Do we allow all (safe and verifiable) externally hosted files? - self.allow_all_external = allow_all_external - - # Stores if we ignored any external links so that we can instruct - # end users how to install them if no distributions are available - self.need_warn_external = False - - # Stores if we ignored any unsafe links so that we can instruct - # end users how to install them if no distributions are available - self.need_warn_unverified = False - - # Do we want to allow _all_ pre-releases? - self.allow_all_prereleases = allow_all_prereleases - - # Do we process dependency links? - self.process_dependency_links = process_dependency_links - self._have_warned_dependency_links = False - - # The Session we'll use to make requests - self.session = session or PipSession() - - def add_dependency_links(self, links): - ## FIXME: this shouldn't be global list this, it should only - ## apply to requirements of the package that specifies the - ## dependency_links value - ## FIXME: also, we should track comes_from (i.e., use Link) - if self.process_dependency_links: - if not self._have_warned_dependency_links: - logger.deprecated( - "1.6", - "Dependency Links processing has been deprecated with an " - "accelerated time schedule and will be removed in pip 1.6", - ) - self._have_warned_dependency_links = True - self.dependency_links.extend(links) - - def _sort_locations(self, locations): - """ - Sort locations into "files" (archives) and "urls", and return - a pair of lists (files,urls) - """ - files = [] - urls = [] - - # puts the url for the given file path into the appropriate list - def sort_path(path): - url = path_to_url(path) - if mimetypes.guess_type(url, strict=False)[0] == 'text/html': - urls.append(url) - else: - files.append(url) - - for url in locations: - - is_local_path = os.path.exists(url) - is_file_url = url.startswith('file:') - is_find_link = url in self.find_links - - if is_local_path or is_file_url: - if is_local_path: - path = url - else: - path = url_to_path(url) - if is_find_link and os.path.isdir(path): - path = os.path.realpath(path) - for item in os.listdir(path): - sort_path(os.path.join(path, item)) - elif is_file_url and os.path.isdir(path): - urls.append(url) - elif os.path.isfile(path): - sort_path(path) - else: - urls.append(url) - - return files, urls - - def _link_sort_key(self, link_tuple): - """ - Function used to generate link sort key for link tuples. - The greater the return value, the more preferred it is. - If not finding wheels, then sorted by version only. - If finding wheels, then the sort order is by version, then: - 1. existing installs - 2. wheels ordered via Wheel.support_index_min() - 3. source archives - Note: it was considered to embed this logic into the Link - comparison operators, but then different sdist links - with the same version, would have to be considered equal - """ - parsed_version, link, _ = link_tuple - if self.use_wheel: - support_num = len(supported_tags) - if link == INSTALLED_VERSION: - pri = 1 - elif link.ext == wheel_ext: - wheel = Wheel(link.filename) # can raise InvalidWheelFilename - if not wheel.supported(): - raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename) - pri = -(wheel.support_index_min()) - else: # sdist - pri = -(support_num) - return (parsed_version, pri) - else: - return parsed_version - - def _sort_versions(self, applicable_versions): - """ - Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary. - See the docstring for `_link_sort_key` for details. - This function is isolated for easier unit testing. - """ - return sorted(applicable_versions, key=self._link_sort_key, reverse=True) - - def find_requirement(self, req, upgrade): - - def mkurl_pypi_url(url): - loc = posixpath.join(url, url_name) - # For maximum compatibility with easy_install, ensure the path - # ends in a trailing slash. Although this isn't in the spec - # (and PyPI can handle it without the slash) some other index - # implementations might break if they relied on easy_install's behavior. - if not loc.endswith('/'): - loc = loc + '/' - return loc - - url_name = req.url_name - # Only check main index if index URL is given: - main_index_url = None - if self.index_urls: - # Check that we have the url_name correctly spelled: - main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True) - # This will also cache the page, so it's okay that we get it again later: - page = self._get_page(main_index_url, req) - if page is None: - url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name - - if url_name is not None: - locations = [ - mkurl_pypi_url(url) - for url in self.index_urls] + self.find_links - else: - locations = list(self.find_links) - for version in req.absolute_versions: - if url_name is not None and main_index_url is not None: - locations = [ - posixpath.join(main_index_url.url, version)] + locations - - file_locations, url_locations = self._sort_locations(locations) - _flocations, _ulocations = self._sort_locations(self.dependency_links) - file_locations.extend(_flocations) - - # We trust every url that the user has given us whether it was given - # via --index-url or --find-links - locations = [Link(url, trusted=True) for url in url_locations] - - # We explicitly do not trust links that came from dependency_links - locations.extend([Link(url) for url in _ulocations]) - - logger.debug('URLs to search for versions for %s:' % req) - for location in locations: - logger.debug('* %s' % location) - - # Determine if this url used a secure transport mechanism - parsed = urlparse.urlparse(str(location)) - if parsed.scheme in INSECURE_SCHEMES: - secure_schemes = INSECURE_SCHEMES[parsed.scheme] - - if len(secure_schemes) == 1: - ctx = (location, parsed.scheme, secure_schemes[0], - parsed.netloc) - logger.warn("%s uses an insecure transport scheme (%s). " - "Consider using %s if %s has it available" % - ctx) - elif len(secure_schemes) > 1: - ctx = (location, parsed.scheme, ", ".join(secure_schemes), - parsed.netloc) - logger.warn("%s uses an insecure transport scheme (%s). " - "Consider using one of %s if %s has any of " - "them available" % ctx) - else: - ctx = (location, parsed.scheme) - logger.warn("%s uses an insecure transport scheme (%s)." % - ctx) - - found_versions = [] - found_versions.extend( - self._package_versions( - # We trust every directly linked archive in find_links - [Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower())) - page_versions = [] - for page in self._get_pages(locations, req): - logger.debug('Analyzing links from page %s' % page.url) - logger.indent += 2 - try: - page_versions.extend(self._package_versions(page.links, req.name.lower())) - finally: - logger.indent -= 2 - dependency_versions = list(self._package_versions( - [Link(url) for url in self.dependency_links], req.name.lower())) - if dependency_versions: - logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions])) - file_versions = list(self._package_versions( - [Link(url) for url in file_locations], req.name.lower())) - if not found_versions and not page_versions and not dependency_versions and not file_versions: - logger.fatal('Could not find any downloads that satisfy the requirement %s' % req) - - if self.need_warn_external: - logger.warn("Some externally hosted files were ignored (use " - "--allow-external %s to allow)." % req.name) - - if self.need_warn_unverified: - logger.warn("Some insecure and unverifiable files were ignored" - " (use --allow-unverified %s to allow)." % - req.name) - - raise DistributionNotFound('No distributions at all found for %s' % req) - installed_version = [] - if req.satisfied_by is not None: - installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)] - if file_versions: - file_versions.sort(reverse=True) - logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions])) - #this is an intentional priority ordering - all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions - applicable_versions = [] - for (parsed_version, link, version) in all_versions: - if version not in req.req: - logger.info("Ignoring link %s, version %s doesn't match %s" - % (link, version, ','.join([''.join(s) for s in req.req.specs]))) - continue - elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases): - # If this version isn't the already installed one, then - # ignore it if it's a pre-release. - if link is not INSTALLED_VERSION: - logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version)) - continue - applicable_versions.append((parsed_version, link, version)) - applicable_versions = self._sort_versions(applicable_versions) - existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION]) - if not upgrade and existing_applicable: - if applicable_versions[0][1] is INSTALLED_VERSION: - logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement' - % req.satisfied_by.version) - else: - logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)' - % (req.satisfied_by.version, applicable_versions[0][2])) - return None - if not applicable_versions: - logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)' - % (req, ', '.join([version for parsed_version, link, version in all_versions]))) - - if self.need_warn_external: - logger.warn("Some externally hosted files were ignored (use " - "--allow-external to allow).") - - if self.need_warn_unverified: - logger.warn("Some insecure and unverifiable files were ignored" - " (use --allow-unverified %s to allow)." % - req.name) - - raise DistributionNotFound('No distributions matching the version for %s' % req) - if applicable_versions[0][1] is INSTALLED_VERSION: - # We have an existing version, and its the best version - logger.info('Installed version (%s) is most up-to-date (past versions: %s)' - % (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none')) - raise BestVersionAlreadyInstalled - if len(applicable_versions) > 1: - logger.info('Using version %s (newest of versions: %s)' % - (applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions]))) - - selected_version = applicable_versions[0][1] - - if (selected_version.internal is not None - and not selected_version.internal): - logger.warn("%s an externally hosted file and may be " - "unreliable" % req.name) - - if (selected_version.verifiable is not None - and not selected_version.verifiable): - logger.warn("%s is potentially insecure and " - "unverifiable." % req.name) - - if selected_version._deprecated_regex: - logger.deprecated( - "1.7", - "%s discovered using a deprecated method of parsing, " - "in the future it will no longer be discovered" % req.name - ) - - return selected_version - - - def _find_url_name(self, index_url, url_name, req): - """Finds the true URL name of a package, when the given name isn't quite correct. - This is usually used to implement case-insensitivity.""" - if not index_url.url.endswith('/'): - # Vaguely part of the PyPI API... weird but true. - ## FIXME: bad to modify this? - index_url.url += '/' - page = self._get_page(index_url, req) - if page is None: - logger.fatal('Cannot fetch index base URL %s' % index_url) - return - norm_name = normalize_name(req.url_name) - for link in page.links: - base = posixpath.basename(link.path.rstrip('/')) - if norm_name == normalize_name(base): - logger.notify('Real name of requirement %s is %s' % (url_name, base)) - return base - return None - - def _get_pages(self, locations, req): - """ - Yields (page, page_url) from the given locations, skipping - locations that have errors, and adding download/homepage links - """ - all_locations = list(locations) - seen = set() - - while all_locations: - location = all_locations.pop(0) - if location in seen: - continue - seen.add(location) - - page = self._get_page(location, req) - if page is None: - continue - - yield page - - for link in page.rel_links(): - normalized = normalize_name(req.name).lower() - - if (not normalized in self.allow_external - and not self.allow_all_external): - self.need_warn_external = True - logger.debug("Not searching %s for files because external " - "urls are disallowed." % link) - continue - - if (link.trusted is not None - and not link.trusted - and not normalized in self.allow_unverified): - logger.debug("Not searching %s for urls, it is an " - "untrusted link and cannot produce safe or " - "verifiable files." % link) - self.need_warn_unverified = True - continue - - all_locations.append(link) - - _egg_fragment_re = re.compile(r'#egg=([^&]*)') - _egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I) - _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') - - def _sort_links(self, links): - "Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates" - eggs, no_eggs = [], [] - seen = set() - for link in links: - if link not in seen: - seen.add(link) - if link.egg_fragment: - eggs.append(link) - else: - no_eggs.append(link) - return no_eggs + eggs - - def _package_versions(self, links, search_name): - for link in self._sort_links(links): - for v in self._link_package_versions(link, search_name): - yield v - - def _known_extensions(self): - extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip') - if self.use_wheel: - return extensions + (wheel_ext,) - return extensions - - def _link_package_versions(self, link, search_name): - """ - Return an iterable of triples (pkg_resources_version_key, - link, python_version) that can be extracted from the given - link. - - Meant to be overridden by subclasses, not called by clients. - """ - platform = get_platform() - - version = None - if link.egg_fragment: - egg_info = link.egg_fragment - else: - egg_info, ext = link.splitext() - if not ext: - if link not in self.logged_links: - logger.debug('Skipping link %s; not a file' % link) - self.logged_links.add(link) - return [] - if egg_info.endswith('.tar'): - # Special double-extension case: - egg_info = egg_info[:-4] - ext = '.tar' + ext - if ext not in self._known_extensions(): - if link not in self.logged_links: - logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext)) - self.logged_links.add(link) - return [] - if "macosx10" in link.path and ext == '.zip': - if link not in self.logged_links: - logger.debug('Skipping link %s; macosx10 one' % (link)) - self.logged_links.add(link) - return [] - if ext == wheel_ext: - try: - wheel = Wheel(link.filename) - except InvalidWheelFilename: - logger.debug('Skipping %s because the wheel filename is invalid' % link) - return [] - if wheel.name.lower() != search_name.lower(): - logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) - return [] - if not wheel.supported(): - logger.debug('Skipping %s because it is not compatible with this Python' % link) - return [] - # This is a dirty hack to prevent installing Binary Wheels from - # PyPI unless it is a Windows or Mac Binary Wheel. This is - # paired with a change to PyPI disabling uploads for the - # same. Once we have a mechanism for enabling support for binary - # wheels on linux that deals with the inherent problems of - # binary distribution this can be removed. - comes_from = getattr(link, "comes_from", None) - if (( - not platform.startswith('win') - and not platform.startswith('macosx') - ) - and comes_from is not None - and urlparse.urlparse(comes_from.url).netloc.endswith( - "pypi.python.org")): - if not wheel.supported(tags=supported_tags_noarch): - logger.debug( - "Skipping %s because it is a pypi-hosted binary " - "Wheel on an unsupported platform" % link - ) - return [] - version = wheel.version - - if not version: - version = self._egg_info_matches(egg_info, search_name, link) - if version is None: - logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) - return [] - - if (link.internal is not None - and not link.internal - and not normalize_name(search_name).lower() in self.allow_external - and not self.allow_all_external): - # We have a link that we are sure is external, so we should skip - # it unless we are allowing externals - logger.debug("Skipping %s because it is externally hosted." % link) - self.need_warn_external = True - return [] - - if (link.verifiable is not None - and not link.verifiable - and not (normalize_name(search_name).lower() - in self.allow_unverified)): - # We have a link that we are sure we cannot verify it's integrity, - # so we should skip it unless we are allowing unsafe installs - # for this requirement. - logger.debug("Skipping %s because it is an insecure and " - "unverifiable file." % link) - self.need_warn_unverified = True - return [] - - match = self._py_version_re.search(version) - if match: - version = version[:match.start()] - py_version = match.group(1) - if py_version != sys.version[:3]: - logger.debug('Skipping %s because Python version is incorrect' % link) - return [] - logger.debug('Found link %s, version: %s' % (link, version)) - return [(pkg_resources.parse_version(version), - link, - version)] - - def _egg_info_matches(self, egg_info, search_name, link): - match = self._egg_info_re.search(egg_info) - if not match: - logger.debug('Could not parse version from link: %s' % link) - return None - name = match.group(0).lower() - # To match the "safe" name that pkg_resources creates: - name = name.replace('_', '-') - # project name and version must be separated by a dash - look_for = search_name.lower() + "-" - if name.startswith(look_for): - return match.group(0)[len(look_for):] - else: - return None - - def _get_page(self, link, req): - return HTMLPage.get_page(link, req, - cache=self.cache, - session=self.session, - ) - - -class PageCache(object): - """Cache of HTML pages""" - - failure_limit = 3 - - def __init__(self): - self._failures = {} - self._pages = {} - self._archives = {} - - def too_many_failures(self, url): - return self._failures.get(url, 0) >= self.failure_limit - - def get_page(self, url): - return self._pages.get(url) - - def is_archive(self, url): - return self._archives.get(url, False) - - def set_is_archive(self, url, value=True): - self._archives[url] = value - - def add_page_failure(self, url, level): - self._failures[url] = self._failures.get(url, 0)+level - - def add_page(self, urls, page): - for url in urls: - self._pages[url] = page - - -class HTMLPage(object): - """Represents one page, along with its URL""" - - ## FIXME: these regexes are horrible hacks: - _homepage_re = re.compile(r'\s*home\s*page', re.I) - _download_re = re.compile(r'\s*download\s+url', re.I) - _href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S) - - def __init__(self, content, url, headers=None, trusted=None): - self.content = content - self.parsed = html5lib.parse(self.content, namespaceHTMLElements=False) - self.url = url - self.headers = headers - self.trusted = trusted - - def __str__(self): - return self.url - - @classmethod - def get_page(cls, link, req, cache=None, skip_archives=True, session=None): - if session is None: - session = PipSession() - - url = link.url - url = url.split('#', 1)[0] - if cache.too_many_failures(url): - return None - - # Check for VCS schemes that do not support lookup as web pages. - from pip.vcs import VcsSupport - for scheme in VcsSupport.schemes: - if url.lower().startswith(scheme) and url[len(scheme)] in '+:': - logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals()) - return None - - if cache is not None: - inst = cache.get_page(url) - if inst is not None: - return inst - try: - if skip_archives: - if cache is not None: - if cache.is_archive(url): - return None - filename = link.filename - for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']: - if filename.endswith(bad_ext): - content_type = cls._get_content_type(url, - session=session, - ) - if content_type.lower().startswith('text/html'): - break - else: - logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type)) - if cache is not None: - cache.set_is_archive(url) - return None - logger.debug('Getting page %s' % url) - - # Tack index.html onto file:// URLs that point to directories - (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url) - if scheme == 'file' and os.path.isdir(url2pathname(path)): - # add trailing slash if not present so urljoin doesn't trim final segment - if not url.endswith('/'): - url += '/' - url = urlparse.urljoin(url, 'index.html') - logger.debug(' file: URL is directory, getting %s' % url) - - resp = session.get(url, headers={"Accept": "text/html"}) - resp.raise_for_status() - - # The check for archives above only works if the url ends with - # something that looks like an archive. However that is not a - # requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download - # redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz - # Unless we issue a HEAD request on every url we cannot know - # ahead of time for sure if something is HTML or not. However we - # can check after we've downloaded it. - content_type = resp.headers.get('Content-Type', 'unknown') - if not content_type.lower().startswith("text/html"): - logger.debug('Skipping page %s because of Content-Type: %s' % - (link, content_type)) - if cache is not None: - cache.set_is_archive(url) - return None - - inst = cls(resp.text, resp.url, resp.headers, trusted=link.trusted) - except requests.HTTPError as exc: - level = 2 if exc.response.status_code == 404 else 1 - cls._handle_fail(req, link, exc, url, cache=cache, level=level) - except requests.ConnectionError as exc: - cls._handle_fail( - req, link, "connection error: %s" % exc, url, - cache=cache, - ) - except requests.Timeout: - cls._handle_fail(req, link, "timed out", url, cache=cache) - except SSLError as exc: - reason = ("There was a problem confirming the ssl certificate: " - "%s" % exc) - cls._handle_fail(req, link, reason, url, - cache=cache, - level=2, - meth=logger.notify, - ) - else: - if cache is not None: - cache.add_page([url, resp.url], inst) - return inst - - @staticmethod - def _handle_fail(req, link, reason, url, cache=None, level=1, meth=None): - if meth is None: - meth = logger.info - - meth("Could not fetch URL %s: %s", link, reason) - meth("Will skip URL %s when looking for download links for %s" % - (link.url, req)) - - if cache is not None: - cache.add_page_failure(url, level) - - @staticmethod - def _get_content_type(url, session=None): - """Get the Content-Type of the given url, using a HEAD request""" - if session is None: - session = PipSession() - - scheme, netloc, path, query, fragment = urlparse.urlsplit(url) - if not scheme in ('http', 'https', 'ftp', 'ftps'): - ## FIXME: some warning or something? - ## assertion error? - return '' - - resp = session.head(url, allow_redirects=True) - resp.raise_for_status() - - return resp.headers.get("Content-Type", "") - - @property - def api_version(self): - if not hasattr(self, "_api_version"): - _api_version = None - - metas = [x for x in self.parsed.findall(".//meta") - if x.get("name", "").lower() == "api-version"] - if metas: - try: - _api_version = int(metas[0].get("value", None)) - except (TypeError, ValueError): - _api_version = None - self._api_version = _api_version - return self._api_version - - @property - def base_url(self): - if not hasattr(self, "_base_url"): - base = self.parsed.find(".//base") - if base is not None and base.get("href"): - self._base_url = base.get("href") - else: - self._base_url = self.url - return self._base_url - - @property - def links(self): - """Yields all links in the page""" - for anchor in self.parsed.findall(".//a"): - if anchor.get("href"): - href = anchor.get("href") - url = self.clean_link(urlparse.urljoin(self.base_url, href)) - - # Determine if this link is internal. If that distinction - # doesn't make sense in this context, then we don't make - # any distinction. - internal = None - if self.api_version and self.api_version >= 2: - # Only api_versions >= 2 have a distinction between - # external and internal links - internal = bool(anchor.get("rel") - and "internal" in anchor.get("rel").split()) - - yield Link(url, self, internal=internal) - - def rel_links(self): - for url in self.explicit_rel_links(): - yield url - for url in self.scraped_rel_links(): - yield url - - def explicit_rel_links(self, rels=('homepage', 'download')): - """Yields all links with the given relations""" - rels = set(rels) - - for anchor in self.parsed.findall(".//a"): - if anchor.get("rel") and anchor.get("href"): - found_rels = set(anchor.get("rel").split()) - # Determine the intersection between what rels were found and - # what rels were being looked for - if found_rels & rels: - href = anchor.get("href") - url = self.clean_link(urlparse.urljoin(self.base_url, href)) - yield Link(url, self, trusted=False) - - def scraped_rel_links(self): - # Can we get rid of this horrible horrible method? - for regex in (self._homepage_re, self._download_re): - match = regex.search(self.content) - if not match: - continue - href_match = self._href_re.search(self.content, pos=match.end()) - if not href_match: - continue - url = href_match.group(1) or href_match.group(2) or href_match.group(3) - if not url: - continue - url = self.clean_link(urlparse.urljoin(self.base_url, url)) - yield Link(url, self, trusted=False, _deprecated_regex=True) - - _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) - - def clean_link(self, url): - """Makes sure a link is fully encoded. That is, if a ' ' shows up in - the link, it will be rewritten to %20 (while not over-quoting - % or other characters).""" - return self._clean_re.sub( - lambda match: '%%%2x' % ord(match.group(0)), url) - - -class Link(object): - - def __init__(self, url, comes_from=None, internal=None, trusted=None, - _deprecated_regex=False): - self.url = url - self.comes_from = comes_from - self.internal = internal - self.trusted = trusted - self._deprecated_regex = _deprecated_regex - - def __str__(self): - if self.comes_from: - return '%s (from %s)' % (self.url, self.comes_from) - else: - return str(self.url) - - def __repr__(self): - return '' % self - - def __eq__(self, other): - return self.url == other.url - - def __ne__(self, other): - return self.url != other.url - - def __lt__(self, other): - return self.url < other.url - - def __le__(self, other): - return self.url <= other.url - - def __gt__(self, other): - return self.url > other.url - - def __ge__(self, other): - return self.url >= other.url - - def __hash__(self): - return hash(self.url) - - @property - def filename(self): - _, netloc, path, _, _ = urlparse.urlsplit(self.url) - name = posixpath.basename(path.rstrip('/')) or netloc - assert name, ('URL %r produced no filename' % self.url) - return name - - @property - def scheme(self): - return urlparse.urlsplit(self.url)[0] - - @property - def path(self): - return urlparse.urlsplit(self.url)[2] - - def splitext(self): - return splitext(posixpath.basename(self.path.rstrip('/'))) - - @property - def ext(self): - return self.splitext()[1] - - @property - def url_without_fragment(self): - scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url) - return urlparse.urlunsplit((scheme, netloc, path, query, None)) - - _egg_fragment_re = re.compile(r'#egg=([^&]*)') - - @property - def egg_fragment(self): - match = self._egg_fragment_re.search(self.url) - if not match: - return None - return match.group(1) - - _hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)') - - @property - def hash(self): - match = self._hash_re.search(self.url) - if match: - return match.group(2) - return None - - @property - def hash_name(self): - match = self._hash_re.search(self.url) - if match: - return match.group(1) - return None - - @property - def show_url(self): - return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) - - @property - def verifiable(self): - """ - Returns True if this link can be verified after download, False if it - cannot, and None if we cannot determine. - """ - trusted = self.trusted or getattr(self.comes_from, "trusted", None) - if trusted is not None and trusted: - # This link came from a trusted source. It *may* be verifiable but - # first we need to see if this page is operating under the new - # API version. - try: - api_version = getattr(self.comes_from, "api_version", None) - api_version = int(api_version) - except (ValueError, TypeError): - api_version = None - - if api_version is None or api_version <= 1: - # This link is either trusted, or it came from a trusted, - # however it is not operating under the API version 2 so - # we can't make any claims about if it's safe or not - return - - if self.hash: - # This link came from a trusted source and it has a hash, so we - # can consider it safe. - return True - else: - # This link came from a trusted source, using the new API - # version, and it does not have a hash. It is NOT verifiable - return False - elif trusted is not None: - # This link came from an untrusted source and we cannot trust it - return False - - -# An object to represent the "link" for the installed version of a requirement. -# Using Inf as the url makes it sort higher. -INSTALLED_VERSION = Link(Inf) - - -def get_requirement_from_url(url): - """Get a requirement from the URL, if possible. This looks for #egg - in the URL""" - link = Link(url) - egg_info = link.egg_fragment - if not egg_info: - egg_info = splitext(link.filename)[0] - return package_to_requirement(egg_info) - - -def package_to_requirement(package_name): - """Translate a name like Foo-1.2 to Foo==1.3""" - match = re.search(r'^(.*?)-(dev|\d.*)', package_name) - if match: - name = match.group(1) - version = match.group(2) - else: - name = package_name - version = '' - if version: - return '%s==%s' % (name, version) - else: - return name diff --git a/Darwin/lib/python3.4/site-packages/pip/locations.py b/Darwin/lib/python3.4/site-packages/pip/locations.py deleted file mode 100644 index 1d40265..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/locations.py +++ /dev/null @@ -1,172 +0,0 @@ -"""Locations where we look for configs, install stuff, etc""" - -import sys -import site -import os -import tempfile -from distutils.command.install import install, SCHEME_KEYS -import getpass -from pip.backwardcompat import get_python_lib, get_path_uid, user_site -import pip.exceptions - - -DELETE_MARKER_MESSAGE = '''\ -This file is placed here by pip to indicate the source was put -here by pip. - -Once this package is successfully installed this source code will be -deleted (unless you remove this file). -''' -PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' - -def write_delete_marker_file(directory): - """ - Write the pip delete marker file into this directory. - """ - filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) - marker_fp = open(filepath, 'w') - marker_fp.write(DELETE_MARKER_MESSAGE) - marker_fp.close() - - -def running_under_virtualenv(): - """ - Return True if we're running inside a virtualenv, False otherwise. - - """ - if hasattr(sys, 'real_prefix'): - return True - elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): - return True - - return False - - -def virtualenv_no_global(): - """ - Return True if in a venv and no system site packages. - """ - #this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file - site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) - no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') - if running_under_virtualenv() and os.path.isfile(no_global_file): - return True - -def __get_username(): - """ Returns the effective username of the current process. """ - if sys.platform == 'win32': - return getpass.getuser() - import pwd - return pwd.getpwuid(os.geteuid()).pw_name - -def _get_build_prefix(): - """ Returns a safe build_prefix """ - path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' % - __get_username()) - if sys.platform == 'win32': - """ on windows(tested on 7) temp dirs are isolated """ - return path - try: - os.mkdir(path) - write_delete_marker_file(path) - except OSError: - file_uid = None - try: - # raises OSError for symlinks - # https://github.com/pypa/pip/pull/935#discussion_r5307003 - file_uid = get_path_uid(path) - except OSError: - file_uid = None - - if file_uid != os.geteuid(): - msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \ - % path - print (msg) - print("pip will not work until the temporary folder is " + \ - "either deleted or is a real directory owned by your user account.") - raise pip.exceptions.InstallationError(msg) - return path - -if running_under_virtualenv(): - build_prefix = os.path.join(sys.prefix, 'build') - src_prefix = os.path.join(sys.prefix, 'src') -else: - # Note: intentionally NOT using mkdtemp - # See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp - build_prefix = _get_build_prefix() - - ## FIXME: keep src in cwd for now (it is not a temporary folder) - try: - src_prefix = os.path.join(os.getcwd(), 'src') - except OSError: - # In case the current working directory has been renamed or deleted - sys.exit("The folder you are executing pip from can no longer be found.") - -# under Mac OS X + virtualenv sys.prefix is not properly resolved -# it is something like /path/to/python/bin/.. -# Note: using realpath due to tmp dirs on OSX being symlinks -build_prefix = os.path.abspath(os.path.realpath(build_prefix)) -src_prefix = os.path.abspath(src_prefix) - -# FIXME doesn't account for venv linked to global site-packages - -site_packages = get_python_lib() -user_dir = os.path.expanduser('~') -if sys.platform == 'win32': - bin_py = os.path.join(sys.prefix, 'Scripts') - bin_user = os.path.join(user_site, 'Scripts') if user_site else None - # buildout uses 'bin' on Windows too? - if not os.path.exists(bin_py): - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') if user_site else None - default_storage_dir = os.path.join(user_dir, 'pip') - default_config_file = os.path.join(default_storage_dir, 'pip.ini') - default_log_file = os.path.join(default_storage_dir, 'pip.log') -else: - bin_py = os.path.join(sys.prefix, 'bin') - bin_user = os.path.join(user_site, 'bin') if user_site else None - default_storage_dir = os.path.join(user_dir, '.pip') - default_config_file = os.path.join(default_storage_dir, 'pip.conf') - default_log_file = os.path.join(default_storage_dir, 'pip.log') - - # Forcing to use /usr/local/bin for standard Mac OS X framework installs - # Also log to ~/Library/Logs/ for use with the Console.app log viewer - if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': - bin_py = '/usr/local/bin' - default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log') - - -def distutils_scheme(dist_name, user=False, home=None, root=None): - """ - Return a distutils install scheme - """ - from distutils.dist import Distribution - - scheme = {} - d = Distribution({'name': dist_name}) - d.parse_config_files() - i = d.get_command_obj('install', create=True) - # NOTE: setting user or home has the side-effect of creating the home dir or - # user base for installations during finalize_options() - # ideally, we'd prefer a scheme class that has no side-effects. - i.user = user or i.user - i.home = home or i.home - i.root = root or i.root - i.finalize_options() - for key in SCHEME_KEYS: - scheme[key] = getattr(i, 'install_'+key) - - if running_under_virtualenv(): - scheme['headers'] = os.path.join(sys.prefix, - 'include', - 'site', - 'python' + sys.version[:3], - dist_name) - - if root is not None: - scheme["headers"] = os.path.join( - root, - os.path.abspath(scheme["headers"])[1:], - ) - - return scheme diff --git a/Darwin/lib/python3.4/site-packages/pip/log.py b/Darwin/lib/python3.4/site-packages/pip/log.py deleted file mode 100644 index 4e31773..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/log.py +++ /dev/null @@ -1,276 +0,0 @@ -"""Logging -""" - -import sys -import os -import logging - -from pip import backwardcompat -from pip._vendor import colorama, pkg_resources - - -def _color_wrap(*colors): - def wrapped(inp): - return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) - return wrapped - - -def should_color(consumer, environ, std=(sys.stdout, sys.stderr)): - real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32) - else consumer.wrapped) - - # If consumer isn't stdout or stderr we shouldn't colorize it - if real_consumer not in std: - return False - - # If consumer is a tty we should color it - if hasattr(real_consumer, "isatty") and real_consumer.isatty(): - return True - - # If we have an ASNI term we should color it - if environ.get("TERM") == "ANSI": - return True - - # If anything else we should not color it - return False - - -def should_warn(current_version, removal_version): - # Our Significant digits on versions is 2, so remove everything but the - # first two places. - current_version = ".".join(current_version.split(".")[:2]) - removal_version = ".".join(removal_version.split(".")[:2]) - - # Our warning threshold is one minor version before removal, so we - # decrement the minor version by one - major, minor = removal_version.split(".") - minor = str(int(minor) - 1) - warn_version = ".".join([major, minor]) - - # Test if our current_version should be a warn - return (pkg_resources.parse_version(current_version) - < pkg_resources.parse_version(warn_version)) - - -class Logger(object): - """ - Logging object for use in command-line script. Allows ranges of - levels, to avoid some redundancy of displayed information. - """ - VERBOSE_DEBUG = logging.DEBUG - 1 - DEBUG = logging.DEBUG - INFO = logging.INFO - NOTIFY = (logging.INFO + logging.WARN) / 2 - WARN = WARNING = logging.WARN - ERROR = logging.ERROR - FATAL = logging.FATAL - - LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL] - - COLORS = { - WARN: _color_wrap(colorama.Fore.YELLOW), - ERROR: _color_wrap(colorama.Fore.RED), - FATAL: _color_wrap(colorama.Fore.RED), - } - - def __init__(self): - self.consumers = [] - self.indent = 0 - self.explicit_levels = False - self.in_progress = None - self.in_progress_hanging = False - - def add_consumers(self, *consumers): - if sys.platform.startswith("win"): - for level, consumer in consumers: - if hasattr(consumer, "write"): - self.consumers.append( - (level, colorama.AnsiToWin32(consumer)), - ) - else: - self.consumers.append((level, consumer)) - else: - self.consumers.extend(consumers) - - def debug(self, msg, *args, **kw): - self.log(self.DEBUG, msg, *args, **kw) - - def info(self, msg, *args, **kw): - self.log(self.INFO, msg, *args, **kw) - - def notify(self, msg, *args, **kw): - self.log(self.NOTIFY, msg, *args, **kw) - - def warn(self, msg, *args, **kw): - self.log(self.WARN, msg, *args, **kw) - - def error(self, msg, *args, **kw): - self.log(self.ERROR, msg, *args, **kw) - - def fatal(self, msg, *args, **kw): - self.log(self.FATAL, msg, *args, **kw) - - def deprecated(self, removal_version, msg, *args, **kwargs): - """ - Logs deprecation message which is log level WARN if the - ``removal_version`` is > 1 minor release away and log level ERROR - otherwise. - - removal_version should be the version that the deprecated feature is - expected to be removed in, so something that will not exist in - version 1.7, but will in 1.6 would have a removal_version of 1.7. - """ - from pip import __version__ - - if should_warn(__version__, removal_version): - self.warn(msg, *args, **kwargs) - else: - self.error(msg, *args, **kwargs) - - def log(self, level, msg, *args, **kw): - if args: - if kw: - raise TypeError( - "You may give positional or keyword arguments, not both") - args = args or kw - - # render - if args: - rendered = msg % args - else: - rendered = msg - rendered = ' ' * self.indent + rendered - if self.explicit_levels: - ## FIXME: should this be a name, not a level number? - rendered = '%02i %s' % (level, rendered) - - for consumer_level, consumer in self.consumers: - if self.level_matches(level, consumer_level): - if (self.in_progress_hanging - and consumer in (sys.stdout, sys.stderr)): - self.in_progress_hanging = False - sys.stdout.write('\n') - sys.stdout.flush() - if hasattr(consumer, 'write'): - write_content = rendered + '\n' - if should_color(consumer, os.environ): - # We are printing to stdout or stderr and it supports - # colors so render our text colored - colorizer = self.COLORS.get(level, lambda x: x) - write_content = colorizer(write_content) - - consumer.write(write_content) - if hasattr(consumer, 'flush'): - consumer.flush() - else: - consumer(rendered) - - def _show_progress(self): - """Should we display download progress?""" - return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty()) - - def start_progress(self, msg): - assert not self.in_progress, ( - "Tried to start_progress(%r) while in_progress %r" - % (msg, self.in_progress)) - if self._show_progress(): - sys.stdout.write(' ' * self.indent + msg) - sys.stdout.flush() - self.in_progress_hanging = True - else: - self.in_progress_hanging = False - self.in_progress = msg - self.last_message = None - - def end_progress(self, msg='done.'): - assert self.in_progress, ( - "Tried to end_progress without start_progress") - if self._show_progress(): - if not self.in_progress_hanging: - # Some message has been printed out since start_progress - sys.stdout.write('...' + self.in_progress + msg + '\n') - sys.stdout.flush() - else: - # These erase any messages shown with show_progress (besides .'s) - logger.show_progress('') - logger.show_progress('') - sys.stdout.write(msg + '\n') - sys.stdout.flush() - self.in_progress = None - self.in_progress_hanging = False - - def show_progress(self, message=None): - """If we are in a progress scope, and no log messages have been - shown, write out another '.'""" - if self.in_progress_hanging: - if message is None: - sys.stdout.write('.') - sys.stdout.flush() - else: - if self.last_message: - padding = ' ' * max(0, len(self.last_message) - len(message)) - else: - padding = '' - sys.stdout.write('\r%s%s%s%s' % - (' ' * self.indent, self.in_progress, message, padding)) - sys.stdout.flush() - self.last_message = message - - def stdout_level_matches(self, level): - """Returns true if a message at this level will go to stdout""" - return self.level_matches(level, self._stdout_level()) - - def _stdout_level(self): - """Returns the level that stdout runs at""" - for level, consumer in self.consumers: - if consumer is sys.stdout: - return level - return self.FATAL - - def level_matches(self, level, consumer_level): - """ - >>> l = Logger() - >>> l.level_matches(3, 4) - False - >>> l.level_matches(3, 2) - True - >>> l.level_matches(slice(None, 3), 3) - False - >>> l.level_matches(slice(None, 3), 2) - True - >>> l.level_matches(slice(1, 3), 1) - True - >>> l.level_matches(slice(2, 3), 1) - False - """ - if isinstance(level, slice): - start, stop = level.start, level.stop - if start is not None and start > consumer_level: - return False - if stop is not None or stop <= consumer_level: - return False - return True - else: - return level >= consumer_level - - @classmethod - def level_for_integer(cls, level): - levels = cls.LEVELS - if level < 0: - return levels[0] - if level >= len(levels): - return levels[-1] - return levels[level] - - def move_stdout_to_stderr(self): - to_remove = [] - to_add = [] - for consumer_level, consumer in self.consumers: - if consumer == sys.stdout: - to_remove.append((consumer_level, consumer)) - to_add.append((consumer_level, sys.stderr)) - for item in to_remove: - self.consumers.remove(item) - self.consumers.extend(to_add) - -logger = Logger() diff --git a/Darwin/lib/python3.4/site-packages/pip/req.py b/Darwin/lib/python3.4/site-packages/pip/req.py deleted file mode 100644 index e9ea20f..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/req.py +++ /dev/null @@ -1,1931 +0,0 @@ -from email.parser import FeedParser -import os -import imp -import locale -import re -import sys -import shutil -import tempfile -import textwrap -import zipfile - -from distutils.util import change_root -from pip.locations import (bin_py, running_under_virtualenv,PIP_DELETE_MARKER_FILENAME, - write_delete_marker_file, bin_user) -from pip.exceptions import (InstallationError, UninstallationError, UnsupportedWheel, - BestVersionAlreadyInstalled, InvalidWheelFilename, - DistributionNotFound, PreviousBuildDirError) -from pip.vcs import vcs -from pip.log import logger -from pip.util import (display_path, rmtree, ask, ask_path_exists, backup_dir, - is_installable_dir, is_local, dist_is_local, - dist_in_usersite, dist_in_site_packages, renames, - normalize_path, egg_link_path, make_path_relative, - call_subprocess, is_prerelease, normalize_name) -from pip.backwardcompat import (urlparse, urllib, uses_pycache, - ConfigParser, string_types, HTTPError, - get_python_version, b) -from pip.index import Link -from pip.locations import build_prefix -from pip.download import (PipSession, get_file_content, is_url, url_to_path, - path_to_url, is_archive_file, - unpack_vcs_link, is_vcs_url, is_file_url, - unpack_file_url, unpack_http_url) -import pip.wheel -from pip.wheel import move_wheel_files, Wheel, wheel_ext -from pip._vendor import pkg_resources, six - - -def read_text_file(filename): - """Return the contents of *filename*. - - Try to decode the file contents with utf-8, the preffered system encoding - (e.g., cp1252 on some Windows machines) and latin1, in that order. Decoding - a byte string with latin1 will never raise an error. In the worst case, the - returned string will contain some garbage characters. - - """ - with open(filename, 'rb') as fp: - data = fp.read() - - encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] - for enc in encodings: - try: - data = data.decode(enc) - except UnicodeDecodeError: - continue - break - - assert type(data) != bytes # Latin1 should have worked. - return data - - -class InstallRequirement(object): - - def __init__(self, req, comes_from, source_dir=None, editable=False, - url=None, as_egg=False, update=True, prereleases=None, - editable_options=None, from_bundle=False, pycompile=True): - self.extras = () - if isinstance(req, string_types): - req = pkg_resources.Requirement.parse(req) - self.extras = req.extras - self.req = req - self.comes_from = comes_from - self.source_dir = source_dir - self.editable = editable - - if editable_options is None: - editable_options = {} - - self.editable_options = editable_options - self.url = url - self.as_egg = as_egg - self._egg_info_path = None - # This holds the pkg_resources.Distribution object if this requirement - # is already available: - self.satisfied_by = None - # This hold the pkg_resources.Distribution object if this requirement - # conflicts with another installed distribution: - self.conflicts_with = None - self._temp_build_dir = None - self._is_bundle = None - # True if the editable should be updated: - self.update = update - # Set to True after successful installation - self.install_succeeded = None - # UninstallPathSet of uninstalled distribution (for possible rollback) - self.uninstalled = None - self.use_user_site = False - self.target_dir = None - self.from_bundle = from_bundle - - self.pycompile = pycompile - - # True if pre-releases are acceptable - if prereleases: - self.prereleases = True - elif self.req is not None: - self.prereleases = any([is_prerelease(x[1]) and x[0] != "!=" for x in self.req.specs]) - else: - self.prereleases = False - - @classmethod - def from_editable(cls, editable_req, comes_from=None, default_vcs=None): - name, url, extras_override = parse_editable(editable_req, default_vcs) - if url.startswith('file:'): - source_dir = url_to_path(url) - else: - source_dir = None - - res = cls(name, comes_from, source_dir=source_dir, - editable=True, - url=url, - editable_options=extras_override, - prereleases=True) - - if extras_override is not None: - res.extras = extras_override - - return res - - @classmethod - def from_line(cls, name, comes_from=None, prereleases=None): - """Creates an InstallRequirement from a name, which might be a - requirement, directory containing 'setup.py', filename, or URL. - """ - url = None - name = name.strip() - req = None - path = os.path.normpath(os.path.abspath(name)) - link = None - - if is_url(name): - link = Link(name) - elif os.path.isdir(path) and (os.path.sep in name or name.startswith('.')): - if not is_installable_dir(path): - raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % name) - link = Link(path_to_url(name)) - elif is_archive_file(path): - if not os.path.isfile(path): - logger.warn('Requirement %r looks like a filename, but the file does not exist', name) - link = Link(path_to_url(name)) - - # If the line has an egg= definition, but isn't editable, pull the requirement out. - # Otherwise, assume the name is the req for the non URL/path/archive case. - if link and req is None: - url = link.url_without_fragment - req = link.egg_fragment #when fragment is None, this will become an 'unnamed' requirement - - # Handle relative file URLs - if link.scheme == 'file' and re.search(r'\.\./', url): - url = path_to_url(os.path.normpath(os.path.abspath(link.path))) - - # fail early for invalid or unsupported wheels - if link.ext == wheel_ext: - wheel = Wheel(link.filename) # can raise InvalidWheelFilename - if not wheel.supported(): - raise UnsupportedWheel("%s is not a supported wheel on this platform." % wheel.filename) - - else: - req = name - - return cls(req, comes_from, url=url, prereleases=prereleases) - - def __str__(self): - if self.req: - s = str(self.req) - if self.url: - s += ' from %s' % self.url - else: - s = self.url - if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) - if self.comes_from: - if isinstance(self.comes_from, string_types): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += ' (from %s)' % comes_from - return s - - def from_path(self): - if self.req is None: - return None - s = str(self.req) - if self.comes_from: - if isinstance(self.comes_from, string_types): - comes_from = self.comes_from - else: - comes_from = self.comes_from.from_path() - if comes_from: - s += '->' + comes_from - return s - - def build_location(self, build_dir, unpack=True): - if self._temp_build_dir is not None: - return self._temp_build_dir - if self.req is None: - self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') - self._ideal_build_dir = build_dir - return self._temp_build_dir - if self.editable: - name = self.name.lower() - else: - name = self.name - # FIXME: Is there a better place to create the build_dir? (hg and bzr need this) - if not os.path.exists(build_dir): - _make_build_dir(build_dir) - return os.path.join(build_dir, name) - - def correct_build_location(self): - """If the build location was a temporary directory, this will move it - to a new more permanent location""" - if self.source_dir is not None: - return - assert self.req is not None - assert self._temp_build_dir - old_location = self._temp_build_dir - new_build_dir = self._ideal_build_dir - del self._ideal_build_dir - if self.editable: - name = self.name.lower() - else: - name = self.name - new_location = os.path.join(new_build_dir, name) - if not os.path.exists(new_build_dir): - logger.debug('Creating directory %s' % new_build_dir) - _make_build_dir(new_build_dir) - if os.path.exists(new_location): - raise InstallationError( - 'A package already exists in %s; please remove it to continue' - % display_path(new_location)) - logger.debug('Moving package %s from %s to new location %s' - % (self, display_path(old_location), display_path(new_location))) - shutil.move(old_location, new_location) - self._temp_build_dir = new_location - self.source_dir = new_location - self._egg_info_path = None - - @property - def name(self): - if self.req is None: - return None - return self.req.project_name - - @property - def url_name(self): - if self.req is None: - return None - return urllib.quote(self.req.unsafe_name) - - @property - def setup_py(self): - try: - import setuptools - except ImportError: - # Setuptools is not available - raise InstallationError( - "setuptools must be installed to install from a source " - "distribution" - ) - - setup_file = 'setup.py' - - if self.editable_options and 'subdirectory' in self.editable_options: - setup_py = os.path.join(self.source_dir, - self.editable_options['subdirectory'], - setup_file) - - else: - setup_py = os.path.join(self.source_dir, setup_file) - - # Python2 __file__ should not be unicode - if six.PY2 and isinstance(setup_py, six.text_type): - setup_py = setup_py.encode(sys.getfilesystemencoding()) - - return setup_py - - def run_egg_info(self, force_root_egg_info=False): - assert self.source_dir - if self.name: - logger.notify('Running setup.py (path:%s) egg_info for package %s' % (self.setup_py, self.name)) - else: - logger.notify('Running setup.py (path:%s) egg_info for package from %s' % (self.setup_py, self.url)) - logger.indent += 2 - try: - - # if it's distribute>=0.7, it won't contain an importable - # setuptools, and having an egg-info dir blocks the ability of - # setup.py to find setuptools plugins, so delete the egg-info dir if - # no setuptools. it will get recreated by the run of egg_info - # NOTE: this self.name check only works when installing from a specifier - # (not archive path/urls) - # TODO: take this out later - if self.name == 'distribute' and not os.path.isdir(os.path.join(self.source_dir, 'setuptools')): - rmtree(os.path.join(self.source_dir, 'distribute.egg-info')) - - script = self._run_setup_py - script = script.replace('__SETUP_PY__', repr(self.setup_py)) - script = script.replace('__PKG_NAME__', repr(self.name)) - egg_info_cmd = [sys.executable, '-c', script, 'egg_info'] - # We can't put the .egg-info files at the root, because then the source code will be mistaken - # for an installed egg, causing problems - if self.editable or force_root_egg_info: - egg_base_option = [] - else: - egg_info_dir = os.path.join(self.source_dir, 'pip-egg-info') - if not os.path.exists(egg_info_dir): - os.makedirs(egg_info_dir) - egg_base_option = ['--egg-base', 'pip-egg-info'] - call_subprocess( - egg_info_cmd + egg_base_option, - cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False, - command_level=logger.VERBOSE_DEBUG, - command_desc='python setup.py egg_info') - finally: - logger.indent -= 2 - if not self.req: - self.req = pkg_resources.Requirement.parse( - "%(Name)s==%(Version)s" % self.pkg_info()) - self.correct_build_location() - - ## FIXME: This is a lame hack, entirely for PasteScript which has - ## a self-provided entry point that causes this awkwardness - _run_setup_py = """ -__file__ = __SETUP_PY__ -from setuptools.command import egg_info -import pkg_resources -import os -import tokenize -def replacement_run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in pkg_resources.iter_entry_points('egg_info.writers'): - # require=False is the change we're making: - writer = ep.load(require=False) - if writer: - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - self.find_sources() -egg_info.egg_info.run = replacement_run -exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec')) -""" - - def egg_info_data(self, filename): - if self.satisfied_by is not None: - if not self.satisfied_by.has_metadata(filename): - return None - return self.satisfied_by.get_metadata(filename) - assert self.source_dir - filename = self.egg_info_path(filename) - if not os.path.exists(filename): - return None - data = read_text_file(filename) - return data - - def egg_info_path(self, filename): - if self._egg_info_path is None: - if self.editable: - base = self.source_dir - else: - base = os.path.join(self.source_dir, 'pip-egg-info') - filenames = os.listdir(base) - if self.editable: - filenames = [] - for root, dirs, files in os.walk(base): - for dir in vcs.dirnames: - if dir in dirs: - dirs.remove(dir) - # Iterate over a copy of ``dirs``, since mutating - # a list while iterating over it can cause trouble. - # (See https://github.com/pypa/pip/pull/462.) - for dir in list(dirs): - # Don't search in anything that looks like a virtualenv environment - if (os.path.exists(os.path.join(root, dir, 'bin', 'python')) - or os.path.exists(os.path.join(root, dir, 'Scripts', 'Python.exe'))): - dirs.remove(dir) - # Also don't search through tests - if dir == 'test' or dir == 'tests': - dirs.remove(dir) - filenames.extend([os.path.join(root, dir) - for dir in dirs]) - filenames = [f for f in filenames if f.endswith('.egg-info')] - - if not filenames: - raise InstallationError('No files/directories in %s (from %s)' % (base, filename)) - assert filenames, "No files/directories in %s (from %s)" % (base, filename) - - # if we have more than one match, we pick the toplevel one. This can - # easily be the case if there is a dist folder which contains an - # extracted tarball for testing purposes. - if len(filenames) > 1: - filenames.sort(key=lambda x: x.count(os.path.sep) + - (os.path.altsep and - x.count(os.path.altsep) or 0)) - self._egg_info_path = os.path.join(base, filenames[0]) - return os.path.join(self._egg_info_path, filename) - - def egg_info_lines(self, filename): - data = self.egg_info_data(filename) - if not data: - return [] - result = [] - for line in data.splitlines(): - line = line.strip() - if not line or line.startswith('#'): - continue - result.append(line) - return result - - def pkg_info(self): - p = FeedParser() - data = self.egg_info_data('PKG-INFO') - if not data: - logger.warn('No PKG-INFO file found in %s' % display_path(self.egg_info_path('PKG-INFO'))) - p.feed(data or '') - return p.close() - - @property - def dependency_links(self): - return self.egg_info_lines('dependency_links.txt') - - _requirements_section_re = re.compile(r'\[(.*?)\]') - - def requirements(self, extras=()): - in_extra = None - for line in self.egg_info_lines('requires.txt'): - match = self._requirements_section_re.match(line.lower()) - if match: - in_extra = match.group(1) - continue - if in_extra and in_extra not in extras: - logger.debug('skipping extra %s' % in_extra) - # Skip requirement for an extra we aren't requiring - continue - yield line - - @property - def absolute_versions(self): - for qualifier, version in self.req.specs: - if qualifier == '==': - yield version - - @property - def installed_version(self): - return self.pkg_info()['version'] - - def assert_source_matches_version(self): - assert self.source_dir - version = self.installed_version - if version not in self.req: - logger.warn('Requested %s, but installing version %s' % (self, self.installed_version)) - else: - logger.debug('Source in %s has version %s, which satisfies requirement %s' - % (display_path(self.source_dir), version, self)) - - def update_editable(self, obtain=True): - if not self.url: - logger.info("Cannot update repository at %s; repository location is unknown" % self.source_dir) - return - assert self.editable - assert self.source_dir - if self.url.startswith('file:'): - # Static paths don't get updated - return - assert '+' in self.url, "bad url: %r" % self.url - if not self.update: - return - vc_type, url = self.url.split('+', 1) - backend = vcs.get_backend(vc_type) - if backend: - vcs_backend = backend(self.url) - if obtain: - vcs_backend.obtain(self.source_dir) - else: - vcs_backend.export(self.source_dir) - else: - assert 0, ( - 'Unexpected version control type (in %s): %s' - % (self.url, vc_type)) - - def uninstall(self, auto_confirm=False): - """ - Uninstall the distribution currently satisfying this requirement. - - Prompts before removing or modifying files unless - ``auto_confirm`` is True. - - Refuses to delete or modify files outside of ``sys.prefix`` - - thus uninstallation within a virtual environment can only - modify that virtual environment, even if the virtualenv is - linked to global site-packages. - - """ - if not self.check_if_exists(): - raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,)) - dist = self.satisfied_by or self.conflicts_with - - paths_to_remove = UninstallPathSet(dist) - - pip_egg_info_path = os.path.join(dist.location, - dist.egg_name()) + '.egg-info' - dist_info_path = os.path.join(dist.location, - '-'.join(dist.egg_name().split('-')[:2]) - ) + '.dist-info' - # workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367 - debian_egg_info_path = pip_egg_info_path.replace( - '-py%s' % pkg_resources.PY_MAJOR, '') - easy_install_egg = dist.egg_name() + '.egg' - develop_egg_link = egg_link_path(dist) - - pip_egg_info_exists = os.path.exists(pip_egg_info_path) - debian_egg_info_exists = os.path.exists(debian_egg_info_path) - dist_info_exists = os.path.exists(dist_info_path) - if pip_egg_info_exists or debian_egg_info_exists: - # package installed by pip - if pip_egg_info_exists: - egg_info_path = pip_egg_info_path - else: - egg_info_path = debian_egg_info_path - paths_to_remove.add(egg_info_path) - if dist.has_metadata('installed-files.txt'): - for installed_file in dist.get_metadata('installed-files.txt').splitlines(): - path = os.path.normpath(os.path.join(egg_info_path, installed_file)) - paths_to_remove.add(path) - #FIXME: need a test for this elif block - #occurs with --single-version-externally-managed/--record outside of pip - elif dist.has_metadata('top_level.txt'): - if dist.has_metadata('namespace_packages.txt'): - namespaces = dist.get_metadata('namespace_packages.txt') - else: - namespaces = [] - for top_level_pkg in [p for p - in dist.get_metadata('top_level.txt').splitlines() - if p and p not in namespaces]: - path = os.path.join(dist.location, top_level_pkg) - paths_to_remove.add(path) - paths_to_remove.add(path + '.py') - paths_to_remove.add(path + '.pyc') - - elif dist.location.endswith(easy_install_egg): - # package installed by easy_install - paths_to_remove.add(dist.location) - easy_install_pth = os.path.join(os.path.dirname(dist.location), - 'easy-install.pth') - paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) - - elif develop_egg_link: - # develop egg - fh = open(develop_egg_link, 'r') - link_pointer = os.path.normcase(fh.readline().strip()) - fh.close() - assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location) - paths_to_remove.add(develop_egg_link) - easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), - 'easy-install.pth') - paths_to_remove.add_pth(easy_install_pth, dist.location) - elif dist_info_exists: - for path in pip.wheel.uninstallation_paths(dist): - paths_to_remove.add(path) - - # find distutils scripts= scripts - if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): - for script in dist.metadata_listdir('scripts'): - if dist_in_usersite(dist): - bin_dir = bin_user - else: - bin_dir = bin_py - paths_to_remove.add(os.path.join(bin_dir, script)) - if sys.platform == 'win32': - paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') - - # find console_scripts - if dist.has_metadata('entry_points.txt'): - config = ConfigParser.SafeConfigParser() - config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt'))) - if config.has_section('console_scripts'): - for name, value in config.items('console_scripts'): - if dist_in_usersite(dist): - bin_dir = bin_user - else: - bin_dir = bin_py - paths_to_remove.add(os.path.join(bin_dir, name)) - if sys.platform == 'win32': - paths_to_remove.add(os.path.join(bin_dir, name) + '.exe') - paths_to_remove.add(os.path.join(bin_dir, name) + '.exe.manifest') - paths_to_remove.add(os.path.join(bin_dir, name) + '-script.py') - - paths_to_remove.remove(auto_confirm) - self.uninstalled = paths_to_remove - - def rollback_uninstall(self): - if self.uninstalled: - self.uninstalled.rollback() - else: - logger.error("Can't rollback %s, nothing uninstalled." - % (self.project_name,)) - - def commit_uninstall(self): - if self.uninstalled: - self.uninstalled.commit() - else: - logger.error("Can't commit %s, nothing uninstalled." - % (self.project_name,)) - - def archive(self, build_dir): - assert self.source_dir - create_archive = True - archive_name = '%s-%s.zip' % (self.name, self.installed_version) - archive_path = os.path.join(build_dir, archive_name) - if os.path.exists(archive_path): - response = ask_path_exists( - 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % - display_path(archive_path), ('i', 'w', 'b')) - if response == 'i': - create_archive = False - elif response == 'w': - logger.warn('Deleting %s' % display_path(archive_path)) - os.remove(archive_path) - elif response == 'b': - dest_file = backup_dir(archive_path) - logger.warn('Backing up %s to %s' - % (display_path(archive_path), display_path(dest_file))) - shutil.move(archive_path, dest_file) - if create_archive: - zip = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED) - dir = os.path.normcase(os.path.abspath(self.source_dir)) - for dirpath, dirnames, filenames in os.walk(dir): - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') - for dirname in dirnames: - dirname = os.path.join(dirpath, dirname) - name = self._clean_zip_name(dirname, dir) - zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') - zipdir.external_attr = 0x1ED << 16 # 0o755 - zip.writestr(zipdir, '') - for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - filename = os.path.join(dirpath, filename) - name = self._clean_zip_name(filename, dir) - zip.write(filename, self.name + '/' + name) - zip.close() - logger.indent -= 2 - logger.notify('Saved %s' % display_path(archive_path)) - - def _clean_zip_name(self, name, prefix): - assert name.startswith(prefix+os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix)) - name = name[len(prefix)+1:] - name = name.replace(os.path.sep, '/') - return name - - def install(self, install_options, global_options=(), root=None): - if self.editable: - self.install_editable(install_options, global_options) - return - if self.is_wheel: - version = pip.wheel.wheel_version(self.source_dir) - pip.wheel.check_compatibility(version, self.name) - - self.move_wheel_files(self.source_dir, root=root) - self.install_succeeded = True - return - - temp_location = tempfile.mkdtemp('-record', 'pip-') - record_filename = os.path.join(temp_location, 'install-record.txt') - try: - install_args = [sys.executable] - install_args.append('-c') - install_args.append( - "import setuptools, tokenize;__file__=%r;"\ - "exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py) - install_args += list(global_options) + ['install','--record', record_filename] - - if not self.as_egg: - install_args += ['--single-version-externally-managed'] - - if root is not None: - install_args += ['--root', root] - - if self.pycompile: - install_args += ["--compile"] - else: - install_args += ["--no-compile"] - - if running_under_virtualenv(): - ## FIXME: I'm not sure if this is a reasonable location; probably not - ## but we can't put it in the default location, as that is a virtualenv symlink that isn't writable - install_args += ['--install-headers', - os.path.join(sys.prefix, 'include', 'site', - 'python' + get_python_version())] - logger.notify('Running setup.py install for %s' % self.name) - logger.indent += 2 - try: - call_subprocess(install_args + install_options, - cwd=self.source_dir, filter_stdout=self._filter_install, show_stdout=False) - finally: - logger.indent -= 2 - if not os.path.exists(record_filename): - logger.notify('Record file %s not found' % record_filename) - return - self.install_succeeded = True - if self.as_egg: - # there's no --always-unzip option we can pass to install command - # so we unable to save the installed-files.txt - return - - def prepend_root(path): - if root is None or not os.path.isabs(path): - return path - else: - return change_root(root, path) - - f = open(record_filename) - for line in f: - line = line.strip() - if line.endswith('.egg-info'): - egg_info_dir = prepend_root(line) - break - else: - logger.warn('Could not find .egg-info directory in install record for %s' % self) - ## FIXME: put the record somewhere - ## FIXME: should this be an error? - return - f.close() - new_lines = [] - f = open(record_filename) - for line in f: - filename = line.strip() - if os.path.isdir(filename): - filename += os.path.sep - new_lines.append(make_path_relative(prepend_root(filename), egg_info_dir)) - f.close() - f = open(os.path.join(egg_info_dir, 'installed-files.txt'), 'w') - f.write('\n'.join(new_lines)+'\n') - f.close() - finally: - if os.path.exists(record_filename): - os.remove(record_filename) - os.rmdir(temp_location) - - def remove_temporary_source(self): - """Remove the source files from this requirement, if they are marked - for deletion""" - if self.is_bundle or os.path.exists(self.delete_marker_filename): - logger.info('Removing source in %s' % self.source_dir) - if self.source_dir: - rmtree(self.source_dir) - self.source_dir = None - if self._temp_build_dir and os.path.exists(self._temp_build_dir): - rmtree(self._temp_build_dir) - self._temp_build_dir = None - - def install_editable(self, install_options, global_options=()): - logger.notify('Running setup.py develop for %s' % self.name) - logger.indent += 2 - try: - ## FIXME: should we do --install-headers here too? - call_subprocess( - [sys.executable, '-c', - "import setuptools, tokenize; __file__=%r; exec(compile(getattr(tokenize, 'open', open)(__file__).read().replace('\\r\\n', '\\n'), __file__, 'exec'))" % self.setup_py] - + list(global_options) + ['develop', '--no-deps'] + list(install_options), - - cwd=self.source_dir, filter_stdout=self._filter_install, - show_stdout=False) - finally: - logger.indent -= 2 - self.install_succeeded = True - - def _filter_install(self, line): - level = logger.NOTIFY - for regex in [r'^running .*', r'^writing .*', '^creating .*', '^[Cc]opying .*', - r'^reading .*', r"^removing .*\.egg-info' \(and everything under it\)$", - r'^byte-compiling ', - # Not sure what this warning is, but it seems harmless: - r"^warning: manifest_maker: standard file '-c' not found$"]: - if re.search(regex, line.strip()): - level = logger.INFO - break - return (level, line) - - def check_if_exists(self): - """Find an installed distribution that satisfies or conflicts - with this requirement, and set self.satisfied_by or - self.conflicts_with appropriately.""" - - if self.req is None: - return False - try: - # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) - # if we've already set distribute as a conflict to setuptools - # then this check has already run before. we don't want it to - # run again, and return False, since it would block the uninstall - # TODO: remove this later - if (self.req.project_name == 'setuptools' - and self.conflicts_with - and self.conflicts_with.project_name == 'distribute'): - return True - else: - self.satisfied_by = pkg_resources.get_distribution(self.req) - except pkg_resources.DistributionNotFound: - return False - except pkg_resources.VersionConflict: - existing_dist = pkg_resources.get_distribution(self.req.project_name) - if self.use_user_site: - if dist_in_usersite(existing_dist): - self.conflicts_with = existing_dist - elif running_under_virtualenv() and dist_in_site_packages(existing_dist): - raise InstallationError("Will not install to the user site because it will lack sys.path precedence to %s in %s" - %(existing_dist.project_name, existing_dist.location)) - else: - self.conflicts_with = existing_dist - return True - - @property - def is_wheel(self): - return self.url and '.whl' in self.url - - @property - def is_bundle(self): - if self._is_bundle is not None: - return self._is_bundle - base = self._temp_build_dir - if not base: - ## FIXME: this doesn't seem right: - return False - self._is_bundle = (os.path.exists(os.path.join(base, 'pip-manifest.txt')) - or os.path.exists(os.path.join(base, 'pyinstall-manifest.txt'))) - return self._is_bundle - - def bundle_requirements(self): - for dest_dir in self._bundle_editable_dirs: - package = os.path.basename(dest_dir) - ## FIXME: svnism: - for vcs_backend in vcs.backends: - url = rev = None - vcs_bundle_file = os.path.join( - dest_dir, vcs_backend.bundle_file) - if os.path.exists(vcs_bundle_file): - vc_type = vcs_backend.name - fp = open(vcs_bundle_file) - content = fp.read() - fp.close() - url, rev = vcs_backend().parse_vcs_bundle_file(content) - break - if url: - url = '%s+%s@%s' % (vc_type, url, rev) - else: - url = None - yield InstallRequirement( - package, self, editable=True, url=url, - update=False, source_dir=dest_dir, from_bundle=True) - for dest_dir in self._bundle_build_dirs: - package = os.path.basename(dest_dir) - yield InstallRequirement(package, self,source_dir=dest_dir, from_bundle=True) - - def move_bundle_files(self, dest_build_dir, dest_src_dir): - base = self._temp_build_dir - assert base - src_dir = os.path.join(base, 'src') - build_dir = os.path.join(base, 'build') - bundle_build_dirs = [] - bundle_editable_dirs = [] - for source_dir, dest_dir, dir_collection in [ - (src_dir, dest_src_dir, bundle_editable_dirs), - (build_dir, dest_build_dir, bundle_build_dirs)]: - if os.path.exists(source_dir): - for dirname in os.listdir(source_dir): - dest = os.path.join(dest_dir, dirname) - dir_collection.append(dest) - if os.path.exists(dest): - logger.warn('The directory %s (containing package %s) already exists; cannot move source from bundle %s' - % (dest, dirname, self)) - continue - if not os.path.exists(dest_dir): - logger.info('Creating directory %s' % dest_dir) - os.makedirs(dest_dir) - shutil.move(os.path.join(source_dir, dirname), dest) - if not os.listdir(source_dir): - os.rmdir(source_dir) - self._temp_build_dir = None - self._bundle_build_dirs = bundle_build_dirs - self._bundle_editable_dirs = bundle_editable_dirs - - def move_wheel_files(self, wheeldir, root=None): - move_wheel_files( - self.name, self.req, wheeldir, - user=self.use_user_site, - home=self.target_dir, - root=root, - pycompile=self.pycompile, - ) - - @property - def delete_marker_filename(self): - assert self.source_dir - return os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME) - - -class Requirements(object): - - def __init__(self): - self._keys = [] - self._dict = {} - - def keys(self): - return self._keys - - def values(self): - return [self._dict[key] for key in self._keys] - - def __contains__(self, item): - return item in self._keys - - def __setitem__(self, key, value): - if key not in self._keys: - self._keys.append(key) - self._dict[key] = value - - def __getitem__(self, key): - return self._dict[key] - - def __repr__(self): - values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()] - return 'Requirements({%s})' % ', '.join(values) - - -class RequirementSet(object): - - def __init__(self, build_dir, src_dir, download_dir, download_cache=None, - upgrade=False, ignore_installed=False, as_egg=False, - target_dir=None, ignore_dependencies=False, - force_reinstall=False, use_user_site=False, session=None, - pycompile=True, wheel_download_dir=None): - self.build_dir = build_dir - self.src_dir = src_dir - self.download_dir = download_dir - if download_cache: - download_cache = os.path.expanduser(download_cache) - self.download_cache = download_cache - self.upgrade = upgrade - self.ignore_installed = ignore_installed - self.force_reinstall = force_reinstall - self.requirements = Requirements() - # Mapping of alias: real_name - self.requirement_aliases = {} - self.unnamed_requirements = [] - self.ignore_dependencies = ignore_dependencies - self.successfully_downloaded = [] - self.successfully_installed = [] - self.reqs_to_cleanup = [] - self.as_egg = as_egg - self.use_user_site = use_user_site - self.target_dir = target_dir #set from --target option - self.session = session or PipSession() - self.pycompile = pycompile - self.wheel_download_dir = wheel_download_dir - - def __str__(self): - reqs = [req for req in self.requirements.values() - if not req.comes_from] - reqs.sort(key=lambda req: req.name.lower()) - return ' '.join([str(req.req) for req in reqs]) - - def add_requirement(self, install_req): - name = install_req.name - install_req.as_egg = self.as_egg - install_req.use_user_site = self.use_user_site - install_req.target_dir = self.target_dir - install_req.pycompile = self.pycompile - if not name: - #url or path requirement w/o an egg fragment - self.unnamed_requirements.append(install_req) - else: - if self.has_requirement(name): - raise InstallationError( - 'Double requirement given: %s (already in %s, name=%r)' - % (install_req, self.get_requirement(name), name)) - self.requirements[name] = install_req - ## FIXME: what about other normalizations? E.g., _ vs. -? - if name.lower() != name: - self.requirement_aliases[name.lower()] = name - - def has_requirement(self, project_name): - for name in project_name, project_name.lower(): - if name in self.requirements or name in self.requirement_aliases: - return True - return False - - @property - def has_requirements(self): - return list(self.requirements.values()) or self.unnamed_requirements - - @property - def has_editables(self): - if any(req.editable for req in self.requirements.values()): - return True - if any(req.editable for req in self.unnamed_requirements): - return True - return False - - @property - def is_download(self): - if self.download_dir: - self.download_dir = os.path.expanduser(self.download_dir) - if os.path.exists(self.download_dir): - return True - else: - logger.fatal('Could not find download directory') - raise InstallationError( - "Could not find or access download directory '%s'" - % display_path(self.download_dir)) - return False - - def get_requirement(self, project_name): - for name in project_name, project_name.lower(): - if name in self.requirements: - return self.requirements[name] - if name in self.requirement_aliases: - return self.requirements[self.requirement_aliases[name]] - raise KeyError("No project with the name %r" % project_name) - - def uninstall(self, auto_confirm=False): - for req in self.requirements.values(): - req.uninstall(auto_confirm=auto_confirm) - req.commit_uninstall() - - def locate_files(self): - ## FIXME: duplicates code from prepare_files; relevant code should - ## probably be factored out into a separate method - unnamed = list(self.unnamed_requirements) - reqs = list(self.requirements.values()) - while reqs or unnamed: - if unnamed: - req_to_install = unnamed.pop(0) - else: - req_to_install = reqs.pop(0) - install_needed = True - if not self.ignore_installed and not req_to_install.editable: - req_to_install.check_if_exists() - if req_to_install.satisfied_by: - if self.upgrade: - #don't uninstall conflict if user install and and conflict is not user install - if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): - req_to_install.conflicts_with = req_to_install.satisfied_by - req_to_install.satisfied_by = None - else: - install_needed = False - if req_to_install.satisfied_by: - logger.notify('Requirement already satisfied ' - '(use --upgrade to upgrade): %s' - % req_to_install) - - if req_to_install.editable: - if req_to_install.source_dir is None: - req_to_install.source_dir = req_to_install.build_location(self.src_dir) - elif install_needed: - req_to_install.source_dir = req_to_install.build_location(self.build_dir, not self.is_download) - - if req_to_install.source_dir is not None and not os.path.isdir(req_to_install.source_dir): - raise InstallationError('Could not install requirement %s ' - 'because source folder %s does not exist ' - '(perhaps --no-download was used without first running ' - 'an equivalent install with --no-install?)' - % (req_to_install, req_to_install.source_dir)) - - def prepare_files(self, finder, force_root_egg_info=False, bundle=False): - """Prepare process. Create temp directories, download and/or unpack files.""" - unnamed = list(self.unnamed_requirements) - reqs = list(self.requirements.values()) - while reqs or unnamed: - if unnamed: - req_to_install = unnamed.pop(0) - else: - req_to_install = reqs.pop(0) - install = True - best_installed = False - not_found = None - if not self.ignore_installed and not req_to_install.editable: - req_to_install.check_if_exists() - if req_to_install.satisfied_by: - if self.upgrade: - if not self.force_reinstall and not req_to_install.url: - try: - url = finder.find_requirement( - req_to_install, self.upgrade) - except BestVersionAlreadyInstalled: - best_installed = True - install = False - except DistributionNotFound: - not_found = sys.exc_info()[1] - else: - # Avoid the need to call find_requirement again - req_to_install.url = url.url - - if not best_installed: - #don't uninstall conflict if user install and conflict is not user install - if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): - req_to_install.conflicts_with = req_to_install.satisfied_by - req_to_install.satisfied_by = None - else: - install = False - if req_to_install.satisfied_by: - if best_installed: - logger.notify('Requirement already up-to-date: %s' - % req_to_install) - else: - logger.notify('Requirement already satisfied ' - '(use --upgrade to upgrade): %s' - % req_to_install) - if req_to_install.editable: - logger.notify('Obtaining %s' % req_to_install) - elif install: - if req_to_install.url and req_to_install.url.lower().startswith('file:'): - logger.notify('Unpacking %s' % display_path(url_to_path(req_to_install.url))) - else: - logger.notify('Downloading/unpacking %s' % req_to_install) - logger.indent += 2 - try: - is_bundle = False - is_wheel = False - if req_to_install.editable: - if req_to_install.source_dir is None: - location = req_to_install.build_location(self.src_dir) - req_to_install.source_dir = location - else: - location = req_to_install.source_dir - if not os.path.exists(self.build_dir): - _make_build_dir(self.build_dir) - req_to_install.update_editable(not self.is_download) - if self.is_download: - req_to_install.run_egg_info() - req_to_install.archive(self.download_dir) - else: - req_to_install.run_egg_info() - elif install: - ##@@ if filesystem packages are not marked - ##editable in a req, a non deterministic error - ##occurs when the script attempts to unpack the - ##build directory - - # NB: This call can result in the creation of a temporary build directory - location = req_to_install.build_location(self.build_dir, not self.is_download) - unpack = True - url = None - - # In the case where the req comes from a bundle, we should - # assume a build dir exists and move on - if req_to_install.from_bundle: - pass - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - elif os.path.exists(os.path.join(location, 'setup.py')): - raise PreviousBuildDirError(textwrap.dedent(""" - pip can't proceed with requirement '%s' due to a pre-existing build directory. - location: %s - This is likely due to a previous installation that failed. - pip is being responsible and not assuming it can delete this. - Please delete it and try again. - """ % (req_to_install, location))) - else: - ## FIXME: this won't upgrade when there's an existing package unpacked in `location` - if req_to_install.url is None: - if not_found: - raise not_found - url = finder.find_requirement(req_to_install, upgrade=self.upgrade) - else: - ## FIXME: should req_to_install.url already be a link? - url = Link(req_to_install.url) - assert url - if url: - try: - - if ( - url.filename.endswith(wheel_ext) - and self.wheel_download_dir - ): - # when doing 'pip wheel` - download_dir = self.wheel_download_dir - do_download = True - else: - download_dir = self.download_dir - do_download = self.is_download - self.unpack_url( - url, location, download_dir, - do_download, - ) - except HTTPError as exc: - logger.fatal( - 'Could not install requirement %s because ' - 'of error %s' % (req_to_install, exc) - ) - raise InstallationError( - 'Could not install requirement %s because of HTTP error %s for URL %s' - % (req_to_install, e, url)) - else: - unpack = False - if unpack: - is_bundle = req_to_install.is_bundle - is_wheel = url and url.filename.endswith(wheel_ext) - if is_bundle: - req_to_install.move_bundle_files(self.build_dir, self.src_dir) - for subreq in req_to_install.bundle_requirements(): - reqs.append(subreq) - self.add_requirement(subreq) - elif self.is_download: - req_to_install.source_dir = location - if not is_wheel: - # FIXME: see https://github.com/pypa/pip/issues/1112 - req_to_install.run_egg_info() - if url and url.scheme in vcs.all_schemes: - req_to_install.archive(self.download_dir) - elif is_wheel: - req_to_install.source_dir = location - req_to_install.url = url.url - else: - req_to_install.source_dir = location - req_to_install.run_egg_info() - if force_root_egg_info: - # We need to run this to make sure that the .egg-info/ - # directory is created for packing in the bundle - req_to_install.run_egg_info(force_root_egg_info=True) - req_to_install.assert_source_matches_version() - #@@ sketchy way of identifying packages not grabbed from an index - if bundle and req_to_install.url: - self.copy_to_build_dir(req_to_install) - install = False - # req_to_install.req is only avail after unpack for URL pkgs - # repeat check_if_exists to uninstall-on-upgrade (#14) - if not self.ignore_installed: - req_to_install.check_if_exists() - if req_to_install.satisfied_by: - if self.upgrade or self.ignore_installed: - #don't uninstall conflict if user install and and conflict is not user install - if not (self.use_user_site and not dist_in_usersite(req_to_install.satisfied_by)): - req_to_install.conflicts_with = req_to_install.satisfied_by - req_to_install.satisfied_by = None - else: - logger.notify( - 'Requirement already satisfied (use ' - '--upgrade to upgrade): %s' % - req_to_install - ) - install = False - if is_wheel: - dist = list( - pkg_resources.find_distributions(location) - )[0] - if not req_to_install.req: - req_to_install.req = dist.as_requirement() - self.add_requirement(req_to_install) - if not self.ignore_dependencies: - for subreq in dist.requires( - req_to_install.extras): - if self.has_requirement( - subreq.project_name): - continue - subreq = InstallRequirement(str(subreq), - req_to_install) - reqs.append(subreq) - self.add_requirement(subreq) - - # sdists - elif not is_bundle: - ## FIXME: shouldn't be globally added: - finder.add_dependency_links(req_to_install.dependency_links) - if (req_to_install.extras): - logger.notify("Installing extra requirements: %r" % ','.join(req_to_install.extras)) - if not self.ignore_dependencies: - for req in req_to_install.requirements(req_to_install.extras): - try: - name = pkg_resources.Requirement.parse(req).project_name - except ValueError: - e = sys.exc_info()[1] - ## FIXME: proper warning - logger.error('Invalid requirement: %r (%s) in requirement %s' % (req, e, req_to_install)) - continue - if self.has_requirement(name): - ## FIXME: check for conflict - continue - subreq = InstallRequirement(req, req_to_install) - reqs.append(subreq) - self.add_requirement(subreq) - if not self.has_requirement(req_to_install.name): - #'unnamed' requirements will get added here - self.add_requirement(req_to_install) - - # cleanup tmp src - if not is_bundle: - if ( - self.is_download or - req_to_install._temp_build_dir is not None - ): - self.reqs_to_cleanup.append(req_to_install) - - if install: - self.successfully_downloaded.append(req_to_install) - if bundle and (req_to_install.url and req_to_install.url.startswith('file:///')): - self.copy_to_build_dir(req_to_install) - finally: - logger.indent -= 2 - - def cleanup_files(self, bundle=False): - """Clean up files, remove builds.""" - logger.notify('Cleaning up...') - logger.indent += 2 - for req in self.reqs_to_cleanup: - req.remove_temporary_source() - - remove_dir = [] - if self._pip_has_created_build_dir(): - remove_dir.append(self.build_dir) - - # The source dir of a bundle can always be removed. - # FIXME: not if it pre-existed the bundle! - if bundle: - remove_dir.append(self.src_dir) - - for dir in remove_dir: - if os.path.exists(dir): - logger.info('Removing temporary dir %s...' % dir) - rmtree(dir) - - logger.indent -= 2 - - def _pip_has_created_build_dir(self): - return (self.build_dir == build_prefix and - os.path.exists(os.path.join(self.build_dir, PIP_DELETE_MARKER_FILENAME))) - - def copy_to_build_dir(self, req_to_install): - target_dir = req_to_install.editable and self.src_dir or self.build_dir - logger.info("Copying %s to %s" % (req_to_install.name, target_dir)) - dest = os.path.join(target_dir, req_to_install.name) - shutil.copytree(req_to_install.source_dir, dest) - call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest, - command_desc='python setup.py clean') - - def unpack_url(self, link, location, download_dir=None, - only_download=False): - if download_dir is None: - download_dir = self.download_dir - - # non-editable vcs urls - if is_vcs_url(link): - if only_download: - loc = download_dir - else: - loc = location - unpack_vcs_link(link, loc, only_download) - - # file urls - elif is_file_url(link): - unpack_file_url(link, location, download_dir) - if only_download: - write_delete_marker_file(location) - - # http urls - else: - unpack_http_url( - link, - location, - self.download_cache, - download_dir, - self.session, - ) - if only_download: - write_delete_marker_file(location) - - def install(self, install_options, global_options=(), *args, **kwargs): - """Install everything in this set (after having downloaded and unpacked the packages)""" - to_install = [r for r in self.requirements.values() - if not r.satisfied_by] - - # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) - # move the distribute-0.7.X wrapper to the end because it does not - # install a setuptools package. by moving it to the end, we ensure it's - # setuptools dependency is handled first, which will provide the - # setuptools package - # TODO: take this out later - distribute_req = pkg_resources.Requirement.parse("distribute>=0.7") - for req in to_install: - if req.name == 'distribute' and req.installed_version in distribute_req: - to_install.remove(req) - to_install.append(req) - - if to_install: - logger.notify('Installing collected packages: %s' % ', '.join([req.name for req in to_install])) - logger.indent += 2 - try: - for requirement in to_install: - - # DISTRIBUTE TO SETUPTOOLS UPGRADE HACK (1 of 3 parts) - # when upgrading from distribute-0.6.X to the new merged - # setuptools in py2, we need to force setuptools to uninstall - # distribute. In py3, which is always using distribute, this - # conversion is already happening in distribute's pkg_resources. - # It's ok *not* to check if setuptools>=0.7 because if someone - # were actually trying to ugrade from distribute to setuptools - # 0.6.X, then all this could do is actually help, although that - # upgade path was certainly never "supported" - # TODO: remove this later - if requirement.name == 'setuptools': - try: - # only uninstall distribute<0.7. For >=0.7, setuptools - # will also be present, and that's what we need to - # uninstall - distribute_requirement = pkg_resources.Requirement.parse("distribute<0.7") - existing_distribute = pkg_resources.get_distribution("distribute") - if existing_distribute in distribute_requirement: - requirement.conflicts_with = existing_distribute - except pkg_resources.DistributionNotFound: - # distribute wasn't installed, so nothing to do - pass - - if requirement.conflicts_with: - logger.notify('Found existing installation: %s' - % requirement.conflicts_with) - logger.indent += 2 - try: - requirement.uninstall(auto_confirm=True) - finally: - logger.indent -= 2 - try: - requirement.install(install_options, global_options, *args, **kwargs) - except: - # if install did not succeed, rollback previous uninstall - if requirement.conflicts_with and not requirement.install_succeeded: - requirement.rollback_uninstall() - raise - else: - if requirement.conflicts_with and requirement.install_succeeded: - requirement.commit_uninstall() - requirement.remove_temporary_source() - finally: - logger.indent -= 2 - self.successfully_installed = to_install - - def create_bundle(self, bundle_filename): - ## FIXME: can't decide which is better; zip is easier to read - ## random files from, but tar.bz2 is smaller and not as lame a - ## format. - - ## FIXME: this file should really include a manifest of the - ## packages, maybe some other metadata files. It would make - ## it easier to detect as well. - zip = zipfile.ZipFile(bundle_filename, 'w', zipfile.ZIP_DEFLATED) - vcs_dirs = [] - for dir, basename in (self.build_dir, 'build'), (self.src_dir, 'src'): - dir = os.path.normcase(os.path.abspath(dir)) - for dirpath, dirnames, filenames in os.walk(dir): - for backend in vcs.backends: - vcs_backend = backend() - vcs_url = vcs_rev = None - if vcs_backend.dirname in dirnames: - for vcs_dir in vcs_dirs: - if dirpath.startswith(vcs_dir): - # vcs bundle file already in parent directory - break - else: - vcs_url, vcs_rev = vcs_backend.get_info( - os.path.join(dir, dirpath)) - vcs_dirs.append(dirpath) - vcs_bundle_file = vcs_backend.bundle_file - vcs_guide = vcs_backend.guide % {'url': vcs_url, - 'rev': vcs_rev} - dirnames.remove(vcs_backend.dirname) - break - if 'pip-egg-info' in dirnames: - dirnames.remove('pip-egg-info') - for dirname in dirnames: - dirname = os.path.join(dirpath, dirname) - name = self._clean_zip_name(dirname, dir) - zip.writestr(basename + '/' + name + '/', '') - for filename in filenames: - if filename == PIP_DELETE_MARKER_FILENAME: - continue - filename = os.path.join(dirpath, filename) - name = self._clean_zip_name(filename, dir) - zip.write(filename, basename + '/' + name) - if vcs_url: - name = os.path.join(dirpath, vcs_bundle_file) - name = self._clean_zip_name(name, dir) - zip.writestr(basename + '/' + name, vcs_guide) - - zip.writestr('pip-manifest.txt', self.bundle_requirements()) - zip.close() - - BUNDLE_HEADER = '''\ -# This is a pip bundle file, that contains many source packages -# that can be installed as a group. You can install this like: -# pip this_file.zip -# The rest of the file contains a list of all the packages included: -''' - - def bundle_requirements(self): - parts = [self.BUNDLE_HEADER] - for req in [req for req in self.requirements.values() - if not req.comes_from]: - parts.append('%s==%s\n' % (req.name, req.installed_version)) - parts.append('# These packages were installed to satisfy the above requirements:\n') - for req in [req for req in self.requirements.values() - if req.comes_from]: - parts.append('%s==%s\n' % (req.name, req.installed_version)) - ## FIXME: should we do something with self.unnamed_requirements? - return ''.join(parts) - - def _clean_zip_name(self, name, prefix): - assert name.startswith(prefix+os.path.sep), ( - "name %r doesn't start with prefix %r" % (name, prefix)) - name = name[len(prefix)+1:] - name = name.replace(os.path.sep, '/') - return name - - -def _make_build_dir(build_dir): - os.makedirs(build_dir) - write_delete_marker_file(build_dir) - - -_scheme_re = re.compile(r'^(http|https|file):', re.I) - - -def parse_requirements(filename, finder=None, comes_from=None, options=None, - session=None): - if session is None: - session = PipSession() - - skip_match = None - skip_regex = options.skip_requirements_regex if options else None - if skip_regex: - skip_match = re.compile(skip_regex) - reqs_file_dir = os.path.dirname(os.path.abspath(filename)) - filename, content = get_file_content(filename, - comes_from=comes_from, - session=session, - ) - for line_number, line in enumerate(content.splitlines()): - line_number += 1 - line = line.strip() - - # Remove comments from file - line = re.sub(r"(^|\s)#.*$", "", line) - - if not line or line.startswith('#'): - continue - if skip_match and skip_match.search(line): - continue - if line.startswith('-r') or line.startswith('--requirement'): - if line.startswith('-r'): - req_url = line[2:].strip() - else: - req_url = line[len('--requirement'):].strip().strip('=') - if _scheme_re.search(filename): - # Relative to a URL - req_url = urlparse.urljoin(filename, req_url) - elif not _scheme_re.search(req_url): - req_url = os.path.join(os.path.dirname(filename), req_url) - for item in parse_requirements(req_url, finder, comes_from=filename, options=options, session=session): - yield item - elif line.startswith('-Z') or line.startswith('--always-unzip'): - # No longer used, but previously these were used in - # requirement files, so we'll ignore. - pass - elif line.startswith('-f') or line.startswith('--find-links'): - if line.startswith('-f'): - line = line[2:].strip() - else: - line = line[len('--find-links'):].strip().lstrip('=') - ## FIXME: it would be nice to keep track of the source of - ## the find_links: - # support a find-links local path relative to a requirements file - relative_to_reqs_file = os.path.join(reqs_file_dir, line) - if os.path.exists(relative_to_reqs_file): - line = relative_to_reqs_file - if finder: - finder.find_links.append(line) - elif line.startswith('-i') or line.startswith('--index-url'): - if line.startswith('-i'): - line = line[2:].strip() - else: - line = line[len('--index-url'):].strip().lstrip('=') - if finder: - finder.index_urls = [line] - elif line.startswith('--extra-index-url'): - line = line[len('--extra-index-url'):].strip().lstrip('=') - if finder: - finder.index_urls.append(line) - elif line.startswith('--use-wheel'): - finder.use_wheel = True - elif line.startswith('--no-index'): - finder.index_urls = [] - elif line.startswith("--allow-external"): - line = line[len("--allow-external"):].strip().lstrip("=") - finder.allow_external |= set([normalize_name(line).lower()]) - elif line.startswith("--allow-all-external"): - finder.allow_all_external = True - # Remove in 1.7 - elif line.startswith("--no-allow-external"): - pass - # Remove in 1.7 - elif line.startswith("--no-allow-insecure"): - pass - # Remove after 1.7 - elif line.startswith("--allow-insecure"): - line = line[len("--allow-insecure"):].strip().lstrip("=") - finder.allow_unverified |= set([normalize_name(line).lower()]) - elif line.startswith("--allow-unverified"): - line = line[len("--allow-unverified"):].strip().lstrip("=") - finder.allow_unverified |= set([normalize_name(line).lower()]) - else: - comes_from = '-r %s (line %s)' % (filename, line_number) - if line.startswith('-e') or line.startswith('--editable'): - if line.startswith('-e'): - line = line[2:].strip() - else: - line = line[len('--editable'):].strip().lstrip('=') - req = InstallRequirement.from_editable( - line, comes_from=comes_from, default_vcs=options.default_vcs if options else None) - else: - req = InstallRequirement.from_line(line, comes_from, prereleases=getattr(options, "pre", None)) - yield req - -def _strip_postfix(req): - """ - Strip req postfix ( -dev, 0.2, etc ) - """ - ## FIXME: use package_to_requirement? - match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) - if match: - # Strip off -dev, -0.2, etc. - req = match.group(1) - return req - -def _build_req_from_url(url): - - parts = [p for p in url.split('#', 1)[0].split('/') if p] - - req = None - if parts[-2] in ('tags', 'branches', 'tag', 'branch'): - req = parts[-3] - elif parts[-1] == 'trunk': - req = parts[-2] - return req - -def _build_editable_options(req): - - """ - This method generates a dictionary of the query string - parameters contained in a given editable URL. - """ - regexp = re.compile(r"[\?#&](?P[^&=]+)=(?P[^&=]+)") - matched = regexp.findall(req) - - if matched: - ret = dict() - for option in matched: - (name, value) = option - if name in ret: - raise Exception("%s option already defined" % name) - ret[name] = value - return ret - return None - - -def parse_editable(editable_req, default_vcs=None): - """Parses svn+http://blahblah@rev#egg=Foobar into a requirement - (Foobar) and a URL""" - - url = editable_req - extras = None - - # If a file path is specified with extras, strip off the extras. - m = re.match(r'^(.+)(\[[^\]]+\])$', url) - if m: - url_no_extras = m.group(1) - extras = m.group(2) - else: - url_no_extras = url - - if os.path.isdir(url_no_extras): - if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): - raise InstallationError("Directory %r is not installable. File 'setup.py' not found." % url_no_extras) - # Treating it as code that has already been checked out - url_no_extras = path_to_url(url_no_extras) - - if url_no_extras.lower().startswith('file:'): - if extras: - return None, url_no_extras, pkg_resources.Requirement.parse('__placeholder__' + extras).extras - else: - return None, url_no_extras, None - - for version_control in vcs: - if url.lower().startswith('%s:' % version_control): - url = '%s+%s' % (version_control, url) - break - - if '+' not in url: - if default_vcs: - url = default_vcs + '+' + url - else: - raise InstallationError( - '%s should either be a path to a local project or a VCS url beginning with svn+, git+, hg+, or bzr+' % editable_req) - - vc_type = url.split('+', 1)[0].lower() - - if not vcs.get_backend(vc_type): - error_message = 'For --editable=%s only ' % editable_req + \ - ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ - ' is currently supported' - raise InstallationError(error_message) - - try: - options = _build_editable_options(editable_req) - except Exception: - message = sys.exc_info()[1] - raise InstallationError( - '--editable=%s error in editable options:%s' % (editable_req, message)) - - if not options or 'egg' not in options: - req = _build_req_from_url(editable_req) - if not req: - raise InstallationError('--editable=%s is not the right format; it must have #egg=Package' % editable_req) - else: - req = options['egg'] - - package = _strip_postfix(req) - return package, url, options - - -class UninstallPathSet(object): - """A set of file paths to be removed in the uninstallation of a - requirement.""" - def __init__(self, dist): - self.paths = set() - self._refuse = set() - self.pth = {} - self.dist = dist - self.save_dir = None - self._moved_paths = [] - - def _permitted(self, path): - """ - Return True if the given path is one we are permitted to - remove/modify, False otherwise. - - """ - return is_local(path) - - def _can_uninstall(self): - if not dist_is_local(self.dist): - logger.notify("Not uninstalling %s at %s, outside environment %s" - % (self.dist.project_name, normalize_path(self.dist.location), sys.prefix)) - return False - return True - - def add(self, path): - path = normalize_path(path) - if not os.path.exists(path): - return - if self._permitted(path): - self.paths.add(path) - else: - self._refuse.add(path) - - # __pycache__ files can show up after 'installed-files.txt' is created, due to imports - if os.path.splitext(path)[1] == '.py' and uses_pycache: - self.add(imp.cache_from_source(path)) - - - def add_pth(self, pth_file, entry): - pth_file = normalize_path(pth_file) - if self._permitted(pth_file): - if pth_file not in self.pth: - self.pth[pth_file] = UninstallPthEntries(pth_file) - self.pth[pth_file].add(entry) - else: - self._refuse.add(pth_file) - - def compact(self, paths): - """Compact a path set to contain the minimal number of paths - necessary to contain all paths in the set. If /a/path/ and - /a/path/to/a/file.txt are both in the set, leave only the - shorter path.""" - short_paths = set() - for path in sorted(paths, key=len): - if not any([(path.startswith(shortpath) and - path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) - for shortpath in short_paths]): - short_paths.add(path) - return short_paths - - def _stash(self, path): - return os.path.join( - self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) - - def remove(self, auto_confirm=False): - """Remove paths in ``self.paths`` with confirmation (unless - ``auto_confirm`` is True).""" - if not self._can_uninstall(): - return - if not self.paths: - logger.notify("Can't uninstall '%s'. No files were found to uninstall." % self.dist.project_name) - return - logger.notify('Uninstalling %s:' % self.dist.project_name) - logger.indent += 2 - paths = sorted(self.compact(self.paths)) - try: - if auto_confirm: - response = 'y' - else: - for path in paths: - logger.notify(path) - response = ask('Proceed (y/n)? ', ('y', 'n')) - if self._refuse: - logger.notify('Not removing or modifying (outside of prefix):') - for path in self.compact(self._refuse): - logger.notify(path) - if response == 'y': - self.save_dir = tempfile.mkdtemp(suffix='-uninstall', - prefix='pip-') - for path in paths: - new_path = self._stash(path) - logger.info('Removing file or directory %s' % path) - self._moved_paths.append(path) - renames(path, new_path) - for pth in self.pth.values(): - pth.remove() - logger.notify('Successfully uninstalled %s' % self.dist.project_name) - - finally: - logger.indent -= 2 - - def rollback(self): - """Rollback the changes previously made by remove().""" - if self.save_dir is None: - logger.error("Can't roll back %s; was not uninstalled" % self.dist.project_name) - return False - logger.notify('Rolling back uninstall of %s' % self.dist.project_name) - for path in self._moved_paths: - tmp_path = self._stash(path) - logger.info('Replacing %s' % path) - renames(tmp_path, path) - for pth in self.pth: - pth.rollback() - - def commit(self): - """Remove temporary save dir: rollback will no longer be possible.""" - if self.save_dir is not None: - rmtree(self.save_dir) - self.save_dir = None - self._moved_paths = [] - - -class UninstallPthEntries(object): - def __init__(self, pth_file): - if not os.path.isfile(pth_file): - raise UninstallationError("Cannot remove entries from nonexistent file %s" % pth_file) - self.file = pth_file - self.entries = set() - self._saved_lines = None - - def add(self, entry): - entry = os.path.normcase(entry) - # On Windows, os.path.normcase converts the entry to use - # backslashes. This is correct for entries that describe absolute - # paths outside of site-packages, but all the others use forward - # slashes. - if sys.platform == 'win32' and not os.path.splitdrive(entry)[0]: - entry = entry.replace('\\', '/') - self.entries.add(entry) - - def remove(self): - logger.info('Removing pth entries from %s:' % self.file) - fh = open(self.file, 'rb') - # windows uses '\r\n' with py3k, but uses '\n' with py2.x - lines = fh.readlines() - self._saved_lines = lines - fh.close() - if any(b('\r\n') in line for line in lines): - endline = '\r\n' - else: - endline = '\n' - for entry in self.entries: - try: - logger.info('Removing entry: %s' % entry) - lines.remove(b(entry + endline)) - except ValueError: - pass - fh = open(self.file, 'wb') - fh.writelines(lines) - fh.close() - - def rollback(self): - if self._saved_lines is None: - logger.error('Cannot roll back changes to %s, none were made' % self.file) - return False - logger.info('Rolling %s back to previous state' % self.file) - fh = open(self.file, 'wb') - fh.writelines(self._saved_lines) - fh.close() - return True - - -class FakeFile(object): - """Wrap a list of lines in an object with readline() to make - ConfigParser happy.""" - def __init__(self, lines): - self._gen = (l for l in lines) - - def readline(self): - try: - try: - return next(self._gen) - except NameError: - return self._gen.next() - except StopIteration: - return '' - - def __iter__(self): - return self._gen diff --git a/Darwin/lib/python3.4/site-packages/pip/runner.py b/Darwin/lib/python3.4/site-packages/pip/runner.py deleted file mode 100644 index be830ad..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/runner.py +++ /dev/null @@ -1,18 +0,0 @@ -import sys -import os - - -def run(): - base = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - ## FIXME: this is kind of crude; if we could create a fake pip - ## module, then exec into it and update pip.__path__ properly, we - ## wouldn't have to update sys.path: - sys.path.insert(0, base) - import pip - return pip.main() - - -if __name__ == '__main__': - exit = run() - if exit: - sys.exit(exit) diff --git a/Darwin/lib/python3.4/site-packages/pip/vcs/__init__.py b/Darwin/lib/python3.4/site-packages/pip/vcs/__init__.py deleted file mode 100644 index a56dd20..0000000 --- a/Darwin/lib/python3.4/site-packages/pip/vcs/__init__.py +++ /dev/null @@ -1,251 +0,0 @@ -"""Handles all VCS (version control) support""" - -import os -import shutil - -from pip.backwardcompat import urlparse, urllib -from pip.log import logger -from pip.util import (display_path, backup_dir, find_command, - rmtree, ask_path_exists) - - -__all__ = ['vcs', 'get_src_requirement'] - - -class VcsSupport(object): - _registry = {} - schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] - - def __init__(self): - # Register more schemes with urlparse for various version control systems - urlparse.uses_netloc.extend(self.schemes) - # Python >= 2.7.4, 3.3 doesn't have uses_fragment - if getattr(urlparse, 'uses_fragment', None): - urlparse.uses_fragment.extend(self.schemes) - super(VcsSupport, self).__init__() - - def __iter__(self): - return self._registry.__iter__() - - @property - def backends(self): - return list(self._registry.values()) - - @property - def dirnames(self): - return [backend.dirname for backend in self.backends] - - @property - def all_schemes(self): - schemes = [] - for backend in self.backends: - schemes.extend(backend.schemes) - return schemes - - def register(self, cls): - if not hasattr(cls, 'name'): - logger.warn('Cannot register VCS %s' % cls.__name__) - return - if cls.name not in self._registry: - self._registry[cls.name] = cls - - def unregister(self, cls=None, name=None): - if name in self._registry: - del self._registry[name] - elif cls in self._registry.values(): - del self._registry[cls.name] - else: - logger.warn('Cannot unregister because no class or name given') - - def get_backend_name(self, location): - """ - Return the name of the version control backend if found at given - location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') - """ - for vc_type in self._registry.values(): - path = os.path.join(location, vc_type.dirname) - if os.path.exists(path): - return vc_type.name - return None - - def get_backend(self, name): - name = name.lower() - if name in self._registry: - return self._registry[name] - - def get_backend_from_location(self, location): - vc_type = self.get_backend_name(location) - if vc_type: - return self.get_backend(vc_type) - return None - - -vcs = VcsSupport() - - -class VersionControl(object): - name = '' - dirname = '' - - def __init__(self, url=None, *args, **kwargs): - self.url = url - self._cmd = None - super(VersionControl, self).__init__(*args, **kwargs) - - def _filter(self, line): - return (logger.INFO, line) - - def _is_local_repository(self, repo): - """ - posix absolute paths start with os.path.sep, - win32 ones ones start with drive (like c:\\folder) - """ - drive, tail = os.path.splitdrive(repo) - return repo.startswith(os.path.sep) or drive - - @property - def cmd(self): - if self._cmd is not None: - return self._cmd - command = find_command(self.name) - logger.info('Found command %r at %r' % (self.name, command)) - self._cmd = command - return command - - def get_url_rev(self): - """ - Returns the correct repository URL and revision by parsing the given - repository URL - """ - error_message = ( - "Sorry, '%s' is a malformed VCS url. " - "The format is +://, " - "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp") - assert '+' in self.url, error_message % self.url - url = self.url.split('+', 1)[1] - scheme, netloc, path, query, frag = urlparse.urlsplit(url) - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - url = urlparse.urlunsplit((scheme, netloc, path, query, '')) - return url, rev - - def get_info(self, location): - """ - Returns (url, revision), where both are strings - """ - assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location - return self.get_url(location), self.get_revision(location) - - def normalize_url(self, url): - """ - Normalize a URL for comparison by unquoting it and removing any trailing slash. - """ - return urllib.unquote(url).rstrip('/') - - def compare_urls(self, url1, url2): - """ - Compare two repo URLs for identity, ignoring incidental differences. - """ - return (self.normalize_url(url1) == self.normalize_url(url2)) - - def parse_vcs_bundle_file(self, content): - """ - Takes the contents of the bundled text file that explains how to revert - the stripped off version control data of the given package and returns - the URL and revision of it. - """ - raise NotImplementedError - - def obtain(self, dest): - """ - Called when installing or updating an editable package, takes the - source path of the checkout. - """ - raise NotImplementedError - - def switch(self, dest, url, rev_options): - """ - Switch the repo at ``dest`` to point to ``URL``. - """ - raise NotImplemented - - def update(self, dest, rev_options): - """ - Update an already-existing repo to the given ``rev_options``. - """ - raise NotImplementedError - - def check_destination(self, dest, url, rev_options, rev_display): - """ - Prepare a location to receive a checkout/clone. - - Return True if the location is ready for (and requires) a - checkout/clone, False otherwise. - """ - checkout = True - prompt = False - if os.path.exists(dest): - checkout = False - if os.path.exists(os.path.join(dest, self.dirname)): - existing_url = self.get_url(dest) - if self.compare_urls(existing_url, url): - logger.info('%s in %s exists, and has correct URL (%s)' % - (self.repo_name.title(), display_path(dest), - url)) - logger.notify('Updating %s %s%s' % - (display_path(dest), self.repo_name, - rev_display)) - self.update(dest, rev_options) - else: - logger.warn('%s %s in %s exists with URL %s' % - (self.name, self.repo_name, - display_path(dest), existing_url)) - prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', - ('s', 'i', 'w', 'b')) - else: - logger.warn('Directory %s already exists, ' - 'and is not a %s %s.' % - (dest, self.name, self.repo_name)) - prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) - if prompt: - logger.warn('The plan is to install the %s repository %s' % - (self.name, url)) - response = ask_path_exists('What to do? %s' % prompt[0], - prompt[1]) - - if response == 's': - logger.notify('Switching %s %s to %s%s' % - (self.repo_name, display_path(dest), url, - rev_display)) - self.switch(dest, url, rev_options) - elif response == 'i': - # do nothing - pass - elif response == 'w': - logger.warn('Deleting %s' % display_path(dest)) - rmtree(dest) - checkout = True - elif response == 'b': - dest_dir = backup_dir(dest) - logger.warn('Backing up %s to %s' - % (display_path(dest), dest_dir)) - shutil.move(dest, dest_dir) - checkout = True - return checkout - - def unpack(self, location): - if os.path.exists(location): - rmtree(location) - self.obtain(location) - - def get_src_requirement(self, dist, location, find_tags=False): - raise NotImplementedError - - -def get_src_requirement(dist, location, find_tags): - version_control = vcs.get_backend_from_location(location) - if version_control: - return version_control().get_src_requirement(dist, location, find_tags) - logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location) - return dist.as_requirement() diff --git a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index a89c12e..0000000 --- a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,58 +0,0 @@ -ChangeLog -INSTALL -LICENSE -MANIFEST.in -README -TODO -setup.cfg -setup.py -OpenSSL/RATIONALE -OpenSSL/SSL.py -OpenSSL/__init__.py -OpenSSL/_util.py -OpenSSL/crypto.py -OpenSSL/rand.py -OpenSSL/tsafe.py -OpenSSL/version.py -OpenSSL/test/__init__.py -OpenSSL/test/test_crypto.py -OpenSSL/test/test_rand.py -OpenSSL/test/test_ssl.py -OpenSSL/test/util.py -doc/Makefile -doc/Quotes -doc/README -doc/api.rst -doc/conf.py -doc/index.rst -doc/internals.rst -doc/introduction.rst -doc/make.bat -doc/api/crypto.rst -doc/api/rand.rst -doc/api/ssl.rst -doc/images/pyopenssl-brand.png -doc/images/pyopenssl-icon.png -doc/images/pyopenssl-logo.png -doc/images/pyopenssl.svg -examples/README -examples/SecureXMLRPCServer.py -examples/certgen.py -examples/mk_simple_certs.py -examples/proxy.py -examples/simple/README -examples/simple/client.py -examples/simple/server.py -examples/sni/README -examples/sni/another.invalid.crt -examples/sni/another.invalid.key -examples/sni/client.py -examples/sni/example.invalid.crt -examples/sni/example.invalid.key -examples/sni/server.py -pyOpenSSL.egg-info/PKG-INFO -pyOpenSSL.egg-info/SOURCES.txt -pyOpenSSL.egg-info/dependency_links.txt -pyOpenSSL.egg-info/requires.txt -pyOpenSSL.egg-info/top_level.txt -rpm/build_script \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/installed-files.txt deleted file mode 100644 index bbfe5d2..0000000 --- a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,30 +0,0 @@ -../OpenSSL/__init__.py -../OpenSSL/tsafe.py -../OpenSSL/rand.py -../OpenSSL/crypto.py -../OpenSSL/SSL.py -../OpenSSL/version.py -../OpenSSL/test/__init__.py -../OpenSSL/test/util.py -../OpenSSL/test/test_crypto.py -../OpenSSL/test/test_rand.py -../OpenSSL/test/test_ssl.py -../OpenSSL/_util.py -../OpenSSL/__init__.pyc -../OpenSSL/tsafe.pyc -../OpenSSL/rand.pyc -../OpenSSL/crypto.pyc -../OpenSSL/SSL.pyc -../OpenSSL/version.pyc -../OpenSSL/test/__init__.pyc -../OpenSSL/test/util.pyc -../OpenSSL/test/test_crypto.pyc -../OpenSSL/test/test_rand.pyc -../OpenSSL/test/test_ssl.pyc -../OpenSSL/_util.pyc -./ -dependency_links.txt -PKG-INFO -requires.txt -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/requires.txt b/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/requires.txt deleted file mode 100644 index d8589dd..0000000 --- a/Darwin/lib/python3.4/site-packages/pyOpenSSL-0.14-py3.4.egg-info/requires.txt +++ /dev/null @@ -1,2 +0,0 @@ -cryptography>=0.2.1 -six>=1.5.2 \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/PKG-INFO deleted file mode 100644 index da5af06..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,17 +0,0 @@ -Metadata-Version: 1.1 -Name: pycparser -Version: 2.10 -Summary: C parser in Python -Home-page: https://github.com/eliben/pycparser -Author: Eli Bendersky -Author-email: eliben@gmail.com -License: BSD -Description: - pycparser is a complete parser of the C language, written in - pure Python using the PLY parsing library. - It parses C code into an AST and can serve as a front-end for - C compilers or analysis tools. - -Platform: Cross Platform -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 diff --git a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 88cea85..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,22 +0,0 @@ -README.rst -pycparser/__init__.py -pycparser/_ast_gen.py -pycparser/_build_tables.py -pycparser/_c_ast.cfg -pycparser/ast_transforms.py -pycparser/c_ast.py -pycparser/c_generator.py -pycparser/c_lexer.py -pycparser/c_parser.py -pycparser/lextab.py -pycparser/plyparser.py -pycparser/yacctab.py -pycparser.egg-info/PKG-INFO -pycparser.egg-info/SOURCES.txt -pycparser.egg-info/dependency_links.txt -pycparser.egg-info/top_level.txt -pycparser/ply/__init__.py -pycparser/ply/cpp.py -pycparser/ply/ctokens.py -pycparser/ply/lex.py -pycparser/ply/yacc.py \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 26c72ce..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser-2.10-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,38 +0,0 @@ -../pycparser/__init__.py -../pycparser/_ast_gen.py -../pycparser/_build_tables.py -../pycparser/ast_transforms.py -../pycparser/c_ast.py -../pycparser/c_generator.py -../pycparser/c_lexer.py -../pycparser/c_parser.py -../pycparser/lextab.py -../pycparser/plyparser.py -../pycparser/yacctab.py -../pycparser/ply/__init__.py -../pycparser/ply/cpp.py -../pycparser/ply/ctokens.py -../pycparser/ply/lex.py -../pycparser/ply/yacc.py -../pycparser/_c_ast.cfg -../pycparser/__init__.pyc -../pycparser/_ast_gen.pyc -../pycparser/_build_tables.pyc -../pycparser/ast_transforms.pyc -../pycparser/c_ast.pyc -../pycparser/c_generator.pyc -../pycparser/c_lexer.pyc -../pycparser/c_parser.pyc -../pycparser/lextab.pyc -../pycparser/plyparser.pyc -../pycparser/yacctab.pyc -../pycparser/ply/__init__.pyc -../pycparser/ply/cpp.pyc -../pycparser/ply/ctokens.pyc -../pycparser/ply/lex.pyc -../pycparser/ply/yacc.pyc -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/pycparser/lextab.py b/Darwin/lib/python3.4/site-packages/pycparser/lextab.py deleted file mode 100644 index 4241902..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser/lextab.py +++ /dev/null @@ -1,9 +0,0 @@ -# pycparser.lextab.py. This file automatically created by PLY (version 3.4). Don't edit! -_tabversion = '3.4' -_lextokens = {'VOID': 1, 'LBRACKET': 1, 'WCHAR_CONST': 1, 'FLOAT_CONST': 1, 'MINUS': 1, 'RPAREN': 1, 'LONG': 1, 'PLUS': 1, 'ELLIPSIS': 1, 'GT': 1, 'GOTO': 1, 'ENUM': 1, 'PERIOD': 1, 'GE': 1, 'INT_CONST_DEC': 1, 'ARROW': 1, 'HEX_FLOAT_CONST': 1, 'DOUBLE': 1, 'MINUSEQUAL': 1, 'INT_CONST_OCT': 1, 'TIMESEQUAL': 1, 'OR': 1, 'SHORT': 1, 'RETURN': 1, 'RSHIFTEQUAL': 1, 'RESTRICT': 1, 'STATIC': 1, 'SIZEOF': 1, 'UNSIGNED': 1, 'UNION': 1, 'COLON': 1, 'WSTRING_LITERAL': 1, 'DIVIDE': 1, 'FOR': 1, 'PLUSPLUS': 1, 'EQUALS': 1, 'ELSE': 1, 'INLINE': 1, 'EQ': 1, 'AND': 1, 'TYPEID': 1, 'LBRACE': 1, 'PPHASH': 1, 'INT': 1, 'SIGNED': 1, 'CONTINUE': 1, 'NOT': 1, 'OREQUAL': 1, 'MOD': 1, 'RSHIFT': 1, 'DEFAULT': 1, 'CHAR': 1, 'WHILE': 1, 'DIVEQUAL': 1, 'EXTERN': 1, 'CASE': 1, 'LAND': 1, 'REGISTER': 1, 'MODEQUAL': 1, 'NE': 1, 'SWITCH': 1, 'INT_CONST_HEX': 1, '_COMPLEX': 1, 'PLUSEQUAL': 1, 'STRUCT': 1, 'CONDOP': 1, 'BREAK': 1, 'VOLATILE': 1, 'ANDEQUAL': 1, 'DO': 1, 'LNOT': 1, 'CONST': 1, 'LOR': 1, 'CHAR_CONST': 1, 'LSHIFT': 1, 'RBRACE': 1, '_BOOL': 1, 'LE': 1, 'SEMI': 1, 'LT': 1, 'COMMA': 1, 'TYPEDEF': 1, 'XOR': 1, 'AUTO': 1, 'TIMES': 1, 'LPAREN': 1, 'MINUSMINUS': 1, 'ID': 1, 'IF': 1, 'STRING_LITERAL': 1, 'FLOAT': 1, 'XOREQUAL': 1, 'LSHIFTEQUAL': 1, 'RBRACKET': 1} -_lexreflags = 0 -_lexliterals = '' -_lexstateinfo = {'ppline': 'exclusive', 'pppragma': 'exclusive', 'INITIAL': 'inclusive'} -_lexstatere = {'ppline': [('(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\\n)|(?Pline)', [None, ('t_ppline_FILENAME', 'FILENAME'), None, None, None, None, None, None, ('t_ppline_LINE_NUMBER', 'LINE_NUMBER'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ppline_NEWLINE', 'NEWLINE'), ('t_ppline_PPLINE', 'PPLINE')])], 'pppragma': [('(?P\\n)|(?Ppragma)|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)', [None, ('t_pppragma_NEWLINE', 'NEWLINE'), ('t_pppragma_PPPRAGMA', 'PPPRAGMA'), ('t_pppragma_STR', 'STR'), None, None, None, None, None, None, ('t_pppragma_ID', 'ID')])], 'INITIAL': [('(?P[ \\t]*\\#)|(?P\\n+)|(?P\\{)|(?P\\})|(?P((((([0-9]*\\.[0-9]+)|([0-9]+\\.))([eE][-+]?[0-9]+)?)|([0-9]+([eE][-+]?[0-9]+)))[FfLl]?))|(?P(0[xX]([0-9a-fA-F]+|((([0-9a-fA-F]+)?\\.[0-9a-fA-F]+)|([0-9a-fA-F]+\\.)))([pP][+-]?[0-9]+)[FfLl]?))|(?P0[xX][0-9a-fA-F]+(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)', [None, ('t_PPHASH', 'PPHASH'), ('t_NEWLINE', 'NEWLINE'), ('t_LBRACE', 'LBRACE'), ('t_RBRACE', 'RBRACE'), ('t_FLOAT_CONST', 'FLOAT_CONST'), None, None, None, None, None, None, None, None, None, ('t_HEX_FLOAT_CONST', 'HEX_FLOAT_CONST'), None, None, None, None, None, None, None, ('t_INT_CONST_HEX', 'INT_CONST_HEX')]), ('(?P0[0-7]*[89])|(?P0[0-7]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|(?P(0(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?)|([1-9][0-9]*(([uU]ll)|([uU]LL)|(ll[uU]?)|(LL[uU]?)|([uU][lL])|([lL][uU]?)|[uU])?))|(?P\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?PL\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))\')|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*\\n)|(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*$))|(?P(\'([^\'\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))[^\'\n]+\')|(\'\')|(\'([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])[^\'\\n]*\'))|(?PL"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")', [None, ('t_BAD_CONST_OCT', 'BAD_CONST_OCT'), ('t_INT_CONST_OCT', 'INT_CONST_OCT'), None, None, None, None, None, None, None, ('t_INT_CONST_DEC', 'INT_CONST_DEC'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_CHAR_CONST', 'CHAR_CONST'), None, None, None, None, None, None, ('t_WCHAR_CONST', 'WCHAR_CONST'), None, None, None, None, None, None, ('t_UNMATCHED_QUOTE', 'UNMATCHED_QUOTE'), None, None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_BAD_CHAR_CONST', 'BAD_CHAR_CONST'), None, None, None, None, None, None, None, None, None, None, ('t_WSTRING_LITERAL', 'WSTRING_LITERAL')]), ('(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*([\\\\][^a-zA-Z._~^!=&\\^\\-\\\\?\'"x0-7])([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P[a-zA-Z_$][0-9a-zA-Z_$]*)|(?P"([^"\\\\\\n]|(\\\\(([a-zA-Z._~!=&\\^\\-\\\\?\'"])|(\\d+)|(x[0-9a-fA-F]+))))*")|(?P\\.\\.\\.)|(?P\\+\\+)|(?P\\|\\|)|(?P\\^=)|(?P\\|=)|(?P<<=)|(?P>>=)|(?P\\+=)|(?P\\*=)|(?P\\+)|(?P%=)|(?P/=)|(?P\\])', [None, ('t_BAD_STRING_LITERAL', 'BAD_STRING_LITERAL'), None, None, None, None, None, None, None, None, None, None, None, None, None, ('t_ID', 'ID'), (None, 'STRING_LITERAL'), None, None, None, None, None, None, (None, 'ELLIPSIS'), (None, 'PLUSPLUS'), (None, 'LOR'), (None, 'XOREQUAL'), (None, 'OREQUAL'), (None, 'LSHIFTEQUAL'), (None, 'RSHIFTEQUAL'), (None, 'PLUSEQUAL'), (None, 'TIMESEQUAL'), (None, 'PLUS'), (None, 'MODEQUAL'), (None, 'DIVEQUAL'), (None, 'RBRACKET')]), ('(?P\\?)|(?P\\^)|(?P<<)|(?P<=)|(?P\\()|(?P->)|(?P==)|(?P!=)|(?P--)|(?P\\|)|(?P\\*)|(?P\\[)|(?P>=)|(?P\\))|(?P&&)|(?P>>)|(?P&=)|(?P-=)|(?P\\.)|(?P=)|(?P<)|(?P,)|(?P/)|(?P&)|(?P%)|(?P;)|(?P-)|(?P>)|(?P:)|(?P~)|(?P!)', [None, (None, 'CONDOP'), (None, 'XOR'), (None, 'LSHIFT'), (None, 'LE'), (None, 'LPAREN'), (None, 'ARROW'), (None, 'EQ'), (None, 'NE'), (None, 'MINUSMINUS'), (None, 'OR'), (None, 'TIMES'), (None, 'LBRACKET'), (None, 'GE'), (None, 'RPAREN'), (None, 'LAND'), (None, 'RSHIFT'), (None, 'ANDEQUAL'), (None, 'MINUSEQUAL'), (None, 'PERIOD'), (None, 'EQUALS'), (None, 'LT'), (None, 'COMMA'), (None, 'DIVIDE'), (None, 'AND'), (None, 'MOD'), (None, 'SEMI'), (None, 'MINUS'), (None, 'GT'), (None, 'COLON'), (None, 'NOT'), (None, 'LNOT')])]} -_lexstateignore = {'ppline': ' \t', 'pppragma': ' \t<>.-{}();+-*/$%@&^~!?:,0123456789', 'INITIAL': ' \t'} -_lexstateerrorf = {'ppline': 't_ppline_error', 'pppragma': 't_pppragma_error', 'INITIAL': 't_error'} diff --git a/Darwin/lib/python3.4/site-packages/pycparser/yacctab.py b/Darwin/lib/python3.4/site-packages/pycparser/yacctab.py deleted file mode 100644 index f14693b..0000000 --- a/Darwin/lib/python3.4/site-packages/pycparser/yacctab.py +++ /dev/null @@ -1,286 +0,0 @@ - -# yacctab.py -# This file is automatically generated. Do not edit. -_tabversion = '3.2' - -_lr_method = 'LALR' - -_lr_signature = '"\xce\xf2\x9e\xca\x17\xf7\xe0\x81\x1f\r\xc4\x0b+;\x87' - -_lr_action_items = {'VOID':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[6,6,-61,-72,-71,-58,-54,-55,-33,-29,-59,6,-34,-53,-68,-63,-52,6,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,6,-67,6,-70,-74,6,-57,-84,-255,-83,6,-111,-110,-30,6,-100,-99,6,6,-45,-46,6,-113,6,6,6,6,-90,6,6,6,6,-36,6,-47,6,6,-85,-91,-256,6,-114,6,6,-115,-117,-116,6,-101,-37,-39,-42,-38,-40,6,-152,-151,-43,-153,-41,-87,-86,-92,-93,6,-103,-102,-171,-170,6,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'LBRACKET':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,24,25,27,28,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,64,65,67,68,69,70,74,78,81,83,84,86,90,94,96,97,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,136,145,146,150,154,163,164,171,173,174,175,176,177,197,200,201,203,207,213,217,238,239,249,252,253,257,263,264,265,293,295,296,305,306,307,308,311,315,319,320,343,344,347,350,352,354,355,356,377,378,384,386,411,412,419,],[-257,-61,-72,-71,-58,-54,-55,-59,-257,-53,-68,-63,-52,-56,-174,62,-66,-257,-69,72,-73,-112,-64,-60,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-255,-83,-49,62,-100,-99,-26,-118,-120,-25,72,72,161,-48,-51,72,-113,-257,-257,72,-239,-249,-253,-250,-247,-237,-238,205,-246,-224,-243,-251,-244,-236,-248,-245,72,-121,-119,161,259,72,72,-85,-256,-21,-82,-22,-81,-254,-252,-233,-232,-114,-115,72,-117,-116,-101,-146,-148,-136,259,-150,-144,-243,-87,-86,-231,-230,-229,-228,-227,-240,72,72,-103,-102,-139,259,-137,-145,-147,-149,-225,-226,259,-138,259,-234,-235,]),'WCHAR_CONST':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,103,103,103,-45,-223,103,-221,103,-220,103,-219,103,103,-218,-222,-219,103,-257,-219,103,103,-256,103,-180,-183,-181,-177,-178,-182,-184,103,-186,-187,-179,-185,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,103,-12,103,103,-11,-219,-39,-42,-38,103,-40,103,103,-152,-151,-43,-153,103,-41,103,103,103,-257,-135,-171,-170,103,-168,103,103,-154,103,-167,-155,103,103,103,103,-257,103,103,-166,-169,103,-158,103,-156,103,103,-157,103,103,103,-257,103,-162,-161,-159,103,103,103,-163,-160,103,-165,-164,]),'FLOAT_CONST':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,104,104,104,-45,-223,104,-221,104,-220,104,-219,104,104,-218,-222,-219,104,-257,-219,104,104,-256,104,-180,-183,-181,-177,-178,-182,-184,104,-186,-187,-179,-185,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,104,-12,104,104,-11,-219,-39,-42,-38,104,-40,104,104,-152,-151,-43,-153,104,-41,104,104,104,-257,-135,-171,-170,104,-168,104,104,-154,104,-167,-155,104,104,104,104,-257,104,104,-166,-169,104,-158,104,-156,104,104,-157,104,104,104,-257,104,-162,-161,-159,104,104,104,-163,-160,104,-165,-164,]),'MINUS':([55,62,72,77,82,98,99,100,101,102,103,104,105,106,107,108,109,111,112,113,115,116,117,118,119,120,121,122,123,124,125,126,127,128,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,203,205,206,208,209,210,211,212,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,302,305,306,307,308,311,315,316,317,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,350,353,358,359,361,362,363,364,367,368,369,371,372,373,376,377,378,379,380,383,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[-255,107,107,107,-45,-223,-210,-239,-249,-253,-250,-247,-237,107,-221,-238,-212,-191,107,-220,107,-246,-219,-224,107,107,-243,-251,-218,-244,-236,222,-248,-245,-222,-219,107,-257,-219,107,107,-256,107,-180,-183,-181,-177,-178,-182,-184,107,-186,-187,-179,-185,-254,107,-216,-252,-233,-232,107,107,107,-210,-215,107,-213,-214,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,107,-12,107,107,-11,-219,-39,-42,-38,107,-40,107,107,-152,-151,-43,-153,107,-41,-243,107,-231,-230,-229,-228,-227,-240,107,107,222,222,222,-196,222,222,222,-195,222,222,-193,-192,222,222,222,222,222,-194,-257,-135,-171,-170,107,-168,107,107,-154,107,-167,-155,107,107,-217,-225,-226,107,107,-211,-257,107,107,-166,-169,107,-158,107,-156,107,107,-157,107,107,107,-257,-234,107,-162,-161,-159,-235,107,107,107,-163,-160,107,-165,-164,]),'RPAREN':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,24,25,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,53,54,56,58,59,60,63,64,65,67,68,69,70,74,78,81,83,84,90,94,96,99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,130,132,133,134,135,136,137,138,139,145,146,150,157,158,159,160,162,163,164,171,173,174,175,176,177,197,199,200,201,203,206,207,209,210,212,213,214,215,216,217,218,238,239,240,241,242,243,249,252,253,264,265,268,278,295,296,303,304,305,306,307,308,310,311,312,313,314,315,318,319,320,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,340,341,342,343,344,354,355,356,366,376,377,378,382,383,394,396,399,400,402,412,414,417,419,420,421,424,],[-257,-61,-72,-71,-58,-54,-55,-59,-257,-53,-68,-63,-52,-56,-174,-109,-66,-257,-69,-73,-112,-64,-60,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,90,-84,-83,-49,-111,-110,-257,-100,-99,-26,-118,-120,-25,-141,-257,-143,-48,-51,-113,-257,-257,-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,-189,-248,-245,-175,238,-15,239,-124,-257,-16,-122,-128,-121,-119,-142,-19,-20,264,265,-257,-141,-257,-85,-256,-21,-82,-22,-81,-254,-216,-252,-233,-232,311,-114,-210,-215,-213,-115,315,317,-172,-257,-214,-117,-116,-127,-2,-126,-1,-101,-146,-148,-150,-144,356,-14,-87,-86,-176,376,-231,-230,-229,-228,-241,-227,378,380,381,-240,-140,-257,-141,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-129,-123,-125,-103,-102,-145,-147,-149,-13,-217,-225,-226,-173,-211,406,408,410,-242,-190,-234,-257,422,-235,-257,425,428,]),'LONG':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[19,19,-61,-72,-71,-58,-54,-55,-33,-29,-59,19,-34,-53,-68,-63,-52,19,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,19,-67,19,-70,-74,19,-57,-84,-255,-83,19,-111,-110,-30,19,-100,-99,19,19,-45,-46,19,-113,19,19,19,19,-90,19,19,19,19,-36,19,-47,19,19,-85,-91,-256,19,-114,19,19,-115,-117,-116,19,-101,-37,-39,-42,-38,-40,19,-152,-151,-43,-153,-41,-87,-86,-92,-93,19,-103,-102,-171,-170,19,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'PLUS':([55,62,72,77,82,98,99,100,101,102,103,104,105,106,107,108,109,111,112,113,115,116,117,118,119,120,121,122,123,124,125,126,127,128,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,203,205,206,208,209,210,211,212,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,302,305,306,307,308,311,315,316,317,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,350,353,358,359,361,362,363,364,367,368,369,371,372,373,376,377,378,379,380,383,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[-255,113,113,113,-45,-223,-210,-239,-249,-253,-250,-247,-237,113,-221,-238,-212,-191,113,-220,113,-246,-219,-224,113,113,-243,-251,-218,-244,-236,226,-248,-245,-222,-219,113,-257,-219,113,113,-256,113,-180,-183,-181,-177,-178,-182,-184,113,-186,-187,-179,-185,-254,113,-216,-252,-233,-232,113,113,113,-210,-215,113,-213,-214,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,113,-12,113,113,-11,-219,-39,-42,-38,113,-40,113,113,-152,-151,-43,-153,113,-41,-243,113,-231,-230,-229,-228,-227,-240,113,113,226,226,226,-196,226,226,226,-195,226,226,-193,-192,226,226,226,226,226,-194,-257,-135,-171,-170,113,-168,113,113,-154,113,-167,-155,113,113,-217,-225,-226,113,113,-211,-257,113,113,-166,-169,113,-158,113,-156,113,113,-157,113,113,113,-257,-234,113,-162,-161,-159,-235,113,113,113,-163,-160,113,-165,-164,]),'ELLIPSIS':([245,],[341,]),'GT':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,227,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,227,-198,-196,-200,227,-199,-195,-202,227,-193,-192,-201,227,227,227,227,-194,-217,-225,-226,-211,-234,-235,]),'GOTO':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,271,-256,-39,-42,-38,-40,271,-152,-151,-43,-153,271,-41,-171,-170,-168,271,-154,-167,-155,271,-166,-169,-158,271,-156,271,-157,271,271,-162,-161,-159,271,271,-163,-160,271,-165,-164,]),'ENUM':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[26,26,-61,-72,-71,-58,-54,-55,-33,-29,-59,26,-34,-53,-68,-63,-52,26,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,26,-67,26,-70,-74,26,-57,-84,-255,-83,26,-111,-110,-30,26,-100,-99,26,26,-45,-46,26,-113,26,26,26,26,-90,26,26,26,26,-36,26,-47,26,26,-85,-91,-256,26,-114,26,26,-115,-117,-116,26,-101,-37,-39,-42,-38,-40,26,-152,-151,-43,-153,-41,-87,-86,-92,-93,26,-103,-102,-171,-170,26,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'PERIOD':([55,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,154,173,197,200,201,203,257,263,293,305,306,307,308,311,315,347,350,352,377,378,384,386,411,412,419,],[-255,-239,-249,-253,-250,-247,-237,-238,204,-246,-224,-243,-251,-244,-236,-248,-245,258,-256,-254,-252,-233,-232,-136,258,-243,-231,-230,-229,-228,-227,-240,-139,258,-137,-225,-226,258,-138,258,-234,-235,]),'GE':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,231,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,231,-198,-196,-200,231,-199,-195,-202,231,-193,-192,-201,231,231,231,231,-194,-217,-225,-226,-211,-234,-235,]),'INT_CONST_DEC':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,124,124,124,-45,-223,124,-221,124,-220,124,-219,124,124,-218,-222,-219,124,-257,-219,124,124,-256,124,-180,-183,-181,-177,-178,-182,-184,124,-186,-187,-179,-185,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,-12,124,124,-11,-219,-39,-42,-38,124,-40,124,124,-152,-151,-43,-153,124,-41,124,124,124,-257,-135,-171,-170,124,-168,124,124,-154,124,-167,-155,124,124,124,124,-257,124,124,-166,-169,124,-158,124,-156,124,124,-157,124,124,124,-257,124,-162,-161,-159,124,124,124,-163,-160,124,-165,-164,]),'ARROW':([100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,200,201,203,293,305,306,307,308,311,315,377,378,412,419,],[-239,-249,-253,-250,-247,-237,-238,202,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-252,-233,-232,-243,-231,-230,-229,-228,-227,-240,-225,-226,-234,-235,]),'HEX_FLOAT_CONST':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,127,127,127,-45,-223,127,-221,127,-220,127,-219,127,127,-218,-222,-219,127,-257,-219,127,127,-256,127,-180,-183,-181,-177,-178,-182,-184,127,-186,-187,-179,-185,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,-12,127,127,-11,-219,-39,-42,-38,127,-40,127,127,-152,-151,-43,-153,127,-41,127,127,127,-257,-135,-171,-170,127,-168,127,127,-154,127,-167,-155,127,127,127,127,-257,127,127,-166,-169,127,-158,127,-156,127,127,-157,127,127,127,-257,127,-162,-161,-159,127,127,127,-163,-160,127,-165,-164,]),'DOUBLE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[40,40,-61,-72,-71,-58,-54,-55,-33,-29,-59,40,-34,-53,-68,-63,-52,40,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,40,-67,40,-70,-74,40,-57,-84,-255,-83,40,-111,-110,-30,40,-100,-99,40,40,-45,-46,40,-113,40,40,40,40,-90,40,40,40,40,-36,40,-47,40,40,-85,-91,-256,40,-114,40,40,-115,-117,-116,40,-101,-37,-39,-42,-38,-40,40,-152,-151,-43,-153,-41,-87,-86,-92,-93,40,-103,-102,-171,-170,40,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'MINUSEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[186,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'INT_CONST_OCT':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,128,128,128,-45,-223,128,-221,128,-220,128,-219,128,128,-218,-222,-219,128,-257,-219,128,128,-256,128,-180,-183,-181,-177,-178,-182,-184,128,-186,-187,-179,-185,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,-12,128,128,-11,-219,-39,-42,-38,128,-40,128,128,-152,-151,-43,-153,128,-41,128,128,128,-257,-135,-171,-170,128,-168,128,128,-154,128,-167,-155,128,128,128,128,-257,128,128,-166,-169,128,-158,128,-156,128,128,-157,128,128,128,-257,128,-162,-161,-159,128,128,128,-163,-160,128,-165,-164,]),'TIMESEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[195,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'OR':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,236,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,236,-203,-205,-206,-194,-217,-225,-226,-211,-234,-235,]),'SHORT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[2,2,-61,-72,-71,-58,-54,-55,-33,-29,-59,2,-34,-53,-68,-63,-52,2,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,2,-67,2,-70,-74,2,-57,-84,-255,-83,2,-111,-110,-30,2,-100,-99,2,2,-45,-46,2,-113,2,2,2,2,-90,2,2,2,2,-36,2,-47,2,2,-85,-91,-256,2,-114,2,2,-115,-117,-116,2,-101,-37,-39,-42,-38,-40,2,-152,-151,-43,-153,-41,-87,-86,-92,-93,2,-103,-102,-171,-170,2,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'RETURN':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,274,-256,-39,-42,-38,-40,274,-152,-151,-43,-153,274,-41,-171,-170,-168,274,-154,-167,-155,274,-166,-169,-158,274,-156,274,-157,274,274,-162,-161,-159,274,274,-163,-160,274,-165,-164,]),'RSHIFTEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[196,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'RESTRICT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,67,69,76,78,82,87,89,90,91,92,93,94,95,96,119,145,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[32,32,-61,-72,-71,-58,-54,-55,-33,-29,-59,32,-34,-53,-68,-63,-52,32,-56,-174,-109,-66,32,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,32,-67,32,-70,-74,32,-57,-84,-255,-83,32,-111,-110,-30,32,-100,-99,32,-120,32,32,-45,-46,32,-113,32,32,32,32,-90,32,32,-121,32,32,-36,32,-47,32,32,-85,-91,-256,32,-114,32,32,-115,-117,-116,32,-101,-37,-39,-42,-38,-40,32,-152,-151,-43,-153,-41,-87,-86,-92,-93,32,-103,-102,-171,-170,32,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'STATIC':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[9,9,-61,-72,-71,-58,-54,-55,-33,-29,-59,9,-34,-53,-68,-63,-52,9,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,9,-67,9,-70,-74,9,-57,-84,-255,-83,-111,-110,-30,9,-100,-99,9,9,-45,-46,9,-113,9,9,-36,9,-47,-85,-256,-114,-115,-117,-116,9,-101,-37,-39,-42,-38,-40,9,-152,-151,-43,-153,-41,-87,-86,9,-103,-102,-171,-170,9,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'SIZEOF':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,106,106,106,-45,-223,106,-221,106,-220,106,-219,106,106,-218,-222,-219,106,-257,-219,106,106,-256,106,-180,-183,-181,-177,-178,-182,-184,106,-186,-187,-179,-185,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,106,-12,106,106,-11,-219,-39,-42,-38,106,-40,106,106,-152,-151,-43,-153,106,-41,106,106,106,-257,-135,-171,-170,106,-168,106,106,-154,106,-167,-155,106,106,106,106,-257,106,106,-166,-169,106,-158,106,-156,106,106,-157,106,106,106,-257,106,-162,-161,-159,106,106,106,-163,-160,106,-165,-164,]),'UNSIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[18,18,-61,-72,-71,-58,-54,-55,-33,-29,-59,18,-34,-53,-68,-63,-52,18,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,18,-67,18,-70,-74,18,-57,-84,-255,-83,18,-111,-110,-30,18,-100,-99,18,18,-45,-46,18,-113,18,18,18,18,-90,18,18,18,18,-36,18,-47,18,18,-85,-91,-256,18,-114,18,18,-115,-117,-116,18,-101,-37,-39,-42,-38,-40,18,-152,-151,-43,-153,-41,-87,-86,-92,-93,18,-103,-102,-171,-170,18,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'UNION':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[20,20,-61,-72,-71,-58,-54,-55,-33,-29,-59,20,-34,-53,-68,-63,-52,20,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,20,-67,20,-70,-74,20,-57,-84,-255,-83,20,-111,-110,-30,20,-100,-99,20,20,-45,-46,20,-113,20,20,20,20,-90,20,20,20,20,-36,20,-47,20,20,-85,-91,-256,20,-114,20,20,-115,-117,-116,20,-101,-37,-39,-42,-38,-40,20,-152,-151,-43,-153,-41,-87,-86,-92,-93,20,-103,-102,-171,-170,20,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'COLON':([2,3,5,6,13,18,19,24,25,27,29,32,33,35,37,39,40,43,45,46,54,56,59,60,64,65,90,94,96,97,99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,130,171,173,174,175,176,177,184,197,199,200,201,203,207,209,210,212,213,216,218,238,239,249,279,293,295,296,298,299,303,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,343,344,365,376,377,378,382,383,402,412,419,],[-61,-72,-71,-58,-59,-68,-63,-174,-109,-66,-69,-73,-112,-64,-60,-62,-65,-67,-70,-74,-84,-83,-111,-110,-100,-99,-113,-257,-257,178,-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,-189,-248,-245,-175,-85,-256,-21,-82,-22,-81,302,-254,-216,-252,-233,-232,-114,-210,-215,-213,-115,-172,-214,-117,-116,-101,363,372,-87,-86,-188,178,-176,-231,-230,-229,-228,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,385,-206,-194,-103,-102,395,-217,-225,-226,-173,-211,-190,-234,-235,]),'$end':([0,8,11,12,15,22,31,36,38,47,61,82,166,173,255,371,],[-257,0,-33,-29,-34,-27,-32,-31,-35,-28,-30,-45,-36,-256,-37,-155,]),'WSTRING_LITERAL':([55,62,72,77,82,98,100,102,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,102,102,102,-45,-223,197,-253,102,-221,102,-220,102,-219,102,102,-218,-222,-219,102,-257,-219,102,102,-256,102,-180,-183,-181,-177,-178,-182,-184,102,-186,-187,-179,-185,-254,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,102,-12,102,102,-11,-219,-39,-42,-38,102,-40,102,102,-152,-151,-43,-153,102,-41,102,102,102,-257,-135,-171,-170,102,-168,102,102,-154,102,-167,-155,102,102,102,102,-257,102,102,-166,-169,102,-158,102,-156,102,102,-157,102,102,102,-257,102,-162,-161,-159,102,102,102,-163,-160,102,-165,-164,]),'DIVIDE':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,229,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,229,229,229,229,229,229,229,229,229,229,-193,-192,229,229,229,229,229,-194,-217,-225,-226,-211,-234,-235,]),'FOR':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,276,-256,-39,-42,-38,-40,276,-152,-151,-43,-153,276,-41,-171,-170,-168,276,-154,-167,-155,276,-166,-169,-158,276,-156,276,-157,276,276,-162,-161,-159,276,276,-163,-160,276,-165,-164,]),'PLUSPLUS':([55,62,72,77,82,98,100,101,102,103,104,105,106,107,108,109,112,113,115,116,117,118,119,120,121,122,123,124,125,127,128,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,203,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,302,305,306,307,308,311,315,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,377,378,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[-255,115,115,115,-45,-223,-239,-249,-253,-250,-247,-237,115,-221,-238,203,115,-220,115,-246,-219,-224,115,115,-243,-251,-218,-244,-236,-248,-245,-222,-219,115,-257,-219,115,115,-256,115,-180,-183,-181,-177,-178,-182,-184,115,-186,-187,-179,-185,-254,115,-252,-233,-232,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,115,-12,115,115,-11,-219,-39,-42,-38,115,-40,115,115,-152,-151,-43,-153,115,-41,-243,115,-231,-230,-229,-228,-227,-240,115,115,-257,-135,-171,-170,115,-168,115,115,-154,115,-167,-155,115,115,-225,-226,115,115,-257,115,115,-166,-169,115,-158,115,-156,115,115,-157,115,115,115,-257,-234,115,-162,-161,-159,-235,115,115,115,-163,-160,115,-165,-164,]),'EQUALS':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,24,25,27,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,64,65,76,83,84,86,90,99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,144,165,171,173,197,199,200,201,203,207,209,210,212,213,218,238,239,249,257,263,293,295,296,305,306,307,308,311,315,343,344,347,352,376,377,378,383,386,412,419,],[-257,-61,-72,-71,-58,-54,-55,-59,-257,-53,-68,-63,-52,-56,-174,-109,-66,-69,77,-73,-112,-64,-60,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-83,-49,-111,-110,-100,-99,151,-48,-51,77,-113,188,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,250,151,-85,-256,-254,-216,-252,-233,-232,-114,-210,-215,-213,-115,-214,-117,-116,-101,-136,353,-243,-87,-86,-231,-230,-229,-228,-227,-240,-103,-102,-139,-137,-217,-225,-226,-211,-138,-234,-235,]),'ELSE':([173,269,270,273,275,286,291,358,359,362,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[-256,-39,-42,-38,-40,-43,-41,-171,-170,-168,-167,-155,-166,-169,-158,-156,-157,-162,-161,423,-163,-160,-165,-164,]),'ANDEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[193,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'EQ':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,233,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,233,-198,-196,-200,-204,-199,-195,-202,233,-193,-192,-201,233,-203,233,233,-194,-217,-225,-226,-211,-234,-235,]),'AND':([55,62,72,77,82,98,99,100,101,102,103,104,105,106,107,108,109,111,112,113,115,116,117,118,119,120,121,122,123,124,125,126,127,128,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,203,205,206,208,209,210,211,212,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,302,305,306,307,308,311,315,316,317,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,350,353,358,359,361,362,363,364,367,368,369,371,372,373,376,377,378,379,380,383,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[-255,123,123,123,-45,-223,-210,-239,-249,-253,-250,-247,-237,123,-221,-238,-212,-191,123,-220,123,-246,-219,-224,123,123,-243,-251,-218,-244,-236,234,-248,-245,-222,-219,123,-257,-219,123,123,-256,123,-180,-183,-181,-177,-178,-182,-184,123,-186,-187,-179,-185,-254,123,-216,-252,-233,-232,123,123,123,-210,-215,123,-213,-214,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,-12,123,123,-11,-219,-39,-42,-38,123,-40,123,123,-152,-151,-43,-153,123,-41,-243,123,-231,-230,-229,-228,-227,-240,123,123,-197,234,-198,-196,-200,-204,-199,-195,-202,234,-193,-192,-201,234,-203,-205,234,-194,-257,-135,-171,-170,123,-168,123,123,-154,123,-167,-155,123,123,-217,-225,-226,123,123,-211,-257,123,123,-166,-169,123,-158,123,-156,123,123,-157,123,123,123,-257,-234,123,-162,-161,-159,-235,123,123,123,-163,-160,123,-165,-164,]),'TYPEID':([0,1,2,3,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,31,32,33,34,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,67,68,69,70,74,76,78,82,87,89,90,91,92,93,94,95,96,119,145,146,162,163,164,166,167,168,169,170,171,172,173,198,202,204,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[24,24,-61,-72,-71,-58,54,-54,-55,-33,-29,-59,24,-34,59,-53,-68,-63,-89,-52,24,-56,-174,-109,64,-66,-257,-69,-32,-73,-112,-88,-64,-31,-60,-35,-62,-65,24,-67,24,-70,-74,24,-57,-84,-255,-83,24,-111,-110,-30,24,-100,-99,-26,-118,-120,-25,59,24,24,-45,-46,24,-113,24,24,24,24,-90,24,24,-121,-119,24,59,24,-36,24,-47,24,24,-85,-91,-256,24,305,307,-114,24,24,-115,-117,-116,24,-101,-37,-39,-42,-38,-40,24,-152,-151,-43,-153,-41,-87,-86,-92,-93,24,-103,-102,-171,-170,24,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'LBRACE':([7,20,25,26,33,34,48,54,55,56,59,60,64,65,76,77,82,85,87,88,89,90,151,152,154,167,168,173,207,213,238,239,256,260,261,269,270,273,275,282,284,285,286,288,290,291,317,350,353,358,359,362,363,367,369,371,372,376,380,381,384,387,389,390,393,395,398,406,407,408,410,411,415,416,418,423,425,426,427,428,429,430,],[55,-89,-109,55,-112,-88,-257,55,-255,55,-111,-110,55,55,-257,55,-45,-7,-46,55,-8,-113,55,55,-257,55,-47,-256,-114,-115,-117,-116,-12,55,-11,-39,-42,-38,-40,55,-152,-151,-43,-153,55,-41,55,-257,-135,-171,-170,-168,55,-154,-167,-155,55,55,55,55,-257,55,-166,-169,-158,55,-156,55,-157,55,55,-257,-162,-161,-159,55,55,-163,-160,55,-165,-164,]),'PPHASH':([0,11,12,15,22,31,36,38,61,82,166,173,255,371,],[38,-33,-29,-34,38,-32,-31,-35,-30,-45,-36,-256,-37,-155,]),'INT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[39,39,-61,-72,-71,-58,-54,-55,-33,-29,-59,39,-34,-53,-68,-63,-52,39,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,39,-67,39,-70,-74,39,-57,-84,-255,-83,39,-111,-110,-30,39,-100,-99,39,39,-45,-46,39,-113,39,39,39,39,-90,39,39,39,39,-36,39,-47,39,39,-85,-91,-256,39,-114,39,39,-115,-117,-116,39,-101,-37,-39,-42,-38,-40,39,-152,-151,-43,-153,-41,-87,-86,-92,-93,39,-103,-102,-171,-170,39,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'SIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[43,43,-61,-72,-71,-58,-54,-55,-33,-29,-59,43,-34,-53,-68,-63,-52,43,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,43,-67,43,-70,-74,43,-57,-84,-255,-83,43,-111,-110,-30,43,-100,-99,43,43,-45,-46,43,-113,43,43,43,43,-90,43,43,43,43,-36,43,-47,43,43,-85,-91,-256,43,-114,43,43,-115,-117,-116,43,-101,-37,-39,-42,-38,-40,43,-152,-151,-43,-153,-41,-87,-86,-92,-93,43,-103,-102,-171,-170,43,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'CONTINUE':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,277,-256,-39,-42,-38,-40,277,-152,-151,-43,-153,277,-41,-171,-170,-168,277,-154,-167,-155,277,-166,-169,-158,277,-156,277,-157,277,277,-162,-161,-159,277,277,-163,-160,277,-165,-164,]),'NOT':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,131,131,131,-45,-223,131,-221,131,-220,131,-219,131,131,-218,-222,-219,131,-257,-219,131,131,-256,131,-180,-183,-181,-177,-178,-182,-184,131,-186,-187,-179,-185,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,131,-12,131,131,-11,-219,-39,-42,-38,131,-40,131,131,-152,-151,-43,-153,131,-41,131,131,131,-257,-135,-171,-170,131,-168,131,131,-154,131,-167,-155,131,131,131,131,-257,131,131,-166,-169,131,-158,131,-156,131,131,-157,131,131,131,-257,131,-162,-161,-159,131,131,131,-163,-160,131,-165,-164,]),'OREQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[194,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'MOD':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,237,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,237,237,237,237,237,237,237,237,237,237,-193,-192,237,237,237,237,237,-194,-217,-225,-226,-211,-234,-235,]),'RSHIFT':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,219,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,219,-198,-196,219,219,219,-195,219,219,-193,-192,219,219,219,219,219,-194,-217,-225,-226,-211,-234,-235,]),'DEFAULT':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,279,-256,-39,-42,-38,-40,279,-152,-151,-43,-153,279,-41,-171,-170,-168,279,-154,-167,-155,279,-166,-169,-158,279,-156,279,-157,279,279,-162,-161,-159,279,279,-163,-160,279,-165,-164,]),'CHAR':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[37,37,-61,-72,-71,-58,-54,-55,-33,-29,-59,37,-34,-53,-68,-63,-52,37,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,37,-67,37,-70,-74,37,-57,-84,-255,-83,37,-111,-110,-30,37,-100,-99,37,37,-45,-46,37,-113,37,37,37,37,-90,37,37,37,37,-36,37,-47,37,37,-85,-91,-256,37,-114,37,37,-115,-117,-116,37,-101,-37,-39,-42,-38,-40,37,-152,-151,-43,-153,-41,-87,-86,-92,-93,37,-103,-102,-171,-170,37,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'WHILE':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,370,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,280,-256,-39,-42,-38,-40,280,-152,-151,-43,-153,280,-41,-171,-170,-168,280,-154,-167,397,-155,280,-166,-169,-158,280,-156,280,-157,280,280,-162,-161,-159,280,280,-163,-160,280,-165,-164,]),'DIVEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[185,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'EXTERN':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[10,10,-61,-72,-71,-58,-54,-55,-33,-29,-59,10,-34,-53,-68,-63,-52,10,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,10,-67,10,-70,-74,10,-57,-84,-255,-83,-111,-110,-30,10,-100,-99,10,10,-45,-46,10,-113,10,10,-36,10,-47,-85,-256,-114,-115,-117,-116,10,-101,-37,-39,-42,-38,-40,10,-152,-151,-43,-153,-41,-87,-86,10,-103,-102,-171,-170,10,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'CASE':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,281,-256,-39,-42,-38,-40,281,-152,-151,-43,-153,281,-41,-171,-170,-168,281,-154,-167,-155,281,-166,-169,-158,281,-156,281,-157,281,281,-162,-161,-159,281,281,-163,-160,281,-165,-164,]),'LAND':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,232,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,232,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-217,-225,-226,-211,-234,-235,]),'REGISTER':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[17,17,-61,-72,-71,-58,-54,-55,-33,-29,-59,17,-34,-53,-68,-63,-52,17,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,17,-67,17,-70,-74,17,-57,-84,-255,-83,-111,-110,-30,17,-100,-99,17,17,-45,-46,17,-113,17,17,-36,17,-47,-85,-256,-114,-115,-117,-116,17,-101,-37,-39,-42,-38,-40,17,-152,-151,-43,-153,-41,-87,-86,17,-103,-102,-171,-170,17,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'MODEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[187,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'NE':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,224,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,224,-198,-196,-200,-204,-199,-195,-202,224,-193,-192,-201,224,-203,224,224,-194,-217,-225,-226,-211,-234,-235,]),'SWITCH':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,283,-256,-39,-42,-38,-40,283,-152,-151,-43,-153,283,-41,-171,-170,-168,283,-154,-167,-155,283,-166,-169,-158,283,-156,283,-157,283,283,-162,-161,-159,283,283,-163,-160,283,-165,-164,]),'INT_CONST_HEX':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,116,116,116,-45,-223,116,-221,116,-220,116,-219,116,116,-218,-222,-219,116,-257,-219,116,116,-256,116,-180,-183,-181,-177,-178,-182,-184,116,-186,-187,-179,-185,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,116,-12,116,116,-11,-219,-39,-42,-38,116,-40,116,116,-152,-151,-43,-153,116,-41,116,116,116,-257,-135,-171,-170,116,-168,116,116,-154,116,-167,-155,116,116,116,116,-257,116,116,-166,-169,116,-158,116,-156,116,116,-157,116,116,116,-257,116,-162,-161,-159,116,116,116,-163,-160,116,-165,-164,]),'_COMPLEX':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[27,27,-61,-72,-71,-58,-54,-55,-33,-29,-59,27,-34,-53,-68,-63,-52,27,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,27,-67,27,-70,-74,27,-57,-84,-255,-83,27,-111,-110,-30,27,-100,-99,27,27,-45,-46,27,-113,27,27,27,27,-90,27,27,27,27,-36,27,-47,27,27,-85,-91,-256,27,-114,27,27,-115,-117,-116,27,-101,-37,-39,-42,-38,-40,27,-152,-151,-43,-153,-41,-87,-86,-92,-93,27,-103,-102,-171,-170,27,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'PLUSEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[190,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'STRUCT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[34,34,-61,-72,-71,-58,-54,-55,-33,-29,-59,34,-34,-53,-68,-63,-52,34,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,34,-67,34,-70,-74,34,-57,-84,-255,-83,34,-111,-110,-30,34,-100,-99,34,34,-45,-46,34,-113,34,34,34,34,-90,34,34,34,34,-36,34,-47,34,34,-85,-91,-256,34,-114,34,34,-115,-117,-116,34,-101,-37,-39,-42,-38,-40,34,-152,-151,-43,-153,-41,-87,-86,-92,-93,34,-103,-102,-171,-170,34,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'CONDOP':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,235,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-217,-225,-226,-211,-234,-235,]),'BREAK':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,287,-256,-39,-42,-38,-40,287,-152,-151,-43,-153,287,-41,-171,-170,-168,287,-154,-167,-155,287,-166,-169,-158,287,-156,287,-157,287,287,-162,-161,-159,287,287,-163,-160,287,-165,-164,]),'VOLATILE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,67,69,76,78,82,87,89,90,91,92,93,94,95,96,119,145,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[46,46,-61,-72,-71,-58,-54,-55,-33,-29,-59,46,-34,-53,-68,-63,-52,46,-56,-174,-109,-66,46,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,46,-67,46,-70,-74,46,-57,-84,-255,-83,46,-111,-110,-30,46,-100,-99,46,-120,46,46,-45,-46,46,-113,46,46,46,46,-90,46,46,-121,46,46,-36,46,-47,46,46,-85,-91,-256,46,-114,46,46,-115,-117,-116,46,-101,-37,-39,-42,-38,-40,46,-152,-151,-43,-153,-41,-87,-86,-92,-93,46,-103,-102,-171,-170,46,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'INLINE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[49,49,-61,-72,-71,-58,-54,-55,-33,-29,-59,49,-34,-53,-68,-63,-52,49,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,49,-67,49,-70,-74,49,-57,-84,-255,-83,-111,-110,-30,49,-100,-99,49,49,-45,-46,49,-113,49,49,-36,49,-47,-85,-256,-114,-115,-117,-116,49,-101,-37,-39,-42,-38,-40,49,-152,-151,-43,-153,-41,-87,-86,49,-103,-102,-171,-170,49,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'DO':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,290,-256,-39,-42,-38,-40,290,-152,-151,-43,-153,290,-41,-171,-170,-168,290,-154,-167,-155,290,-166,-169,-158,290,-156,290,-157,290,290,-162,-161,-159,290,290,-163,-160,290,-165,-164,]),'LNOT':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,98,98,98,-45,-223,98,-221,98,-220,98,-219,98,98,-218,-222,-219,98,-257,-219,98,98,-256,98,-180,-183,-181,-177,-178,-182,-184,98,-186,-187,-179,-185,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,98,-12,98,98,-11,-219,-39,-42,-38,98,-40,98,98,-152,-151,-43,-153,98,-41,98,98,98,-257,-135,-171,-170,98,-168,98,98,-154,98,-167,-155,98,98,98,98,-257,98,98,-166,-169,98,-158,98,-156,98,98,-157,98,98,98,-257,98,-162,-161,-159,98,98,98,-163,-160,98,-165,-164,]),'CONST':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,67,69,76,78,82,87,89,90,91,92,93,94,95,96,119,145,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[3,3,-61,-72,-71,-58,-54,-55,-33,-29,-59,3,-34,-53,-68,-63,-52,3,-56,-174,-109,-66,3,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,3,-67,3,-70,-74,3,-57,-84,-255,-83,3,-111,-110,-30,3,-100,-99,3,-120,3,3,-45,-46,3,-113,3,3,3,3,-90,3,3,-121,3,3,-36,3,-47,3,3,-85,-91,-256,3,-114,3,3,-115,-117,-116,3,-101,-37,-39,-42,-38,-40,3,-152,-151,-43,-153,-41,-87,-86,-92,-93,3,-103,-102,-171,-170,3,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'LOR':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,220,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-217,-225,-226,-211,-234,-235,]),'CHAR_CONST':([55,62,72,77,82,98,106,107,112,113,115,117,119,120,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,101,101,101,-45,-223,101,-221,101,-220,101,-219,101,101,-218,-222,-219,101,-257,-219,101,101,-256,101,-180,-183,-181,-177,-178,-182,-184,101,-186,-187,-179,-185,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,101,-12,101,101,-11,-219,-39,-42,-38,101,-40,101,101,-152,-151,-43,-153,101,-41,101,101,101,-257,-135,-171,-170,101,-168,101,101,-154,101,-167,-155,101,101,101,101,-257,101,101,-166,-169,101,-158,101,-156,101,101,-157,101,101,101,-257,101,-162,-161,-159,101,101,101,-163,-160,101,-165,-164,]),'LSHIFT':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,221,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,221,-198,-196,221,221,221,-195,221,221,-193,-192,221,221,221,221,221,-194,-217,-225,-226,-211,-234,-235,]),'RBRACE':([55,82,93,95,99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,130,142,143,144,155,167,169,170,172,173,197,199,200,201,203,209,210,212,218,246,247,248,262,269,270,273,275,282,284,285,286,288,289,291,292,298,300,301,303,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,345,346,349,350,351,358,359,362,367,369,371,376,377,378,383,388,389,390,393,398,401,402,403,407,411,412,415,416,418,419,426,427,429,430,],[-255,-45,173,-90,-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,-189,-248,-245,-175,-104,173,-107,-130,-257,173,173,-91,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,173,173,-105,173,-39,-42,-38,-40,-6,-152,-151,-43,-153,-5,-41,173,-188,-92,-93,-176,-231,-230,-229,-228,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-106,-108,-133,173,-131,-171,-170,-168,-154,-167,-155,-217,-225,-226,-211,-132,-166,-169,-158,-156,173,-190,-134,-157,173,-234,-162,-161,-159,-235,-163,-160,-165,-164,]),'_BOOL':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[13,13,-61,-72,-71,-58,-54,-55,-33,-29,-59,13,-34,-53,-68,-63,-52,13,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,13,-67,13,-70,-74,13,-57,-84,-255,-83,13,-111,-110,-30,13,-100,-99,13,13,-45,-46,13,-113,13,13,13,13,-90,13,13,13,13,-36,13,-47,13,13,-85,-91,-256,13,-114,13,13,-115,-117,-116,13,-101,-37,-39,-42,-38,-40,13,-152,-151,-43,-153,-41,-87,-86,-92,-93,13,-103,-102,-171,-170,13,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'LE':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,223,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,223,-198,-196,-200,223,-199,-195,-202,223,-193,-192,-201,223,223,223,223,-194,-217,-225,-226,-211,-234,-235,]),'SEMI':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,49,50,51,52,54,55,56,58,59,60,61,64,65,67,68,69,70,71,73,74,75,76,79,80,81,82,83,84,86,90,94,96,97,99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,130,145,146,150,153,155,163,165,166,167,171,173,174,175,176,177,179,180,181,182,183,184,197,199,200,201,203,207,209,210,212,213,216,218,238,239,249,251,252,253,254,255,264,265,269,270,272,273,274,275,277,278,282,284,285,286,287,288,289,290,291,293,295,296,297,298,303,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,343,344,351,354,355,356,357,358,359,360,361,362,363,366,367,369,371,372,374,375,376,377,378,382,383,388,389,390,391,392,393,395,398,402,404,405,406,407,408,410,412,413,415,416,418,419,422,423,425,426,427,428,429,430,],[15,-257,-61,-72,-71,-58,-54,-55,-33,-29,-59,-257,-34,-53,-68,-63,-52,15,-56,-174,-109,-66,-257,-69,-257,-32,-73,-112,-64,-31,-60,-35,-62,-65,82,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-255,-83,-49,-111,-110,-30,-100,-99,-26,-118,-120,-25,-18,-44,-141,-17,-79,-78,-75,-143,-45,-48,-51,-257,-113,-257,-257,-257,-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,-189,-248,-245,-175,-121,-119,-142,-77,-130,-141,-79,-36,-257,-85,-256,-21,-82,-22,-81,-24,300,-94,301,-23,-96,-254,-216,-252,-233,-232,-114,-210,-215,-213,-115,-172,-214,-117,-116,-101,-76,-146,-148,-80,-37,-150,-144,-39,-42,358,-38,359,-40,362,-14,-257,-152,-151,-43,369,-153,-13,-257,-41,-243,-87,-86,-98,-188,-176,-231,-230,-229,-228,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,-103,-102,-131,-145,-147,-149,389,-171,-170,390,-257,-168,-257,-13,-154,-167,-155,-257,-95,-97,-217,-225,-226,-173,-211,-132,-166,-169,404,-257,-158,-257,-156,-190,-257,414,-257,-157,-257,-257,-234,420,-162,-161,-159,-235,426,-257,-257,-163,-160,-257,-165,-164,]),'LT':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,225,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,225,-198,-196,-200,225,-199,-195,-202,225,-193,-192,-201,225,225,225,225,-194,-217,-225,-226,-211,-234,-235,]),'COMMA':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,24,25,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,64,65,67,68,69,70,71,74,76,79,80,81,83,84,90,99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,130,135,136,137,138,139,142,143,144,145,146,150,153,155,163,165,171,173,179,181,184,197,199,200,201,203,207,209,210,212,213,214,216,218,238,239,240,241,242,243,246,247,248,249,251,252,253,254,262,264,265,278,293,295,296,297,298,303,305,306,307,308,309,310,311,312,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,337,338,339,340,342,343,344,345,346,349,351,354,355,356,360,374,375,376,377,378,382,383,388,394,396,399,400,401,402,403,412,417,419,],[-257,-61,-72,-71,-58,-54,-55,-59,-257,-53,-68,-63,-52,-56,-174,-109,-66,-257,-69,-73,-112,-64,-60,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-83,-49,-111,-110,-100,-99,-26,-118,-120,-25,147,-141,-79,-78,-75,-143,-48,-51,-113,-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,-189,-248,-245,-175,-124,-257,244,245,-128,-104,248,-107,-121,-119,-142,-77,-130,-141,-79,-85,-256,299,-94,-96,-254,-216,-252,-233,-232,-114,-210,-215,-213,-115,316,-172,-214,-117,-116,-127,-2,-126,-1,248,248,-105,-101,-76,-146,-148,-80,350,-150,-144,316,-243,-87,-86,-98,-188,-176,-231,-230,-229,-228,316,-241,-227,379,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,316,-206,-194,-129,-125,-103,-102,-106,-108,-133,-131,-145,-147,-149,316,-95,-97,-217,-225,-226,-173,-211,-132,316,316,316,-242,411,-190,-134,-234,316,-235,]),'TYPEDEF':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[23,23,-61,-72,-71,-58,-54,-55,-33,-29,-59,23,-34,-53,-68,-63,-52,23,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,23,-67,23,-70,-74,23,-57,-84,-255,-83,-111,-110,-30,23,-100,-99,23,23,-45,-46,23,-113,23,23,-36,23,-47,-85,-256,-114,-115,-117,-116,23,-101,-37,-39,-42,-38,-40,23,-152,-151,-43,-153,-41,-87,-86,23,-103,-102,-171,-170,23,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'XOR':([99,100,101,102,103,104,105,108,109,111,116,118,121,122,124,125,126,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,376,377,378,383,412,419,],[-210,-239,-249,-253,-250,-247,-237,-238,-212,-191,-246,-224,-243,-251,-244,-236,228,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-197,228,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,228,-203,-205,228,-194,-217,-225,-226,-211,-234,-235,]),'AUTO':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,63,64,65,76,78,82,87,89,90,162,164,166,167,168,171,173,207,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[21,21,-61,-72,-71,-58,-54,-55,-33,-29,-59,21,-34,-53,-68,-63,-52,21,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,21,-67,21,-70,-74,21,-57,-84,-255,-83,-111,-110,-30,21,-100,-99,21,21,-45,-46,21,-113,21,21,-36,21,-47,-85,-256,-114,-115,-117,-116,21,-101,-37,-39,-42,-38,-40,21,-152,-151,-43,-153,-41,-87,-86,21,-103,-102,-171,-170,21,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'TIMES':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,27,28,29,30,31,32,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,64,65,67,68,69,70,72,77,78,82,83,84,86,94,96,97,98,99,100,101,102,103,104,105,106,107,108,109,111,112,113,115,116,117,118,119,120,121,122,123,124,125,126,127,128,131,136,145,147,149,151,154,156,161,164,166,167,171,173,174,175,176,177,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,199,200,201,203,205,206,208,209,210,211,212,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,249,250,255,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,295,296,299,302,305,306,307,308,311,315,316,317,319,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,343,344,350,353,358,359,361,362,363,364,367,368,369,371,372,373,376,377,378,379,380,383,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[28,-257,-61,-72,28,-71,-58,-54,-55,-33,-29,-59,-257,-34,-53,-68,-63,-52,28,-56,-174,-66,-257,-69,28,-32,-73,-64,-31,-60,-35,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-255,-83,-49,-30,117,-100,-99,-26,28,-120,-25,149,156,28,-45,-48,-51,28,-257,-257,28,-223,-210,-239,-249,-253,-250,-247,-237,156,-221,-238,-212,-191,156,-220,156,-246,-219,-224,156,156,-243,-251,-218,-244,-236,230,-248,-245,-222,28,-121,28,-219,156,-257,-219,267,28,-36,156,-85,-256,-21,-82,-22,-81,156,-180,-183,-181,-177,-178,-182,-184,156,-186,-187,-179,-185,-254,156,-216,-252,-233,-232,156,156,156,-210,-215,156,-213,28,-214,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,156,-101,156,-37,-12,156,156,-11,-219,-39,-42,-38,156,-40,156,156,-152,-151,-43,-153,156,-41,-243,-87,-86,28,156,-231,-230,-229,-228,-227,-240,156,156,28,230,230,230,230,230,230,230,230,230,230,-193,-192,230,230,230,230,230,-194,-103,-102,-257,-135,-171,-170,156,-168,156,156,-154,156,-167,-155,156,156,-217,-225,-226,156,156,-211,-257,156,156,-166,-169,156,-158,156,-156,156,156,-157,156,156,156,-257,-234,156,-162,-161,-159,-235,156,156,156,-163,-160,156,-165,-164,]),'LPAREN':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,16,17,18,19,21,22,23,24,25,27,28,29,30,31,32,33,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,61,62,64,65,67,68,69,70,72,74,77,78,81,82,83,84,86,90,94,96,97,98,100,101,102,103,104,105,106,107,108,109,112,113,115,116,117,118,119,120,121,122,123,124,125,127,128,131,136,145,146,147,149,150,151,154,156,161,163,164,166,167,171,173,174,175,176,177,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,203,205,206,207,208,211,213,217,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,238,239,249,250,252,253,255,256,259,260,261,264,265,267,269,270,273,274,275,276,280,281,282,283,284,285,286,288,290,291,293,294,295,296,299,302,305,306,307,308,311,315,316,317,319,320,343,344,350,353,354,355,356,358,359,361,362,363,364,367,368,369,371,372,373,377,378,379,380,384,385,387,389,390,392,393,395,397,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[4,-257,-61,-72,4,-71,-58,-54,-55,-33,-29,-59,-257,-34,4,-53,-68,-63,-52,4,-56,-174,63,-66,-257,-69,78,-32,-73,-112,-64,-31,-60,-35,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-255,-83,-49,63,-30,119,-100,-99,-26,-118,-120,-25,119,78,119,78,162,-45,-48,-51,164,-113,-257,-257,164,-223,-239,-249,-253,-250,-247,-237,198,-221,-238,206,208,-220,211,-246,-219,-224,119,211,-243,-251,-218,-244,-236,-248,-245,-222,78,-121,-119,4,-219,162,119,-257,-219,119,164,164,-36,119,-85,-256,-21,-82,-22,-81,208,-180,-183,-181,-177,-178,-182,-184,119,-186,-187,-179,-185,-254,119,-252,-233,-232,119,119,-114,119,119,-115,319,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,208,119,208,208,-117,-116,-101,208,-146,-148,-37,-12,208,119,-11,-150,-144,-219,-39,-42,-38,119,-40,361,364,208,119,368,-152,-151,-43,-153,119,-41,-243,373,-87,-86,4,208,-231,-230,-229,-228,-227,-240,119,208,319,319,-103,-102,-257,-135,-145,-147,-149,-171,-170,119,-168,119,119,-154,119,-167,-155,119,119,-225,-226,119,208,-257,208,119,-166,-169,119,-158,119,409,-156,119,119,-157,119,119,119,-257,-234,119,-162,-161,-159,-235,119,119,119,-163,-160,119,-165,-164,]),'MINUSMINUS':([55,62,72,77,82,98,100,101,102,103,104,105,106,107,108,109,112,113,115,116,117,118,119,120,121,122,123,124,125,127,128,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,197,198,200,201,203,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,293,302,305,306,307,308,311,315,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,377,378,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,412,414,415,416,418,419,420,423,425,426,427,428,429,430,],[-255,120,120,120,-45,-223,-239,-249,-253,-250,-247,-237,120,-221,-238,201,120,-220,120,-246,-219,-224,120,120,-243,-251,-218,-244,-236,-248,-245,-222,-219,120,-257,-219,120,120,-256,120,-180,-183,-181,-177,-178,-182,-184,120,-186,-187,-179,-185,-254,120,-252,-233,-232,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,120,-12,120,120,-11,-219,-39,-42,-38,120,-40,120,120,-152,-151,-43,-153,120,-41,-243,120,-231,-230,-229,-228,-227,-240,120,120,-257,-135,-171,-170,120,-168,120,120,-154,120,-167,-155,120,120,-225,-226,120,120,-257,120,120,-166,-169,120,-158,120,-156,120,120,-157,120,120,120,-257,-234,120,-162,-161,-159,-235,120,120,120,-163,-160,120,-165,-164,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,26,27,28,29,30,31,32,34,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,63,64,65,66,67,68,69,70,72,74,77,78,82,83,84,86,94,96,97,98,106,107,112,113,115,117,119,120,123,131,136,140,141,145,146,147,149,151,154,156,161,163,164,166,167,171,173,174,175,176,177,178,185,186,187,188,189,190,191,192,193,194,195,196,198,202,204,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,244,248,249,250,255,256,258,259,260,261,267,269,270,271,273,274,275,281,282,284,285,286,288,290,291,295,296,299,302,316,317,343,344,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[33,-257,-61,-72,33,-71,-58,56,-54,-55,-33,-29,-59,-257,-34,33,-53,-68,-63,-89,-52,33,-56,-174,65,-66,-257,-69,33,-32,-73,-88,-64,-31,-60,-35,-62,-65,-257,-67,-257,-70,-74,-57,-50,-9,-10,-84,-255,-83,-49,-30,121,121,-100,-99,144,-26,-118,-120,-25,121,33,121,33,-45,-48,-51,33,-257,-257,33,-223,121,-221,121,-220,121,-219,121,121,-218,-222,33,144,144,-121,-119,33,-219,121,-257,-219,121,33,33,-36,293,-85,-256,-21,-82,-22,-81,121,-180,-183,-181,-177,-178,-182,-184,121,-186,-187,-179,-185,121,306,308,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,144,-101,121,-37,-12,121,121,121,-11,-219,-39,-42,357,-38,121,-40,121,293,-152,-151,-43,-153,293,-41,-87,-86,33,121,121,121,-103,-102,-257,-135,-171,-170,121,-168,293,121,-154,121,-167,-155,293,121,121,121,-257,121,121,-166,-169,121,-158,293,-156,121,293,-157,293,121,293,-257,121,-162,-161,-159,121,293,293,-163,-160,293,-165,-164,]),'IF':([55,82,167,173,269,270,273,275,282,284,285,286,288,290,291,358,359,362,363,367,369,371,372,389,390,393,395,398,406,407,408,410,415,416,418,423,425,426,427,428,429,430,],[-255,-45,294,-256,-39,-42,-38,-40,294,-152,-151,-43,-153,294,-41,-171,-170,-168,294,-154,-167,-155,294,-166,-169,-158,294,-156,294,-157,294,294,-162,-161,-159,294,294,-163,-160,294,-165,-164,]),'STRING_LITERAL':([55,62,72,77,82,98,106,107,108,112,113,115,117,119,120,122,123,131,149,151,154,156,161,167,173,178,185,186,187,188,189,190,191,192,193,194,195,196,198,200,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,256,259,260,261,267,269,270,273,274,275,281,282,284,285,286,288,290,291,302,316,317,350,353,358,359,361,362,363,364,367,368,369,371,372,373,379,380,384,385,387,389,390,392,393,395,398,404,406,407,408,409,410,411,414,415,416,418,420,423,425,426,427,428,429,430,],[-255,122,122,122,-45,-223,122,-221,200,122,-220,122,-219,122,122,-251,-218,-222,-219,122,-257,-219,122,122,-256,122,-180,-183,-181,-177,-178,-182,-184,122,-186,-187,-179,-185,122,-252,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,-12,122,122,-11,-219,-39,-42,-38,122,-40,122,122,-152,-151,-43,-153,122,-41,122,122,122,-257,-135,-171,-170,122,-168,122,122,-154,122,-167,-155,122,122,122,122,-257,122,122,-166,-169,122,-158,122,-156,122,122,-157,122,122,122,-257,122,-162,-161,-159,122,122,122,-163,-160,122,-165,-164,]),'FLOAT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,24,25,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,63,64,65,76,78,82,87,89,90,91,92,93,94,95,96,119,162,164,166,167,168,169,170,171,172,173,198,207,208,211,213,238,239,245,249,255,269,270,273,275,282,284,285,286,288,291,295,296,300,301,319,343,344,358,359,361,362,367,369,371,389,390,393,398,407,415,416,418,426,427,429,430,],[35,35,-61,-72,-71,-58,-54,-55,-33,-29,-59,35,-34,-53,-68,-63,-52,35,-56,-174,-109,-66,-69,-32,-73,-112,-64,-31,-60,-35,-62,-65,35,-67,35,-70,-74,35,-57,-84,-255,-83,35,-111,-110,-30,35,-100,-99,35,35,-45,-46,35,-113,35,35,35,35,-90,35,35,35,35,-36,35,-47,35,35,-85,-91,-256,35,-114,35,35,-115,-117,-116,35,-101,-37,-39,-42,-38,-40,35,-152,-151,-43,-153,-41,-87,-86,-92,-93,35,-103,-102,-171,-170,35,-168,-154,-167,-155,-166,-169,-158,-156,-157,-162,-161,-159,-163,-160,-165,-164,]),'XOREQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[189,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'LSHIFTEQUAL':([99,100,101,102,103,104,105,108,109,116,118,121,122,124,125,127,128,173,197,199,200,201,203,209,210,212,218,293,305,306,307,308,311,315,376,377,378,383,412,419,],[191,-239,-249,-253,-250,-247,-237,-238,-212,-246,-224,-243,-251,-244,-236,-248,-245,-256,-254,-216,-252,-233,-232,-210,-215,-213,-214,-243,-231,-230,-229,-228,-227,-240,-217,-225,-226,-211,-234,-235,]),'RBRACKET':([62,72,99,100,101,102,103,104,105,108,109,110,111,114,116,117,118,121,122,124,125,126,127,128,129,130,148,149,161,173,197,199,200,201,203,209,210,212,216,218,266,267,298,303,305,306,307,308,309,311,315,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,338,339,348,376,377,378,382,383,402,412,419,],[-257,-257,-210,-239,-249,-253,-250,-247,-237,-238,-212,207,-191,-4,-246,213,-224,-243,-251,-244,-236,-189,-248,-245,-3,-175,252,253,-257,-256,-254,-216,-252,-233,-232,-210,-215,-213,-172,-214,354,355,-188,-176,-231,-230,-229,-228,377,-227,-240,-197,-209,-198,-196,-200,-204,-199,-195,-202,-207,-193,-192,-201,-208,-203,-205,-206,-194,386,-217,-225,-226,-173,-211,-190,-234,-235,]),} - -_lr_action = { } -for _k, _v in _lr_action_items.items(): - for _x,_y in zip(_v[0],_v[1]): - if not _x in _lr_action: _lr_action[_x] = { } - _lr_action[_x][_k] = _y -del _lr_action_items - -_lr_goto_items = {'storage_class_specifier':([0,1,14,22,42,44,48,63,76,78,89,162,164,167,245,282,319,361,],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]),'identifier_list_opt':([63,],[132,]),'parameter_declaration':([63,78,162,164,245,319,],[135,135,135,135,342,135,]),'selection_statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[291,291,291,291,291,291,291,291,291,291,291,291,]),'constant':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,105,]),'unary_expression':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[99,99,99,199,209,212,99,218,99,99,99,209,99,99,99,99,99,99,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,209,99,209,209,209,209,99,99,209,99,99,209,99,209,99,99,99,99,99,99,99,209,209,99,99,99,99,99,99,99,99,99,99,99,99,99,]),'conditional_expression':([62,72,77,119,151,161,167,178,192,198,205,206,208,211,235,250,259,260,274,281,282,290,302,316,361,363,364,368,372,373,379,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[130,130,130,130,130,130,130,298,130,130,130,130,130,130,130,298,298,130,130,298,130,130,298,130,130,130,130,130,130,130,130,402,130,130,130,130,130,130,130,130,130,130,130,130,130,]),'brace_close':([93,143,169,170,246,247,262,292,350,401,411,],[171,249,295,296,343,344,351,371,388,412,419,]),'struct_or_union_specifier':([0,1,14,22,42,44,48,57,63,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'unified_wstring_literal':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,]),'abstract_declarator_opt':([136,217,],[240,318,]),'iteration_statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[270,270,270,270,270,270,270,270,270,270,270,270,]),'init_declarator_list':([30,86,],[71,71,]),'translation_unit_or_empty':([0,],[8,]),'struct_declaration_list':([57,91,92,],[93,169,170,]),'block_item_list_opt':([167,],[292,]),'enumerator':([66,140,141,248,],[142,142,142,345,]),'pp_directive':([0,22,],[11,11,]),'abstract_declarator':([30,78,86,97,136,164,217,319,],[79,160,79,182,241,160,241,160,]),'declaration_specifiers_opt':([1,14,42,44,],[50,58,83,84,]),'external_declaration':([0,22,],[12,61,]),'type_specifier':([0,1,14,22,42,44,48,57,63,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[14,14,14,14,14,14,14,94,14,14,14,14,94,94,94,94,94,94,14,14,14,94,94,94,94,94,14,14,14,14,]),'designation':([154,350,384,411,],[256,256,256,256,]),'compound_statement':([88,152,167,282,290,363,372,395,406,408,410,423,425,428,],[166,255,275,275,275,275,275,275,275,275,275,275,275,275,]),'pointer':([0,4,22,30,68,78,86,97,136,147,164,217,299,319,],[16,16,16,74,146,74,163,163,74,16,163,320,16,320,]),'type_name':([119,198,208,211,],[215,304,313,314,]),'unified_string_literal':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,108,]),'postfix_expression':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,109,]),'assignment_expression_opt':([62,72,161,],[110,148,266,]),'designation_opt':([154,350,384,411,],[260,387,260,387,]),'expression_statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[269,269,269,269,269,269,269,269,269,269,269,269,]),'unary_operator':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,]),'cast_expression':([62,72,77,112,119,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[111,111,111,210,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,111,383,111,111,111,111,111,111,111,383,111,111,111,111,111,111,111,111,111,111,111,111,111,111,]),'init_declarator':([30,86,147,],[80,80,251,]),'struct_declarator_list':([97,],[179,]),'brace_open':([7,26,54,56,64,65,77,88,151,152,167,260,282,290,317,363,372,376,380,381,387,395,406,408,410,423,425,428,],[57,66,91,92,140,141,154,167,154,167,167,154,167,167,384,167,167,384,384,384,154,167,167,167,167,167,167,167,]),'assignment_operator':([99,],[192,]),'struct_or_union':([0,1,14,22,42,44,48,57,63,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,]),'identifier':([62,63,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,244,250,258,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[125,139,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,340,125,347,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,]),'struct_declaration':([57,91,92,93,169,170,],[95,95,95,172,172,172,]),'assignment_expression':([62,72,77,119,151,161,167,192,198,205,206,208,211,235,260,274,282,290,316,361,363,364,368,372,373,379,387,392,395,404,406,408,409,410,414,420,423,425,428,],[114,114,155,216,155,114,216,303,216,216,310,216,216,216,155,216,216,216,382,216,216,216,216,216,216,400,155,216,216,216,216,216,216,216,216,216,216,216,216,]),'parameter_type_list':([63,78,162,164,319,],[134,158,158,158,158,]),'type_qualifier_list_opt':([28,],[68,]),'direct_declarator':([0,4,16,22,30,74,78,86,97,136,147,163,164,299,],[25,25,60,25,25,60,25,25,25,25,25,60,25,25,]),'type_qualifier_list':([28,],[67,]),'designator':([154,263,350,384,411,],[257,352,257,257,257,]),'argument_expression_list':([206,],[312,]),'initializer':([77,151,260,387,],[153,254,349,403,]),'specifier_qualifier_list_opt':([94,96,],[175,177,]),'constant_expression':([178,250,259,281,302,],[297,346,348,365,375,]),'expression_opt':([167,282,290,361,363,372,392,395,404,406,408,410,414,420,423,425,428,],[272,272,272,391,272,272,405,272,413,272,272,272,421,424,272,272,272,]),'primary_expression':([62,72,77,106,112,115,119,120,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,317,361,363,364,368,372,373,379,380,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,]),'declaration_specifiers':([0,1,14,22,42,44,48,63,76,78,89,162,164,167,245,282,319,361,],[30,52,52,30,52,52,86,136,86,136,86,136,136,86,136,86,136,86,]),'declaration':([0,22,48,76,89,167,282,361,],[31,31,87,87,168,285,285,392,]),'struct_declarator_list_opt':([97,],[180,]),'identifier_list':([63,],[137,]),'typedef_name':([0,1,14,22,42,44,48,57,63,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'parameter_type_list_opt':([78,162,164,319,],[159,268,159,159,]),'jump_statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[286,286,286,286,286,286,286,286,286,286,286,286,]),'declaration_list_opt':([48,76,],[88,152,]),'struct_declarator':([97,299,],[181,374,]),'function_definition':([0,22,],[36,36,]),'binary_expression':([62,72,77,119,151,161,167,178,192,198,205,206,208,211,219,220,221,222,223,224,225,226,227,228,229,230,231,232,233,234,235,236,237,250,259,260,274,281,282,290,302,316,361,363,364,368,372,373,379,385,387,392,395,404,406,408,409,410,414,420,423,425,428,],[126,126,126,126,126,126,126,126,126,126,126,126,126,126,321,322,323,324,325,326,327,328,329,330,331,332,333,334,335,336,126,338,339,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,]),'parameter_list':([63,78,162,164,319,],[138,138,138,138,138,]),'init_declarator_list_opt':([30,86,],[73,73,]),'enum_specifier':([0,1,14,22,42,44,48,57,63,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,]),'decl_body':([0,22,48,76,89,167,282,361,],[41,41,41,41,41,41,41,41,]),'type_qualifier':([0,1,14,22,28,42,44,48,57,63,67,76,78,89,91,92,93,94,96,119,162,164,167,169,170,198,208,211,245,282,319,361,],[42,42,42,42,69,42,42,42,96,42,145,42,42,42,96,96,96,96,96,96,42,42,42,96,96,96,96,96,42,42,42,42,]),'statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[284,284,370,393,398,407,415,416,418,427,429,430,]),'enumerator_list':([66,140,141,],[143,246,247,]),'labeled_statement':([167,282,290,363,372,395,406,408,410,423,425,428,],[273,273,273,273,273,273,273,273,273,273,273,273,]),'function_specifier':([0,1,14,22,42,44,48,63,76,78,89,162,164,167,245,282,319,361,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'specifier_qualifier_list':([57,91,92,93,94,96,119,169,170,198,208,211,],[97,97,97,97,176,176,217,97,97,217,217,217,]),'block_item':([167,282,],[288,367,]),'block_item_list':([167,],[282,]),'empty':([0,1,14,28,30,42,44,48,62,63,72,76,78,86,94,96,97,136,154,161,162,164,167,217,282,290,319,350,361,363,372,384,392,395,404,406,408,410,411,414,420,423,425,428,],[47,51,51,70,75,51,51,85,129,133,129,85,157,75,174,174,183,243,261,129,157,157,289,243,366,366,157,261,366,366,366,261,366,366,366,366,366,366,261,366,366,366,366,366,]),'translation_unit':([0,],[22,]),'initializer_list':([154,384,],[262,401,]),'declarator':([0,4,22,30,78,86,97,136,147,164,299,],[48,53,48,76,53,165,184,242,165,53,184,]),'direct_abstract_declarator':([30,74,78,86,97,136,163,164,217,319,320,],[81,150,81,81,81,81,150,81,81,81,150,]),'designator_list':([154,350,384,411,],[263,263,263,263,]),'declaration_list':([48,76,],[89,89,]),'expression':([119,167,198,205,208,211,235,274,282,290,361,363,364,368,372,373,392,395,404,406,408,409,410,414,420,423,425,428,],[214,278,214,309,214,214,337,360,278,278,278,278,394,396,278,399,278,278,278,278,278,417,278,278,278,278,278,278,]),} - -_lr_goto = { } -for _k, _v in _lr_goto_items.items(): - for _x,_y in zip(_v[0],_v[1]): - if not _x in _lr_goto: _lr_goto[_x] = { } - _lr_goto[_x][_k] = _y -del _lr_goto_items -_lr_productions = [ - ("S' -> translation_unit_or_empty","S'",1,None,None,None), - ('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',41), - ('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',42), - ('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',41), - ('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',42), - ('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',41), - ('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',42), - ('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',41), - ('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',42), - ('declaration_specifiers_opt -> empty','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',41), - ('declaration_specifiers_opt -> declaration_specifiers','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',42), - ('designation_opt -> empty','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',41), - ('designation_opt -> designation','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',42), - ('expression_opt -> empty','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',41), - ('expression_opt -> expression','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',42), - ('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',41), - ('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',42), - ('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',41), - ('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',42), - ('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',41), - ('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',42), - ('specifier_qualifier_list_opt -> empty','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',41), - ('specifier_qualifier_list_opt -> specifier_qualifier_list','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',42), - ('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',41), - ('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',42), - ('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',41), - ('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',42), - ('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',496), - ('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',497), - ('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','../pycparser/c_parser.py',505), - ('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','../pycparser/c_parser.py',512), - ('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','../pycparser/c_parser.py',524), - ('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','../pycparser/c_parser.py',529), - ('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','../pycparser/c_parser.py',534), - ('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','../pycparser/c_parser.py',539), - ('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','../pycparser/c_parser.py',544), - ('function_definition -> declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','../pycparser/c_parser.py',553), - ('function_definition -> declaration_specifiers declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','../pycparser/c_parser.py',570), - ('statement -> labeled_statement','statement',1,'p_statement','../pycparser/c_parser.py',581), - ('statement -> expression_statement','statement',1,'p_statement','../pycparser/c_parser.py',582), - ('statement -> compound_statement','statement',1,'p_statement','../pycparser/c_parser.py',583), - ('statement -> selection_statement','statement',1,'p_statement','../pycparser/c_parser.py',584), - ('statement -> iteration_statement','statement',1,'p_statement','../pycparser/c_parser.py',585), - ('statement -> jump_statement','statement',1,'p_statement','../pycparser/c_parser.py',586), - ('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','../pycparser/c_parser.py',600), - ('declaration -> decl_body SEMI','declaration',2,'p_declaration','../pycparser/c_parser.py',659), - ('declaration_list -> declaration','declaration_list',1,'p_declaration_list','../pycparser/c_parser.py',668), - ('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','../pycparser/c_parser.py',669), - ('declaration_specifiers -> type_qualifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_1','../pycparser/c_parser.py',674), - ('declaration_specifiers -> type_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_2','../pycparser/c_parser.py',679), - ('declaration_specifiers -> storage_class_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_3','../pycparser/c_parser.py',684), - ('declaration_specifiers -> function_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_4','../pycparser/c_parser.py',689), - ('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',694), - ('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',695), - ('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',696), - ('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',697), - ('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',698), - ('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','../pycparser/c_parser.py',703), - ('type_specifier -> VOID','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',708), - ('type_specifier -> _BOOL','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',709), - ('type_specifier -> CHAR','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',710), - ('type_specifier -> SHORT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',711), - ('type_specifier -> INT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',712), - ('type_specifier -> LONG','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',713), - ('type_specifier -> FLOAT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',714), - ('type_specifier -> DOUBLE','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',715), - ('type_specifier -> _COMPLEX','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',716), - ('type_specifier -> SIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',717), - ('type_specifier -> UNSIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',718), - ('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',723), - ('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',724), - ('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',725), - ('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',730), - ('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',731), - ('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',732), - ('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list_1','../pycparser/c_parser.py',737), - ('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list_1','../pycparser/c_parser.py',738), - ('init_declarator_list -> EQUALS initializer','init_declarator_list',2,'p_init_declarator_list_2','../pycparser/c_parser.py',748), - ('init_declarator_list -> abstract_declarator','init_declarator_list',1,'p_init_declarator_list_3','../pycparser/c_parser.py',756), - ('init_declarator -> declarator','init_declarator',1,'p_init_declarator','../pycparser/c_parser.py',764), - ('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','../pycparser/c_parser.py',765), - ('specifier_qualifier_list -> type_qualifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','../pycparser/c_parser.py',770), - ('specifier_qualifier_list -> type_specifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','../pycparser/c_parser.py',775), - ('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',783), - ('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',784), - ('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','../pycparser/c_parser.py',793), - ('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',802), - ('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',803), - ('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',812), - ('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',813), - ('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','../pycparser/c_parser.py',820), - ('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','../pycparser/c_parser.py',821), - ('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','../pycparser/c_parser.py',826), - ('struct_declaration -> specifier_qualifier_list abstract_declarator SEMI','struct_declaration',3,'p_struct_declaration_2','../pycparser/c_parser.py',864), - ('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','../pycparser/c_parser.py',878), - ('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','../pycparser/c_parser.py',879), - ('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','../pycparser/c_parser.py',887), - ('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','../pycparser/c_parser.py',892), - ('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','../pycparser/c_parser.py',893), - ('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',901), - ('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',902), - ('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','../pycparser/c_parser.py',907), - ('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',912), - ('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',913), - ('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','../pycparser/c_parser.py',918), - ('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','../pycparser/c_parser.py',919), - ('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','../pycparser/c_parser.py',920), - ('enumerator -> ID','enumerator',1,'p_enumerator','../pycparser/c_parser.py',931), - ('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','../pycparser/c_parser.py',932), - ('declarator -> direct_declarator','declarator',1,'p_declarator_1','../pycparser/c_parser.py',947), - ('declarator -> pointer direct_declarator','declarator',2,'p_declarator_2','../pycparser/c_parser.py',952), - ('declarator -> pointer TYPEID','declarator',2,'p_declarator_3','../pycparser/c_parser.py',961), - ('direct_declarator -> ID','direct_declarator',1,'p_direct_declarator_1','../pycparser/c_parser.py',972), - ('direct_declarator -> LPAREN declarator RPAREN','direct_declarator',3,'p_direct_declarator_2','../pycparser/c_parser.py',981), - ('direct_declarator -> direct_declarator LBRACKET assignment_expression_opt RBRACKET','direct_declarator',4,'p_direct_declarator_3','../pycparser/c_parser.py',986), - ('direct_declarator -> direct_declarator LBRACKET TIMES RBRACKET','direct_declarator',4,'p_direct_declarator_4','../pycparser/c_parser.py',998), - ('direct_declarator -> direct_declarator LPAREN parameter_type_list RPAREN','direct_declarator',4,'p_direct_declarator_5','../pycparser/c_parser.py',1008), - ('direct_declarator -> direct_declarator LPAREN identifier_list_opt RPAREN','direct_declarator',4,'p_direct_declarator_5','../pycparser/c_parser.py',1009), - ('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','../pycparser/c_parser.py',1036), - ('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','../pycparser/c_parser.py',1037), - ('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','../pycparser/c_parser.py',1047), - ('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','../pycparser/c_parser.py',1048), - ('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','../pycparser/c_parser.py',1053), - ('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','../pycparser/c_parser.py',1054), - ('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','../pycparser/c_parser.py',1062), - ('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','../pycparser/c_parser.py',1063), - ('parameter_declaration -> declaration_specifiers declarator','parameter_declaration',2,'p_parameter_declaration_1','../pycparser/c_parser.py',1072), - ('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','../pycparser/c_parser.py',1083), - ('identifier_list -> identifier','identifier_list',1,'p_identifier_list','../pycparser/c_parser.py',1113), - ('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','../pycparser/c_parser.py',1114), - ('initializer -> assignment_expression','initializer',1,'p_initializer_1','../pycparser/c_parser.py',1123), - ('initializer -> brace_open initializer_list brace_close','initializer',3,'p_initializer_2','../pycparser/c_parser.py',1128), - ('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','../pycparser/c_parser.py',1129), - ('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','../pycparser/c_parser.py',1134), - ('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','../pycparser/c_parser.py',1135), - ('designation -> designator_list EQUALS','designation',2,'p_designation','../pycparser/c_parser.py',1146), - ('designator_list -> designator','designator_list',1,'p_designator_list','../pycparser/c_parser.py',1154), - ('designator_list -> designator_list designator','designator_list',2,'p_designator_list','../pycparser/c_parser.py',1155), - ('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','../pycparser/c_parser.py',1160), - ('designator -> PERIOD identifier','designator',2,'p_designator','../pycparser/c_parser.py',1161), - ('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','../pycparser/c_parser.py',1166), - ('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','../pycparser/c_parser.py',1182), - ('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','../pycparser/c_parser.py',1190), - ('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','../pycparser/c_parser.py',1195), - ('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','../pycparser/c_parser.py',1205), - ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','../pycparser/c_parser.py',1209), - ('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','../pycparser/c_parser.py',1219), - ('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','../pycparser/c_parser.py',1227), - ('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','../pycparser/c_parser.py',1237), - ('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','../pycparser/c_parser.py',1245), - ('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','../pycparser/c_parser.py',1255), - ('block_item -> declaration','block_item',1,'p_block_item','../pycparser/c_parser.py',1266), - ('block_item -> statement','block_item',1,'p_block_item','../pycparser/c_parser.py',1267), - ('block_item_list -> block_item','block_item_list',1,'p_block_item_list','../pycparser/c_parser.py',1274), - ('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','../pycparser/c_parser.py',1275), - ('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','../pycparser/c_parser.py',1281), - ('labeled_statement -> ID COLON statement','labeled_statement',3,'p_labeled_statement_1','../pycparser/c_parser.py',1287), - ('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement_2','../pycparser/c_parser.py',1291), - ('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement_3','../pycparser/c_parser.py',1295), - ('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_1','../pycparser/c_parser.py',1299), - ('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement_2','../pycparser/c_parser.py',1303), - ('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_3','../pycparser/c_parser.py',1307), - ('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement_1','../pycparser/c_parser.py',1312), - ('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','../pycparser/c_parser.py',1316), - ('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','../pycparser/c_parser.py',1320), - ('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement','iteration_statement',8,'p_iteration_statement_4','../pycparser/c_parser.py',1324), - ('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','../pycparser/c_parser.py',1328), - ('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','../pycparser/c_parser.py',1332), - ('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','../pycparser/c_parser.py',1336), - ('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','../pycparser/c_parser.py',1340), - ('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','../pycparser/c_parser.py',1341), - ('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','../pycparser/c_parser.py',1346), - ('expression -> assignment_expression','expression',1,'p_expression','../pycparser/c_parser.py',1353), - ('expression -> expression COMMA assignment_expression','expression',3,'p_expression','../pycparser/c_parser.py',1354), - ('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','../pycparser/c_parser.py',1366), - ('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','../pycparser/c_parser.py',1370), - ('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','../pycparser/c_parser.py',1371), - ('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1384), - ('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1385), - ('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1386), - ('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1387), - ('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1388), - ('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1389), - ('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1390), - ('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1391), - ('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1392), - ('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1393), - ('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1394), - ('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','../pycparser/c_parser.py',1399), - ('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','../pycparser/c_parser.py',1403), - ('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','../pycparser/c_parser.py',1404), - ('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','../pycparser/c_parser.py',1412), - ('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1413), - ('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1414), - ('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1415), - ('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1416), - ('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1417), - ('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1418), - ('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1419), - ('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1420), - ('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1421), - ('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1422), - ('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1423), - ('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1424), - ('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1425), - ('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1426), - ('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1427), - ('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1428), - ('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1429), - ('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1430), - ('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','../pycparser/c_parser.py',1438), - ('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','../pycparser/c_parser.py',1442), - ('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','../pycparser/c_parser.py',1446), - ('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1450), - ('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1451), - ('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1452), - ('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','../pycparser/c_parser.py',1457), - ('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','../pycparser/c_parser.py',1458), - ('unary_operator -> AND','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1466), - ('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1467), - ('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1468), - ('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1469), - ('unary_operator -> NOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1470), - ('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1471), - ('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','../pycparser/c_parser.py',1476), - ('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','../pycparser/c_parser.py',1480), - ('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','../pycparser/c_parser.py',1484), - ('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','../pycparser/c_parser.py',1485), - ('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1490), - ('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1491), - ('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1492), - ('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1493), - ('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1499), - ('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1500), - ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','../pycparser/c_parser.py',1505), - ('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','../pycparser/c_parser.py',1506), - ('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','../pycparser/c_parser.py',1511), - ('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','../pycparser/c_parser.py',1515), - ('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1519), - ('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1520), - ('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','../pycparser/c_parser.py',1525), - ('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','../pycparser/c_parser.py',1529), - ('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','../pycparser/c_parser.py',1530), - ('identifier -> ID','identifier',1,'p_identifier','../pycparser/c_parser.py',1539), - ('constant -> INT_CONST_DEC','constant',1,'p_constant_1','../pycparser/c_parser.py',1543), - ('constant -> INT_CONST_OCT','constant',1,'p_constant_1','../pycparser/c_parser.py',1544), - ('constant -> INT_CONST_HEX','constant',1,'p_constant_1','../pycparser/c_parser.py',1545), - ('constant -> FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1551), - ('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1552), - ('constant -> CHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1558), - ('constant -> WCHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1559), - ('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','../pycparser/c_parser.py',1570), - ('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','../pycparser/c_parser.py',1571), - ('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','../pycparser/c_parser.py',1581), - ('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','../pycparser/c_parser.py',1582), - ('brace_open -> LBRACE','brace_open',1,'p_brace_open','../pycparser/c_parser.py',1592), - ('brace_close -> RBRACE','brace_close',1,'p_brace_close','../pycparser/c_parser.py',1597), - ('empty -> ','empty',0,'p_empty','../pycparser/c_parser.py',1602), -] diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/DESCRIPTION.rst deleted file mode 100644 index 8577264..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/DESCRIPTION.rst +++ /dev/null @@ -1,1742 +0,0 @@ -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -------------------------- -Installation Instructions -------------------------- - -Upgrading from Distribute -========================= - -Currently, Distribute disallows installing Setuptools 0.7+ over Distribute. -You must first uninstall any active version of Distribute first (see -`Uninstalling`_). - -Upgrading from Setuptools 0.6 -============================= - -Upgrading from prior versions of Setuptools is supported. Initial reports -good success in this regard. - -Windows -======= - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate .egg -file and install it for you. - -.. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. - - -Unix-based Systems including Mac OS X -===================================== - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | sudo python - -Alternatively, on Python 2.6 and later, Setuptools may be installed to a -user-local path:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - > python ez_setup.py --user - - -Python 2.4 and Python 2.5 support -================================= - -Setuptools 2.0 and later requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the bootstrap script for Setuptools 1.x: -https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py. - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `Package Compatibility Notes`_ (user-maintained) -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - -======= -CHANGES -======= - ---- -2.1 ---- - -* `Issue #129 `_: Suppress inspection of '*.whl' files when searching for files - in a zip-imported file. -* `Issue #131 `_: Fix RuntimeError when constructing an egg fetcher. - ------ -2.0.2 ------ - -* Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. -* Fix NameError in ``sdist:re_finder``. - ------ -2.0.1 ------ - -* `Issue #124 `_: Fixed error in list detection in upload_docs. - ---- -2.0 ---- - -* `Issue #121 `_: Exempt lib2to3 pickled grammars from DirectorySandbox. -* `Issue #41 `_: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. -* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. -* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - ------ -1.4.2 ------ - -* `Issue #116 `_: Correct TypeError when reading a local package index on Python - 3. - ------ -1.4.1 ------ - -* `Issue #114 `_: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - -* `Issue #105 `_ and `Issue #113 `_: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - ---- -1.4 ---- - -* `Issue #27 `_: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. -* `Pull Request #21 `_: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - ------ -1.3.2 ------ - -* `Issue #99 `_: Fix filename encoding issues in SVN support. - ------ -1.3.1 ------ - -* Remove exuberant warning in SVN support when SVN is not used. - ---- -1.3 ---- - -* Address security vulnerability in SSL match_hostname check as reported in - `Python #17997 `_. -* Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. -* Correct NameError in ``ssl_support`` module (``socket.error``). - ---- -1.2 ---- - -* `Issue #26 `_: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. -* `Issue #93 `_: Wheels are now distributed with every release. Note that as - reported in `Issue #108 `_, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. -* Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - ------ -1.1.7 ------ - -* Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). -* ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. -* `Distribute #363 `_ and `Issue #55 `_: Skip an sdist test that fails on locales - other than UTF-8. - ------ -1.1.6 ------ - -* `Distribute #349 `_: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - ------ -1.1.5 ------ - -* `Issue #69 `_: Second attempt at fix (logic was reversed). - ------ -1.1.4 ------ - -* `Issue #77 `_: Fix error in upload command (Python 2.4). - ------ -1.1.3 ------ - -* Fix NameError in previous patch. - ------ -1.1.2 ------ - -* `Issue #69 `_: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - ------ -1.1.1 ------ - -* `Issue #75 `_: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. -* `Issue #76 `_: Fix AttributeError in upload command with Python 2.4. - ---- -1.1 ---- - -* `Issue #71 `_ (`Distribute #333 `_): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. -* `Issue #72 `_: Restored Python 2.4 compatibility in ``ez_setup.py``. - ---- -1.0 ---- - -* `Issue #60 `_: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. -* `Issue #63 `_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. -* `Issue #65 `_: Deprecated the ``Features`` functionality. -* `Issue #52 `_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - -Backward-Incompatible Changes -============================= - -This release includes a couple of backward-incompatible changes, but most if -not all users will find 1.0 a drop-in replacement for 0.9. - -* `Issue #50 `_: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. -* `Issue #50 `_: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. -* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - ------ -0.9.8 ------ - -* `Issue #53 `_: Fix NameErrors in `_vcs_split_rev_from_url`. - ------ -0.9.7 ------ - -* `Issue #49 `_: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. -* `Issue #34 `_: Documentation now refers to bootstrap script in code repository - referenced by bookmark. -* Add underscore-separated keys to environment markers (markerlib). - ------ -0.9.6 ------ - -* `Issue #44 `_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - ------ -0.9.5 ------ - -* `Python #17980 `_: Fix security vulnerability in SSL certificate validation. - ------ -0.9.4 ------ - -* `Issue #43 `_: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - ------ -0.9.3 ------ - -* `Issue #42 `_: Fix new ``AttributeError`` introduced in last fix. - ------ -0.9.2 ------ - -* `Issue #42 `_: Fix regression where blank checksums would trigger an - ``AttributeError``. - ------ -0.9.1 ------ - -* `Distribute #386 `_: Allow other positional and keyword arguments to os.open. -* Corrected dependency on certifi mis-referenced in 0.9. - ---- -0.9 ---- - -* `package_index` now validates hashes other than MD5 in download links. - ---- -0.8 ---- - -* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - ------ -0.7.8 ------ - -* `Distribute #375 `_: Yet another fix for yet another regression. - ------ -0.7.7 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release (redo). -* `Issue #30 `_: Added test for get_cache_path. - ------ -0.7.6 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release. - ------ -0.7.5 ------ - -* `Issue #21 `_: Restore Python 2.4 compatibility in ``test_easy_install``. -* `Distribute #375 `_: Merged additional warning from Distribute 0.6.46. -* Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - ------ -0.7.4 ------ - -* `Issue #20 `_: Fix comparison of parsed SVN version on Python 3. - ------ -0.7.3 ------ - -* `Issue #1 `_: Disable installation of Windows-specific files on non-Windows systems. -* Use new sysconfig module with Python 2.7 or >=3.2. - ------ -0.7.2 ------ - -* `Issue #14 `_: Use markerlib when the `parser` module is not available. -* `Issue #10 `_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - ------ -0.7.1 ------ - -* Fix NameError (`Issue #3 `_) again - broken in bad merge. - ---- -0.7 ---- - -* Merged Setuptools and Distribute. See docs/merge.txt for details. - -Added several features that were slated for setuptools 0.6c12: - -* Index URL now defaults to HTTPS. -* Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. -* Added support for SSL certificate validation when installing packages from - an HTTPS service. - ------ -0.7b4 ------ - -* `Issue #3 `_: Fixed NameError in SSL support. - ------- -0.6.49 ------- - -* Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in `Distribute #375 `_. - ------- -0.6.48 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - ------- -0.6.47 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - ------- -0.6.46 ------- - -* `Distribute #375 `_: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - ------- -0.6.45 ------- - -* `Distribute #379 `_: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - ------- -0.6.44 ------- - -* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - ------- -0.6.43 ------- - -* `Distribute #378 `_: Restore support for Python 2.4 Syntax (regression in 0.6.42). - ------- -0.6.42 ------- - -* External links finder no longer yields duplicate links. -* `Distribute #337 `_: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR `#31 `_). - ------- -0.6.41 ------- - -* `Distribute #27 `_: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. -* Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - ------- -0.6.40 ------- - -* `Distribute #376 `_: brought back cli.exe and gui.exe that were deleted in the - previous release. - ------- -0.6.39 ------- - -* Add support for console launchers on ARM platforms. -* Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. -* Launcher build script now refactored for robustness. -* `Distribute #375 `_: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - ------- -0.6.38 ------- - -* `Distribute #371 `_: The launcher manifest file is now installed properly. - ------- -0.6.37 ------- - -* `Distribute #143 `_: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - ------- -0.6.36 ------- - -* `Pull Request #35 `_: In `Buildout #64 `_, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - ------- -0.6.35 ------- - - -Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in -how it parses version numbers. - -* `Distribute #278 `_: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - ------- -0.6.34 ------- - -* `Distribute #341 `_: 0.6.33 fails to build under Python 2.4. - ------- -0.6.33 ------- - -* Fix 2 errors with Jython 2.5. -* Fix 1 failure with Jython 2.5 and 2.7. -* Disable workaround for Jython scripts on Linux systems. -* `Distribute #336 `_: `setup.py` no longer masks failure exit code when tests fail. -* Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. -* `Distribute #341 `_: Fix a ResourceWarning. - ------- -0.6.32 ------- - -* Fix test suite with Python 2.6. -* Fix some DeprecationWarnings and ResourceWarnings. -* `Distribute #335 `_: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - ------- -0.6.31 ------- - -* `Distribute #303 `_: Make sure the manifest only ever contains UTF-8 in Python 3. -* `Distribute #329 `_: Properly close files created by tests for compatibility with - Jython. -* Work around `Jython #1980 `_ and `Jython #1981 `_. -* `Distribute #334 `_: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv `#359 `_ `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - -* Fix for encoding issue when installing from Windows executable on Python 3. -* `Distribute #323 `_: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - ------- -0.6.30 ------- - -* `Distribute #328 `_: Clean up temporary directories in distribute_setup.py. -* Fix fatal bug in distribute_setup.py. - ------- -0.6.29 ------- - -* `Pull Request #14 `_: Honor file permissions in zip files. -* `Distribute #327 `_: Merged pull request `#24 `_ to fix a dependency problem with pip. -* Merged pull request `#23 `_ to fix https://github.com/pypa/virtualenv/issues/301. -* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. -* `Distribute #326 `_: `upload_docs` provided mangled auth credentials under Python 3. -* `Distribute #320 `_: Fix check for "createable" in distribute_setup.py. -* `Distribute #305 `_: Remove a warning that was triggered during normal operations. -* `Distribute #311 `_: Print metadata in UTF-8 independent of platform. -* `Distribute #303 `_: Read manifest file with UTF-8 encoding under Python 3. -* `Distribute #301 `_: Allow to run tests of namespace packages when using 2to3. -* `Distribute #304 `_: Prevent import loop in site.py under Python 3.3. -* `Distribute #283 `_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. -* `Distribute #299 `_: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. -* `Distribute #306 `_: Even if 2to3 is used, we build in-place under Python 2. -* `Distribute #307 `_: Prints the full path when .svn/entries is broken. -* `Distribute #313 `_: Support for sdist subcommands (Python 2.7) -* `Distribute #314 `_: test_local_index() would fail an OS X. -* `Distribute #310 `_: Non-ascii characters in a namespace __init__.py causes errors. -* `Distribute #218 `_: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. -* `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - ------- -0.6.28 ------- - -* `Distribute #294 `_: setup.py can now be invoked from any directory. -* Scripts are now installed honoring the umask. -* Added support for .dist-info directories. -* `Distribute #283 `_: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - ------- -0.6.27 ------- - -* Support current snapshots of CPython 3.3. -* Distribute now recognizes README.rst as a standard, default readme file. -* Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for `#285 `_. -* `Distribute #231 `_: Don't fiddle with system python when used with buildout - (bootstrap.py) - ------- -0.6.26 ------- - -* `Distribute #183 `_: Symlinked files are now extracted from source distributions. -* `Distribute #227 `_: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - ------- -0.6.25 ------- - -* `Distribute #258 `_: Workaround a cache issue -* `Distribute #260 `_: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. -* `Distribute #262 `_: package_index.open_with_auth no longer throws LookupError - on Python 3. -* `Distribute #269 `_: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. -* `Distribute #272 `_: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. -* `Distribute #273 `_: Legacy script launchers now install with Python2/3 support. - ------- -0.6.24 ------- - -* `Distribute #249 `_: Added options to exclude 2to3 fixers - ------- -0.6.23 ------- - -* `Distribute #244 `_: Fixed a test -* `Distribute #243 `_: Fixed a test -* `Distribute #239 `_: Fixed a test -* `Distribute #240 `_: Fixed a test -* `Distribute #241 `_: Fixed a test -* `Distribute #237 `_: Fixed a test -* `Distribute #238 `_: easy_install now uses 64bit executable wrappers on 64bit Python -* `Distribute #208 `_: Fixed parsed_versions, it now honors post-releases as noted in the documentation -* `Distribute #207 `_: Windows cli and gui wrappers pass CTRL-C to child python process -* `Distribute #227 `_: easy_install now passes its arguments to setup.py bdist_egg -* `Distribute #225 `_: Fixed a NameError on Python 2.5, 2.4 - ------- -0.6.21 ------- - -* `Distribute #225 `_: FIxed a regression on py2.4 - ------- -0.6.20 ------- - -* `Distribute #135 `_: Include url in warning when processing URLs in package_index. -* `Distribute #212 `_: Fix issue where easy_instal fails on Python 3 on windows installer. -* `Distribute #213 `_: Fix typo in documentation. - ------- -0.6.19 ------- - -* `Distribute #206 `_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - ------- -0.6.18 ------- - -* `Distribute #210 `_: Fixed a regression introduced by `Distribute #204 `_ fix. - ------- -0.6.17 ------- - -* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. -* Support Python >=3.1.4 and >=3.2.1. -* `Distribute #204 `_: Don't try to import the parent of a namespace package in - declare_namespace -* `Distribute #196 `_: Tolerate responses with multiple Content-Length headers -* `Distribute #205 `_: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - ------- -0.6.16 ------- - -* Builds sdist gztar even on Windows (avoiding `Distribute #193 `_). -* `Distribute #192 `_: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. -* `Distribute #195 `_: Cython build support. -* `Distribute #200 `_: Issues with recognizing 64-bit packages on Windows. - ------- -0.6.15 ------- - -* Fixed typo in bdist_egg -* Several issues under Python 3 has been solved. -* `Distribute #146 `_: Fixed missing DLL files after easy_install of windows exe package. - ------- -0.6.14 ------- - -* `Distribute #170 `_: Fixed unittest failure. Thanks to Toshio. -* `Distribute #171 `_: Fixed race condition in unittests cause deadlocks in test suite. -* `Distribute #143 `_: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. -* `Distribute #174 `_: Fixed the edit mode when its used with setuptools itself - ------- -0.6.13 ------- - -* `Distribute #160 `_: 2.7 gives ValueError("Invalid IPv6 URL") -* `Distribute #150 `_: Fixed using ~/.local even in a --no-site-packages virtualenv -* `Distribute #163 `_: scan index links before external links, and don't use the md5 when - comparing two distributions - ------- -0.6.12 ------- - -* `Distribute #149 `_: Fixed various failures on 2.3/2.4 - ------- -0.6.11 ------- - -* Found another case of SandboxViolation - fixed -* `Distribute #15 `_ and `Distribute #48 `_: Introduced a socket timeout of 15 seconds on url openings -* Added indexsidebar.html into MANIFEST.in -* `Distribute #108 `_: Fixed TypeError with Python3.1 -* `Distribute #121 `_: Fixed --help install command trying to actually install. -* `Distribute #112 `_: Added an os.makedirs so that Tarek's solution will work. -* `Distribute #133 `_: Added --no-find-links to easy_install -* Added easy_install --user -* `Distribute #100 `_: Fixed develop --user not taking '.' in PYTHONPATH into account -* `Distribute #134 `_: removed spurious UserWarnings. Patch by VanLindberg -* `Distribute #138 `_: cant_write_to_target error when setup_requires is used. -* `Distribute #147 `_: respect the sys.dont_write_bytecode flag - ------- -0.6.10 ------- - -* Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - ------ -0.6.9 ------ - -* `Distribute #90 `_: unknown setuptools version can be added in the working set -* `Distribute #87 `_: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. -* `Distribute #89 `_: added a side bar with a download link to the doc. -* `Distribute #86 `_: fixed missing sentence in pkg_resources doc. -* Added a nicer error message when a DistributionNotFound is raised. -* `Distribute #80 `_: test_develop now works with Python 3.1 -* `Distribute #93 `_: upload_docs now works if there is an empty sub-directory. -* `Distribute #70 `_: exec bit on non-exec files -* `Distribute #99 `_: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). -* `Distribute #101 `_: Allowing ``os.devnull`` in Sandbox -* `Distribute #92 `_: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) -* `Distribute #103 `_: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. -* `Distribute #104 `_: remvoved the assertion when the installation fails, - with a nicer message for the end user. -* `Distribute #100 `_: making sure there's no SandboxViolation when - the setup script patches setuptools. - ------ -0.6.8 ------ - -* Added "check_packages" in dist. (added in Setuptools 0.6c11) -* Fixed the DONT_PATCH_SETUPTOOLS state. - ------ -0.6.7 ------ - -* `Distribute #58 `_: Added --user support to the develop command -* `Distribute #11 `_: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" -* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. -* Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. -* `Distribute #21 `_: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. -* Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. -* `Distribute #64 `_: use_setuptools no longer rebuilds the distribute egg every - time it is run -* use_setuptools now properly respects the requested version -* use_setuptools will no longer try to import a distribute egg for the - wrong Python version -* `Distribute #74 `_: no_fake should be True by default. -* `Distribute #72 `_: avoid a bootstrapping issue with easy_install -U - ------ -0.6.6 ------ - -* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - ------ -0.6.5 ------ - -* `Distribute #65 `_: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - -* `Distribute #67 `_: Fixed doc typo (PEP 381/382) - -* Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - -* When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - -* Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - ------ -0.6.4 ------ - -* Added the generation of `distribute_setup_3k.py` during the release. - This closes `Distribute #52 `_. - -* Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue `Distribute #56 `_. - -* Fixed a bootstrap bug on the use_setuptools() API. - ------ -0.6.3 ------ - -setuptools -========== - -* Fixed a bunch of calls to file() that caused crashes on Python 3. - -bootstrapping -============= - -* Fixed a bug in sorting that caused bootstrap to fail on Python 3. - ------ -0.6.2 ------ - -setuptools -========== - -* Added Python 3 support; see docs/python3.txt. - This closes `Old Setuptools #39 `_. - -* Added option to run 2to3 automatically when installing on Python 3. - This closes issue `Distribute #31 `_. - -* Fixed invalid usage of requirement.parse, that broke develop -d. - This closes `Old Setuptools #44 `_. - -* Fixed script launcher for 64-bit Windows. - This closes `Old Setuptools #2 `_. - -* KeyError when compiling extensions. - This closes `Old Setuptools #41 `_. - -bootstrapping -============= - -* Fixed bootstrap not working on Windows. This closes issue `Distribute #49 `_. - -* Fixed 2.6 dependencies. This closes issue `Distribute #50 `_. - -* Make sure setuptools is patched when running through easy_install - This closes `Old Setuptools #40 `_. - ------ -0.6.1 ------ - -setuptools -========== - -* package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes `Distribute #16 `_ and `Distribute #18 `_. - -* zip_ok is now False by default. This closes `Old Setuptools #33 `_. - -* Fixed invalid URL error catching. `Old Setuptools #20 `_. - -* Fixed invalid bootstraping with easy_install installation (`Distribute #40 `_). - Thanks to Florian Schulze for the help. - -* Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - -bootstrapping -============= - -* The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes `Distribute #10 `_. - ---- -0.6 ---- - -setuptools -========== - -* Packages required at build time where not fully present at install time. - This closes `Distribute #12 `_. - -* Protected against failures in tarfile extraction. This closes `Distribute #10 `_. - -* Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 `_. - -* sandbox.py replaced builtin type file with builtin function open. This - closes `Distribute #6 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -* Added compatibility with Subversion 1.6. This references `Distribute #1 `_. - -pkg_resources -============= - -* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue `#5 `_. - -* Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes `Distribute #13 `_. - -* Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes `Distribute #9 `_. - -* Corrected inconsistency between documentation and code of add_entry. - This closes `Distribute #8 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -easy_install -============ - -* Immediately close all file handles. This closes `Distribute #3 `_. - ------ -0.6c9 ------ - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - ------ -0.6c7 ------ - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - ------ -0.6c6 ------ - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - ------ -0.6c5 ------ - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - ------ -0.6c4 ------ - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - ------ -0.6c3 ------ - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - ------ -0.6c2 ------ - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - ------ -0.6c1 ------ - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - ------ -0.6b4 ------ - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - ------ -0.6b3 ------ - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - ------ -0.6b1 ------ - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - ------- -0.6a11 ------- - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - ------- -0.6a10 ------- - - * Fixed the ``develop`` command ignoring ``--find-links``. - ------ -0.6a9 ------ - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - ------ -0.6a8 ------ - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - ------ -0.6a5 ------ - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - ------ -0.6a3 ------ - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - ------ -0.6a2 ------ - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - ------ -0.6a1 ------ - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - ------- -0.5a12 ------- - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - ------- -0.5a11 ------- - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - ------ -0.5a9 ------ - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - ------ -0.5a8 ------ - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - ------ -0.5a7 ------ - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - ------ -0.5a6 ------ - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - ------ -0.5a5 ------ - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - ------ -0.5a4 ------ - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - ------ -0.5a1 ------ - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - ------ -0.4a2 ------ - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - ------ -0.3a2 ------ - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - ------ -0.3a1 ------ - - * Initial release. - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/METADATA b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/METADATA deleted file mode 100644 index d9c0d2d..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/METADATA +++ /dev/null @@ -1,1772 +0,0 @@ -Metadata-Version: 2.0 -Name: setuptools -Version: 2.1 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://pypi.python.org/pypi/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: PSF or ZPL -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities -Provides-Extra: ssl:sys_platform=='win32' -Requires-Dist: wincertstore (==0.1); extra == "ssl:sys_platform=='win32'" -Provides-Extra: certs -Requires-Dist: certifi (==0.0.8); extra == 'certs' - -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -------------------------- -Installation Instructions -------------------------- - -Upgrading from Distribute -========================= - -Currently, Distribute disallows installing Setuptools 0.7+ over Distribute. -You must first uninstall any active version of Distribute first (see -`Uninstalling`_). - -Upgrading from Setuptools 0.6 -============================= - -Upgrading from prior versions of Setuptools is supported. Initial reports -good success in this regard. - -Windows -======= - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate .egg -file and install it for you. - -.. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. - - -Unix-based Systems including Mac OS X -===================================== - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | sudo python - -Alternatively, on Python 2.6 and later, Setuptools may be installed to a -user-local path:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - > python ez_setup.py --user - - -Python 2.4 and Python 2.5 support -================================= - -Setuptools 2.0 and later requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the bootstrap script for Setuptools 1.x: -https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py. - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `Package Compatibility Notes`_ (user-maintained) -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: - -======= -CHANGES -======= - ---- -2.1 ---- - -* `Issue #129 `_: Suppress inspection of '*.whl' files when searching for files - in a zip-imported file. -* `Issue #131 `_: Fix RuntimeError when constructing an egg fetcher. - ------ -2.0.2 ------ - -* Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. -* Fix NameError in ``sdist:re_finder``. - ------ -2.0.1 ------ - -* `Issue #124 `_: Fixed error in list detection in upload_docs. - ---- -2.0 ---- - -* `Issue #121 `_: Exempt lib2to3 pickled grammars from DirectorySandbox. -* `Issue #41 `_: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. -* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. -* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - ------ -1.4.2 ------ - -* `Issue #116 `_: Correct TypeError when reading a local package index on Python - 3. - ------ -1.4.1 ------ - -* `Issue #114 `_: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - -* `Issue #105 `_ and `Issue #113 `_: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - ---- -1.4 ---- - -* `Issue #27 `_: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. -* `Pull Request #21 `_: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - ------ -1.3.2 ------ - -* `Issue #99 `_: Fix filename encoding issues in SVN support. - ------ -1.3.1 ------ - -* Remove exuberant warning in SVN support when SVN is not used. - ---- -1.3 ---- - -* Address security vulnerability in SSL match_hostname check as reported in - `Python #17997 `_. -* Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. -* Correct NameError in ``ssl_support`` module (``socket.error``). - ---- -1.2 ---- - -* `Issue #26 `_: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. -* `Issue #93 `_: Wheels are now distributed with every release. Note that as - reported in `Issue #108 `_, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. -* Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - ------ -1.1.7 ------ - -* Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). -* ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. -* `Distribute #363 `_ and `Issue #55 `_: Skip an sdist test that fails on locales - other than UTF-8. - ------ -1.1.6 ------ - -* `Distribute #349 `_: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - ------ -1.1.5 ------ - -* `Issue #69 `_: Second attempt at fix (logic was reversed). - ------ -1.1.4 ------ - -* `Issue #77 `_: Fix error in upload command (Python 2.4). - ------ -1.1.3 ------ - -* Fix NameError in previous patch. - ------ -1.1.2 ------ - -* `Issue #69 `_: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - ------ -1.1.1 ------ - -* `Issue #75 `_: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. -* `Issue #76 `_: Fix AttributeError in upload command with Python 2.4. - ---- -1.1 ---- - -* `Issue #71 `_ (`Distribute #333 `_): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. -* `Issue #72 `_: Restored Python 2.4 compatibility in ``ez_setup.py``. - ---- -1.0 ---- - -* `Issue #60 `_: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. -* `Issue #63 `_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. -* `Issue #65 `_: Deprecated the ``Features`` functionality. -* `Issue #52 `_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - -Backward-Incompatible Changes -============================= - -This release includes a couple of backward-incompatible changes, but most if -not all users will find 1.0 a drop-in replacement for 0.9. - -* `Issue #50 `_: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. -* `Issue #50 `_: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. -* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - ------ -0.9.8 ------ - -* `Issue #53 `_: Fix NameErrors in `_vcs_split_rev_from_url`. - ------ -0.9.7 ------ - -* `Issue #49 `_: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. -* `Issue #34 `_: Documentation now refers to bootstrap script in code repository - referenced by bookmark. -* Add underscore-separated keys to environment markers (markerlib). - ------ -0.9.6 ------ - -* `Issue #44 `_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - ------ -0.9.5 ------ - -* `Python #17980 `_: Fix security vulnerability in SSL certificate validation. - ------ -0.9.4 ------ - -* `Issue #43 `_: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - ------ -0.9.3 ------ - -* `Issue #42 `_: Fix new ``AttributeError`` introduced in last fix. - ------ -0.9.2 ------ - -* `Issue #42 `_: Fix regression where blank checksums would trigger an - ``AttributeError``. - ------ -0.9.1 ------ - -* `Distribute #386 `_: Allow other positional and keyword arguments to os.open. -* Corrected dependency on certifi mis-referenced in 0.9. - ---- -0.9 ---- - -* `package_index` now validates hashes other than MD5 in download links. - ---- -0.8 ---- - -* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - ------ -0.7.8 ------ - -* `Distribute #375 `_: Yet another fix for yet another regression. - ------ -0.7.7 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release (redo). -* `Issue #30 `_: Added test for get_cache_path. - ------ -0.7.6 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release. - ------ -0.7.5 ------ - -* `Issue #21 `_: Restore Python 2.4 compatibility in ``test_easy_install``. -* `Distribute #375 `_: Merged additional warning from Distribute 0.6.46. -* Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - ------ -0.7.4 ------ - -* `Issue #20 `_: Fix comparison of parsed SVN version on Python 3. - ------ -0.7.3 ------ - -* `Issue #1 `_: Disable installation of Windows-specific files on non-Windows systems. -* Use new sysconfig module with Python 2.7 or >=3.2. - ------ -0.7.2 ------ - -* `Issue #14 `_: Use markerlib when the `parser` module is not available. -* `Issue #10 `_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - ------ -0.7.1 ------ - -* Fix NameError (`Issue #3 `_) again - broken in bad merge. - ---- -0.7 ---- - -* Merged Setuptools and Distribute. See docs/merge.txt for details. - -Added several features that were slated for setuptools 0.6c12: - -* Index URL now defaults to HTTPS. -* Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. -* Added support for SSL certificate validation when installing packages from - an HTTPS service. - ------ -0.7b4 ------ - -* `Issue #3 `_: Fixed NameError in SSL support. - ------- -0.6.49 ------- - -* Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in `Distribute #375 `_. - ------- -0.6.48 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - ------- -0.6.47 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - ------- -0.6.46 ------- - -* `Distribute #375 `_: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - ------- -0.6.45 ------- - -* `Distribute #379 `_: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - ------- -0.6.44 ------- - -* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - ------- -0.6.43 ------- - -* `Distribute #378 `_: Restore support for Python 2.4 Syntax (regression in 0.6.42). - ------- -0.6.42 ------- - -* External links finder no longer yields duplicate links. -* `Distribute #337 `_: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR `#31 `_). - ------- -0.6.41 ------- - -* `Distribute #27 `_: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. -* Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - ------- -0.6.40 ------- - -* `Distribute #376 `_: brought back cli.exe and gui.exe that were deleted in the - previous release. - ------- -0.6.39 ------- - -* Add support for console launchers on ARM platforms. -* Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. -* Launcher build script now refactored for robustness. -* `Distribute #375 `_: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - ------- -0.6.38 ------- - -* `Distribute #371 `_: The launcher manifest file is now installed properly. - ------- -0.6.37 ------- - -* `Distribute #143 `_: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - ------- -0.6.36 ------- - -* `Pull Request #35 `_: In `Buildout #64 `_, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - ------- -0.6.35 ------- - - -Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in -how it parses version numbers. - -* `Distribute #278 `_: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - ------- -0.6.34 ------- - -* `Distribute #341 `_: 0.6.33 fails to build under Python 2.4. - ------- -0.6.33 ------- - -* Fix 2 errors with Jython 2.5. -* Fix 1 failure with Jython 2.5 and 2.7. -* Disable workaround for Jython scripts on Linux systems. -* `Distribute #336 `_: `setup.py` no longer masks failure exit code when tests fail. -* Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. -* `Distribute #341 `_: Fix a ResourceWarning. - ------- -0.6.32 ------- - -* Fix test suite with Python 2.6. -* Fix some DeprecationWarnings and ResourceWarnings. -* `Distribute #335 `_: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - ------- -0.6.31 ------- - -* `Distribute #303 `_: Make sure the manifest only ever contains UTF-8 in Python 3. -* `Distribute #329 `_: Properly close files created by tests for compatibility with - Jython. -* Work around `Jython #1980 `_ and `Jython #1981 `_. -* `Distribute #334 `_: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv `#359 `_ `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - -* Fix for encoding issue when installing from Windows executable on Python 3. -* `Distribute #323 `_: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - ------- -0.6.30 ------- - -* `Distribute #328 `_: Clean up temporary directories in distribute_setup.py. -* Fix fatal bug in distribute_setup.py. - ------- -0.6.29 ------- - -* `Pull Request #14 `_: Honor file permissions in zip files. -* `Distribute #327 `_: Merged pull request `#24 `_ to fix a dependency problem with pip. -* Merged pull request `#23 `_ to fix https://github.com/pypa/virtualenv/issues/301. -* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. -* `Distribute #326 `_: `upload_docs` provided mangled auth credentials under Python 3. -* `Distribute #320 `_: Fix check for "createable" in distribute_setup.py. -* `Distribute #305 `_: Remove a warning that was triggered during normal operations. -* `Distribute #311 `_: Print metadata in UTF-8 independent of platform. -* `Distribute #303 `_: Read manifest file with UTF-8 encoding under Python 3. -* `Distribute #301 `_: Allow to run tests of namespace packages when using 2to3. -* `Distribute #304 `_: Prevent import loop in site.py under Python 3.3. -* `Distribute #283 `_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. -* `Distribute #299 `_: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. -* `Distribute #306 `_: Even if 2to3 is used, we build in-place under Python 2. -* `Distribute #307 `_: Prints the full path when .svn/entries is broken. -* `Distribute #313 `_: Support for sdist subcommands (Python 2.7) -* `Distribute #314 `_: test_local_index() would fail an OS X. -* `Distribute #310 `_: Non-ascii characters in a namespace __init__.py causes errors. -* `Distribute #218 `_: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. -* `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - ------- -0.6.28 ------- - -* `Distribute #294 `_: setup.py can now be invoked from any directory. -* Scripts are now installed honoring the umask. -* Added support for .dist-info directories. -* `Distribute #283 `_: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - ------- -0.6.27 ------- - -* Support current snapshots of CPython 3.3. -* Distribute now recognizes README.rst as a standard, default readme file. -* Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for `#285 `_. -* `Distribute #231 `_: Don't fiddle with system python when used with buildout - (bootstrap.py) - ------- -0.6.26 ------- - -* `Distribute #183 `_: Symlinked files are now extracted from source distributions. -* `Distribute #227 `_: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - ------- -0.6.25 ------- - -* `Distribute #258 `_: Workaround a cache issue -* `Distribute #260 `_: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. -* `Distribute #262 `_: package_index.open_with_auth no longer throws LookupError - on Python 3. -* `Distribute #269 `_: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. -* `Distribute #272 `_: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. -* `Distribute #273 `_: Legacy script launchers now install with Python2/3 support. - ------- -0.6.24 ------- - -* `Distribute #249 `_: Added options to exclude 2to3 fixers - ------- -0.6.23 ------- - -* `Distribute #244 `_: Fixed a test -* `Distribute #243 `_: Fixed a test -* `Distribute #239 `_: Fixed a test -* `Distribute #240 `_: Fixed a test -* `Distribute #241 `_: Fixed a test -* `Distribute #237 `_: Fixed a test -* `Distribute #238 `_: easy_install now uses 64bit executable wrappers on 64bit Python -* `Distribute #208 `_: Fixed parsed_versions, it now honors post-releases as noted in the documentation -* `Distribute #207 `_: Windows cli and gui wrappers pass CTRL-C to child python process -* `Distribute #227 `_: easy_install now passes its arguments to setup.py bdist_egg -* `Distribute #225 `_: Fixed a NameError on Python 2.5, 2.4 - ------- -0.6.21 ------- - -* `Distribute #225 `_: FIxed a regression on py2.4 - ------- -0.6.20 ------- - -* `Distribute #135 `_: Include url in warning when processing URLs in package_index. -* `Distribute #212 `_: Fix issue where easy_instal fails on Python 3 on windows installer. -* `Distribute #213 `_: Fix typo in documentation. - ------- -0.6.19 ------- - -* `Distribute #206 `_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - ------- -0.6.18 ------- - -* `Distribute #210 `_: Fixed a regression introduced by `Distribute #204 `_ fix. - ------- -0.6.17 ------- - -* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. -* Support Python >=3.1.4 and >=3.2.1. -* `Distribute #204 `_: Don't try to import the parent of a namespace package in - declare_namespace -* `Distribute #196 `_: Tolerate responses with multiple Content-Length headers -* `Distribute #205 `_: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - ------- -0.6.16 ------- - -* Builds sdist gztar even on Windows (avoiding `Distribute #193 `_). -* `Distribute #192 `_: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. -* `Distribute #195 `_: Cython build support. -* `Distribute #200 `_: Issues with recognizing 64-bit packages on Windows. - ------- -0.6.15 ------- - -* Fixed typo in bdist_egg -* Several issues under Python 3 has been solved. -* `Distribute #146 `_: Fixed missing DLL files after easy_install of windows exe package. - ------- -0.6.14 ------- - -* `Distribute #170 `_: Fixed unittest failure. Thanks to Toshio. -* `Distribute #171 `_: Fixed race condition in unittests cause deadlocks in test suite. -* `Distribute #143 `_: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. -* `Distribute #174 `_: Fixed the edit mode when its used with setuptools itself - ------- -0.6.13 ------- - -* `Distribute #160 `_: 2.7 gives ValueError("Invalid IPv6 URL") -* `Distribute #150 `_: Fixed using ~/.local even in a --no-site-packages virtualenv -* `Distribute #163 `_: scan index links before external links, and don't use the md5 when - comparing two distributions - ------- -0.6.12 ------- - -* `Distribute #149 `_: Fixed various failures on 2.3/2.4 - ------- -0.6.11 ------- - -* Found another case of SandboxViolation - fixed -* `Distribute #15 `_ and `Distribute #48 `_: Introduced a socket timeout of 15 seconds on url openings -* Added indexsidebar.html into MANIFEST.in -* `Distribute #108 `_: Fixed TypeError with Python3.1 -* `Distribute #121 `_: Fixed --help install command trying to actually install. -* `Distribute #112 `_: Added an os.makedirs so that Tarek's solution will work. -* `Distribute #133 `_: Added --no-find-links to easy_install -* Added easy_install --user -* `Distribute #100 `_: Fixed develop --user not taking '.' in PYTHONPATH into account -* `Distribute #134 `_: removed spurious UserWarnings. Patch by VanLindberg -* `Distribute #138 `_: cant_write_to_target error when setup_requires is used. -* `Distribute #147 `_: respect the sys.dont_write_bytecode flag - ------- -0.6.10 ------- - -* Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - ------ -0.6.9 ------ - -* `Distribute #90 `_: unknown setuptools version can be added in the working set -* `Distribute #87 `_: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. -* `Distribute #89 `_: added a side bar with a download link to the doc. -* `Distribute #86 `_: fixed missing sentence in pkg_resources doc. -* Added a nicer error message when a DistributionNotFound is raised. -* `Distribute #80 `_: test_develop now works with Python 3.1 -* `Distribute #93 `_: upload_docs now works if there is an empty sub-directory. -* `Distribute #70 `_: exec bit on non-exec files -* `Distribute #99 `_: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). -* `Distribute #101 `_: Allowing ``os.devnull`` in Sandbox -* `Distribute #92 `_: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) -* `Distribute #103 `_: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. -* `Distribute #104 `_: remvoved the assertion when the installation fails, - with a nicer message for the end user. -* `Distribute #100 `_: making sure there's no SandboxViolation when - the setup script patches setuptools. - ------ -0.6.8 ------ - -* Added "check_packages" in dist. (added in Setuptools 0.6c11) -* Fixed the DONT_PATCH_SETUPTOOLS state. - ------ -0.6.7 ------ - -* `Distribute #58 `_: Added --user support to the develop command -* `Distribute #11 `_: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" -* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. -* Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. -* `Distribute #21 `_: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. -* Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. -* `Distribute #64 `_: use_setuptools no longer rebuilds the distribute egg every - time it is run -* use_setuptools now properly respects the requested version -* use_setuptools will no longer try to import a distribute egg for the - wrong Python version -* `Distribute #74 `_: no_fake should be True by default. -* `Distribute #72 `_: avoid a bootstrapping issue with easy_install -U - ------ -0.6.6 ------ - -* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - ------ -0.6.5 ------ - -* `Distribute #65 `_: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - -* `Distribute #67 `_: Fixed doc typo (PEP 381/382) - -* Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - -* When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - -* Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - ------ -0.6.4 ------ - -* Added the generation of `distribute_setup_3k.py` during the release. - This closes `Distribute #52 `_. - -* Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue `Distribute #56 `_. - -* Fixed a bootstrap bug on the use_setuptools() API. - ------ -0.6.3 ------ - -setuptools -========== - -* Fixed a bunch of calls to file() that caused crashes on Python 3. - -bootstrapping -============= - -* Fixed a bug in sorting that caused bootstrap to fail on Python 3. - ------ -0.6.2 ------ - -setuptools -========== - -* Added Python 3 support; see docs/python3.txt. - This closes `Old Setuptools #39 `_. - -* Added option to run 2to3 automatically when installing on Python 3. - This closes issue `Distribute #31 `_. - -* Fixed invalid usage of requirement.parse, that broke develop -d. - This closes `Old Setuptools #44 `_. - -* Fixed script launcher for 64-bit Windows. - This closes `Old Setuptools #2 `_. - -* KeyError when compiling extensions. - This closes `Old Setuptools #41 `_. - -bootstrapping -============= - -* Fixed bootstrap not working on Windows. This closes issue `Distribute #49 `_. - -* Fixed 2.6 dependencies. This closes issue `Distribute #50 `_. - -* Make sure setuptools is patched when running through easy_install - This closes `Old Setuptools #40 `_. - ------ -0.6.1 ------ - -setuptools -========== - -* package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes `Distribute #16 `_ and `Distribute #18 `_. - -* zip_ok is now False by default. This closes `Old Setuptools #33 `_. - -* Fixed invalid URL error catching. `Old Setuptools #20 `_. - -* Fixed invalid bootstraping with easy_install installation (`Distribute #40 `_). - Thanks to Florian Schulze for the help. - -* Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - -bootstrapping -============= - -* The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes `Distribute #10 `_. - ---- -0.6 ---- - -setuptools -========== - -* Packages required at build time where not fully present at install time. - This closes `Distribute #12 `_. - -* Protected against failures in tarfile extraction. This closes `Distribute #10 `_. - -* Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 `_. - -* sandbox.py replaced builtin type file with builtin function open. This - closes `Distribute #6 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -* Added compatibility with Subversion 1.6. This references `Distribute #1 `_. - -pkg_resources -============= - -* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue `#5 `_. - -* Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes `Distribute #13 `_. - -* Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes `Distribute #9 `_. - -* Corrected inconsistency between documentation and code of add_entry. - This closes `Distribute #8 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -easy_install -============ - -* Immediately close all file handles. This closes `Distribute #3 `_. - ------ -0.6c9 ------ - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - ------ -0.6c7 ------ - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - ------ -0.6c6 ------ - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - ------ -0.6c5 ------ - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - ------ -0.6c4 ------ - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - ------ -0.6c3 ------ - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - ------ -0.6c2 ------ - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - ------ -0.6c1 ------ - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - ------ -0.6b4 ------ - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - ------ -0.6b3 ------ - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - ------ -0.6b1 ------ - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - ------- -0.6a11 ------- - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - ------- -0.6a10 ------- - - * Fixed the ``develop`` command ignoring ``--find-links``. - ------ -0.6a9 ------ - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - ------ -0.6a8 ------ - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - ------ -0.6a5 ------ - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - ------ -0.6a3 ------ - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - ------ -0.6a2 ------ - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - ------ -0.6a1 ------ - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - ------- -0.5a12 ------- - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - ------- -0.5a11 ------- - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - ------ -0.5a9 ------ - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - ------ -0.5a8 ------ - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - ------ -0.5a7 ------ - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - ------ -0.5a6 ------ - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - ------ -0.5a5 ------ - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - ------ -0.5a4 ------ - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - ------ -0.5a1 ------ - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - ------ -0.4a2 ------ - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - ------ -0.3a2 ------ - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - ------ -0.3a1 ------ - - * Initial release. - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/RECORD b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/RECORD deleted file mode 100644 index 6d2846e..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/RECORD +++ /dev/null @@ -1,149 +0,0 @@ -easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 -pkg_resources.py,sha256=5T_ifvAqE8BT7wva7_BBd1YD6SqWLSxE089VNdOzQ40,98631 -setuptools/archive_util.py,sha256=4knsPzl_EjgtRu5-3G4tN5m0GIRcsiZOGJ1vjqaV3jg,6601 -setuptools/cli-32.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/cli-64.exe,sha256=KLABu5pyrnokJCv6skjXZ6GsXeyYHGcqOUT3oHI3Xpo,74752 -setuptools/cli-arm-32.exe,sha256=0pFNIi2SmY2gdY91Y4LRhj1wuBsnv5cG1fus3iBJv40,69120 -setuptools/cli.exe,sha256=dfEuovMNnA2HLa3jRfMPVi5tk4R7alCbpTvuxtCyw0Y,65536 -setuptools/compat.py,sha256=DhySZMeNA-fmaMiESpefR2T0G1lV1k47xgHpXsB1NGU,2556 -setuptools/depends.py,sha256=MZYC62GbkiR-H-w56YonoPCVttPfYxhTK6kZ07FpGXc,6221 -setuptools/dist.py,sha256=HHqUocSP9DWOCiCT9mx6gYy_m3QQJ_hqwCn8rdizzvo,32547 -setuptools/extension.py,sha256=Lxj67VzDEYORY-oEW9U-SP9JL3s-O9Sgk9BOwtvVLdQ,1404 -setuptools/gui-32.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/gui-64.exe,sha256=aYKMhX1IJLn4ULHgWX0sE0yREUt6B3TEHf_jOw6yNyE,75264 -setuptools/gui-arm-32.exe,sha256=R5gRWLkY7wvO_CVGxoi7LZVTv0h-DKsKScy6fkbp4XI,69120 -setuptools/gui.exe,sha256=XBr0bHMA6Hpz2s9s9Bzjl-PwXfa9nH4ie0rFn4V2kWA,65536 -setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 -setuptools/package_index.py,sha256=VKTKT2NijHEWrxmrqa57QfDHMaaI44KeSKhJeFZCfcw,38925 -setuptools/py26compat.py,sha256=ggKS8_aZWWHHS792vF3uXp5vmUkGNk3vjLreLTHr_-Q,431 -setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 -setuptools/py31compat.py,sha256=95J21c4_2pcNOrScsHvAMDA-HUO-93qDWn9W_71Sv8o,396 -setuptools/sandbox.py,sha256=wXVdRCIL4pVWiGVgYjQKKoP0UmIduT5nZDO2rKH8Y2k,9994 -setuptools/script template (dev).py,sha256=fZZQIwfK2-F1tF0ld6Sh141zltAD6ubIa525WH6cMz8,311 -setuptools/script template.py,sha256=VpRgZSPN7tulpU94pWiFkKPzhHXonxnBZh2hjPIGAxY,163 -setuptools/site-patch.py,sha256=SeMejq4jWSvvCdCPktirf2278IwLWET5FQ8c0R9F2zo,2418 -setuptools/ssl_support.py,sha256=wnK1ffiiA7TrLYseT5A1qwNJuKelxMu9o3L4E270mD4,7663 -setuptools/svn_utils.py,sha256=GMqvFLBQyD3dx06qQuA8H-fTUhAYK5ma4USzoR1iKfk,17848 -setuptools/version.py,sha256=-yjKj08H9NSe_5N5qBYWmC9d9NK1uYPTPT5FuTBHiZg,20 -setuptools/__init__.py,sha256=Sn0OUGFc_YlwGw2A3glq08IkDCcqssSoJTNwZK3cnJA,3317 -setuptools/command/alias.py,sha256=tO3iC_Kem0gj2QQEFVi3E5HXGewhaD5WoCeBrK7VuWY,2486 -setuptools/command/bdist_egg.py,sha256=U0cNWkqDHAppQFBwl8cB2tWxEYz7dTGDMlPpagmU0jc,18718 -setuptools/command/bdist_rpm.py,sha256=knEihWuGchIAzJlxp3iuJomX2kiTlgWnbw7dM38bXQk,2025 -setuptools/command/bdist_wininst.py,sha256=mU8EplmcWQ71cyE6S-fXCF4p3wh7UaOHu3q8qgKR8UI,2283 -setuptools/command/build_ext.py,sha256=k65J3ZUZzQw62U-5Xt7NuQ0UuWMUhU0-sIUpuxcqA7M,11854 -setuptools/command/build_py.py,sha256=DrLl-zFXlM0BX2vAS4LS_arxKbCYVlDPczTFecrGJpo,8440 -setuptools/command/develop.py,sha256=svjNIfZbyne63KcNMjxwr8L_rw8XZW8j0T09stYLOGE,6447 -setuptools/command/easy_install.py,sha256=YEhO3asUlChAmOhwsmPSL44DOmyLZJ6-CpQRiQIOS6w,72706 -setuptools/command/egg_info.py,sha256=tv-uFbNLnypUOUkwM4HZ_MjBAQiYcz654Pj5vgRouZw,14320 -setuptools/command/install.py,sha256=bRsMtbrAEU-GfztsHeYE_YCw6IHotGu5AndqewvGFw4,4052 -setuptools/command/install_egg_info.py,sha256=_QGULrbm0ueuGxldzPoGB7QdSd2g4bhU8qhzL8fR17w,3833 -setuptools/command/install_lib.py,sha256=y7jrhIP0DG7_hO-MiYHCoBwf4ctdUhQcST9RtyvAkG4,2486 -setuptools/command/install_scripts.py,sha256=va2sHUlpGZNxOB3V5Z4ZuqnD-1mrccE9M2n6014xSXM,2068 -setuptools/command/launcher manifest.xml,sha256=Fyr-X1kjKTWDGYd6CN0DMaTciOeUlJD782waCjBz_w4,550 -setuptools/command/register.py,sha256=a3b1P-yqZJGAh6ONEtmgzeSyCZ6hdItlCL9eFKnCYbo,277 -setuptools/command/rotate.py,sha256=zBjgakDOmqXL2abaxS9qpZu-jNBWVH0L3oaAUvZFwHE,2062 -setuptools/command/saveopts.py,sha256=r9oreFEGkaEn3a6FuWKvaIpigz6oQFayKcD7Jhn-RgI,705 -setuptools/command/sdist.py,sha256=VphymRSyNS5_vP4v-1dWEPBICVjCFMrvXjaF-jFwH7U,8346 -setuptools/command/setopt.py,sha256=akZNG_5z74x6NmhgNm0Opxc-cPO-6jV7SRC5i98TAVE,5068 -setuptools/command/test.py,sha256=rIOoFLV990UvprR50fLT5mUZvRu7O3q9reHc1IV76z0,5932 -setuptools/command/upload_docs.py,sha256=rz-PwQfbJlPZo6GwkJmcLselx5sQrPwn-4d_4JX6alY,6811 -setuptools/command/__init__.py,sha256=XeM2FtxUGjx9ipxX4xnHsfMOSplYVQpgXvXDEe2cEz0,553 -setuptools/tests/doctest.py,sha256=7TssbcXpZFInzIQYlF1A0_grPCR8IfWq2Vd3z7PUaRM,99828 -setuptools/tests/environment.py,sha256=Sl9Pok7ZEakC6YXP8urDkh9-XxQpoJAzyzzZUqOGOXQ,4658 -setuptools/tests/py26compat.py,sha256=i_JBukWMEat4AM1FtU8DAd06r0gjZR3uma_jb_gxEXU,267 -setuptools/tests/script-with-bom.py,sha256=nWOGL62VEQBsH5GaZvCyRyYqobziynGaqQJisffifsc,46 -setuptools/tests/server.py,sha256=Fqk53860mwB_wDNOzixZLjCq_c8_1kaGiXBm5fHco2c,2651 -setuptools/tests/test_bdist_egg.py,sha256=reUNnPid0M8A46IaQY6G96Z69xGOXIRSCcV5onn6YMA,1954 -setuptools/tests/test_build_ext.py,sha256=hDmYSmlFILM1kA2isuwXLdeZr8USRuqE05241imWRqk,673 -setuptools/tests/test_develop.py,sha256=a3oQdMFHaJCfIvmX5KZgYF-cxJfn4ZXmYN5WYCGJbXs,3605 -setuptools/tests/test_dist_info.py,sha256=ZgVLERe6WZpWUcwrLGk2b_cSOMZMASLV1hTV4hgQLG0,2615 -setuptools/tests/test_easy_install.py,sha256=BUp2alsOENDy65JIfrdCQPf2l8AQ8Axloxsw-43hEMw,13214 -setuptools/tests/test_egg_info.py,sha256=ki4KmxB8jZRN-_WZLZvJTKbHJfMIvAzlYf99Gr1XvUE,5401 -setuptools/tests/test_markerlib.py,sha256=UYBTjaug56cWxIwlCubdSTGZ-s9bqB1co54636x0xfo,2506 -setuptools/tests/test_packageindex.py,sha256=8vay-a5Dry_cg71WFA1rlofIWO5c9KI6D6PKN4WKKiI,7625 -setuptools/tests/test_resources.py,sha256=gJJbOVd1IEIMGB3rmeb-AJaIVbdruteEOQNa5153bHk,23973 -setuptools/tests/test_sandbox.py,sha256=O0rWNNqktqFA7z6Ry6Qs1w1jfTHFAywfQ3qWTDdbrN0,2170 -setuptools/tests/test_sdist.py,sha256=w8lri1vGqJ2CGICorRLmy4gRwxKrODauynf6TYky72A,17816 -setuptools/tests/test_svn.py,sha256=oQntZvH7wBS3KDJx0X8s4AETBz8_yPdGfQE-Qc-F070,7806 -setuptools/tests/test_test.py,sha256=qiOxi_gsPKzpinA1ejMphEuIGgWBL42lma0L3AnTlxQ,3710 -setuptools/tests/test_upload_docs.py,sha256=N__IVGihRBRqA3PetRcIDmNFu1XOhR7ix0e50xMuq_M,2139 -setuptools/tests/__init__.py,sha256=CBp9iInJ-yM6whHqEMQsch8MjqbFeNSpcdWGwFe2QRI,12582 -setuptools-2.1.data/scripts/easy_install-3.4.pya,sha256=us16WEnNSEPQR6t4Z7JIWt8lbGEy2AAHl6MFatFAtk8,323 -setuptools-2.1.data/scripts/easy_install.pya,sha256=G2Up65bgEaUsmaG28QEGjHvdOMeNOzMFR1f7OIQPG2w,315 -setuptools-2.1.dist-info/dependency_links.txt,sha256=H84NpZJcSa-8h9mn-ZONhSwqx6F9qdpEAzLiHqI7NX8,221 -setuptools-2.1.dist-info/DESCRIPTION.rst,sha256=usmm7d1IwRoXYq06OpWR652VD-EWQXwUNaZeNB5kc7I,70462 -setuptools-2.1.dist-info/entry_points.txt,sha256=e7MxvQ-4HIrLyB8dHxDDFnfT7te7AXJGbFU-JDGLF6Q,2773 -setuptools-2.1.dist-info/entry_points.txt.orig,sha256=cSjzEAaZY3oFy5lcmzSXdFmB57Bgssvf7oopPGIX8qw,2773 -setuptools-2.1.dist-info/METADATA,sha256=Pcc_rI2vZKHZBemU5Ul9vmvlWQgj9Ro9WRHU_UWboPo,71792 -setuptools-2.1.dist-info/pydist.json,sha256=dCAAJ8lmNn3Qv96z0HyWvcvxhFfPA9vIwHHL67GYQTc,4544 -setuptools-2.1.dist-info/RECORD,, -setuptools-2.1.dist-info/requires.txt.orig,sha256=MUdW-45Nj8mMUWcHoTNr3ABZT_aLbZTJ5wi7EA4FL2Y,71 -setuptools-2.1.dist-info/top_level.txt,sha256=pzTRJWMVhyj_jNyTDsLjehfL_CrUlpf8Z_eP00s2X8w,49 -setuptools-2.1.dist-info/WHEEL,sha256=2QrAqABq1X2mxKM4JY32oZUIM268GesCrPKhzE9RspQ,116 -setuptools-2.1.dist-info/zip-safe,sha256=frcCV1k9oG9oKj3dpUqdJg1PxRT2RSN_XKdLCPjaYaY,2 -_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 -_markerlib/__init__.py,sha256=GSmhZqvAitLJHhSgtqqusfq2nJ_ClP3oy3Lm0uZLIsU,552 -/Users/build/platform_darwin/bin/easy_install-3.4,sha256=6yqxfxMR1TM8fjv61x67GiIQRoF5RE4QJrHbpFBypMo,259 -setuptools/command/__pycache__/setopt.cpython-34.pyc,, -setuptools/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/build_ext.cpython-34.pyc,, -setuptools/tests/__pycache__/doctest.cpython-34.pyc,, -setuptools/tests/__pycache__/test_resources.cpython-34.pyc,, -setuptools/__pycache__/script template (dev).cpython-34.pyc,, -setuptools/tests/__pycache__/test_easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/__init__.cpython-34.pyc,, -setuptools/command/__pycache__/develop.cpython-34.pyc,, -setuptools/tests/__pycache__/test_markerlib.cpython-34.pyc,, -setuptools/__pycache__/ssl_support.cpython-34.pyc,, -setuptools/__pycache__/py31compat.cpython-34.pyc,, -setuptools/command/__pycache__/rotate.cpython-34.pyc,, -setuptools/command/__pycache__/register.cpython-34.pyc,, -setuptools/command/__pycache__/build_py.cpython-34.pyc,, -setuptools/__pycache__/compat.cpython-34.pyc,, -setuptools/command/__pycache__/test.cpython-34.pyc,, -setuptools/__pycache__/archive_util.cpython-34.pyc,, -setuptools/__pycache__/dist.cpython-34.pyc,, -setuptools/command/__pycache__/saveopts.cpython-34.pyc,, -setuptools/tests/__pycache__/script-with-bom.cpython-34.pyc,, -setuptools/tests/__pycache__/test_upload_docs.cpython-34.pyc,, -setuptools/__pycache__/py27compat.cpython-34.pyc,, -setuptools/tests/__pycache__/py26compat.cpython-34.pyc,, -setuptools/tests/__pycache__/test_develop.cpython-34.pyc,, -setuptools/__pycache__/extension.cpython-34.pyc,, -setuptools/tests/__pycache__/server.cpython-34.pyc,, -setuptools/command/__pycache__/sdist.cpython-34.pyc,, -setuptools/command/__pycache__/alias.cpython-34.pyc,, -setuptools/tests/__pycache__/test_packageindex.cpython-34.pyc,, -setuptools/__pycache__/site-patch.cpython-34.pyc,, -setuptools/tests/__pycache__/__init__.cpython-34.pyc,, -setuptools/tests/__pycache__/test_dist_info.cpython-34.pyc,, -setuptools/tests/__pycache__/test_egg_info.cpython-34.pyc,, -setuptools/command/__pycache__/easy_install.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_wininst.cpython-34.pyc,, -setuptools/__pycache__/svn_utils.cpython-34.pyc,, -setuptools/command/__pycache__/install_egg_info.cpython-34.pyc,, -setuptools/tests/__pycache__/test_svn.cpython-34.pyc,, -setuptools/tests/__pycache__/test_sdist.cpython-34.pyc,, -_markerlib/__pycache__/__init__.cpython-34.pyc,, -setuptools/__pycache__/lib2to3_ex.cpython-34.pyc,, -setuptools/tests/__pycache__/test_build_ext.cpython-34.pyc,, -setuptools/command/__pycache__/install_lib.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_rpm.cpython-34.pyc,, -setuptools/command/__pycache__/bdist_egg.cpython-34.pyc,, -setuptools/tests/__pycache__/test_test.cpython-34.pyc,, -setuptools/command/__pycache__/install.cpython-34.pyc,, -setuptools/command/__pycache__/egg_info.cpython-34.pyc,, -setuptools/command/__pycache__/upload_docs.cpython-34.pyc,, -setuptools/__pycache__/package_index.cpython-34.pyc,, -setuptools/tests/__pycache__/test_bdist_egg.cpython-34.pyc,, -__pycache__/easy_install.cpython-34.pyc,, -setuptools/__pycache__/version.cpython-34.pyc,, -_markerlib/__pycache__/markers.cpython-34.pyc,, -__pycache__/pkg_resources.cpython-34.pyc,, -setuptools/__pycache__/sandbox.cpython-34.pyc,, -setuptools/command/__pycache__/install_scripts.cpython-34.pyc,, -setuptools/__pycache__/py26compat.cpython-34.pyc,, -setuptools/tests/__pycache__/environment.cpython-34.pyc,, -setuptools/__pycache__/script template.cpython-34.pyc,, -setuptools/__pycache__/depends.cpython-34.pyc,, -setuptools/tests/__pycache__/test_sandbox.cpython-34.pyc,, diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/dependency_links.txt deleted file mode 100644 index b1c9a2c..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/dependency_links.txt +++ /dev/null @@ -1,2 +0,0 @@ -https://pypi.python.org/packages/source/c/certifi/certifi-0.0.8.tar.gz#md5=dc5f5e7f0b5fc08d27654b17daa6ecec -https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.1.zip#md5=2f9accbebe8f7b4c06ac7aa83879b81c diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/entry_points.txt.orig b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/entry_points.txt.orig deleted file mode 100644 index 63a3ed9..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/entry_points.txt.orig +++ /dev/null @@ -1,62 +0,0 @@ -[console_scripts] -easy_install-3.4 = setuptools.command.easy_install:main -easy_install = setuptools.command.easy_install:main - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[egg_info.writers] -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -PKG-INFO = setuptools.command.egg_info:write_pkg_info -depends.txt = setuptools.command.egg_info:warn_depends_obsolete -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -entry_points.txt = setuptools.command.egg_info:write_entries -requires.txt = setuptools.command.egg_info:write_requirements -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg - -[distutils.setup_keywords] -use_2to3_fixers = setuptools.dist:assert_string_list -test_suite = setuptools.dist:check_test_suite -packages = setuptools.dist:check_packages -install_requires = setuptools.dist:check_requirements -entry_points = setuptools.dist:check_entry_points -zip_safe = setuptools.dist:assert_bool -tests_require = setuptools.dist:check_requirements -use_2to3_exclude_fixers = setuptools.dist:assert_string_list -dependency_links = setuptools.dist:assert_string_list -package_data = setuptools.dist:check_package_data -convert_2to3_doctests = setuptools.dist:assert_string_list -include_package_data = setuptools.dist:assert_bool -exclude_package_data = setuptools.dist:check_package_data -namespace_packages = setuptools.dist:check_nsp -test_loader = setuptools.dist:check_importable -extras_require = setuptools.dist:check_extras -use_2to3 = setuptools.dist:assert_bool -eager_resources = setuptools.dist:assert_string_list - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - -[distutils.commands] -bdist_egg = setuptools.command.bdist_egg:bdist_egg -alias = setuptools.command.alias:alias -sdist = setuptools.command.sdist:sdist -rotate = setuptools.command.rotate:rotate -install = setuptools.command.install:install -build_py = setuptools.command.build_py:build_py -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -egg_info = setuptools.command.egg_info:egg_info -install_egg_info = setuptools.command.install_egg_info:install_egg_info -test = setuptools.command.test:test -register = setuptools.command.register:register -saveopts = setuptools.command.saveopts:saveopts -build_ext = setuptools.command.build_ext:build_ext -easy_install = setuptools.command.easy_install:easy_install -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -setopt = setuptools.command.setopt:setopt -upload_docs = setuptools.command.upload_docs:upload_docs -install_lib = setuptools.command.install_lib:install_lib -develop = setuptools.command.develop:develop -install_scripts = setuptools.command.install_scripts:install_scripts - diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/pydist.json b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/pydist.json deleted file mode 100644 index 77df767..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/pydist.json +++ /dev/null @@ -1 +0,0 @@ -{"document_names": {"description": "DESCRIPTION.rst"}, "name": "setuptools", "commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.4": "setuptools.command.easy_install:main"}}, "metadata_version": "2.0", "generator": "bdist_wheel (0.22.0)", "project_urls": {"Home": "https://pypi.python.org/pypi/setuptools"}, "license": "PSF or ZPL", "extras": ["certs", "ssl:sys_platform=='win32'"], "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "License :: OSI Approved :: Zope Public License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "version": "2.1", "exports": {"egg_info.writers": {"requires.txt": "setuptools.command.egg_info:write_requirements", "entry_points.txt": "setuptools.command.egg_info:write_entries", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "console_scripts": {"easy_install": "setuptools.command.easy_install:main", "easy_install-3.4": "setuptools.command.easy_install:main"}, "setuptools.file_finders": {"svn_cvs": "setuptools.command.sdist:_default_revctrl"}, "distutils.setup_keywords": {"exclude_package_data": "setuptools.dist:check_package_data", "include_package_data": "setuptools.dist:assert_bool", "use_2to3": "setuptools.dist:assert_bool", "dependency_links": "setuptools.dist:assert_string_list", "test_loader": "setuptools.dist:check_importable", "convert_2to3_doctests": "setuptools.dist:assert_string_list", "extras_require": "setuptools.dist:check_extras", "package_data": "setuptools.dist:check_package_data", "test_suite": "setuptools.dist:check_test_suite", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "install_requires": "setuptools.dist:check_requirements", "zip_safe": "setuptools.dist:assert_bool", "entry_points": "setuptools.dist:check_entry_points", "namespace_packages": "setuptools.dist:check_nsp", "tests_require": "setuptools.dist:check_requirements", "packages": "setuptools.dist:check_packages", "use_2to3_fixers": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list"}, "distutils.commands": {"upload_docs": "setuptools.command.upload_docs:upload_docs", "egg_info": "setuptools.command.egg_info:egg_info", "register": "setuptools.command.register:register", "saveopts": "setuptools.command.saveopts:saveopts", "install_scripts": "setuptools.command.install_scripts:install_scripts", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "install_lib": "setuptools.command.install_lib:install_lib", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "rotate": "setuptools.command.rotate:rotate", "develop": "setuptools.command.develop:develop", "build_py": "setuptools.command.build_py:build_py", "alias": "setuptools.command.alias:alias", "install": "setuptools.command.install:install", "build_ext": "setuptools.command.build_ext:build_ext", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "sdist": "setuptools.command.sdist:sdist", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "easy_install": "setuptools.command.easy_install:easy_install", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}, "contacts": [{"name": "Python Packaging Authority", "role": "author", "email": "distutils-sig@python.org"}], "keywords": "CPAN PyPI distutils eggs package management", "run_requires": [{"requires": ["wincertstore (==0.1)"], "environment": "extra == \"ssl:sys_platform=='win32'\""}, {"requires": ["certifi (==0.0.8)"], "extra": "certs"}]} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/zip-safe b/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/zip-safe deleted file mode 100644 index d3f5a12..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools-2.1.dist-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/__init__.py b/Darwin/lib/python3.4/site-packages/setuptools/__init__.py deleted file mode 100644 index fc9b7b9..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -import os -import sys -import distutils.core -import distutils.filelist -from distutils.core import Command as _Command -from distutils.util import convert_path - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched -from setuptools.depends import Require - -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - looks_like_package = ( - '.' not in name - and os.path.isdir(fn) - and os.path.isfile(os.path.join(fn, '__init__.py')) - ) - if looks_like_package: - out.append(prefix+name) - stack.append((fn, prefix+name+'.')) - for pat in list(exclude)+['ez_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -distutils.filelist.findall = findall # fix findall bug in distutils. - -# sys.dont_write_bytecode was introduced in Python 2.6. -_dont_write_bytecode = getattr(sys, 'dont_write_bytecode', - bool(os.environ.get("PYTHONDONTWRITEBYTECODE"))) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/cli-32.exe b/Darwin/lib/python3.4/site-packages/setuptools/cli-32.exe deleted file mode 100755 index b1487b7..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/cli-32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/cli-64.exe b/Darwin/lib/python3.4/site-packages/setuptools/cli-64.exe deleted file mode 100755 index 675e6bf..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/cli-64.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/cli-arm-32.exe b/Darwin/lib/python3.4/site-packages/setuptools/cli-arm-32.exe deleted file mode 100755 index 2f40402..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/cli-arm-32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/cli.exe b/Darwin/lib/python3.4/site-packages/setuptools/cli.exe deleted file mode 100755 index b1487b7..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/cli.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py b/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py deleted file mode 100644 index 8c48da3..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,82 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - - - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec - - - - - - - - - - - - - - - - - - - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py b/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py deleted file mode 100644 index e8521f8..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys - -class bdist_wininst(_bdist_wininst): - _good_upload = _bad_upload = None - - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - installer_name = self.get_installer_filename(fullname) - if self.target_version: - pyversion = self.target_version - # fix 2.5+ bdist_wininst ignoring --target-version spec - self._bad_upload = ('bdist_wininst', 'any', installer_name) - else: - pyversion = 'any' - self._good_upload = ('bdist_wininst', pyversion, installer_name) - - def _fix_upload_names(self): - good, bad = self._good_upload, self._bad_upload - dist_files = getattr(self.distribution, 'dist_files', []) - if bad in dist_files: - dist_files.remove(bad) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug - return cmd - - def run(self): - self._is_running = True - try: - _bdist_wininst.run(self) - self._fix_upload_names() - finally: - self._is_running = False - - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/install_egg_info.py b/Darwin/lib/python3.4/site-packages/setuptools/command/install_egg_info.py deleted file mode 100644 index f44b34b..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,125 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wt') - for pkg in nsp: - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,types,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() - - def _get_all_ns_packages(self): - nsp = {} - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp['.'.join(pkg)] = 1 - pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/install_lib.py b/Darwin/lib/python3.4/site-packages/setuptools/command/install_lib.py deleted file mode 100644 index 82afa14..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/install_lib.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs - - - - - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/launcher manifest.xml b/Darwin/lib/python3.4/site-packages/setuptools/command/launcher manifest.xml deleted file mode 100644 index 844d226..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/launcher manifest.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/command/register.py b/Darwin/lib/python3.4/site-packages/setuptools/command/register.py deleted file mode 100644 index 3b2e085..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -from distutils.command.register import register as _register - -class register(_register): - __doc__ = _register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - _register.run(self) - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/extension.py b/Darwin/lib/python3.4/site-packages/setuptools/extension.py deleted file mode 100644 index d7892d3..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/extension.py +++ /dev/null @@ -1,46 +0,0 @@ -import sys -import distutils.core -import distutils.extension - -from setuptools.dist import _get_unpatched - -_Extension = _get_unpatched(distutils.core.Extension) - -def have_pyrex(): - """ - Return True if Cython or Pyrex can be imported. - """ - pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' - for pyrex_impl in pyrex_impls: - try: - # from (pyrex_impl) import build_ext - __import__(pyrex_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, *args, **kw): - _Extension.__init__(self, *args, **kw) - if not have_pyrex(): - self._convert_pyx_sources_to_c() - - def _convert_pyx_sources_to_c(self): - "convert .pyx extensions to .c" - def pyx_to_c(source): - if source.endswith('.pyx'): - source = source[:-4] + '.c' - return source - self.sources = list(map(pyx_to_c, self.sources)) - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/Darwin/lib/python3.4/site-packages/setuptools/gui-32.exe b/Darwin/lib/python3.4/site-packages/setuptools/gui-32.exe deleted file mode 100755 index f8d3509..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/gui-32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/gui-64.exe b/Darwin/lib/python3.4/site-packages/setuptools/gui-64.exe deleted file mode 100755 index 330c51a..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/gui-64.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/gui-arm-32.exe b/Darwin/lib/python3.4/site-packages/setuptools/gui-arm-32.exe deleted file mode 100755 index 537aff3..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/gui-arm-32.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/gui.exe b/Darwin/lib/python3.4/site-packages/setuptools/gui.exe deleted file mode 100755 index f8d3509..0000000 Binary files a/Darwin/lib/python3.4/site-packages/setuptools/gui.exe and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/setuptools/py31compat.py b/Darwin/lib/python3.4/site-packages/setuptools/py31compat.py deleted file mode 100644 index dbb324b..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/py31compat.py +++ /dev/null @@ -1,11 +0,0 @@ -__all__ = ['get_config_vars', 'get_path'] - -try: - # Python 2.7 or >=3.2 - from sysconfig import get_config_vars, get_path -except ImportError: - from distutils.sysconfig import get_config_vars, get_python_lib - def get_path(name): - if name not in ('platlib', 'purelib'): - raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') diff --git a/Darwin/lib/python3.4/site-packages/setuptools/script template (dev).py b/Darwin/lib/python3.4/site-packages/setuptools/script template (dev).py deleted file mode 100644 index b3fe209..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/script template (dev).py +++ /dev/null @@ -1,11 +0,0 @@ -# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import sys -from pkg_resources import require -require("""%(spec)r""") -del require -__file__ = """%(dev_path)r""" -if sys.version_info < (3, 0): - execfile(__file__) -else: - exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/script template.py b/Darwin/lib/python3.4/site-packages/setuptools/script template.py deleted file mode 100644 index 8dd5d51..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/script template.py +++ /dev/null @@ -1,4 +0,0 @@ -# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import pkg_resources -pkg_resources.run_script("""%(spec)r""", """%(script_name)r""") diff --git a/Darwin/lib/python3.4/site-packages/setuptools/svn_utils.py b/Darwin/lib/python3.4/site-packages/setuptools/svn_utils.py deleted file mode 100644 index a9bdc5c..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/svn_utils.py +++ /dev/null @@ -1,564 +0,0 @@ -import os -import re -import sys -from distutils import log -import xml.dom.pulldom -import shlex -import locale -import codecs -import unicodedata -import warnings -from setuptools.compat import unicode -from xml.sax.saxutils import unescape - -try: - import urlparse -except ImportError: - import urllib.parse as urlparse - -from subprocess import Popen as _Popen, PIPE as _PIPE - -#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005) -# and SVN 1.3 hasn't been supported by the developers since mid 2008. - -#subprocess is called several times with shell=(sys.platform=='win32') -#see the follow for more information: -# http://bugs.python.org/issue8557 -# http://stackoverflow.com/questions/5658622/ -# python-subprocess-popen-environment-path - - -def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0): - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(args, stdout=stdout, stderr=stderr, - shell=(sys.platform == 'win32')) - - data = proc.communicate()[stream] - except OSError: - return 1, '' - - #doubled checked and - data = decode_as_string(data, encoding) - - #communciate calls wait() - return proc.returncode, data - - -def _get_entry_schedule(entry): - schedule = entry.getElementsByTagName('schedule')[0] - return "".join([t.nodeValue - for t in schedule.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_target_property(target): - property_text = target.getElementsByTagName('property')[0] - return "".join([t.nodeValue - for t in property_text.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_xml_data(decoded_str): - if sys.version_info < (3, 0): - #old versions want an encoded string - data = decoded_str.encode('utf-8') - else: - data = decoded_str - return data - - -def joinpath(prefix, *suffix): - if not prefix or prefix == '.': - return os.path.join(*suffix) - return os.path.join(prefix, *suffix) - -def determine_console_encoding(): - try: - #try for the preferred encoding - encoding = locale.getpreferredencoding() - - #see if the locale.getdefaultlocale returns null - #some versions of python\platforms return US-ASCII - #when it cannot determine an encoding - if not encoding or encoding == "US-ASCII": - encoding = locale.getdefaultlocale()[1] - - if encoding: - codecs.lookup(encoding) # make sure a lookup error is not made - - except (locale.Error, LookupError): - encoding = None - - is_osx = sys.platform == "darwin" - if not encoding: - return ["US-ASCII", "utf-8"][is_osx] - elif encoding.startswith("mac-") and is_osx: - #certain versions of python would return mac-roman as default - #OSX as a left over of earlier mac versions. - return "utf-8" - else: - return encoding - -_console_encoding = determine_console_encoding() - -def decode_as_string(text, encoding=None): - """ - Decode the console or file output explicitly using getpreferredencoding. - The text paraemeter should be a encoded string, if not no decode occurs - If no encoding is given, getpreferredencoding is used. If encoding is - specified, that is used instead. This would be needed for SVN --xml - output. Unicode is explicitly put in composed NFC form. - - --xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion - DEV List from 2007 seems to indicate the same. - """ - #text should be a byte string - - if encoding is None: - encoding = _console_encoding - - if not isinstance(text, unicode): - text = text.decode(encoding) - - text = unicodedata.normalize('NFC', text) - - return text - - -def parse_dir_entries(decoded_str): - '''Parse the entries from a recursive info xml''' - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - entries = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'entry': - doc.expandNode(node) - if not _get_entry_schedule(node).startswith('delete'): - entries.append((node.getAttribute('path'), - node.getAttribute('kind'))) - - return entries[1:] # do not want the root directory - - -def parse_externals_xml(decoded_str, prefix=''): - '''Parse a propget svn:externals xml''' - prefix = os.path.normpath(prefix) - prefix = os.path.normcase(prefix) - - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - externals = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'target': - doc.expandNode(node) - path = os.path.normpath(node.getAttribute('path')) - - if os.path.normcase(path).startswith(prefix): - path = path[len(prefix)+1:] - - data = _get_target_property(node) - #data should be decoded already - for external in parse_external_prop(data): - externals.append(joinpath(path, external)) - - return externals # do not want the root directory - - -def parse_external_prop(lines): - """ - Parse the value of a retrieved svn:externals entry. - - possible token setups (with quotng and backscaping in laters versions) - URL[@#] EXT_FOLDERNAME - [-r#] URL EXT_FOLDERNAME - EXT_FOLDERNAME [-r#] URL - """ - externals = [] - for line in lines.splitlines(): - line = line.lstrip() # there might be a "\ " - if not line: - continue - - if sys.version_info < (3, 0): - #shlex handles NULLs just fine and shlex in 2.7 tries to encode - #as ascii automatiically - line = line.encode('utf-8') - line = shlex.split(line) - if sys.version_info < (3, 0): - line = [x.decode('utf-8') for x in line] - - #EXT_FOLDERNAME is either the first or last depending on where - #the URL falls - if urlparse.urlsplit(line[-1])[0]: - external = line[0] - else: - external = line[-1] - - external = decode_as_string(external, encoding="utf-8") - externals.append(os.path.normpath(external)) - - return externals - - -def parse_prop_file(filename, key): - found = False - f = open(filename, 'rt') - data = '' - try: - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts) == 2: - kind, length = parts - data = f.read(int(length)) - if kind == 'K' and data == key: - found = True - elif kind == 'V' and found: - break - finally: - f.close() - - return data - - -class SvnInfo(object): - ''' - Generic svn_info object. No has little knowledge of how to extract - information. Use cls.load to instatiate according svn version. - - Paths are not filesystem encoded. - ''' - - @staticmethod - def get_svn_version(): - code, data = _run_command(['svn', '--version', '--quiet']) - if code == 0 and data: - return data.strip() - else: - return '' - - #svnversion return values (previous implementations return max revision) - # 4123:4168 mixed revision working copy - # 4168M modified working copy - # 4123S switched working copy - # 4123:4168MS mixed revision, modified, switched working copy - revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I) - - @classmethod - def load(cls, dirname=''): - normdir = os.path.normpath(dirname) - code, data = _run_command(['svn', 'info', normdir]) - # Must check for some contents, as some use empty directories - # in testcases - svn_dir = os.path.join(normdir, '.svn') - has_svn = (os.path.isfile(os.path.join(svn_dir, 'entries')) or - os.path.isfile(os.path.join(svn_dir, 'dir-props')) or - os.path.isfile(os.path.join(svn_dir, 'dir-prop-base'))) - - svn_version = tuple(cls.get_svn_version().split('.')) - - try: - base_svn_version = tuple(int(x) for x in svn_version[:2]) - except ValueError: - base_svn_version = tuple() - - if not has_svn: - return SvnInfo(dirname) - - if code or not base_svn_version or base_svn_version < (1, 3): - warnings.warn(("No SVN 1.3+ command found: falling back " - "on pre 1.7 .svn parsing"), DeprecationWarning) - return SvnFileInfo(dirname) - - if base_svn_version < (1, 5): - return Svn13Info(dirname) - - return Svn15Info(dirname) - - def __init__(self, path=''): - self.path = path - self._entries = None - self._externals = None - - def get_revision(self): - 'Retrieve the directory revision informatino using svnversion' - code, data = _run_command(['svnversion', '-c', self.path]) - if code: - log.warn("svnversion failed") - return 0 - - parsed = self.revision_re.match(data) - if parsed: - return int(parsed.group(2)) - else: - return 0 - - @property - def entries(self): - if self._entries is None: - self._entries = self.get_entries() - return self._entries - - @property - def externals(self): - if self._externals is None: - self._externals = self.get_externals() - return self._externals - - def iter_externals(self): - ''' - Iterate over the svn:external references in the repository path. - ''' - for item in self.externals: - yield item - - def iter_files(self): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - for item, kind in self.entries: - if kind.lower() == 'file': - yield item - - def iter_dirs(self, include_root=True): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - if include_root: - yield self.path - for item, kind in self.entries: - if kind.lower() == 'dir': - yield item - - def get_entries(self): - return [] - - def get_externals(self): - return [] - - -class Svn13Info(SvnInfo): - def get_entries(self): - code, data = _run_command(['svn', 'info', '-R', '--xml', self.path], - encoding="utf-8") - - if code: - log.debug("svn info failed") - return [] - - return parse_dir_entries(data) - - def get_externals(self): - #Previous to 1.5 --xml was not supported for svn propget and the -R - #output format breaks the shlex compatible semantics. - cmd = ['svn', 'propget', 'svn:externals'] - result = [] - for folder in self.iter_dirs(): - code, lines = _run_command(cmd + [folder], encoding="utf-8") - if code != 0: - log.warn("svn propget failed") - return [] - #lines should a str - for external in parse_external_prop(lines): - if folder: - external = os.path.join(folder, external) - result.append(os.path.normpath(external)) - - return result - - -class Svn15Info(Svn13Info): - def get_externals(self): - cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml'] - code, lines = _run_command(cmd, encoding="utf-8") - if code: - log.debug("svn propget failed") - return [] - return parse_externals_xml(lines, prefix=os.path.abspath(self.path)) - - -class SvnFileInfo(SvnInfo): - - def __init__(self, path=''): - super(SvnFileInfo, self).__init__(path) - self._directories = None - self._revision = None - - def _walk_svn(self, base): - entry_file = joinpath(base, '.svn', 'entries') - if os.path.isfile(entry_file): - entries = SVNEntriesFile.load(base) - yield (base, False, entries.parse_revision()) - for path in entries.get_undeleted_records(): - path = decode_as_string(path) - path = joinpath(base, path) - if os.path.isfile(path): - yield (path, True, None) - elif os.path.isdir(path): - for item in self._walk_svn(path): - yield item - - def _build_entries(self): - entries = list() - - rev = 0 - for path, isfile, dir_rev in self._walk_svn(self.path): - if isfile: - entries.append((path, 'file')) - else: - entries.append((path, 'dir')) - rev = max(rev, dir_rev) - - self._entries = entries - self._revision = rev - - def get_entries(self): - if self._entries is None: - self._build_entries() - return self._entries - - def get_revision(self): - if self._revision is None: - self._build_entries() - return self._revision - - def get_externals(self): - prop_files = [['.svn', 'dir-prop-base'], - ['.svn', 'dir-props']] - externals = [] - - for dirname in self.iter_dirs(): - prop_file = None - for rel_parts in prop_files: - filename = joinpath(dirname, *rel_parts) - if os.path.isfile(filename): - prop_file = filename - - if prop_file is not None: - ext_prop = parse_prop_file(prop_file, 'svn:externals') - #ext_prop should be utf-8 coming from svn:externals - ext_prop = decode_as_string(ext_prop, encoding="utf-8") - externals.extend(parse_external_prop(ext_prop)) - - return externals - - -def svn_finder(dirname=''): - #combined externals due to common interface - #combined externals and entries due to lack of dir_props in 1.7 - info = SvnInfo.load(dirname) - for path in info.iter_files(): - yield path - - for path in info.iter_externals(): - sub_info = SvnInfo.load(path) - for sub_path in sub_info.iter_files(): - yield sub_path - - -class SVNEntriesFile(object): - def __init__(self, data): - self.data = data - - @classmethod - def load(class_, base): - filename = os.path.join(base, '.svn', 'entries') - f = open(filename) - try: - result = SVNEntriesFile.read(f) - finally: - f.close() - return result - - @classmethod - def read(class_, fileobj): - data = fileobj.read() - is_xml = data.startswith(' revision_line_number - and section[revision_line_number]) - ] - return rev_numbers - - def get_undeleted_records(self): - undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete') - result = [ - section[0] - for section in self.get_sections() - if undeleted(section) - ] - return result - - -class SVNEntriesFileXML(SVNEntriesFile): - def is_valid(self): - return True - - def get_url(self): - "Get repository URL" - urlre = re.compile('url="([^"]+)"') - return urlre.search(self.data).group(1) - - def parse_revision_numbers(self): - revre = re.compile(r'committed-rev="(\d+)"') - return [ - int(m.group(1)) - for m in revre.finditer(self.data) - ] - - def get_undeleted_records(self): - entries_pattern = \ - re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) - results = [ - unescape(match.group(1)) - for match in entries_pattern.finditer(self.data) - ] - return results - - -if __name__ == '__main__': - for name in svn_finder(sys.argv[1]): - print(name) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/__init__.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/__init__.py deleted file mode 100644 index b5328ce..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/__init__.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Tests for the 'setuptools' package""" -import sys -import os -import unittest -from setuptools.tests import doctest -import distutils.core -import distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -from distutils.core import Extension -from distutils.version import LooseVersion -from setuptools.compat import func_code - -from setuptools.compat import func_code -import setuptools.dist -import setuptools.depends as dep -from setuptools import Feature -from setuptools.depends import Require - -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite( - os.path.join('tests', 'api_tests.txt'), - optionflags=doctest.ELLIPSIS, package='pkg_resources', - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core._setup_stop_after = None - - -class DependsTests(unittest.TestCase): - - def testExtractConst(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platforms - return - - def f1(): - global x, y, z - x = "test" - y = z - - fc = func_code(f1) - # unrecognized name - self.assertEqual(dep.extract_constant(fc,'q', -1), None) - - # constant assigned - self.assertEqual(dep.extract_constant(fc,'x', -1), "test") - - # expression assigned - self.assertEqual(dep.extract_constant(fc,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(dep.extract_constant(fc,'z', -1), None) - - def testFindModule(self): - self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') - self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') - f,p,i = dep.find_module('setuptools.tests') - f.close() - - def testModuleExtract(self): - if not hasattr(dep, 'get_module_constant'): - # skip on non-bytecode platforms - return - - from email import __version__ - self.assertEqual( - dep.get_module_constant('email','__version__'), __version__ - ) - self.assertEqual( - dep.get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - dep.get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platformsh - return - - req = Require('Email','1.0.3','email') - - self.assertEqual(req.name, 'Email') - self.assertEqual(req.module, 'email') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Email-1.0.3') - - from email import __version__ - self.assertEqual(req.get_version(), __version__) - self.assertTrue(req.version_ok('1.0.9')) - self.assertTrue(not req.version_ok('0.9.1')) - self.assertTrue(not req.version_ok('unknown')) - - self.assertTrue(req.is_present()) - self.assertTrue(req.is_current()) - - req = Require('Email 3000','03000','email',format=LooseVersion) - self.assertTrue(req.is_present()) - self.assertTrue(not req.is_current()) - self.assertTrue(not req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.assertTrue(not req.is_present()) - self.assertTrue(not req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.assertTrue(req.is_present(paths)) - self.assertTrue(req.is_current(paths)) - - -class DistroTests(unittest.TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - def testDistroType(self): - self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.assertTrue(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.assertTrue(not self.dist.has_contents_for('a')) - - self.assertTrue(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.assertTrue(not self.dist.has_contents_for('b')) - - self.assertTrue(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.assertTrue(not self.dist.has_contents_for('c')) - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - -class FeatureTests(unittest.TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.assertTrue(not - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.assertTrue( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.assertTrue( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.assertTrue( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.assertTrue( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.assertTrue( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.assertTrue(not 'without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.assertTrue(not 'bar_et' in dist.py_modules) - self.assertTrue(not 'pkg.bar' in dist.packages) - self.assertTrue('pkg.baz' in dist.packages) - self.assertTrue('scripts/baz_it' in dist.scripts) - self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(unittest.TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/doctest.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/doctest.py deleted file mode 100644 index 47293c3..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/doctest.py +++ /dev/null @@ -1,2683 +0,0 @@ -# Module doctest. -# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org). -# Major enhancements and refactoring by: -# Jim Fulton -# Edward Loper - -# Provided as-is; use at your own risk; no warranty; no promises; enjoy! - -try: - basestring -except NameError: - basestring = str - -try: - enumerate -except NameError: - def enumerate(seq): - return zip(range(len(seq)),seq) - -r"""Module doctest -- a framework for running examples in docstrings. - -In simplest use, end each module M to be tested with: - -def _test(): - import doctest - doctest.testmod() - -if __name__ == "__main__": - _test() - -Then running the module as a script will cause the examples in the -docstrings to get executed and verified: - -python M.py - -This won't display anything unless an example fails, in which case the -failing example(s) and the cause(s) of the failure(s) are printed to stdout -(why not stderr? because stderr is a lame hack <0.2 wink>), and the final -line of output is "Test failed.". - -Run it with the -v switch instead: - -python M.py -v - -and a detailed report of all examples tried is printed to stdout, along -with assorted summaries at the end. - -You can force verbose mode by passing "verbose=True" to testmod, or prohibit -it by passing "verbose=False". In either of those cases, sys.argv is not -examined by testmod. - -There are a variety of other ways to run doctests, including integration -with the unittest framework, and support for running non-Python text -files containing doctests. There are also many ways to override parts -of doctest's default behaviors. See the Library Reference Manual for -details. -""" - -__docformat__ = 'reStructuredText en' - -__all__ = [ - # 0, Option Flags - 'register_optionflag', - 'DONT_ACCEPT_TRUE_FOR_1', - 'DONT_ACCEPT_BLANKLINE', - 'NORMALIZE_WHITESPACE', - 'ELLIPSIS', - 'IGNORE_EXCEPTION_DETAIL', - 'COMPARISON_FLAGS', - 'REPORT_UDIFF', - 'REPORT_CDIFF', - 'REPORT_NDIFF', - 'REPORT_ONLY_FIRST_FAILURE', - 'REPORTING_FLAGS', - # 1. Utility Functions - 'is_private', - # 2. Example & DocTest - 'Example', - 'DocTest', - # 3. Doctest Parser - 'DocTestParser', - # 4. Doctest Finder - 'DocTestFinder', - # 5. Doctest Runner - 'DocTestRunner', - 'OutputChecker', - 'DocTestFailure', - 'UnexpectedException', - 'DebugRunner', - # 6. Test Functions - 'testmod', - 'testfile', - 'run_docstring_examples', - # 7. Tester - 'Tester', - # 8. Unittest Support - 'DocTestSuite', - 'DocFileSuite', - 'set_unittest_reportflags', - # 9. Debugging Support - 'script_from_examples', - 'testsource', - 'debug_src', - 'debug', -] - -import __future__ - -import sys, traceback, inspect, linecache, os, re, types -import unittest, difflib, pdb, tempfile -import warnings -from setuptools.compat import StringIO, execfile, func_code, im_func - -# Don't whine about the deprecated is_private function in this -# module's tests. -warnings.filterwarnings("ignore", "is_private", DeprecationWarning, - __name__, 0) - -# There are 4 basic classes: -# - Example: a pair, plus an intra-docstring line number. -# - DocTest: a collection of examples, parsed from a docstring, plus -# info about where the docstring came from (name, filename, lineno). -# - DocTestFinder: extracts DocTests from a given object's docstring and -# its contained objects' docstrings. -# - DocTestRunner: runs DocTest cases, and accumulates statistics. -# -# So the basic picture is: -# -# list of: -# +------+ +---------+ +-------+ -# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results| -# +------+ +---------+ +-------+ -# | Example | -# | ... | -# | Example | -# +---------+ - -# Option constants. - -OPTIONFLAGS_BY_NAME = {} -def register_optionflag(name): - flag = 1 << len(OPTIONFLAGS_BY_NAME) - OPTIONFLAGS_BY_NAME[name] = flag - return flag - -DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') -DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') -NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') -ELLIPSIS = register_optionflag('ELLIPSIS') -IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') - -COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | - DONT_ACCEPT_BLANKLINE | - NORMALIZE_WHITESPACE | - ELLIPSIS | - IGNORE_EXCEPTION_DETAIL) - -REPORT_UDIFF = register_optionflag('REPORT_UDIFF') -REPORT_CDIFF = register_optionflag('REPORT_CDIFF') -REPORT_NDIFF = register_optionflag('REPORT_NDIFF') -REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') - -REPORTING_FLAGS = (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF | - REPORT_ONLY_FIRST_FAILURE) - -# Special string markers for use in `want` strings: -BLANKLINE_MARKER = '' -ELLIPSIS_MARKER = '...' - -###################################################################### -## Table of Contents -###################################################################### -# 1. Utility Functions -# 2. Example & DocTest -- store test cases -# 3. DocTest Parser -- extracts examples from strings -# 4. DocTest Finder -- extracts test cases from objects -# 5. DocTest Runner -- runs test cases -# 6. Test Functions -- convenient wrappers for testing -# 7. Tester Class -- for backwards compatibility -# 8. Unittest Support -# 9. Debugging Support -# 10. Example Usage - -###################################################################### -## 1. Utility Functions -###################################################################### - -def is_private(prefix, base): - """prefix, base -> true iff name prefix + "." + base is "private". - - Prefix may be an empty string, and base does not contain a period. - Prefix is ignored (although functions you write conforming to this - protocol may make use of it). - Return true iff base begins with an (at least one) underscore, but - does not both begin and end with (at least) two underscores. - - >>> is_private("a.b", "my_func") - False - >>> is_private("____", "_my_func") - True - >>> is_private("someclass", "__init__") - False - >>> is_private("sometypo", "__init_") - True - >>> is_private("x.y.z", "_") - True - >>> is_private("_x.y.z", "__") - False - >>> is_private("", "") # senseless but consistent - False - """ - warnings.warn("is_private is deprecated; it wasn't useful; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning, stacklevel=2) - return base[:1] == "_" and not base[:2] == "__" == base[-2:] - -def _extract_future_flags(globs): - """ - Return the compiler-flags associated with the future features that - have been imported into the given namespace (globs). - """ - flags = 0 - for fname in __future__.all_feature_names: - feature = globs.get(fname, None) - if feature is getattr(__future__, fname): - flags |= feature.compiler_flag - return flags - -def _normalize_module(module, depth=2): - """ - Return the module specified by `module`. In particular: - - If `module` is a module, then return module. - - If `module` is a string, then import and return the - module with that name. - - If `module` is None, then return the calling module. - The calling module is assumed to be the module of - the stack frame at the given depth in the call stack. - """ - if inspect.ismodule(module): - return module - elif isinstance(module, basestring): - return __import__(module, globals(), locals(), ["*"]) - elif module is None: - return sys.modules[sys._getframe(depth).f_globals['__name__']] - else: - raise TypeError("Expected a module, string, or None") - -def _indent(s, indent=4): - """ - Add the given number of space characters to the beginning every - non-blank line in `s`, and return the result. - """ - # This regexp matches the start of non-blank lines: - return re.sub('(?m)^(?!$)', indent*' ', s) - -def _exception_traceback(exc_info): - """ - Return a string containing a traceback message for the given - exc_info tuple (as returned by sys.exc_info()). - """ - # Get a traceback message. - excout = StringIO() - exc_type, exc_val, exc_tb = exc_info - traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) - return excout.getvalue() - -# Override some StringIO methods. -class _SpoofOut(StringIO): - def getvalue(self): - result = StringIO.getvalue(self) - # If anything at all was written, make sure there's a trailing - # newline. There's no way for the expected output to indicate - # that a trailing newline is missing. - if result and not result.endswith("\n"): - result += "\n" - # Prevent softspace from screwing up the next test case, in - # case they used print with a trailing comma in an example. - if hasattr(self, "softspace"): - del self.softspace - return result - - def truncate(self, size=None): - StringIO.truncate(self, size) - if hasattr(self, "softspace"): - del self.softspace - -# Worst-case linear-time ellipsis matching. -def _ellipsis_match(want, got): - """ - Essentially the only subtle case: - >>> _ellipsis_match('aa...aa', 'aaa') - False - """ - if want.find(ELLIPSIS_MARKER)==-1: - return want == got - - # Find "the real" strings. - ws = want.split(ELLIPSIS_MARKER) - assert len(ws) >= 2 - - # Deal with exact matches possibly needed at one or both ends. - startpos, endpos = 0, len(got) - w = ws[0] - if w: # starts with exact match - if got.startswith(w): - startpos = len(w) - del ws[0] - else: - return False - w = ws[-1] - if w: # ends with exact match - if got.endswith(w): - endpos -= len(w) - del ws[-1] - else: - return False - - if startpos > endpos: - # Exact end matches required more characters than we have, as in - # _ellipsis_match('aa...aa', 'aaa') - return False - - # For the rest, we only need to find the leftmost non-overlapping - # match for each piece. If there's no overall match that way alone, - # there's no overall match period. - for w in ws: - # w may be '' at times, if there are consecutive ellipses, or - # due to an ellipsis at the start or end of `want`. That's OK. - # Search for an empty string succeeds, and doesn't change startpos. - startpos = got.find(w, startpos, endpos) - if startpos < 0: - return False - startpos += len(w) - - return True - -def _comment_line(line): - "Return a commented form of the given line" - line = line.rstrip() - if line: - return '# '+line - else: - return '#' - -class _OutputRedirectingPdb(pdb.Pdb): - """ - A specialized version of the python debugger that redirects stdout - to a given stream when interacting with the user. Stdout is *not* - redirected when traced code is executed. - """ - def __init__(self, out): - self.__out = out - pdb.Pdb.__init__(self) - - def trace_dispatch(self, *args): - # Redirect stdout to the given stream. - save_stdout = sys.stdout - sys.stdout = self.__out - # Call Pdb's trace dispatch method. - try: - return pdb.Pdb.trace_dispatch(self, *args) - finally: - sys.stdout = save_stdout - -# [XX] Normalize with respect to os.path.pardir? -def _module_relative_path(module, path): - if not inspect.ismodule(module): - raise TypeError('Expected a module: %r' % module) - if path.startswith('/'): - raise ValueError('Module-relative files may not have absolute paths') - - # Find the base directory for the path. - if hasattr(module, '__file__'): - # A normal module/package - basedir = os.path.split(module.__file__)[0] - elif module.__name__ == '__main__': - # An interactive session. - if len(sys.argv)>0 and sys.argv[0] != '': - basedir = os.path.split(sys.argv[0])[0] - else: - basedir = os.curdir - else: - # A module w/o __file__ (this includes builtins) - raise ValueError("Can't resolve paths relative to the module " + - module + " (it has no __file__)") - - # Combine the base directory and the path. - return os.path.join(basedir, *(path.split('/'))) - -###################################################################### -## 2. Example & DocTest -###################################################################### -## - An "example" is a pair, where "source" is a -## fragment of source code, and "want" is the expected output for -## "source." The Example class also includes information about -## where the example was extracted from. -## -## - A "doctest" is a collection of examples, typically extracted from -## a string (such as an object's docstring). The DocTest class also -## includes information about where the string was extracted from. - -class Example: - """ - A single doctest example, consisting of source code and expected - output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. - """ - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Normalize inputs. - if not source.endswith('\n'): - source += '\n' - if want and not want.endswith('\n'): - want += '\n' - if exc_msg is not None and not exc_msg.endswith('\n'): - exc_msg += '\n' - # Store properties. - self.source = source - self.want = want - self.lineno = lineno - self.indent = indent - if options is None: options = {} - self.options = options - self.exc_msg = exc_msg - -class DocTest: - """ - A collection of doctest examples that should be run in a single - namespace. Each `DocTest` defines the following attributes: - - - examples: the list of examples. - - - globs: The namespace (aka globals) that the examples should - be run in. - - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). - - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. - - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. - - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. - """ - def __init__(self, examples, globs, name, filename, lineno, docstring): - """ - Create a new DocTest containing the given examples. The - DocTest's globals are initialized with a copy of `globs`. - """ - assert not isinstance(examples, basestring), \ - "DocTest no longer accepts str; use DocTestParser instead" - self.examples = examples - self.docstring = docstring - self.globs = globs.copy() - self.name = name - self.filename = filename - self.lineno = lineno - - def __repr__(self): - if len(self.examples) == 0: - examples = 'no examples' - elif len(self.examples) == 1: - examples = '1 example' - else: - examples = '%d examples' % len(self.examples) - return ('' % - (self.name, self.filename, self.lineno, examples)) - - - # This lets us sort tests by name: - def __cmp__(self, other): - if not isinstance(other, DocTest): - return -1 - return cmp((self.name, self.filename, self.lineno, id(self)), - (other.name, other.filename, other.lineno, id(other))) - -###################################################################### -## 3. DocTestParser -###################################################################### - -class DocTestParser: - """ - A class used to parse strings containing doctest examples. - """ - # This regular expression is used to find doctest examples in a - # string. It defines three groups: `source` is the source code - # (including leading indentation and prompts); `indent` is the - # indentation of the first (PS1) line of the source code; and - # `want` is the expected output (including leading indentation). - _EXAMPLE_RE = re.compile(r''' - # Source consists of a PS1 line followed by zero or more PS2 lines. - (?P - (?:^(?P [ ]*) >>> .*) # PS1 line - (?:\n [ ]* \.\.\. .*)*) # PS2 lines - \n? - # Want consists of any non-blank lines that do not start with PS1. - (?P (?:(?![ ]*$) # Not a blank line - (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line - )*) - ''', re.MULTILINE | re.VERBOSE) - - # A regular expression for handling `want` strings that contain - # expected exceptions. It divides `want` into three pieces: - # - the traceback header line (`hdr`) - # - the traceback stack (`stack`) - # - the exception message (`msg`), as generated by - # traceback.format_exception_only() - # `msg` may have multiple lines. We assume/require that the - # exception message is the first non-indented line starting with a word - # character following the traceback header line. - _EXCEPTION_RE = re.compile(r""" - # Grab the traceback header. Different versions of Python have - # said different things on the first traceback line. - ^(?P Traceback\ \( - (?: most\ recent\ call\ last - | innermost\ last - ) \) : - ) - \s* $ # toss trailing whitespace on the header. - (?P .*?) # don't blink: absorb stuff until... - ^ (?P \w+ .*) # a line *starts* with alphanum. - """, re.VERBOSE | re.MULTILINE | re.DOTALL) - - # A callable returning a true value iff its argument is a blank line - # or contains a single comment. - _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match - - def parse(self, string, name=''): - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - string = string.expandtabs() - # If all lines begin with the same indentation, then strip it. - min_indent = self._min_indent(string) - if min_indent > 0: - string = '\n'.join([l[min_indent:] for l in string.split('\n')]) - - output = [] - charno, lineno = 0, 0 - # Find all doctest examples in the string: - for m in self._EXAMPLE_RE.finditer(string): - # Add the pre-example text to `output`. - output.append(string[charno:m.start()]) - # Update lineno (lines before this example) - lineno += string.count('\n', charno, m.start()) - # Extract info from the regexp match. - (source, options, want, exc_msg) = \ - self._parse_example(m, name, lineno) - # Create an Example, and add it to the list. - if not self._IS_BLANK_OR_COMMENT(source): - output.append( Example(source, want, exc_msg, - lineno=lineno, - indent=min_indent+len(m.group('indent')), - options=options) ) - # Update lineno (lines inside this example) - lineno += string.count('\n', m.start(), m.end()) - # Update charno. - charno = m.end() - # Add any remaining post-example text to `output`. - output.append(string[charno:]) - return output - - def get_doctest(self, string, globs, name, filename, lineno): - """ - Extract all doctest examples from the given string, and - collect them into a `DocTest` object. - - `globs`, `name`, `filename`, and `lineno` are attributes for - the new `DocTest` object. See the documentation for `DocTest` - for more information. - """ - return DocTest(self.get_examples(string, name), globs, - name, filename, lineno, string) - - def get_examples(self, string, name=''): - """ - Extract all doctest examples from the given string, and return - them as a list of `Example` objects. Line numbers are - 0-based, because it's most common in doctests that nothing - interesting appears on the same line as opening triple-quote, - and so the first interesting line is called \"line 1\" then. - - The optional argument `name` is a name identifying this - string, and is only used for error messages. - """ - return [x for x in self.parse(string, name) - if isinstance(x, Example)] - - def _parse_example(self, m, name, lineno): - """ - Given a regular expression match from `_EXAMPLE_RE` (`m`), - return a pair `(source, want)`, where `source` is the matched - example's source code (with prompts and indentation stripped); - and `want` is the example's expected output (with indentation - stripped). - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - # Get the example's indentation level. - indent = len(m.group('indent')) - - # Divide source into lines; check that they're properly - # indented; and then strip their indentation & prompts. - source_lines = m.group('source').split('\n') - self._check_prompt_blank(source_lines, indent, name, lineno) - self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) - source = '\n'.join([sl[indent+4:] for sl in source_lines]) - - # Divide want into lines; check that it's properly indented; and - # then strip the indentation. Spaces before the last newline should - # be preserved, so plain rstrip() isn't good enough. - want = m.group('want') - want_lines = want.split('\n') - if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): - del want_lines[-1] # forget final newline & spaces after it - self._check_prefix(want_lines, ' '*indent, name, - lineno + len(source_lines)) - want = '\n'.join([wl[indent:] for wl in want_lines]) - - # If `want` contains a traceback message, then extract it. - m = self._EXCEPTION_RE.match(want) - if m: - exc_msg = m.group('msg') - else: - exc_msg = None - - # Extract options from the source. - options = self._find_options(source, name, lineno) - - return source, options, want, exc_msg - - # This regular expression looks for option directives in the - # source code of an example. Option directives are comments - # starting with "doctest:". Warning: this may give false - # positives for string-literals that contain the string - # "#doctest:". Eliminating these false positives would require - # actually parsing the string; but we limit them by ignoring any - # line containing "#doctest:" that is *followed* by a quote mark. - _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$', - re.MULTILINE) - - def _find_options(self, source, name, lineno): - """ - Return a dictionary containing option overrides extracted from - option directives in the given source string. - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - options = {} - # (note: with the current regexp, this will match at most once:) - for m in self._OPTION_DIRECTIVE_RE.finditer(source): - option_strings = m.group(1).replace(',', ' ').split() - for option in option_strings: - if (option[0] not in '+-' or - option[1:] not in OPTIONFLAGS_BY_NAME): - raise ValueError('line %r of the doctest for %s ' - 'has an invalid option: %r' % - (lineno+1, name, option)) - flag = OPTIONFLAGS_BY_NAME[option[1:]] - options[flag] = (option[0] == '+') - if options and self._IS_BLANK_OR_COMMENT(source): - raise ValueError('line %r of the doctest for %s has an option ' - 'directive on a line with no example: %r' % - (lineno, name, source)) - return options - - # This regular expression finds the indentation of every non-blank - # line in a string. - _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE) - - def _min_indent(self, s): - "Return the minimum indentation of any non-blank line in `s`" - indents = [len(indent) for indent in self._INDENT_RE.findall(s)] - if len(indents) > 0: - return min(indents) - else: - return 0 - - def _check_prompt_blank(self, lines, indent, name, lineno): - """ - Given the lines of a source string (including prompts and - leading indentation), check to make sure that every prompt is - followed by a space character. If any line is not followed by - a space character, then raise ValueError. - """ - for i, line in enumerate(lines): - if len(line) >= indent+4 and line[indent+3] != ' ': - raise ValueError('line %r of the docstring for %s ' - 'lacks blank after %s: %r' % - (lineno+i+1, name, - line[indent:indent+3], line)) - - def _check_prefix(self, lines, prefix, name, lineno): - """ - Check that every line in the given list starts with the given - prefix; if any line does not, then raise a ValueError. - """ - for i, line in enumerate(lines): - if line and not line.startswith(prefix): - raise ValueError('line %r of the docstring for %s has ' - 'inconsistent leading whitespace: %r' % - (lineno+i+1, name, line)) - - -###################################################################### -## 4. DocTest Finder -###################################################################### - -class DocTestFinder: - """ - A class used to extract the DocTests that are relevant to a given - object, from its docstring and the docstrings of its contained - objects. Doctests can currently be extracted from the following - object types: modules, functions, classes, methods, staticmethods, - classmethods, and properties. - """ - - def __init__(self, verbose=False, parser=DocTestParser(), - recurse=True, _namefilter=None, exclude_empty=True): - """ - Create a new doctest finder. - - The optional argument `parser` specifies a class or - function that should be used to create new DocTest objects (or - objects that implement the same interface as DocTest). The - signature for this factory function should match the signature - of the DocTest constructor. - - If the optional argument `recurse` is false, then `find` will - only examine the given object, and not any contained objects. - - If the optional argument `exclude_empty` is false, then `find` - will include tests for objects with empty docstrings. - """ - self._parser = parser - self._verbose = verbose - self._recurse = recurse - self._exclude_empty = exclude_empty - # _namefilter is undocumented, and exists only for temporary backward- - # compatibility support of testmod's deprecated isprivate mess. - self._namefilter = _namefilter - - def find(self, obj, name=None, module=None, globs=None, - extraglobs=None): - """ - Return a list of the DocTests that are defined by the given - object's docstring, or by any of its contained objects' - docstrings. - - The optional parameter `module` is the module that contains - the given object. If the module is not specified or is None, then - the test finder will attempt to automatically determine the - correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - - Contained objects whose module does not match `module` are ignored. - - If `module` is False, no attempt to find the module will be made. - This is obscure, of use mostly in tests: if `module` is False, or - is None but cannot be found automatically, then all objects are - considered to belong to the (non-existent) module, so all contained - objects will (recursively) be searched for doctests. - - The globals for each DocTest is formed by combining `globs` - and `extraglobs` (bindings in `extraglobs` override bindings - in `globs`). A new copy of the globals dictionary is created - for each DocTest. If `globs` is not specified, then it - defaults to the module's `__dict__`, if specified, or {} - otherwise. If `extraglobs` is not specified, then it defaults - to {}. - - """ - # If name was not specified, then extract it from the object. - if name is None: - name = getattr(obj, '__name__', None) - if name is None: - raise ValueError("DocTestFinder.find: name must be given " - "when obj.__name__ doesn't exist: %r" % - (type(obj),)) - - # Find the module that contains the given object (if obj is - # a module, then module=obj.). Note: this may fail, in which - # case module will be None. - if module is False: - module = None - elif module is None: - module = inspect.getmodule(obj) - - # Read the module's source code. This is used by - # DocTestFinder._find_lineno to find the line number for a - # given object's docstring. - try: - file = inspect.getsourcefile(obj) or inspect.getfile(obj) - source_lines = linecache.getlines(file) - if not source_lines: - source_lines = None - except TypeError: - source_lines = None - - # Initialize globals, and merge in extraglobs. - if globs is None: - if module is None: - globs = {} - else: - globs = module.__dict__.copy() - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - # Recursively expore `obj`, extracting DocTests. - tests = [] - self._find(tests, obj, name, module, source_lines, globs, {}) - return tests - - def _filter(self, obj, prefix, base): - """ - Return true if the given object should not be examined. - """ - return (self._namefilter is not None and - self._namefilter(prefix, base)) - - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is func_globals(object) - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - else: - raise ValueError("object must be a class or function") - - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - if self._verbose: - print('Finding tests in %s' % name) - - # If we've already processed this object, then ignore it. - if id(obj) in seen: - return - seen[id(obj)] = 1 - - # Find a test for this object, and add it to the list of tests. - test = self._get_test(obj, name, module, globs, source_lines) - if test is not None: - tests.append(test) - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - valname = '%s.%s' % (name, valname) - # Recurse to functions & classes. - if ((inspect.isfunction(val) or inspect.isclass(val)) and - self._from_module(module, val)): - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a module's __test__ dictionary. - if inspect.ismodule(obj) and self._recurse: - for valname, val in getattr(obj, '__test__', {}).items(): - if not isinstance(valname, basestring): - raise ValueError("DocTestFinder.find: __test__ keys " - "must be strings: %r" % - (type(valname),)) - if not (inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or inspect.ismodule(val) or - isinstance(val, basestring)): - raise ValueError("DocTestFinder.find: __test__ values " - "must be strings, functions, methods, " - "classes, or modules: %r" % - (type(val),)) - valname = '%s.__test__.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = im_func(getattr(obj, valname)) - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - def _get_test(self, obj, name, module, globs, source_lines): - """ - Return a DocTest for the given object, if it defines a docstring; - otherwise, return None. - """ - # Extract the object's docstring. If it doesn't have one, - # then return None (no test for this object). - if isinstance(obj, basestring): - docstring = obj - else: - try: - if obj.__doc__ is None: - docstring = '' - else: - docstring = obj.__doc__ - if not isinstance(docstring, basestring): - docstring = str(docstring) - except (TypeError, AttributeError): - docstring = '' - - # Find the docstring's location in the file. - lineno = self._find_lineno(obj, source_lines) - - # Don't bother if the docstring is empty. - if self._exclude_empty and not docstring: - return None - - # Return a DocTest for this object. - if module is None: - filename = None - else: - filename = getattr(module, '__file__', module.__name__) - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - return self._parser.get_doctest(docstring, globs, name, - filename, lineno) - - def _find_lineno(self, obj, source_lines): - """ - Return a line number of the given object's docstring. Note: - this method assumes that the object has a docstring. - """ - lineno = None - - # Find the line number for modules. - if inspect.ismodule(obj): - lineno = 0 - - # Find the line number for classes. - # Note: this could be fooled if a class is defined multiple - # times in a single file. - if inspect.isclass(obj): - if source_lines is None: - return None - pat = re.compile(r'^\s*class\s*%s\b' % - getattr(obj, '__name__', '-')) - for i, line in enumerate(source_lines): - if pat.match(line): - lineno = i - break - - # Find the line number for functions & methods. - if inspect.ismethod(obj): obj = im_func(obj) - if inspect.isfunction(obj): obj = func_code(obj) - if inspect.istraceback(obj): obj = obj.tb_frame - if inspect.isframe(obj): obj = obj.f_code - if inspect.iscode(obj): - lineno = getattr(obj, 'co_firstlineno', None)-1 - - # Find the line number where the docstring starts. Assume - # that it's the first line that begins with a quote mark. - # Note: this could be fooled by a multiline function - # signature, where a continuation line begins with a quote - # mark. - if lineno is not None: - if source_lines is None: - return lineno+1 - pat = re.compile('(^|.*:)\s*\w*("|\')') - for lineno in range(lineno, len(source_lines)): - if pat.match(source_lines[lineno]): - return lineno - - # We couldn't find the line number. - return None - -###################################################################### -## 5. DocTest Runner -###################################################################### - -class DocTestRunner: - """ - A class used to run DocTest test cases, and accumulate statistics. - The `run` method is used to process a single DocTest case. It - returns a tuple `(f, t)`, where `t` is the number of test cases - tried, and `f` is the number of test cases that failed. - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> for test in tests: - ... print runner.run(test) - (0, 2) - (0, 1) - (0, 2) - (0, 2) - - The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated `(f, t)` - tuple: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 tests in _TestClass.square - 7 tests in 4 items. - 7 passed and 0 failed. - Test passed. - (0, 7) - - The aggregated number of tried examples and failed examples is - also available via the `tries` and `failures` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - - The comparison between expected outputs and actual outputs is done - by an `OutputChecker`. This comparison may be customized with a - number of option flags; see the documentation for `testmod` for - more information. If the option flags are insufficient, then the - comparison may also be customized by passing a subclass of - `OutputChecker` to the constructor. - - The test runner's display output can be controlled in two ways. - First, an output function (`out) can be passed to - `TestRunner.run`; this function will be called with strings that - should be displayed. It defaults to `sys.stdout.write`. If - capturing the output is not sufficient, then the display output - can be also customized by subclassing DocTestRunner, and - overriding the methods `report_start`, `report_success`, - `report_unexpected_exception`, and `report_failure`. - """ - # This divider string is used to separate failure messages, and to - # separate sections of the summary. - DIVIDER = "*" * 70 - - def __init__(self, checker=None, verbose=None, optionflags=0): - """ - Create a new test runner. - - Optional keyword arg `checker` is the `OutputChecker` that - should be used to compare the expected outputs and actual - outputs of doctest examples. - - Optional keyword arg 'verbose' prints lots of stuff if true, - only failures if false; by default, it's true iff '-v' is in - sys.argv. - - Optional argument `optionflags` can be used to control how the - test runner compares expected output to actual output, and how - it displays failures. See the documentation for `testmod` for - more information. - """ - self._checker = checker or OutputChecker() - if verbose is None: - verbose = '-v' in sys.argv - self._verbose = verbose - self.optionflags = optionflags - self.original_optionflags = optionflags - - # Keep track of the examples we've run. - self.tries = 0 - self.failures = 0 - self._name2ft = {} - - # Create a fake output target for capturing doctest output. - self._fakeout = _SpoofOut() - - #///////////////////////////////////////////////////////////////// - # Reporting methods - #///////////////////////////////////////////////////////////////// - - def report_start(self, out, test, example): - """ - Report that the test runner is about to process the given - example. (Only displays a message if verbose=True) - """ - if self._verbose: - if example.want: - out('Trying:\n' + _indent(example.source) + - 'Expecting:\n' + _indent(example.want)) - else: - out('Trying:\n' + _indent(example.source) + - 'Expecting nothing\n') - - def report_success(self, out, test, example, got): - """ - Report that the given example ran successfully. (Only - displays a message if verbose=True) - """ - if self._verbose: - out("ok\n") - - def report_failure(self, out, test, example, got): - """ - Report that the given example failed. - """ - out(self._failure_header(test, example) + - self._checker.output_difference(example, got, self.optionflags)) - - def report_unexpected_exception(self, out, test, example, exc_info): - """ - Report that the given example raised an unexpected exception. - """ - out(self._failure_header(test, example) + - 'Exception raised:\n' + _indent(_exception_traceback(exc_info))) - - def _failure_header(self, test, example): - out = [self.DIVIDER] - if test.filename: - if test.lineno is not None and example.lineno is not None: - lineno = test.lineno + example.lineno + 1 - else: - lineno = '?' - out.append('File "%s", line %s, in %s' % - (test.filename, lineno, test.name)) - else: - out.append('Line %s, in %s' % (example.lineno+1, test.name)) - out.append('Failed example:') - source = example.source - out.append(_indent(source)) - return '\n'.join(out) - - #///////////////////////////////////////////////////////////////// - # DocTest Running - #///////////////////////////////////////////////////////////////// - - def __run(self, test, compileflags, out): - """ - Run the examples in `test`. Write the outcome of each example - with one of the `DocTestRunner.report_*` methods, using the - writer function `out`. `compileflags` is the set of compiler - flags that should be used to execute examples. Return a tuple - `(f, t)`, where `t` is the number of examples tried, and `f` - is the number of examples that failed. The examples are run - in the namespace `test.globs`. - """ - # Keep track of the number of failures and tries. - failures = tries = 0 - - # Save the option flags (since option directives can be used - # to modify them). - original_optionflags = self.optionflags - - SUCCESS, FAILURE, BOOM = range(3) # `outcome` state - - check = self._checker.check_output - - # Process each example. - for examplenum, example in enumerate(test.examples): - - # If REPORT_ONLY_FIRST_FAILURE is set, then supress - # reporting after the first failure. - quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and - failures > 0) - - # Merge in the example's options. - self.optionflags = original_optionflags - if example.options: - for (optionflag, val) in example.options.items(): - if val: - self.optionflags |= optionflag - else: - self.optionflags &= ~optionflag - - # Record that we started this example. - tries += 1 - if not quiet: - self.report_start(out, test, example) - - # Use a special filename for compile(), so we can retrieve - # the source code during interactive debugging (see - # __patched_linecache_getlines). - filename = '' % (test.name, examplenum) - - # Run the example in the given context (globs), and record - # any exception that gets raised. (But don't intercept - # keyboard interrupts.) - try: - # Don't blink! This is where the user's code gets run. - exec(compile(example.source, filename, "single", - compileflags, 1), test.globs) - self.debugger.set_continue() # ==== Example Finished ==== - exception = None - except KeyboardInterrupt: - raise - except: - exception = sys.exc_info() - self.debugger.set_continue() # ==== Example Finished ==== - - got = self._fakeout.getvalue() # the actual output - self._fakeout.truncate(0) - outcome = FAILURE # guilty until proved innocent or insane - - # If the example executed without raising any exceptions, - # verify its output. - if exception is None: - if check(example.want, got, self.optionflags): - outcome = SUCCESS - - # The example raised an exception: check if it was expected. - else: - exc_info = sys.exc_info() - exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] - if not quiet: - got += _exception_traceback(exc_info) - - # If `example.exc_msg` is None, then we weren't expecting - # an exception. - if example.exc_msg is None: - outcome = BOOM - - # We expected an exception: see whether it matches. - elif check(example.exc_msg, exc_msg, self.optionflags): - outcome = SUCCESS - - # Another chance if they didn't care about the detail. - elif self.optionflags & IGNORE_EXCEPTION_DETAIL: - m1 = re.match(r'[^:]*:', example.exc_msg) - m2 = re.match(r'[^:]*:', exc_msg) - if m1 and m2 and check(m1.group(0), m2.group(0), - self.optionflags): - outcome = SUCCESS - - # Report the outcome. - if outcome is SUCCESS: - if not quiet: - self.report_success(out, test, example, got) - elif outcome is FAILURE: - if not quiet: - self.report_failure(out, test, example, got) - failures += 1 - elif outcome is BOOM: - if not quiet: - self.report_unexpected_exception(out, test, example, - exc_info) - failures += 1 - else: - assert False, ("unknown outcome", outcome) - - # Restore the option flags (in case they were modified) - self.optionflags = original_optionflags - - # Record and return the number of failures and tries. - self.__record_outcome(test, failures, tries) - return failures, tries - - def __record_outcome(self, test, f, t): - """ - Record the fact that the given DocTest (`test`) generated `f` - failures out of `t` tried examples. - """ - f2, t2 = self._name2ft.get(test.name, (0,0)) - self._name2ft[test.name] = (f+f2, t+t2) - self.failures += f - self.tries += t - - __LINECACHE_FILENAME_RE = re.compile(r'[\w\.]+)' - r'\[(?P\d+)\]>$') - def __patched_linecache_getlines(self, filename, module_globals=None): - m = self.__LINECACHE_FILENAME_RE.match(filename) - if m and m.group('name') == self.test.name: - example = self.test.examples[int(m.group('examplenum'))] - return example.source.splitlines(True) - elif func_code(self.save_linecache_getlines).co_argcount > 1: - return self.save_linecache_getlines(filename, module_globals) - else: - return self.save_linecache_getlines(filename) - - def run(self, test, compileflags=None, out=None, clear_globs=True): - """ - Run the examples in `test`, and display the results using the - writer function `out`. - - The examples are run in the namespace `test.globs`. If - `clear_globs` is true (the default), then this namespace will - be cleared after the test runs, to help with garbage - collection. If you would like to examine the namespace after - the test completes, then use `clear_globs=False`. - - `compileflags` gives the set of flags that should be used by - the Python compiler when running the examples. If not - specified, then it will default to the set of future-import - flags that apply to `globs`. - - The output of each example is checked using - `DocTestRunner.check_output`, and the results are formatted by - the `DocTestRunner.report_*` methods. - """ - self.test = test - - if compileflags is None: - compileflags = _extract_future_flags(test.globs) - - save_stdout = sys.stdout - if out is None: - out = save_stdout.write - sys.stdout = self._fakeout - - # Patch pdb.set_trace to restore sys.stdout during interactive - # debugging (so it's not still redirected to self._fakeout). - # Note that the interactive output will go to *our* - # save_stdout, even if that's not the real sys.stdout; this - # allows us to write test cases for the set_trace behavior. - save_set_trace = pdb.set_trace - self.debugger = _OutputRedirectingPdb(save_stdout) - self.debugger.reset() - pdb.set_trace = self.debugger.set_trace - - # Patch linecache.getlines, so we can see the example's source - # when we're inside the debugger. - self.save_linecache_getlines = linecache.getlines - linecache.getlines = self.__patched_linecache_getlines - - try: - return self.__run(test, compileflags, out) - finally: - sys.stdout = save_stdout - pdb.set_trace = save_set_trace - linecache.getlines = self.save_linecache_getlines - if clear_globs: - test.globs.clear() - - #///////////////////////////////////////////////////////////////// - # Summarization - #///////////////////////////////////////////////////////////////// - def summarize(self, verbose=None): - """ - Print a summary of all the test cases that have been run by - this DocTestRunner, and return a tuple `(f, t)`, where `f` is - the total number of failed examples, and `t` is the total - number of tried examples. - - The optional `verbose` argument controls how detailed the - summary is. If the verbosity is not specified, then the - DocTestRunner's verbosity is used. - """ - if verbose is None: - verbose = self._verbose - notests = [] - passed = [] - failed = [] - totalt = totalf = 0 - for x in self._name2ft.items(): - name, (f, t) = x - assert f <= t - totalt += t - totalf += f - if t == 0: - notests.append(name) - elif f == 0: - passed.append( (name, t) ) - else: - failed.append(x) - if verbose: - if notests: - print(len(notests), "items had no tests:") - notests.sort() - for thing in notests: - print(" ", thing) - if passed: - print(len(passed), "items passed all tests:") - passed.sort() - for thing, count in passed: - print(" %3d tests in %s" % (count, thing)) - if failed: - print(self.DIVIDER) - print(len(failed), "items had failures:") - failed.sort() - for thing, (f, t) in failed: - print(" %3d of %3d in %s" % (f, t, thing)) - if verbose: - print(totalt, "tests in", len(self._name2ft), "items.") - print(totalt - totalf, "passed and", totalf, "failed.") - if totalf: - print("***Test Failed***", totalf, "failures.") - elif verbose: - print("Test passed.") - return totalf, totalt - - #///////////////////////////////////////////////////////////////// - # Backward compatibility cruft to maintain doctest.master. - #///////////////////////////////////////////////////////////////// - def merge(self, other): - d = self._name2ft - for name, (f, t) in other._name2ft.items(): - if name in d: - print("*** DocTestRunner.merge: '" + name + "' in both" \ - " testers; summing outcomes.") - f2, t2 = d[name] - f = f + f2 - t = t + t2 - d[name] = f, t - -class OutputChecker: - """ - A class used to check the whether the actual output from a doctest - example matches the expected output. `OutputChecker` defines two - methods: `check_output`, which compares a given pair of outputs, - and returns true if they match; and `output_difference`, which - returns a string describing the differences between two outputs. - """ - def check_output(self, want, got, optionflags): - """ - Return True iff the actual output from an example (`got`) - matches the expected output (`want`). These strings are - always considered to match if they are identical; but - depending on what option flags the test runner is using, - several non-exact match types are also possible. See the - documentation for `TestRunner` for more information about - option flags. - """ - # Handle the common case first, for efficiency: - # if they're string-identical, always return true. - if got == want: - return True - - # The values True and False replaced 1 and 0 as the return - # value for boolean comparisons in Python 2.3. - if not (optionflags & DONT_ACCEPT_TRUE_FOR_1): - if (got,want) == ("True\n", "1\n"): - return True - if (got,want) == ("False\n", "0\n"): - return True - - # can be used as a special sequence to signify a - # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - # Replace in want with a blank line. - want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER), - '', want) - # If a line in got contains only spaces, then remove the - # spaces. - got = re.sub('(?m)^\s*?$', '', got) - if got == want: - return True - - # This flag causes doctest to ignore any differences in the - # contents of whitespace strings. Note that this can be used - # in conjunction with the ELLIPSIS flag. - if optionflags & NORMALIZE_WHITESPACE: - got = ' '.join(got.split()) - want = ' '.join(want.split()) - if got == want: - return True - - # The ELLIPSIS flag says to let the sequence "..." in `want` - # match any substring in `got`. - if optionflags & ELLIPSIS: - if _ellipsis_match(want, got): - return True - - # We didn't find any match; return false. - return False - - # Should we do a fancy diff? - def _do_a_fancy_diff(self, want, got, optionflags): - # Not unless they asked for a fancy diff. - if not optionflags & (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF): - return False - - # If expected output uses ellipsis, a meaningful fancy diff is - # too hard ... or maybe not. In two real-life failures Tim saw, - # a diff was a major help anyway, so this is commented out. - # [todo] _ellipsis_match() knows which pieces do and don't match, - # and could be the basis for a kick-ass diff in this case. - ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want: - ## return False - - # ndiff does intraline difference marking, so can be useful even - # for 1-line differences. - if optionflags & REPORT_NDIFF: - return True - - # The other diff types need at least a few lines to be helpful. - return want.count('\n') > 2 and got.count('\n') > 2 - - def output_difference(self, example, got, optionflags): - """ - Return a string describing the differences between the - expected output for a given example (`example`) and the actual - output (`got`). `optionflags` is the set of option flags used - to compare `want` and `got`. - """ - want = example.want - # If s are being used, then replace blank lines - # with in the actual output string. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got) - - # Check if we should use diff. - if self._do_a_fancy_diff(want, got, optionflags): - # Split want & got into lines. - want_lines = want.splitlines(True) # True == keep line ends - got_lines = got.splitlines(True) - # Use difflib to find their differences. - if optionflags & REPORT_UDIFF: - diff = difflib.unified_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'unified diff with -expected +actual' - elif optionflags & REPORT_CDIFF: - diff = difflib.context_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'context diff with expected followed by actual' - elif optionflags & REPORT_NDIFF: - engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) - diff = list(engine.compare(want_lines, got_lines)) - kind = 'ndiff with -expected +actual' - else: - assert 0, 'Bad diff option' - # Remove trailing whitespace on diff output. - diff = [line.rstrip() + '\n' for line in diff] - return 'Differences (%s):\n' % kind + _indent(''.join(diff)) - - # If we're not using diff, then simply list the expected - # output followed by the actual output. - if want and got: - return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got)) - elif want: - return 'Expected:\n%sGot nothing\n' % _indent(want) - elif got: - return 'Expected nothing\nGot:\n%s' % _indent(got) - else: - return 'Expected nothing\nGot nothing\n' - -class DocTestFailure(Exception): - """A DocTest example has failed in debugging mode. - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - got: the actual output - """ - def __init__(self, test, example, got): - self.test = test - self.example = example - self.got = got - - def __str__(self): - return str(self.test) - -class UnexpectedException(Exception): - """A DocTest example has encountered an unexpected exception - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - exc_info: the exception info - """ - def __init__(self, test, example, exc_info): - self.test = test - self.example = example - self.exc_info = exc_info - - def __str__(self): - return str(self.test) - -class DebugRunner(DocTestRunner): - r"""Run doc tests but raise an exception as soon as there is a failure. - - If an unexpected exception occurs, an UnexpectedException is raised. - It contains the test, the example, and the original exception: - - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException, failure: - ... pass - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - We wrap the original exception to give the calling application - access to the test and example information. - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> try: - ... runner.run(test) - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - If a failure or error occurs, the globals are left intact: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - Traceback (most recent call last): - ... - UnexpectedException: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} - - But the globals are cleared if there is no error: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - (0, 1) - - >>> test.globs - {} - - """ - - def run(self, test, compileflags=None, out=None, clear_globs=True): - r = DocTestRunner.run(self, test, compileflags, out, False) - if clear_globs: - test.globs.clear() - return r - - def report_unexpected_exception(self, out, test, example, exc_info): - raise UnexpectedException(test, example, exc_info) - - def report_failure(self, out, test, example, got): - raise DocTestFailure(test, example, got) - -###################################################################### -## 6. Test Functions -###################################################################### -# These should be backwards compatible. - -# For backward compatibility, a global instance of a DocTestRunner -# class, updated by testmod. -master = None - -def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, - raise_on_error=False, exclude_empty=False): - """m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - - Test examples in docstrings in functions and classes reachable - from module m (or the current module if m is not supplied), starting - with m.__doc__. Unless isprivate is specified, private names - are not skipped. - - Also test examples reachable from dict m.__test__ if it exists and is - not None. m.__test__ maps names to functions, classes and strings; - function and class docstrings are tested even if the name is private; - strings are tested directly, as if they were docstrings. - - Return (#failures, #tests). - - See doctest.__doc__ for an overview. - - Optional keyword arg "name" gives the name of the module; by default - use m.__name__. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use m.__dict__. A copy of this - dict is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. This is new in 2.4. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. This is new in 2.3. Possible values (see the - docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Deprecated in Python 2.4: - Optional keyword arg "isprivate" specifies a function used to - determine whether a name is private. The default function is - treat all functions as public. Optionally, "isprivate" can be - set to doctest.is_private to skip over functions marked as private - using the underscore naming convention; see its docs for details. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if isprivate is not None: - warnings.warn("the isprivate argument is deprecated; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning) - - # If no module was given, then use __main__. - if m is None: - # DWA - m will still be None if this wasn't invoked from the command - # line, in which case the following TypeError is about as good an error - # as we should expect - m = sys.modules.get('__main__') - - # Check that we were actually given a module. - if not inspect.ismodule(m): - raise TypeError("testmod: module required; %r" % (m,)) - - # If no name was given, then use the module's name. - if name is None: - name = m.__name__ - - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - for test in finder.find(m, name, globs=globs, extraglobs=extraglobs): - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def testfile(filename, module_relative=True, name=None, package=None, - globs=None, verbose=None, report=True, optionflags=0, - extraglobs=None, raise_on_error=False, parser=DocTestParser()): - """ - Test examples in the given file. Return (#failures, #tests). - - Optional keyword arg "module_relative" specifies how filenames - should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - - Optional keyword arg "name" gives the name of the test; by default - use the file's basename. - - Optional keyword argument "package" is a Python package or the - name of a Python package whose directory should be used as the - base directory for a module relative filename. If no package is - specified, then the calling module's directory is used as the base - directory for module relative filenames. It is an error to - specify "package" if "module_relative" is False. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use {}. A copy of this dict - is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Optional keyword arg "parser" specifies a DocTestParser (or - subclass) that should be used to extract tests from the files. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path - if module_relative: - package = _normalize_module(package) - filename = _module_relative_path(package, filename) - - # If no name was given, then use the file's name. - if name is None: - name = os.path.basename(filename) - - # Assemble the globals. - if globs is None: - globs = {} - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - # Read the file, convert it to a test, and run it. - f = open(filename) - s = f.read() - f.close() - test = parser.get_doctest(s, globs, name, filename, 0) - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def run_docstring_examples(f, globs, verbose=False, name="NoName", - compileflags=None, optionflags=0): - """ - Test examples in the given object's docstring (`f`), using `globs` - as globals. Optional argument `name` is used in failure messages. - If the optional argument `verbose` is true, then generate output - even if there are no failures. - - `compileflags` gives the set of flags that should be used by the - Python compiler when running the examples. If not specified, then - it will default to the set of future-import flags that apply to - `globs`. - - Optional keyword arg `optionflags` specifies options for the - testing and output. See the documentation for `testmod` for more - information. - """ - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(verbose=verbose, recurse=False) - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - for test in finder.find(f, name, globs=globs): - runner.run(test, compileflags=compileflags) - -###################################################################### -## 7. Tester -###################################################################### -# This is provided only for backwards compatibility. It's not -# actually used in any way. - -class Tester: - def __init__(self, mod=None, globs=None, verbose=None, - isprivate=None, optionflags=0): - - warnings.warn("class Tester is deprecated; " - "use class doctest.DocTestRunner instead", - DeprecationWarning, stacklevel=2) - if mod is None and globs is None: - raise TypeError("Tester.__init__: must specify mod or globs") - if mod is not None and not inspect.ismodule(mod): - raise TypeError("Tester.__init__: mod must be a module; %r" % - (mod,)) - if globs is None: - globs = mod.__dict__ - self.globs = globs - - self.verbose = verbose - self.isprivate = isprivate - self.optionflags = optionflags - self.testfinder = DocTestFinder(_namefilter=isprivate) - self.testrunner = DocTestRunner(verbose=verbose, - optionflags=optionflags) - - def runstring(self, s, name): - test = DocTestParser().get_doctest(s, self.globs, name, None, None) - if self.verbose: - print("Running string", name) - (f,t) = self.testrunner.run(test) - if self.verbose: - print(f, "of", t, "examples failed in string", name) - return (f,t) - - def rundoc(self, object, name=None, module=None): - f = t = 0 - tests = self.testfinder.find(object, name, module=module, - globs=self.globs) - for test in tests: - (f2, t2) = self.testrunner.run(test) - (f,t) = (f+f2, t+t2) - return (f,t) - - def rundict(self, d, name, module=None): - import types - m = types.ModuleType(name) - m.__dict__.update(d) - if module is None: - module = False - return self.rundoc(m, name, module) - - def run__test__(self, d, name): - import types - m = types.ModuleType(name) - m.__test__ = d - return self.rundoc(m, name) - - def summarize(self, verbose=None): - return self.testrunner.summarize(verbose) - - def merge(self, other): - self.testrunner.merge(other.testrunner) - -###################################################################### -## 8. Unittest Support -###################################################################### - -_unittest_reportflags = 0 - -def set_unittest_reportflags(flags): - """Sets the unittest option flags. - - The old flag is returned so that a runner could restore the old - value if it wished to: - - >>> old = _unittest_reportflags - >>> set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True - - >>> import doctest - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - - Only reporting flags can be set: - - >>> set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) - - >>> set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - """ - global _unittest_reportflags - - if (flags & REPORTING_FLAGS) != flags: - raise ValueError("Only reporting flags allowed", flags) - old = _unittest_reportflags - _unittest_reportflags = flags - return old - - -class DocTestCase(unittest.TestCase): - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None): - - unittest.TestCase.__init__(self) - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - def setUp(self): - test = self._dt_test - - if self._dt_setUp is not None: - self._dt_setUp(test) - - def tearDown(self): - test = self._dt_test - - if self._dt_tearDown is not None: - self._dt_tearDown(test) - - test.globs.clear() - - def runTest(self): - test = self._dt_test - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - runner = DocTestRunner(optionflags=optionflags, - checker=self._dt_checker, verbose=False) - - try: - runner.DIVIDER = "-"*70 - failures, tries = runner.run( - test, out=new.write, clear_globs=False) - finally: - sys.stdout = old - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def format_failure(self, err): - test = self._dt_test - if test.lineno is None: - lineno = 'unknown line number' - else: - lineno = '%s' % test.lineno - lname = '.'.join(test.name.split('.')[-1:]) - return ('Failed doctest test for %s\n' - ' File "%s", line %s, in %s\n\n%s' - % (test.name, test.filename, lineno, lname, err) - ) - - def debug(self): - r"""Run the test case without results and without catching exceptions - - The unit test framework includes a debug method on test cases - and test suites to support post-mortem debugging. The test code - is run in such a way that errors are not caught. This way a - caller can catch the errors and initiate post-mortem debugging. - - The DocTestCase provides a debug method that raises - UnexpectedException errors if there is an unexepcted - exception: - - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - >>> try: - ... case.debug() - ... except UnexpectedException, failure: - ... pass - - The UnexpectedException contains the test, the example, and - the original exception: - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - - >>> try: - ... case.debug() - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - """ - - self.setUp() - runner = DebugRunner(optionflags=self._dt_optionflags, - checker=self._dt_checker, verbose=False) - runner.run(self._dt_test) - self.tearDown() - - def id(self): - return self._dt_test.name - - def __repr__(self): - name = self._dt_test.name.split('.') - return "%s (%s)" % (name[-1], '.'.join(name[:-1])) - - __str__ = __repr__ - - def shortDescription(self): - return "Doctest: " + self._dt_test.name - -def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, - **options): - """ - Convert doctest tests for a module to a unittest test suite. - - This converts each documentation string in a module that - contains doctest tests to a unittest test case. If any of the - tests in a doc string fail, then the test case fails. An exception - is raised showing the name of the file containing the test and a - (sometimes approximate) line number. - - The `module` argument provides the module to be tested. The argument - can be either a module or a module name. - - If no argument is given, the calling module is used. - - A number of options may be provided as keyword arguments: - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - """ - - if test_finder is None: - test_finder = DocTestFinder() - - module = _normalize_module(module) - tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) - if globs is None: - globs = module.__dict__ - if not tests: - # Why do we want to do this? Because it reveals a bug that might - # otherwise be hidden. - raise ValueError(module, "has no tests") - - tests.sort() - suite = unittest.TestSuite() - for test in tests: - if len(test.examples) == 0: - continue - if not test.filename: - filename = module.__file__ - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - test.filename = filename - suite.addTest(DocTestCase(test, **options)) - - return suite - -class DocFileCase(DocTestCase): - - def id(self): - return '_'.join(self._dt_test.name.split('.')) - - def __repr__(self): - return self._dt_test.filename - __str__ = __repr__ - - def format_failure(self, err): - return ('Failed doctest test for %s\n File "%s", line 0\n\n%s' - % (self._dt_test.name, self._dt_test.filename, err) - ) - -def DocFileTest(path, module_relative=True, package=None, - globs=None, parser=DocTestParser(), **options): - if globs is None: - globs = {} - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path. - if module_relative: - package = _normalize_module(package) - path = _module_relative_path(package, path) - - # Find the file and read it. - name = os.path.basename(path) - f = open(path) - doc = f.read() - f.close() - - # Convert it to a test, and wrap it in a DocFileCase. - test = parser.get_doctest(doc, globs, name, path, 0) - return DocFileCase(test, **options) - -def DocFileSuite(*paths, **kw): - """A unittest suite for one or more doctest files. - - The path to each doctest file is given as a string; the - interpretation of that string depends on the keyword argument - "module_relative". - - A number of options may be provided as keyword arguments: - - module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - - package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - - parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - """ - suite = unittest.TestSuite() - - # We do this here so that _normalize_module is called at the right - # level. If it were called in DocFileTest, then this function - # would be the caller and we might guess the package incorrectly. - if kw.get('module_relative', True): - kw['package'] = _normalize_module(kw.get('package')) - - for path in paths: - suite.addTest(DocFileTest(path, **kw)) - - return suite - -###################################################################### -## 9. Debugging Support -###################################################################### - -def script_from_examples(s): - r"""Extract script from text with examples. - - Converts text with examples to a Python script. Example input is - converted to regular code. Example output and all other words - are converted to comments: - - >>> text = ''' - ... Here are examples of simple math. - ... - ... Python has super accurate integer addition - ... - ... >>> 2 + 2 - ... 5 - ... - ... And very friendly error messages: - ... - ... >>> 1/0 - ... To Infinity - ... And - ... Beyond - ... - ... You can use logic if you want: - ... - ... >>> if 0: - ... ... blah - ... ... blah - ... ... - ... - ... Ho hum - ... ''' - - >>> print script_from_examples(text) - # Here are examples of simple math. - # - # Python has super accurate integer addition - # - 2 + 2 - # Expected: - ## 5 - # - # And very friendly error messages: - # - 1/0 - # Expected: - ## To Infinity - ## And - ## Beyond - # - # You can use logic if you want: - # - if 0: - blah - blah - # - # Ho hum - """ - output = [] - for piece in DocTestParser().parse(s): - if isinstance(piece, Example): - # Add the example's source code (strip trailing NL) - output.append(piece.source[:-1]) - # Add the expected output: - want = piece.want - if want: - output.append('# Expected:') - output += ['## '+l for l in want.split('\n')[:-1]] - else: - # Add non-example text. - output += [_comment_line(l) - for l in piece.split('\n')[:-1]] - - # Trim junk on both ends. - while output and output[-1] == '#': - output.pop() - while output and output[0] == '#': - output.pop(0) - # Combine the output, and return it. - return '\n'.join(output) - -def testsource(module, name): - """Extract the test sources from a doctest docstring as a script. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the doc string with tests to be debugged. - """ - module = _normalize_module(module) - tests = DocTestFinder().find(module) - test = [t for t in tests if t.name == name] - if not test: - raise ValueError(name, "not found in tests") - test = test[0] - testsrc = script_from_examples(test.docstring) - return testsrc - -def debug_src(src, pm=False, globs=None): - """Debug a single doctest docstring, in argument `src`'""" - testsrc = script_from_examples(src) - debug_script(testsrc, pm, globs) - -def debug_script(src, pm=False, globs=None): - "Debug a test script. `src` is the script, as a string." - import pdb - - # Note that tempfile.NameTemporaryFile() cannot be used. As the - # docs say, a file so created cannot be opened by name a second time - # on modern Windows boxes, and execfile() needs to open it. - srcfilename = tempfile.mktemp(".py", "doctestdebug") - f = open(srcfilename, 'w') - f.write(src) - f.close() - - try: - if globs: - globs = globs.copy() - else: - globs = {} - - if pm: - try: - execfile(srcfilename, globs, globs) - except: - print(sys.exc_info()[1]) - pdb.post_mortem(sys.exc_info()[2]) - else: - # Note that %r is vital here. '%s' instead can, e.g., cause - # backslashes to get treated as metacharacters on Windows. - pdb.run("execfile(%r)" % srcfilename, globs, globs) - - finally: - os.remove(srcfilename) - -def debug(module, name, pm=False): - """Debug a single doctest docstring. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the docstring with tests to be debugged. - """ - module = _normalize_module(module) - testsrc = testsource(module, name) - debug_script(testsrc, pm, module.__dict__) - -###################################################################### -## 10. Example Usage -###################################################################### -class _TestClass: - """ - A pointless class, for sanity-checking of docstring testing. - - Methods: - square() - get() - - >>> _TestClass(13).get() + _TestClass(-12).get() - 1 - >>> hex(_TestClass(13).square().get()) - '0xa9' - """ - - def __init__(self, val): - """val -> _TestClass object with associated value val. - - >>> t = _TestClass(123) - >>> print t.get() - 123 - """ - - self.val = val - - def square(self): - """square() -> square TestClass's associated value - - >>> _TestClass(13).square().get() - 169 - """ - - self.val = self.val ** 2 - return self - - def get(self): - """get() -> return TestClass's associated value. - - >>> x = _TestClass(-42) - >>> print x.get() - -42 - """ - - return self.val - -__test__ = {"_TestClass": _TestClass, - "string": r""" - Example of a string object, searched as-is. - >>> x = 1; y = 2 - >>> x + y, x * y - (3, 2) - """, - - "bool-int equivalence": r""" - In 2.2, boolean expressions displayed - 0 or 1. By default, we still accept - them. This can be disabled by passing - DONT_ACCEPT_TRUE_FOR_1 to the new - optionflags argument. - >>> 4 == 4 - 1 - >>> 4 == 4 - True - >>> 4 > 4 - 0 - >>> 4 > 4 - False - """, - - "blank lines": r""" - Blank lines can be marked with : - >>> print 'foo\n\nbar\n' - foo - - bar - - """, - - "ellipsis": r""" - If the ellipsis flag is used, then '...' can be used to - elide substrings in the desired output: - >>> print range(1000) #doctest: +ELLIPSIS - [0, 1, 2, ..., 999] - """, - - "whitespace normalization": r""" - If the whitespace normalization flag is used, then - differences in whitespace are ignored. - >>> print range(30) #doctest: +NORMALIZE_WHITESPACE - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29] - """, - } - -def _test(): - r = unittest.TextTestRunner() - r.run(DocTestSuite()) - -if __name__ == "__main__": - _test() - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/environment.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/environment.py deleted file mode 100644 index 476d280..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/environment.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import zipfile -import sys -import tempfile -import unittest -import shutil -import stat -import unicodedata - -from subprocess import Popen as _Popen, PIPE as _PIPE - - -def _extract(self, member, path=None, pwd=None): - """for zipfile py2.5 borrowed from cpython""" - if not isinstance(member, zipfile.ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return _extract_member(self, member, path, pwd) - - -def _extract_from_zip(self, name, dest_path): - dest_file = open(dest_path, 'wb') - try: - dest_file.write(self.read(name)) - finally: - dest_file.close() - - -def _extract_member(self, member, targetpath, pwd): - """for zipfile py2.5 borrowed from cpython""" - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - _extract_from_zip(self, member.filename, targetpath) - - return targetpath - - -def _remove_dir(target): - - #on windows this seems to a problem - for dir_path, dirs, files in os.walk(target): - os.chmod(dir_path, stat.S_IWRITE) - for filename in files: - os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE) - shutil.rmtree(target) - - -class ZippedEnvironment(unittest.TestCase): - - datafile = None - dataname = None - old_cwd = None - - def setUp(self): - if self.datafile is None or self.dataname is None: - return - - if not os.path.isfile(self.datafile): - self.old_cwd = None - return - - self.old_cwd = os.getcwd() - - self.temp_dir = tempfile.mkdtemp() - zip_file, source, target = [None, None, None] - try: - zip_file = zipfile.ZipFile(self.datafile) - for files in zip_file.namelist(): - _extract(zip_file, files, self.temp_dir) - finally: - if zip_file: - zip_file.close() - del zip_file - - os.chdir(os.path.join(self.temp_dir, self.dataname)) - - def tearDown(self): - #Assume setUp was never completed - if self.dataname is None or self.datafile is None: - return - - try: - if self.old_cwd: - os.chdir(self.old_cwd) - _remove_dir(self.temp_dir) - except OSError: - #sigh? - pass - - -def _which_dirs(cmd): - result = set() - for path in os.environ.get('PATH', '').split(os.pathsep): - filename = os.path.join(path, cmd) - if os.access(filename, os.X_OK): - result.add(path) - return result - - -def run_setup_py(cmd, pypath=None, path=None, - data_stream=0, env=None): - """ - Execution command for tests, separate from those used by the - code directly to prevent accidental behavior issues - """ - if env is None: - env = dict() - for envname in os.environ: - env[envname] = os.environ[envname] - - #override the python path if needed - if pypath is not None: - env["PYTHONPATH"] = pypath - - #overide the execution path if needed - if path is not None: - env["PATH"] = path - if not env.get("PATH", ""): - env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) - env["PATH"] = os.pathsep.join(env["PATH"]) - - cmd = [sys.executable, "setup.py"] + list(cmd) - - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE, - shell=(sys.platform == 'win32'), env=env) - - data = proc.communicate()[data_stream] - except OSError: - return 1, '' - - #decode the console string if needed - if hasattr(data, "decode"): - data = data.decode() # should use the preffered encoding - data = unicodedata.normalize('NFC', data) - - #communciate calls wait() - return proc.returncode, data diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/py26compat.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/py26compat.py deleted file mode 100644 index d4fb891..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/py26compat.py +++ /dev/null @@ -1,14 +0,0 @@ -import unittest - -try: - # provide skipIf for Python 2.4-2.6 - skipIf = unittest.skipIf -except AttributeError: - def skipIf(condition, reason): - def skipper(func): - def skip(*args, **kwargs): - return - if condition: - return skip - return func - return skipper diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py deleted file mode 100644 index 22dee0d..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/script-with-bom.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -result = 'passed' diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/server.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/server.py deleted file mode 100644 index ae2381e..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/server.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" -import sys -import time -import threading -from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import (urllib2, URLError, HTTPServer, - SimpleHTTPRequestHandler) - -class IndexServer(HTTPServer): - """Basic single-threaded http server simulating a package index - - You can use this server in unittest like this:: - s = IndexServer() - s.start() - index_url = s.base_url() + 'mytestindex' - # do some test requests to the index - # The index files should be located in setuptools/tests/indexes - s.stop() - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=SimpleHTTPRequestHandler): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - self._run = True - - def serve(self): - while self._run: - self.handle_request() - - def start(self): - self.thread = threading.Thread(target=self.serve) - self.thread.start() - - def stop(self): - "Stop the server" - - # Let the server finish the last request and wait for a new one. - time.sleep(0.1) - - # self.shutdown is not supported on python < 2.6, so just - # set _run to false, and make a request, causing it to - # terminate. - self._run = False - url = 'http://127.0.0.1:%(server_port)s/' % vars(self) - try: - if sys.version_info >= (2, 6): - urllib2.urlopen(url, timeout=5) - else: - urllib2.urlopen(url) - except URLError: - # ignore any errors; all that's important is the request - pass - self.thread.join() - self.socket.close() - - def base_url(self): - port = self.server_port - return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port - -class RequestRecorder(BaseHTTPRequestHandler): - def do_GET(self): - requests = vars(self.server).setdefault('requests', []) - requests.append(self) - self.send_response(200, 'OK') - -class MockServer(HTTPServer, threading.Thread): - """ - A simple HTTP Server that records the requests made to it. - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=RequestRecorder): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - threading.Thread.__init__(self) - self.setDaemon(True) - self.requests = [] - - def run(self): - self.serve_forever() - - def url(self): - return 'http://localhost:%(server_port)s/' % vars(self) - url = property(url) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py deleted file mode 100644 index 1a12218..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_bdist_egg.py +++ /dev/null @@ -1,69 +0,0 @@ -"""develop tests -""" -import sys -import os, re, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.bdist_egg import bdist_egg -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', py_modules=['hi']) -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - f = open('setup.py', 'w') - f.write(SETUP_PY) - f.close() - f = open('hi.py', 'w') - f.write('1\n') - f.close() - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_bdist_egg(self): - dist = Distribution(dict( - script_name='setup.py', - script_args=['bdist_egg'], - name='foo', - py_modules=['hi'] - )) - os.makedirs(os.path.join('build', 'src')) - old_stdout = sys.stdout - sys.stdout = o = StringIO() - try: - dist.parse_command_line() - dist.run_commands() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - [content] = os.listdir('dist') - self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) - -def test_suite(): - return unittest.makeSuite(TestDevelopTest) - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py deleted file mode 100644 index a520ced..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_build_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext -from setuptools.command.build_ext import build_ext -from setuptools.dist import Distribution - -class TestBuildExtTest(unittest.TestCase): - - def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map - dist = Distribution() - cmd = build_ext(dist) - cmd.ext_map['foo/bar'] = '' - res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') - assert res == wanted - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_develop.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_develop.py deleted file mode 100644 index 7b90161..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_develop.py +++ /dev/null @@ -1,122 +0,0 @@ -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.compat import StringIO -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['foo'], - use_2to3=True, -) -""" - -INIT_PY = """print "foo" -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'foo')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # foo/__init__.py - init = os.path.join(self.dir, 'foo', '__init__.py') - f = open(init, 'w') - f.write(INIT_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_develop(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - dist = Distribution( - dict(name='foo', - packages=['foo'], - use_2to3=True, - version='0.0', - )) - dist.script_name = 'setup.py' - cmd = develop(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - #sys.stdout = StringIO() - try: - cmd.run() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - content = os.listdir(site.USER_SITE) - content.sort() - self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) - - # Check that we are using the right code. - egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') - try: - path = egg_link_file.read().split()[0].strip() - finally: - egg_link_file.close() - init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') - try: - init = init_file.read().strip() - finally: - init_file.close() - if sys.version < "3": - self.assertEqual(init, 'print "foo"') - else: - self.assertEqual(init, 'print("foo")') - - def notest_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError: - e = sys.exc_info()[1] - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py deleted file mode 100644 index a8adb68..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_dist_info.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Test .dist-info style distributions. -""" -import os -import shutil -import tempfile -import unittest -import textwrap - -try: - import ast -except: - pass - -import pkg_resources - -from setuptools.tests.py26compat import skipIf - -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() - -class TestDistInfo(unittest.TestCase): - - def test_distinfo(self): - dists = {} - for d in pkg_resources.find_distributions(self.tmpdir): - dists[d.project_name] = d - - assert len(dists) == 2, dists - - unversioned = dists['UnversionedDistribution'] - versioned = dists['VersionedDistribution'] - - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA - - @skipIf('ast' not in globals(), - "ast is used to test conditional dependencies (Python >= 2.6)") - def test_conditional_dependencies(self): - requires = [pkg_resources.Requirement.parse('splort==4'), - pkg_resources.Requirement.parse('quux>=1.1')] - - for d in pkg_resources.find_distributions(self.tmpdir): - self.assertEqual(d.requires(), requires[:1]) - self.assertEqual(d.requires(extras=('baz',)), requires) - self.assertEqual(d.extras, ['baz']) - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - versioned = os.path.join(self.tmpdir, - 'VersionedDistribution-2.718.dist-info') - os.mkdir(versioned) - metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: VersionedDistribution - Requires-Dist: splort (4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - unversioned = os.path.join(self.tmpdir, - 'UnversionedDistribution.dist-info') - os.mkdir(unversioned) - metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: UnversionedDistribution - Version: 0.3 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - - def tearDown(self): - shutil.rmtree(self.tmpdir) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py deleted file mode 100644 index a90ae23..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_easy_install.py +++ /dev/null @@ -1,394 +0,0 @@ -"""Easy install Tests -""" -import sys -import os -import shutil -import tempfile -import unittest -import site -import contextlib -import textwrap -import tarfile -import logging -import distutils.core - -from setuptools.compat import StringIO, BytesIO, next, urlparse -from setuptools.sandbox import run_setup, SandboxViolation -from setuptools.command.easy_install import ( - easy_install, fix_jython_executable, get_script_args, nt_quote_arg) -from setuptools.command.easy_install import PthDistributions -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from pkg_resources import Distribution as PRDistribution -import setuptools.tests.server - -class FakeDist(object): - def get_entry_map(self, group): - if group != 'console_scripts': - return {} - return {'name': 'ep'} - - def as_requirement(self): - return 'spec' - -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): - - def test_install_site_py(self): - dist = Distribution() - cmd = easy_install(dist) - cmd.sitepy_installed = False - cmd.install_dir = tempfile.mkdtemp() - try: - cmd.install_site_py() - sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assertTrue(os.path.exists(sitepy)) - finally: - shutil.rmtree(cmd.install_dir) - - def test_get_script_args(self): - dist = FakeDist() - - old_platform = sys.platform - try: - name, script = [i for i in next(get_script_args(dist))][0:2] - finally: - sys.platform = old_platform - - self.assertEqual(script, WANTED) - - def test_no_find_links(self): - # new option '--no-find-links', that blocks find-links added at - # the project level - dist = Distribution() - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.no_find_links = True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - self.assertEqual(cmd.package_index.scanned_urls, {}) - - # let's try without it (default behavior) - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - keys = sorted(cmd.package_index.scanned_urls.keys()) - self.assertEqual(keys, ['link1', 'link2']) - - -class TestPTHFileWriter(unittest.TestCase): - def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty - if a distribution is in site but also the cwd - ''' - pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(os.getcwd())) - self.assertTrue(pth.dirty) - - def test_add_from_site_is_ignored(self): - if os.name != 'nt': - location = '/test/location/does-not-have-to-exist' - else: - location = 'c:\\does_not_exist' - pth = PthDistributions('does-not_exist', [location, ]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(location)) - self.assertTrue(not pth.dirty) - - -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.old_enable_site = site.ENABLE_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - site.ENABLE_USER_SITE = self.old_enable_site - easy_install_pkg.__file__ = self.old_file - - def test_user_install_implied(self): - site.ENABLE_USER_SITE = True # disabled sometimes - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') - - def test_multiproc_atexit(self): - try: - __import__('multiprocessing') - except ImportError: - # skip the test if multiprocessing is not available - return - - log = logging.getLogger('test_easy_install') - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - log.info('this should not break') - - def test_user_install_not_implied_without_usersite_enabled(self): - site.ENABLE_USER_SITE = False # usually enabled - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') - - def test_local_index(self): - # make sure the local index is used - # when easy_install looks for installed - # packages - new_location = tempfile.mkdtemp() - target = tempfile.mkdtemp() - egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: - f.write('Name: foo\n') - finally: - f.close() - - sys.path.append(target) - old_ppath = os.environ.get('PYTHONPATH') - os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path) - try: - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.install_dir = target - cmd.args = ['foo'] - cmd.ensure_finalized() - cmd.local_index.scan([new_location]) - res = cmd.easy_install('foo') - self.assertEqual(os.path.realpath(res.location), - os.path.realpath(new_location)) - finally: - sys.path.remove(target) - for basedir in [new_location, target, ]: - if not os.path.exists(basedir) or not os.path.isdir(basedir): - continue - try: - shutil.rmtree(basedir) - except: - pass - if old_ppath is not None: - os.environ['PYTHONPATH'] = old_ppath - else: - del os.environ['PYTHONPATH'] - - def test_setup_requires(self): - """Regression test for Distribute issue #318 - - Ensure that a package with setup_requires can be installed when - setuptools is installed in the user site-packages without causing a - SandboxViolation. - """ - - test_setup_attrs = { - 'name': 'test_pkg', 'version': '0.0', - 'setup_requires': ['foobar'], - 'dependency_links': [os.path.abspath(self.dir)] - } - - test_pkg = os.path.join(self.dir, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - os.mkdir(test_pkg) - - f = open(test_setup_py, 'w') - f.write(textwrap.dedent("""\ - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - f.close() - - foobar_path = os.path.join(self.dir, 'foobar-0.1.tar.gz') - make_trivial_sdist( - foobar_path, - textwrap.dedent("""\ - import setuptools - setuptools.setup( - name='foobar', - version='0.1' - ) - """)) - - old_stdout = sys.stdout - old_stderr = sys.stderr - sys.stdout = StringIO() - sys.stderr = StringIO() - try: - try: - with reset_setup_stop_context(): - run_setup(test_setup_py, ['install']) - except SandboxViolation: - self.fail('Installation caused SandboxViolation') - finally: - sys.stdout = old_stdout - sys.stderr = old_stderr - - -class TestSetupRequires(unittest.TestCase): - - def test_setup_requires_honors_fetch_params(self): - """ - When easy_install installs a source distribution which specifies - setup_requires, it should honor the fetch parameters (such as - allow-hosts, index-url, and find-links). - """ - # set up a server which will simulate an alternate package index. - p_index = setuptools.tests.server.MockServer() - p_index.start() - netloc = 1 - p_index_loc = urlparse(p_index.url)[netloc] - if p_index_loc.endswith(':0'): - # Some platforms (Jython) don't find a port to which to bind, - # so skip this test for them. - return - # create an sdist that has a build-time dependency. - with TestSetupRequires.create_sdist() as dist_file: - with tempdir_context() as temp_install_dir: - with environment_context(PYTHONPATH=temp_install_dir): - ei_params = ['--index-url', p_index.url, - '--allow-hosts', p_index_loc, - '--exclude-scripts', '--install-dir', temp_install_dir, - dist_file] - with reset_setup_stop_context(): - with argv_context(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - self.assertRaises(SystemExit, - easy_install_pkg.main, ei_params) - # there should have been two or three requests to the server - # (three happens on Python 3.3a) - self.assertTrue(2 <= len(p_index.requests) <= 3) - self.assertEqual(p_index.requests[0].path, '/does-not-exist/') - - @staticmethod - @contextlib.contextmanager - def create_sdist(): - """ - Return an sdist with a setup_requires dependency (of something that - doesn't exist) - """ - with tempdir_context() as dir: - dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_trivial_sdist( - dist_path, - textwrap.dedent(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """).lstrip()) - yield dist_path - - -def make_trivial_sdist(dist_path, setup_py): - """Create a simple sdist tarball at dist_path, containing just a - setup.py, the contents of which are provided by the setup_py string. - """ - - setup_py_file = tarfile.TarInfo(name='setup.py') - try: - # Python 3 (StringIO gets converted to io module) - MemFile = BytesIO - except AttributeError: - MemFile = StringIO - setup_py_bytes = MemFile(setup_py.encode('utf-8')) - setup_py_file.size = len(setup_py_bytes.getvalue()) - dist = tarfile.open(dist_path, 'w:gz') - try: - dist.addfile(setup_py_file, fileobj=setup_py_bytes) - finally: - dist.close() - - -@contextlib.contextmanager -def tempdir_context(cd=lambda dir:None): - temp_dir = tempfile.mkdtemp() - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) - -@contextlib.contextmanager -def environment_context(**updates): - old_env = os.environ.copy() - os.environ.update(updates) - try: - yield - finally: - for key in updates: - del os.environ[key] - os.environ.update(old_env) - -@contextlib.contextmanager -def argv_context(repl): - old_argv = sys.argv[:] - sys.argv[:] = repl - yield - sys.argv[:] = old_argv - -@contextlib.contextmanager -def reset_setup_stop_context(): - """ - When the setuptools tests are run using setup.py test, and then - one wants to invoke another setup() command (such as easy_install) - within those tests, it's necessary to reset the global variable - in distutils.core so that the setup() command will run naturally. - """ - setup_stop_after = distutils.core._setup_stop_after - distutils.core._setup_stop_after = None - yield - distutils.core._setup_stop_after = setup_stop_after diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py deleted file mode 100644 index 2785436..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_egg_info.py +++ /dev/null @@ -1,173 +0,0 @@ - -import os -import sys -import tempfile -import shutil -import unittest - -import pkg_resources -import warnings -from setuptools.command import egg_info -from setuptools import svn_utils -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') -"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" - - -class TestEggInfo(unittest.TestCase): - - def setUp(self): - self.test_dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.test_dir, '.svn')) - - self.old_cwd = os.getcwd() - os.chdir(self.test_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.test_dir) - - def _write_entries(self, entries): - fn = os.path.join(self.test_dir, '.svn', 'entries') - entries_f = open(fn, 'wb') - entries_f.write(entries) - entries_f.close() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_version_10_format(self): - """ - """ - #keeping this set for 1.6 is a good check on the get_svn_revision - #to ensure I return using svnversion what would had been returned - version_str = svn_utils.SvnInfo.get_svn_version() - version = [int(x) for x in version_str.split('.')[:2]] - if version != [1, 6]: - if hasattr(self, 'skipTest'): - self.skipTest('') - else: - sys.stderr.write('\n Skipping due to SVN Version\n') - return - - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - self.assertEqual(rev, '89000') - - def test_version_10_format_legacy_parser(self): - """ - """ - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable: - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - #catch_warnings not available until py26 - warning_filters = warnings.filters - warnings.filters = warning_filters[:] - try: - warnings.simplefilter("ignore", DeprecationWarning) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - finally: - #restore the warning filters - warnings.filters = warning_filters - #restore the os path - if path_variable: - os.environ[path_variable] = old_path - - self.assertEqual(rev, '89000') - -DUMMY_SOURCE_TXT = """CHANGES.txt -CONTRIBUTORS.txt -HISTORY.txt -LICENSE -MANIFEST.in -README.txt -setup.py -dummy/__init__.py -dummy/test.txt -dummy.egg-info/PKG-INFO -dummy.egg-info/SOURCES.txt -dummy.egg-info/dependency_links.txt -dummy.egg-info/top_level.txt""" - - -class TestSvnDummy(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return None - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummy, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -class TestSvnDummyLegacy(environment.ZippedEnvironment): - - def setUp(self): - self.base_version = (1, 6) - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummyLegacy, self).setUp() - - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - path="", - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py deleted file mode 100644 index dae71cb..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_markerlib.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import unittest -from setuptools.tests.py26compat import skipIf - -try: - import ast -except ImportError: - pass - -class TestMarkerlib(unittest.TestCase): - - @skipIf('ast' not in globals(), - "ast not available (Python < 2.6?)") - def test_markers(self): - from _markerlib import interpret, default_environment, compile - - os_name = os.name - - self.assertTrue(interpret("")) - - self.assertTrue(interpret("os.name != 'buuuu'")) - self.assertTrue(interpret("os_name != 'buuuu'")) - self.assertTrue(interpret("python_version > '1.0'")) - self.assertTrue(interpret("python_version < '5.0'")) - self.assertTrue(interpret("python_version <= '5.0'")) - self.assertTrue(interpret("python_version >= '1.0'")) - self.assertTrue(interpret("'%s' in os.name" % os_name)) - self.assertTrue(interpret("'%s' in os_name" % os_name)) - self.assertTrue(interpret("'buuuu' not in os.name")) - - self.assertFalse(interpret("os.name == 'buuuu'")) - self.assertFalse(interpret("os_name == 'buuuu'")) - self.assertFalse(interpret("python_version < '1.0'")) - self.assertFalse(interpret("python_version > '5.0'")) - self.assertFalse(interpret("python_version >= '5.0'")) - self.assertFalse(interpret("python_version <= '1.0'")) - self.assertFalse(interpret("'%s' not in os.name" % os_name)) - self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) - self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) - - environment = default_environment() - environment['extra'] = 'test' - self.assertTrue(interpret("extra == 'test'", environment)) - self.assertFalse(interpret("extra == 'doc'", environment)) - - def raises_nameError(): - try: - interpret("python.version == '42'") - except NameError: - pass - else: - raise Exception("Expected NameError") - - raises_nameError() - - def raises_syntaxError(): - try: - interpret("(x for x in (4,))") - except SyntaxError: - pass - else: - raise Exception("Expected SyntaxError") - - raises_syntaxError() - - statement = "python_version == '5'" - self.assertEqual(compile(statement).__doc__, statement) - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py deleted file mode 100644 index 664566a..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Package Index Tests -""" -import sys -import os -import unittest -import pkg_resources -from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url -import distutils.errors -import setuptools.package_index -from setuptools.tests.server import IndexServer - -class TestPackageIndex(unittest.TestCase): - - def test_bad_url_bad_port(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1:0/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_typo(self): - # issue 16 - # easy_install inquant.contentmirror.plone breaks because of a typo - # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_bad_status_line(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - def _urlopen(*args): - raise httplib.BadStatusLine('line') - - index.opener = _urlopen - url = 'http://example.com' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue('line' in str(v)) - else: - raise AssertionError('Should have raise here!') - - def test_bad_url_double_scheme(self): - """ - A bad URL with a double scheme should raise a DistutilsError. - """ - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue 20 - url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' - try: - index.open_url(url) - except distutils.errors.DistutilsError: - error = sys.exc_info()[1] - msg = unicode(error) - assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg - return - raise RuntimeError("Did not raise") - - def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = ('') - index.process_index(url, page) - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - self.assertTrue(index.url_ok(url, True)) - - def test_links_priority(self): - """ - Download links from the pypi simple index should be used before - external download links. - https://bitbucket.org/tarek/distribute/issue/163 - - Usecase : - - someone uploads a package on pypi, a md5 is generated - - someone manually copies this link (with the md5 in the url) onto an - external page accessible from the package page. - - someone reuploads the package (with a different md5) - - while easy_installing, an MD5 error occurs because the external link - is used - -> Setuptools should use the link from pypi, not the external one. - """ - if sys.platform.startswith('java'): - # Skip this test on jython because binding to :0 fails - return - - # start an index server - server = IndexServer() - server.start() - index_url = server.base_url() + 'test_links_priority/simple/' - - # scan a test index - pi = setuptools.package_index.PackageIndex(index_url) - requirement = pkg_resources.Requirement.parse('foobar') - pi.find_packages(requirement) - server.stop() - - # the distribution has been found - self.assertTrue('foobar' in pi) - # we have only one link, because links are compared without md5 - self.assertTrue(len(pi['foobar'])==1) - # the link should be from the index - self.assertTrue('correct_md5' in pi['foobar'][0].location) - - def test_parse_bdist_wininst(self): - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) - - def test__vcs_split_rev_from_url(self): - """ - Test the basic usage of _vcs_split_rev_from_url - """ - vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url - url, rev = vsrfu('https://example.com/bar@2995') - self.assertEqual(url, 'https://example.com/bar') - self.assertEqual(rev, '2995') - - def test_local_index(self): - """ - local_open should be able to read an index from the file system. - """ - f = open('index.html', 'w') - f.write('
content
') - f.close() - try: - url = 'file:' + pathname2url(os.getcwd()) + '/' - res = setuptools.package_index.local_open(url) - finally: - os.remove('index.html') - assert 'content' in res.read() - - -class TestContentCheckers(unittest.TestCase): - - def test_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - checker.feed('You should probably not be using MD5'.encode('ascii')) - self.assertEqual(checker.hash.hexdigest(), - 'f12895fdffbd45007040d2e44df98478') - self.assertTrue(checker.is_valid()) - - def test_other_fragment(self): - "Content checks should succeed silently if no hash is present" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_blank_md5(self): - "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_get_hash_name_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - self.assertEqual(checker.hash_name, 'md5') - - def test_report(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - rep = checker.report(lambda x: x, 'My message about %s') - self.assertEqual(rep, 'My message about md5') diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_resources.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_resources.py deleted file mode 100644 index c9fcf76..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_resources.py +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove - -import os -import sys -import tempfile -import shutil -from unittest import TestCase - -import pkg_resources -from pkg_resources import (parse_requirements, VersionConflict, parse_version, - Distribution, EntryPoint, Requirement, safe_version, safe_name, - WorkingSet) - -from setuptools.command.easy_install import (get_script_header, is_sh, - nt_quote_arg) -from setuptools.compat import StringIO, iteritems - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -def safe_repr(obj, short=False): - """ copied from Python2.7""" - try: - result = repr(obj) - except Exception: - result = object.__repr__(obj) - if not short or len(result) < pkg_resources._MAX_LENGTH: - return result - return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' - -class Metadata(pkg_resources.EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return pkg_resources.yield_lines(self.get_metadata(name)) - -dist_from_fn = pkg_resources.Distribution.from_filename - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = pkg_resources.Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - ad.add(dist_from_fn("FooPkg-1.3_1.egg")) - ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) - ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.assertTrue(ad['FooPkg']) - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(dist_from_fn("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") - foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]) - ws.add(foo12) - ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]) - ws.add(foo14) - ws.add(foo12) - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - def testResolve(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual(list(ws.resolve([],ad)), []) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo) - ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - list(map(ws.add,targets)) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises(VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.assertTrue(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(len(m), len(self.submap_expect)) - for key, ep in iteritems(self.submap_expect): - self.assertEqual(repr(m.get(key)), repr(ep)) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), - feature3=EntryPoint('feature3', 'this.module', extras=['something']) - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.assertTrue(parse_version('1.2') in r) - self.assertTrue(parse_version('1.1') not in r) - self.assertTrue('1.2' in r) - self.assertTrue('1.1' not in r) - self.assertTrue(foo_dist not in r) - self.assertTrue(twist11 not in r) - self.assertTrue(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.assertTrue(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.assertTrue(v not in r, (v,r)) - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - frozenset(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - self.assertTrue(d("foo-0.3a4.egg") not in r1) - self.assertTrue(d("foo-0.3a1.egg") not in r1) - self.assertTrue(d("foo-0.3a4.egg") not in r2) - - self.assertTrue(d("foo-0.3a2.egg") in r1) - self.assertTrue(d("foo-0.3a2.egg") in r2) - self.assertTrue(d("foo-0.3a3.egg") in r2) - self.assertTrue(d("foo-0.3a5.egg") in r2) - - def testSetuptoolsProjectName(self): - """ - The setuptools project should implement the setuptools package. - """ - - self.assertEqual( - Requirement.parse('setuptools').project_name, 'setuptools') - # setuptools 0.7 and higher means setuptools. - self.assertEqual( - Requirement.parse('setuptools == 0.7').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - sample = """ - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - self.assertEqual(list(pkg_resources.split_sections(sample)), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1') - c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertTrue(p1= (3,) and os.environ.get("LC_CTYPE") - in (None, "C", "POSIX")): - return - - class java: - class lang: - class System: - @staticmethod - def getProperty(property): - return "" - sys.modules["java"] = java - - platform = sys.platform - sys.platform = 'java1.5.0_13' - stdout, stderr = sys.stdout, sys.stderr - try: - # A mock sys.executable that uses a shebang line (this file) - exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') - self.assertEqual( - get_script_header('#!/usr/local/bin/python', executable=exe), - '#!/usr/bin/env %s\n' % exe) - - # Ensure we generate what is basically a broken shebang line - # when there's options, with a warning emitted - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python -x', - executable=exe), - '#!%s -x\n' % exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe), - '#!%s -x\n' % self.non_ascii_exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - finally: - del sys.modules["java"] - sys.platform = platform - sys.stdout, sys.stderr = stdout, stderr - - -class NamespaceTests(TestCase): - - def setUp(self): - self._ns_pkgs = pkg_resources._namespace_packages.copy() - self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") - os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) - self._prev_sys_path = sys.path[:] - sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) - - def tearDown(self): - shutil.rmtree(self._tmpdir) - pkg_resources._namespace_packages = self._ns_pkgs.copy() - sys.path = self._prev_sys_path[:] - - def _assertIn(self, member, container): - """ assertIn and assertTrue does not exist in Python2.3""" - if member not in container: - standardMsg = '%s not found in %s' % (safe_repr(member), - safe_repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - - def test_two_levels_deep(self): - """ - Test nested namespace packages - Create namespace packages in the following tree : - site-packages-1/pkg1/pkg2 - site-packages-2/pkg1/pkg2 - Check both are in the _namespace_packages dict and that their __path__ - is correct - """ - sys.path.append(os.path.join(self._tmpdir, "site-pkgs2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")) - ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" - for site in ["site-pkgs", "site-pkgs2"]: - pkg1_init = open(os.path.join(self._tmpdir, site, - "pkg1", "__init__.py"), "w") - pkg1_init.write(ns_str) - pkg1_init.close() - pkg2_init = open(os.path.join(self._tmpdir, site, - "pkg1", "pkg2", "__init__.py"), "w") - pkg2_init.write(ns_str) - pkg2_init.close() - import pkg1 - self._assertIn("pkg1", pkg_resources._namespace_packages.keys()) - try: - import pkg1.pkg2 - except ImportError: - self.fail("Setuptools tried to import the parent namespace package") - # check the _namespace_packages dict - self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys()) - self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"]) - # check the __path__ attribute contains both paths - self.assertEqual(pkg1.pkg2.__path__, [ - os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"), - os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")]) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sandbox.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sandbox.py deleted file mode 100644 index 3dad137..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,79 +0,0 @@ -"""develop tests -""" -import sys -import os -import shutil -import unittest -import tempfile -import types - -import pkg_resources -import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox, SandboxViolation - -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True - -class TestSandbox(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) - sandbox.run(self._file_writer(os.devnull)) - - def _file_writer(path): - def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() - return do_write - - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) - - def test_setup_py_with_BOM(self): - """ - It should be possible to execute a setup.py with a Byte Order Mark - """ - target = pkg_resources.resource_filename(__name__, - 'script-with-bom.py') - namespace = types.ModuleType('namespace') - setuptools.sandbox.execfile(target, vars(namespace)) - assert namespace.result == 'passed' - -if __name__ == '__main__': - unittest.main() diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sdist.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sdist.py deleted file mode 100644 index 71d1075..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_sdist.py +++ /dev/null @@ -1,535 +0,0 @@ -# -*- coding: utf-8 -*- -"""sdist tests""" - -import locale -import os -import shutil -import sys -import tempfile -import unittest -import unicodedata -import re -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -from setuptools.compat import StringIO, unicode -from setuptools.tests.py26compat import skipIf -from setuptools.command.sdist import sdist, walk_revctrl -from setuptools.command.egg_info import manifest_maker -from setuptools.dist import Distribution -from setuptools import svn_utils - -SETUP_ATTRS = { - 'name': 'sdist_test', - 'version': '0.0', - 'packages': ['sdist_test'], - 'package_data': {'sdist_test': ['*.txt']} -} - - -SETUP_PY = """\ -from setuptools import setup - -setup(**%r) -""" % SETUP_ATTRS - - -if sys.version_info >= (3,): - LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') -else: - LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' - - -# Cannot use context manager because of Python 2.4 -def quiet(): - global old_stdout, old_stderr - old_stdout, old_stderr = sys.stdout, sys.stderr - sys.stdout, sys.stderr = StringIO(), StringIO() - -def unquiet(): - sys.stdout, sys.stderr = old_stdout, old_stderr - - -# Fake byte literals for Python <= 2.5 -def b(s, encoding='utf-8'): - if sys.version_info >= (3,): - return s.encode(encoding) - return s - - -# Convert to POSIX path -def posix(path): - if sys.version_info >= (3,) and not isinstance(path, str): - return path.replace(os.sep.encode('ascii'), b('/')) - else: - return path.replace(os.sep, '/') - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, unicode): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -class TestSdistTest(unittest.TestCase): - - def setUp(self): - self.temp_dir = tempfile.mkdtemp() - f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') - f.write(SETUP_PY) - f.close() - # Set up the rest of the test package - test_pkg = os.path.join(self.temp_dir, 'sdist_test') - os.mkdir(test_pkg) - # *.rst was not included in package_data, so c.rst should not be - # automatically added to the manifest when not under version control - for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: - # Just touch the files; their contents are irrelevant - open(os.path.join(test_pkg, fname), 'w').close() - - self.old_cwd = os.getcwd() - os.chdir(self.temp_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.temp_dir) - - def test_package_data_in_sdist(self): - """Regression test for pull request #4: ensures that files listed in - package_data are included in the manifest even if they're not added to - version control. - """ - - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # squelch output - quiet() - try: - cmd.run() - finally: - unquiet() - - manifest = cmd.filelist.files - self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) - - def test_manifest_is_written_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join('sdist_test', 'smörbröd.py') - - # Add UTF-8 filename and write manifest - quiet() - try: - mm.run() - mm.filelist.files.append(filename) - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - u_contents = contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - if sys.version_info >= (3,): - self.assertTrue(posix(filename) in u_contents) - else: - self.assertTrue(posix(filename) in contents) - - # Python 3 only - if sys.version_info >= (3,): - - def test_write_manifest_allows_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - - # Add filename and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - self.assertTrue(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertTrue(u_filename in mm.filelist.files) - - def test_write_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - - # Add filename with surrogates and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The Latin-1 filename should have been skipped - self.assertFalse(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertFalse(u_filename in mm.filelist.files) - - def test_manifest_is_read_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add UTF-8 filename to manifest - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - cmd.read_manifest() - finally: - unquiet() - - # The filelist should contain the UTF-8 filename - if sys.version_info >= (3,): - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - - # Python 3 only - if sys.version_info >= (3,): - - def test_read_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add Latin-1 filename to manifest - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - try: - cmd.read_manifest() - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - finally: - unquiet() - - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - - @skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8', - 'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') - def test_sdist_with_utf8_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - open(filename, 'w').close() - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.platform == 'darwin': - filename = decompose(filename) - - if sys.version_info >= (3,): - fs_enc = sys.getfilesystemencoding() - - if sys.platform == 'win32': - if fs_enc == 'cp1252': - # Python 3 mangles the UTF-8 filename - filename = filename.decode('cp1252') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('mbcs') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - else: - self.assertTrue(filename in cmd.filelist.files) - - def test_sdist_with_latin1_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - open(filename, 'w').close() - self.assertTrue(os.path.isfile(filename)) - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.version_info >= (3,): - #not all windows systems have a default FS encoding of cp1252 - if sys.platform == 'win32': - # Latin-1 is similar to Windows-1252 however - # on mbcs filesys it is not in latin-1 encoding - fs_enc = sys.getfilesystemencoding() - if fs_enc == 'mbcs': - filename = filename.decode('mbcs') - else: - filename = filename.decode('latin-1') - - self.assertTrue(filename in cmd.filelist.files) - else: - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - else: - # No conversion takes place under Python 2 and the file - # is included. We shall keep it that way for BBB. - self.assertTrue(filename in cmd.filelist.files) - - -class TestDummyOutput(environment.ZippedEnvironment): - - def setUp(self): - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', "dummy.zip") - self.dataname = "dummy" - super(TestDummyOutput, self).setUp() - - def _run(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=0) - if code: - info = "DIR: " + os.path.abspath('.') - info += "\n SDIST RETURNED: %i\n\n" % code - info += data - raise AssertionError(info) - - datalines = data.splitlines() - - possible = ( - "running sdist", - "running egg_info", - "creating dummy\.egg-info", - "writing dummy\.egg-info", - "writing top-level names to dummy\.egg-info", - "writing dependency_links to dummy\.egg-info", - "writing manifest file 'dummy\.egg-info", - "reading manifest file 'dummy\.egg-info", - "reading manifest template 'MANIFEST\.in'", - "writing manifest file 'dummy\.egg-info", - "creating dummy-0.1.1", - "making hard links in dummy-0\.1\.1", - "copying files to dummy-0\.1\.1", - "copying \S+ -> dummy-0\.1\.1", - "copying dummy", - "copying dummy\.egg-info", - "hard linking \S+ -> dummy-0\.1\.1", - "hard linking dummy", - "hard linking dummy\.egg-info", - "Writing dummy-0\.1\.1", - "creating dist", - "creating 'dist", - "Creating tar archive", - "running check", - "adding 'dummy-0\.1\.1", - "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", - "gzip .+ dist/dummy-0\.1\.1\.tar", - "removing 'dummy-0\.1\.1' \\(and everything under it\\)", - ) - - print(" DIR: " + os.path.abspath('.')) - for line in datalines: - found = False - for pattern in possible: - if re.match(pattern, line): - print(" READ: " + line) - found = True - break - if not found: - raise AssertionError("Unexpexected: %s\n-in-\n%s" - % (line, data)) - - return data - - def test_sources(self): - self._run() - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_walksvn(self): - if self.base_version >= (1, 6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - #TODO is this right - expected = set([ - os.path.join('a file'), - os.path.join(folder2, 'Changes.txt'), - os.path.join(folder2, 'MD5SUMS'), - os.path.join(folder2, 'README.txt'), - os.path.join(folder3, 'Changes.txt'), - os.path.join(folder3, 'MD5SUMS'), - os.path.join(folder3, 'README.txt'), - os.path.join(folder3, 'TODO.txt'), - os.path.join(folder3, 'fin'), - os.path.join('third_party', 'README.txt'), - os.path.join('folder', folder2, 'Changes.txt'), - os.path.join('folder', folder2, 'MD5SUMS'), - os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'Changes.txt'), - os.path.join('folder', folder3, 'fin'), - os.path.join('folder', folder3, 'MD5SUMS'), - os.path.join('folder', folder3, 'oops'), - os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'ZuMachen.txt'), - os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), - os.path.join('folder', 'lalala.txt'), - os.path.join('folder', 'quest.txt'), - # The example will have a deleted file - # (or should) but shouldn't return it - ]) - self.assertEqual(set(x for x in walk_revctrl()), expected) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_svn.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_svn.py deleted file mode 100644 index afee32b..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_svn.py +++ /dev/null @@ -1,246 +0,0 @@ -# -*- coding: utf-8 -*- -"""svn tests""" - - -import os -import sys -import unittest -import codecs -import subprocess -from setuptools.tests import environment -from setuptools.compat import unicode, unichr - -from setuptools import svn_utils -from setuptools.tests.py26compat import skipIf - - -def _do_svn_check(): - try: - subprocess.check_call(["svn", "--version"], - shell=(sys.platform == 'win32')) - return True - except (OSError, subprocess.CalledProcessError): - return False -_svn_check = _do_svn_check() - - -class TestSvnVersion(unittest.TestCase): - - def test_no_svn_found(self): - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable is None: - try: - self.skipTest('Cannot figure out how to modify path') - except AttributeError: # PY26 doesn't have this - return - - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - try: - version = svn_utils.SvnInfo.get_svn_version() - self.assertEqual(version, '') - finally: - os.environ[path_variable] = old_path - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_svn_should_exist(self): - version = svn_utils.SvnInfo.get_svn_version() - self.assertNotEqual(version, '') - -def _read_utf8_file(path): - fileobj = None - try: - fileobj = codecs.open(path, 'r', 'utf-8') - data = fileobj.read() - return data - finally: - if fileobj: - fileobj.close() - - -class ParserInfoXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_info.xml') - #Remember these are pre-generated to test XML parsing - # so these paths might not valid on your system - example_base = "%s_example" % svn_name - - data = _read_utf8_file(path) - - expected = set([ - ("\\".join((example_base, 'a file')), 'file'), - ("\\".join((example_base, 'folder')), 'dir'), - ("\\".join((example_base, 'folder', 'lalala.txt')), 'file'), - ("\\".join((example_base, 'folder', 'quest.txt')), 'file'), - ]) - self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - -class ParserExternalXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.xml') - example_base = svn_name + '_example' - data = _read_utf8_file(path) - - if ext_spaces: - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.sep.join((example_base, folder2)), - os.sep.join((example_base, folder3)), - # folder is third_party大介 - os.sep.join((example_base, - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - os.sep.join((example_base, 'folder', folder2)), - os.sep.join((example_base, 'folder', folder3)), - os.sep.join((example_base, 'folder', - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - ]) - - expected = set(os.path.normpath(x) for x in expected) - dir_base = os.sep.join(('C:', 'development', 'svn_example')) - self.assertEqual(set(x for x - in svn_utils.parse_externals_xml(data, dir_base)), expected) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class ParseExternal(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.txt') - data = _read_utf8_file(path) - - if ext_spaces: - expected = set(['third party2', 'third party3', - 'third party3b', 'third_party']) - else: - expected = set(['third_party2', 'third_party3', 'third_party']) - - self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # empty or null - self.dataname = None - self.datafile = None - return - - self.base_version = tuple([int(x) for x in version.split('.')[:2]]) - - if self.base_version < (1,3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1,9): - #trying the latest version - self.base_version = (1,8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_revision(self): - rev = svn_utils.SvnInfo.load('.').get_revision() - self.assertEqual(rev, 6) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_entries(self): - expected = set([ - (os.path.join('a file'), 'file'), - (os.path.join('folder'), 'dir'), - (os.path.join('folder', 'lalala.txt'), 'file'), - (os.path.join('folder', 'quest.txt'), 'file'), - #The example will have a deleted file (or should) - #but shouldn't return it - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set(x for x in info.entries), expected) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_externals(self): - if self.base_version >= (1,6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.path.join(folder2), - os.path.join(folder3), - os.path.join('third_party'), - os.path.join('folder', folder2), - os.path.join('folder', folder3), - os.path.join('folder', 'third_party'), - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set([x for x in info.externals]), expected) - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_test.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_test.py deleted file mode 100644 index 7a06a40..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_test.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: UTF-8 -*- - -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.test import test -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', -) -""" - -NS_INIT = """# -*- coding: Latin-1 -*- -# Söme Arbiträry Ãœnicode to test Issüé 310 -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) -""" -# Make sure this is Latin-1 binary, before writing: -if sys.version_info < (3,): - NS_INIT = NS_INIT.decode('UTF-8') -NS_INIT = NS_INIT.encode('Latin-1') - -TEST_PY = """import unittest - -class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used - -test_suite = unittest.makeSuite(TestTest) -""" - -class TestTestTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'name')) - os.mkdir(os.path.join(self.dir, 'name', 'space')) - os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'wt') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # name/__init__.py - init = os.path.join(self.dir, 'name', '__init__.py') - f = open(init, 'wb') - f.write(NS_INIT) - f.close() - # name/space/__init__.py - init = os.path.join(self.dir, 'name', 'space', '__init__.py') - f = open(init, 'wt') - f.write('#empty\n') - f.close() - # name/space/tests/__init__.py - init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') - f = open(init, 'wt') - f.write(TEST_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_test(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - dist = Distribution(dict( - name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', - use_2to3=True, - )) - dist.script_name = 'setup.py' - cmd = test(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. - cmd.run() - except SystemExit: # The test runner calls sys.exit, stop that making an error. - pass - finally: - sys.stdout = old_stdout - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py b/Darwin/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py deleted file mode 100644 index 769f16c..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) - - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') - f.write("Hello world.") - f.close() - - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) - - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - cmd.target_dir = self.upload_dir - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, 'foo.zip') - try: - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - zip_file = zipfile.ZipFile(tmp_file) # woh... - - assert zip_file.namelist() == ['index.html'] - - zip_file.close() - finally: - shutil.rmtree(tmp_dir) - diff --git a/Darwin/lib/python3.4/site-packages/setuptools/version.py b/Darwin/lib/python3.4/site-packages/setuptools/version.py deleted file mode 100644 index d980f27..0000000 --- a/Darwin/lib/python3.4/site-packages/setuptools/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '2.1' diff --git a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/PKG-INFO deleted file mode 100644 index abae236..0000000 --- a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/PKG-INFO +++ /dev/null @@ -1,54 +0,0 @@ -Metadata-Version: 1.1 -Name: simplejson -Version: 3.6.3 -Summary: Simple, fast, extensible JSON encoder/decoder for Python -Home-page: http://github.com/simplejson/simplejson -Author: Bob Ippolito -Author-email: bob@redivi.com -License: MIT License -Description: simplejson is a simple, fast, complete, correct and extensible - JSON encoder and decoder for Python 2.5+ - and Python 3.3+. It is pure Python code with no dependencies, - but includes an optional C extension for a serious speed boost. - - The latest documentation for simplejson can be read online here: - http://simplejson.readthedocs.org/ - - simplejson is the externally maintained development version of the - json library included with Python 2.6 and Python 3.0, but maintains - backwards compatibility with Python 2.5. - - The encoder can be specialized to provide serialization in any kind of - situation, without any special support by the objects to be serialized - (somewhat like pickle). This is best done with the ``default`` kwarg - to dumps. - - The decoder can handle incoming JSON strings of any specified encoding - (UTF-8 by default). It can also be specialized to post-process JSON - objects with the ``object_hook`` or ``object_pairs_hook`` kwargs. This - is particularly useful for implementing protocols such as JSON-RPC - that have a richer type system than JSON itself. - - For those of you that have legacy systems to maintain, there is a - very old fork of simplejson in the `python2.2`_ branch that supports - Python 2.2. This is based off of a very old version of simplejson, - is not maintained, and should only be used as a last resort. - - .. _python2.2: https://github.com/simplejson/simplejson/tree/python2.2 - -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: License :: OSI Approved :: Academic Free License (AFL) -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.5 -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Programming Language :: Python :: 3.4 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/SOURCES.txt b/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/SOURCES.txt deleted file mode 100644 index 76f095a..0000000 --- a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,48 +0,0 @@ -CHANGES.txt -LICENSE.txt -MANIFEST.in -README.rst -conf.py -index.rst -setup.cfg -setup.py -scripts/make_docs.py -simplejson/__init__.py -simplejson/_speedups.c -simplejson/compat.py -simplejson/decoder.py -simplejson/encoder.py -simplejson/ordered_dict.py -simplejson/scanner.py -simplejson/tool.py -simplejson.egg-info/PKG-INFO -simplejson.egg-info/SOURCES.txt -simplejson.egg-info/dependency_links.txt -simplejson.egg-info/top_level.txt -simplejson/tests/__init__.py -simplejson/tests/test_bigint_as_string.py -simplejson/tests/test_bitsize_int_as_string.py -simplejson/tests/test_check_circular.py -simplejson/tests/test_decimal.py -simplejson/tests/test_decode.py -simplejson/tests/test_default.py -simplejson/tests/test_dump.py -simplejson/tests/test_encode_basestring_ascii.py -simplejson/tests/test_encode_for_html.py -simplejson/tests/test_errors.py -simplejson/tests/test_fail.py -simplejson/tests/test_float.py -simplejson/tests/test_for_json.py -simplejson/tests/test_indent.py -simplejson/tests/test_item_sort_key.py -simplejson/tests/test_namedtuple.py -simplejson/tests/test_pass1.py -simplejson/tests/test_pass2.py -simplejson/tests/test_pass3.py -simplejson/tests/test_recursion.py -simplejson/tests/test_scanstring.py -simplejson/tests/test_separators.py -simplejson/tests/test_speedups.py -simplejson/tests/test_tool.py -simplejson/tests/test_tuple.py -simplejson/tests/test_unicode.py \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/dependency_links.txt b/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/installed-files.txt b/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/installed-files.txt deleted file mode 100644 index 51f3d8e..0000000 --- a/Darwin/lib/python3.4/site-packages/simplejson-3.6.3-py3.4.egg-info/installed-files.txt +++ /dev/null @@ -1,74 +0,0 @@ -../simplejson/__init__.py -../simplejson/compat.py -../simplejson/decoder.py -../simplejson/encoder.py -../simplejson/ordered_dict.py -../simplejson/scanner.py -../simplejson/tool.py -../simplejson/tests/__init__.py -../simplejson/tests/test_bigint_as_string.py -../simplejson/tests/test_bitsize_int_as_string.py -../simplejson/tests/test_check_circular.py -../simplejson/tests/test_decimal.py -../simplejson/tests/test_decode.py -../simplejson/tests/test_default.py -../simplejson/tests/test_dump.py -../simplejson/tests/test_encode_basestring_ascii.py -../simplejson/tests/test_encode_for_html.py -../simplejson/tests/test_errors.py -../simplejson/tests/test_fail.py -../simplejson/tests/test_float.py -../simplejson/tests/test_for_json.py -../simplejson/tests/test_indent.py -../simplejson/tests/test_item_sort_key.py -../simplejson/tests/test_namedtuple.py -../simplejson/tests/test_pass1.py -../simplejson/tests/test_pass2.py -../simplejson/tests/test_pass3.py -../simplejson/tests/test_recursion.py -../simplejson/tests/test_scanstring.py -../simplejson/tests/test_separators.py -../simplejson/tests/test_speedups.py -../simplejson/tests/test_tool.py -../simplejson/tests/test_tuple.py -../simplejson/tests/test_unicode.py -../simplejson/__init__.pyc -../simplejson/compat.pyc -../simplejson/decoder.pyc -../simplejson/encoder.pyc -../simplejson/ordered_dict.pyc -../simplejson/scanner.pyc -../simplejson/tool.pyc -../simplejson/tests/__init__.pyc -../simplejson/tests/test_bigint_as_string.pyc -../simplejson/tests/test_bitsize_int_as_string.pyc -../simplejson/tests/test_check_circular.pyc -../simplejson/tests/test_decimal.pyc -../simplejson/tests/test_decode.pyc -../simplejson/tests/test_default.pyc -../simplejson/tests/test_dump.pyc -../simplejson/tests/test_encode_basestring_ascii.pyc -../simplejson/tests/test_encode_for_html.pyc -../simplejson/tests/test_errors.pyc -../simplejson/tests/test_fail.pyc -../simplejson/tests/test_float.pyc -../simplejson/tests/test_for_json.pyc -../simplejson/tests/test_indent.pyc -../simplejson/tests/test_item_sort_key.pyc -../simplejson/tests/test_namedtuple.pyc -../simplejson/tests/test_pass1.pyc -../simplejson/tests/test_pass2.pyc -../simplejson/tests/test_pass3.pyc -../simplejson/tests/test_recursion.pyc -../simplejson/tests/test_scanstring.pyc -../simplejson/tests/test_separators.pyc -../simplejson/tests/test_speedups.pyc -../simplejson/tests/test_tool.pyc -../simplejson/tests/test_tuple.pyc -../simplejson/tests/test_unicode.pyc -../simplejson/_speedups.so -./ -dependency_links.txt -PKG-INFO -SOURCES.txt -top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/simplejson/_speedups.so b/Darwin/lib/python3.4/site-packages/simplejson/_speedups.so deleted file mode 100755 index e4addf5..0000000 Binary files a/Darwin/lib/python3.4/site-packages/simplejson/_speedups.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/RECORD b/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/RECORD deleted file mode 100644 index d4dd158..0000000 --- a/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/RECORD +++ /dev/null @@ -1,8 +0,0 @@ -six.py,sha256=SHMbYUbZdimEb9Q7FBGMUtwaLCRC7qYQgXXSS_PHQss,26518 -six-1.7.3.dist-info/METADATA,sha256=J5bgiEeMfq7UWjeoY8UJSndsDcCGQlaAVNrEkDHgE0g,1279 -six-1.7.3.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4 -six-1.7.3.dist-info/DESCRIPTION.rst,sha256=wDIPS0rnIMXICM3qxqUg2g5ozzQyOpCLh8oaca9UgFQ,771 -six-1.7.3.dist-info/metadata.json,sha256=BLxlZMZlGI6548ILR5YYtDZ_U35REqChoRN1SCjGy5M,621 -six-1.7.3.dist-info/RECORD,, -six-1.7.3.dist-info/WHEEL,sha256=6lxp_S3wZGmTBtGMVmNNLyvKFcp7HqQw2Wn4YYk-Suo,110 -__pycache__/six.cpython-34.pyc,, diff --git a/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/metadata.json b/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/metadata.json deleted file mode 100644 index 13595fb..0000000 --- a/Darwin/lib/python3.4/site-packages/six-1.7.3.dist-info/metadata.json +++ /dev/null @@ -1 +0,0 @@ -{"version": "1.7.3", "license": "MIT", "metadata_version": "2.0", "project_urls": {"Home": "http://pypi.python.org/pypi/six/"}, "contacts": [{"email": "benjamin@python.org", "role": "author", "name": "Benjamin Peterson"}], "generator": "bdist_wheel (0.23.0)", "name": "six", "document_names": {"description": "DESCRIPTION.rst"}, "summary": "Python 2 and 3 compatibility utilities", "classifiers": ["Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Libraries", "Topic :: Utilities"]} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/sqlalchemy/cprocessors.so b/Darwin/lib/python3.4/site-packages/sqlalchemy/cprocessors.so deleted file mode 100755 index 03455b9..0000000 Binary files a/Darwin/lib/python3.4/site-packages/sqlalchemy/cprocessors.so and /dev/null differ diff --git a/Darwin/lib/python3.4/site-packages/sqlalchemy/cresultproxy.so b/Darwin/lib/python3.4/site-packages/sqlalchemy/cresultproxy.so deleted file mode 100755 index 5436df3..0000000 Binary files a/Darwin/lib/python3.4/site-packages/sqlalchemy/cresultproxy.so and /dev/null differ diff --git a/Darwin/lib/python3.4/sre_constants.py b/Darwin/lib/python3.4/sre_constants.py deleted file mode 100644 index 23e3516..0000000 --- a/Darwin/lib/python3.4/sre_constants.py +++ /dev/null @@ -1,261 +0,0 @@ -# -# Secret Labs' Regular Expression Engine -# -# various symbols used by the regular expression engine. -# run this script to update the _sre include files! -# -# Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. -# -# See the sre.py file for information on usage and redistribution. -# - -"""Internal support module for sre""" - -# update when constants are added or removed - -MAGIC = 20031017 - -from _sre import MAXREPEAT - -# SRE standard exception (access as sre.error) -# should this really be here? - -class error(Exception): - pass - -# operators - -FAILURE = "failure" -SUCCESS = "success" - -ANY = "any" -ANY_ALL = "any_all" -ASSERT = "assert" -ASSERT_NOT = "assert_not" -AT = "at" -BIGCHARSET = "bigcharset" -BRANCH = "branch" -CALL = "call" -CATEGORY = "category" -CHARSET = "charset" -GROUPREF = "groupref" -GROUPREF_IGNORE = "groupref_ignore" -GROUPREF_EXISTS = "groupref_exists" -IN = "in" -IN_IGNORE = "in_ignore" -INFO = "info" -JUMP = "jump" -LITERAL = "literal" -LITERAL_IGNORE = "literal_ignore" -MARK = "mark" -MAX_REPEAT = "max_repeat" -MAX_UNTIL = "max_until" -MIN_REPEAT = "min_repeat" -MIN_UNTIL = "min_until" -NEGATE = "negate" -NOT_LITERAL = "not_literal" -NOT_LITERAL_IGNORE = "not_literal_ignore" -RANGE = "range" -REPEAT = "repeat" -REPEAT_ONE = "repeat_one" -SUBPATTERN = "subpattern" -MIN_REPEAT_ONE = "min_repeat_one" - -# positions -AT_BEGINNING = "at_beginning" -AT_BEGINNING_LINE = "at_beginning_line" -AT_BEGINNING_STRING = "at_beginning_string" -AT_BOUNDARY = "at_boundary" -AT_NON_BOUNDARY = "at_non_boundary" -AT_END = "at_end" -AT_END_LINE = "at_end_line" -AT_END_STRING = "at_end_string" -AT_LOC_BOUNDARY = "at_loc_boundary" -AT_LOC_NON_BOUNDARY = "at_loc_non_boundary" -AT_UNI_BOUNDARY = "at_uni_boundary" -AT_UNI_NON_BOUNDARY = "at_uni_non_boundary" - -# categories -CATEGORY_DIGIT = "category_digit" -CATEGORY_NOT_DIGIT = "category_not_digit" -CATEGORY_SPACE = "category_space" -CATEGORY_NOT_SPACE = "category_not_space" -CATEGORY_WORD = "category_word" -CATEGORY_NOT_WORD = "category_not_word" -CATEGORY_LINEBREAK = "category_linebreak" -CATEGORY_NOT_LINEBREAK = "category_not_linebreak" -CATEGORY_LOC_WORD = "category_loc_word" -CATEGORY_LOC_NOT_WORD = "category_loc_not_word" -CATEGORY_UNI_DIGIT = "category_uni_digit" -CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit" -CATEGORY_UNI_SPACE = "category_uni_space" -CATEGORY_UNI_NOT_SPACE = "category_uni_not_space" -CATEGORY_UNI_WORD = "category_uni_word" -CATEGORY_UNI_NOT_WORD = "category_uni_not_word" -CATEGORY_UNI_LINEBREAK = "category_uni_linebreak" -CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak" - -OPCODES = [ - - # failure=0 success=1 (just because it looks better that way :-) - FAILURE, SUCCESS, - - ANY, ANY_ALL, - ASSERT, ASSERT_NOT, - AT, - BRANCH, - CALL, - CATEGORY, - CHARSET, BIGCHARSET, - GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE, - IN, IN_IGNORE, - INFO, - JUMP, - LITERAL, LITERAL_IGNORE, - MARK, - MAX_UNTIL, - MIN_UNTIL, - NOT_LITERAL, NOT_LITERAL_IGNORE, - NEGATE, - RANGE, - REPEAT, - REPEAT_ONE, - SUBPATTERN, - MIN_REPEAT_ONE - -] - -ATCODES = [ - AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY, - AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING, - AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY, - AT_UNI_NON_BOUNDARY -] - -CHCODES = [ - CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE, - CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD, - CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD, - CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT, - CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD, - CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK, - CATEGORY_UNI_NOT_LINEBREAK -] - -def makedict(list): - d = {} - i = 0 - for item in list: - d[item] = i - i = i + 1 - return d - -OPCODES = makedict(OPCODES) -ATCODES = makedict(ATCODES) -CHCODES = makedict(CHCODES) - -# replacement operations for "ignore case" mode -OP_IGNORE = { - GROUPREF: GROUPREF_IGNORE, - IN: IN_IGNORE, - LITERAL: LITERAL_IGNORE, - NOT_LITERAL: NOT_LITERAL_IGNORE -} - -AT_MULTILINE = { - AT_BEGINNING: AT_BEGINNING_LINE, - AT_END: AT_END_LINE -} - -AT_LOCALE = { - AT_BOUNDARY: AT_LOC_BOUNDARY, - AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY -} - -AT_UNICODE = { - AT_BOUNDARY: AT_UNI_BOUNDARY, - AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY -} - -CH_LOCALE = { - CATEGORY_DIGIT: CATEGORY_DIGIT, - CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT, - CATEGORY_SPACE: CATEGORY_SPACE, - CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE, - CATEGORY_WORD: CATEGORY_LOC_WORD, - CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD, - CATEGORY_LINEBREAK: CATEGORY_LINEBREAK, - CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK -} - -CH_UNICODE = { - CATEGORY_DIGIT: CATEGORY_UNI_DIGIT, - CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT, - CATEGORY_SPACE: CATEGORY_UNI_SPACE, - CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE, - CATEGORY_WORD: CATEGORY_UNI_WORD, - CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD, - CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK, - CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK -} - -# flags -SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking) -SRE_FLAG_IGNORECASE = 2 # case insensitive -SRE_FLAG_LOCALE = 4 # honour system locale -SRE_FLAG_MULTILINE = 8 # treat target as multiline string -SRE_FLAG_DOTALL = 16 # treat target as a single string -SRE_FLAG_UNICODE = 32 # use unicode "locale" -SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments -SRE_FLAG_DEBUG = 128 # debugging -SRE_FLAG_ASCII = 256 # use ascii "locale" - -# flags for INFO primitive -SRE_INFO_PREFIX = 1 # has prefix -SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix) -SRE_INFO_CHARSET = 4 # pattern starts with character from given set - -if __name__ == "__main__": - def dump(f, d, prefix): - items = sorted(d.items(), key=lambda a: a[1]) - for k, v in items: - f.write("#define %s_%s %s\n" % (prefix, k.upper(), v)) - f = open("sre_constants.h", "w") - f.write("""\ -/* - * Secret Labs' Regular Expression Engine - * - * regular expression matching engine - * - * NOTE: This file is generated by sre_constants.py. If you need - * to change anything in here, edit sre_constants.py and run it. - * - * Copyright (c) 1997-2001 by Secret Labs AB. All rights reserved. - * - * See the _sre.c file for information on usage and redistribution. - */ - -""") - - f.write("#define SRE_MAGIC %d\n" % MAGIC) - - dump(f, OPCODES, "SRE_OP") - dump(f, ATCODES, "SRE") - dump(f, CHCODES, "SRE") - - f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE) - f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE) - f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE) - f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE) - f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL) - f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE) - f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE) - f.write("#define SRE_FLAG_DEBUG %d\n" % SRE_FLAG_DEBUG) - f.write("#define SRE_FLAG_ASCII %d\n" % SRE_FLAG_ASCII) - - f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX) - f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL) - f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET) - - f.close() - print("done") diff --git a/Darwin/lib/python3.4/symbol.py b/Darwin/lib/python3.4/symbol.py deleted file mode 100755 index 5cf4a65..0000000 --- a/Darwin/lib/python3.4/symbol.py +++ /dev/null @@ -1,111 +0,0 @@ -#! /usr/bin/env python3 - -"""Non-terminal symbols of Python grammar (from "graminit.h").""" - -# This file is automatically generated; please don't muck it up! -# -# To update the symbols in this file, 'cd' to the top directory of -# the python source tree after building the interpreter and run: -# -# ./python Lib/symbol.py - -#--start constants-- -single_input = 256 -file_input = 257 -eval_input = 258 -decorator = 259 -decorators = 260 -decorated = 261 -funcdef = 262 -parameters = 263 -typedargslist = 264 -tfpdef = 265 -varargslist = 266 -vfpdef = 267 -stmt = 268 -simple_stmt = 269 -small_stmt = 270 -expr_stmt = 271 -testlist_star_expr = 272 -augassign = 273 -del_stmt = 274 -pass_stmt = 275 -flow_stmt = 276 -break_stmt = 277 -continue_stmt = 278 -return_stmt = 279 -yield_stmt = 280 -raise_stmt = 281 -import_stmt = 282 -import_name = 283 -import_from = 284 -import_as_name = 285 -dotted_as_name = 286 -import_as_names = 287 -dotted_as_names = 288 -dotted_name = 289 -global_stmt = 290 -nonlocal_stmt = 291 -assert_stmt = 292 -compound_stmt = 293 -if_stmt = 294 -while_stmt = 295 -for_stmt = 296 -try_stmt = 297 -with_stmt = 298 -with_item = 299 -except_clause = 300 -suite = 301 -test = 302 -test_nocond = 303 -lambdef = 304 -lambdef_nocond = 305 -or_test = 306 -and_test = 307 -not_test = 308 -comparison = 309 -comp_op = 310 -star_expr = 311 -expr = 312 -xor_expr = 313 -and_expr = 314 -shift_expr = 315 -arith_expr = 316 -term = 317 -factor = 318 -power = 319 -atom = 320 -testlist_comp = 321 -trailer = 322 -subscriptlist = 323 -subscript = 324 -sliceop = 325 -exprlist = 326 -testlist = 327 -dictorsetmaker = 328 -classdef = 329 -arglist = 330 -argument = 331 -comp_iter = 332 -comp_for = 333 -comp_if = 334 -encoding_decl = 335 -yield_expr = 336 -yield_arg = 337 -#--end constants-- - -sym_name = {} -for _name, _value in list(globals().items()): - if type(_value) is type(0): - sym_name[_value] = _name - - -def _main(): - import sys - import token - if len(sys.argv) == 1: - sys.argv = sys.argv + ["Include/graminit.h", "Lib/symbol.py"] - token._main() - -if __name__ == "__main__": - _main() diff --git a/Darwin/lib/python3.4/tkinter/_fix.py b/Darwin/lib/python3.4/tkinter/_fix.py deleted file mode 100644 index 5f32d25..0000000 --- a/Darwin/lib/python3.4/tkinter/_fix.py +++ /dev/null @@ -1,78 +0,0 @@ -import sys, os - -# Delay import _tkinter until we have set TCL_LIBRARY, -# so that Tcl_FindExecutable has a chance to locate its -# encoding directory. - -# Unfortunately, we cannot know the TCL_LIBRARY directory -# if we don't know the tcl version, which we cannot find out -# without import Tcl. Fortunately, Tcl will itself look in -# \..\tcl, so anything close to -# the real Tcl library will do. - -# Expand symbolic links on Vista -try: - import ctypes - ctypes.windll.kernel32.GetFinalPathNameByHandleW -except (ImportError, AttributeError): - def convert_path(s): - return s -else: - def convert_path(s): - if isinstance(s, bytes): - s = s.decode("mbcs") - hdir = ctypes.windll.kernel32.\ - CreateFileW(s, 0x80, # FILE_READ_ATTRIBUTES - 1, # FILE_SHARE_READ - None, 3, # OPEN_EXISTING - 0x02000000, # FILE_FLAG_BACKUP_SEMANTICS - None) - if hdir == -1: - # Cannot open directory, give up - return s - buf = ctypes.create_unicode_buffer("", 32768) - res = ctypes.windll.kernel32.\ - GetFinalPathNameByHandleW(hdir, buf, len(buf), - 0) # VOLUME_NAME_DOS - ctypes.windll.kernel32.CloseHandle(hdir) - if res == 0: - # Conversion failed (e.g. network location) - return s - s = buf[:res] - # Ignore leading \\?\ - if s.startswith("\\\\?\\"): - s = s[4:] - if s.startswith("UNC"): - s = "\\" + s[3:] - return s - -prefix = os.path.join(sys.base_prefix,"tcl") -if not os.path.exists(prefix): - # devdir/../tcltk/lib - prefix = os.path.join(sys.base_prefix, os.path.pardir, "tcltk", "lib") - prefix = os.path.abspath(prefix) -# if this does not exist, no further search is needed -if os.path.exists(prefix): - prefix = convert_path(prefix) - if "TCL_LIBRARY" not in os.environ: - for name in os.listdir(prefix): - if name.startswith("tcl"): - tcldir = os.path.join(prefix,name) - if os.path.isdir(tcldir): - os.environ["TCL_LIBRARY"] = tcldir - # Compute TK_LIBRARY, knowing that it has the same version - # as Tcl - import _tkinter - ver = str(_tkinter.TCL_VERSION) - if "TK_LIBRARY" not in os.environ: - v = os.path.join(prefix, 'tk'+ver) - if os.path.exists(os.path.join(v, "tclIndex")): - os.environ['TK_LIBRARY'] = v - # We don't know the Tix version, so we must search the entire - # directory - if "TIX_LIBRARY" not in os.environ: - for name in os.listdir(prefix): - if name.startswith("tix"): - tixdir = os.path.join(prefix,name) - if os.path.isdir(tixdir): - os.environ["TIX_LIBRARY"] = tixdir diff --git a/Darwin/lib/python3.4/tkinter/test/test_tkinter/test_font.py b/Darwin/lib/python3.4/tkinter/test/test_tkinter/test_font.py deleted file mode 100644 index dfd630b..0000000 --- a/Darwin/lib/python3.4/tkinter/test/test_tkinter/test_font.py +++ /dev/null @@ -1,33 +0,0 @@ -import unittest -import tkinter -from tkinter import font -from test.support import requires, run_unittest -import tkinter.test.support as support - -requires('gui') - -class FontTest(unittest.TestCase): - - def setUp(self): - support.root_deiconify() - - def tearDown(self): - support.root_withdraw() - - def test_font_eq(self): - fontname = "TkDefaultFont" - try: - f = font.Font(name=fontname, exists=True) - except tkinter._tkinter.TclError: - f = font.Font(name=fontname, exists=False) - font1 = font.nametofont(fontname) - font2 = font.nametofont(fontname) - self.assertIsNot(font1, font2) - self.assertEqual(font1, font2) - self.assertNotEqual(font1, font1.copy()) - self.assertNotEqual(font1, 0) - -tests_gui = (FontTest, ) - -if __name__ == "__main__": - run_unittest(*tests_gui) diff --git a/Darwin/lib/python3.4/traceback.py b/Darwin/lib/python3.4/traceback.py deleted file mode 100644 index faf593a..0000000 --- a/Darwin/lib/python3.4/traceback.py +++ /dev/null @@ -1,313 +0,0 @@ -"""Extract, format and print information about Python stack traces.""" - -import linecache -import sys -import operator - -__all__ = ['extract_stack', 'extract_tb', 'format_exception', - 'format_exception_only', 'format_list', 'format_stack', - 'format_tb', 'print_exc', 'format_exc', 'print_exception', - 'print_last', 'print_stack', 'print_tb', - 'clear_frames'] - -# -# Formatting and printing lists of traceback lines. -# - -def _format_list_iter(extracted_list): - for filename, lineno, name, line in extracted_list: - item = ' File "{}", line {}, in {}\n'.format(filename, lineno, name) - if line: - item = item + ' {}\n'.format(line.strip()) - yield item - -def print_list(extracted_list, file=None): - """Print the list of tuples as returned by extract_tb() or - extract_stack() as a formatted stack trace to the given file.""" - if file is None: - file = sys.stderr - for item in _format_list_iter(extracted_list): - print(item, file=file, end="") - -def format_list(extracted_list): - """Format a list of traceback entry tuples for printing. - - Given a list of tuples as returned by extract_tb() or - extract_stack(), return a list of strings ready for printing. - Each string in the resulting list corresponds to the item with the - same index in the argument list. Each string ends in a newline; - the strings may contain internal newlines as well, for those items - whose source text line is not None. - """ - return list(_format_list_iter(extracted_list)) - -# -# Printing and Extracting Tracebacks. -# - -# extractor takes curr and needs to return a tuple of: -# - Frame object -# - Line number -# - Next item (same type as curr) -# In practice, curr is either a traceback or a frame. -def _extract_tb_or_stack_iter(curr, limit, extractor): - if limit is None: - limit = getattr(sys, 'tracebacklimit', None) - - n = 0 - while curr is not None and (limit is None or n < limit): - f, lineno, next_item = extractor(curr) - co = f.f_code - filename = co.co_filename - name = co.co_name - - linecache.checkcache(filename) - line = linecache.getline(filename, lineno, f.f_globals) - - if line: - line = line.strip() - else: - line = None - - yield (filename, lineno, name, line) - curr = next_item - n += 1 - -def _extract_tb_iter(tb, limit): - return _extract_tb_or_stack_iter( - tb, limit, - operator.attrgetter("tb_frame", "tb_lineno", "tb_next")) - -def print_tb(tb, limit=None, file=None): - """Print up to 'limit' stack trace entries from the traceback 'tb'. - - If 'limit' is omitted or None, all entries are printed. If 'file' - is omitted or None, the output goes to sys.stderr; otherwise - 'file' should be an open file or file-like object with a write() - method. - """ - print_list(extract_tb(tb, limit=limit), file=file) - -def format_tb(tb, limit=None): - """A shorthand for 'format_list(extract_tb(tb, limit))'.""" - return format_list(extract_tb(tb, limit=limit)) - -def extract_tb(tb, limit=None): - """Return list of up to limit pre-processed entries from traceback. - - This is useful for alternate formatting of stack traces. If - 'limit' is omitted or None, all entries are extracted. A - pre-processed stack trace entry is a quadruple (filename, line - number, function name, text) representing the information that is - usually printed for a stack trace. The text is a string with - leading and trailing whitespace stripped; if the source is not - available it is None. - """ - return list(_extract_tb_iter(tb, limit=limit)) - -# -# Exception formatting and output. -# - -_cause_message = ( - "\nThe above exception was the direct cause " - "of the following exception:\n") - -_context_message = ( - "\nDuring handling of the above exception, " - "another exception occurred:\n") - -def _iter_chain(exc, custom_tb=None, seen=None): - if seen is None: - seen = set() - seen.add(exc) - its = [] - context = exc.__context__ - cause = exc.__cause__ - if cause is not None and cause not in seen: - its.append(_iter_chain(cause, False, seen)) - its.append([(_cause_message, None)]) - elif (context is not None and - not exc.__suppress_context__ and - context not in seen): - its.append(_iter_chain(context, None, seen)) - its.append([(_context_message, None)]) - its.append([(exc, custom_tb or exc.__traceback__)]) - # itertools.chain is in an extension module and may be unavailable - for it in its: - yield from it - -def _format_exception_iter(etype, value, tb, limit, chain): - if chain: - values = _iter_chain(value, tb) - else: - values = [(value, tb)] - - for value, tb in values: - if isinstance(value, str): - # This is a cause/context message line - yield value + '\n' - continue - if tb: - yield 'Traceback (most recent call last):\n' - yield from _format_list_iter(_extract_tb_iter(tb, limit=limit)) - yield from _format_exception_only_iter(type(value), value) - -def print_exception(etype, value, tb, limit=None, file=None, chain=True): - """Print exception up to 'limit' stack trace entries from 'tb' to 'file'. - - This differs from print_tb() in the following ways: (1) if - traceback is not None, it prints a header "Traceback (most recent - call last):"; (2) it prints the exception type and value after the - stack trace; (3) if type is SyntaxError and value has the - appropriate format, it prints the line where the syntax error - occurred with a caret on the next line indicating the approximate - position of the error. - """ - if file is None: - file = sys.stderr - for line in _format_exception_iter(etype, value, tb, limit, chain): - print(line, file=file, end="") - -def format_exception(etype, value, tb, limit=None, chain=True): - """Format a stack trace and the exception information. - - The arguments have the same meaning as the corresponding arguments - to print_exception(). The return value is a list of strings, each - ending in a newline and some containing internal newlines. When - these lines are concatenated and printed, exactly the same text is - printed as does print_exception(). - """ - return list(_format_exception_iter(etype, value, tb, limit, chain)) - -def format_exception_only(etype, value): - """Format the exception part of a traceback. - - The arguments are the exception type and value such as given by - sys.last_type and sys.last_value. The return value is a list of - strings, each ending in a newline. - - Normally, the list contains a single string; however, for - SyntaxError exceptions, it contains several lines that (when - printed) display detailed information about where the syntax - error occurred. - - The message indicating which exception occurred is always the last - string in the list. - - """ - return list(_format_exception_only_iter(etype, value)) - -def _format_exception_only_iter(etype, value): - # Gracefully handle (the way Python 2.4 and earlier did) the case of - # being called with (None, None). - if etype is None: - yield _format_final_exc_line(etype, value) - return - - stype = etype.__name__ - smod = etype.__module__ - if smod not in ("__main__", "builtins"): - stype = smod + '.' + stype - - if not issubclass(etype, SyntaxError): - yield _format_final_exc_line(stype, value) - return - - # It was a syntax error; show exactly where the problem was found. - filename = value.filename or "" - lineno = str(value.lineno) or '?' - yield ' File "{}", line {}\n'.format(filename, lineno) - - badline = value.text - offset = value.offset - if badline is not None: - yield ' {}\n'.format(badline.strip()) - if offset is not None: - caretspace = badline.rstrip('\n') - offset = min(len(caretspace), offset) - 1 - caretspace = caretspace[:offset].lstrip() - # non-space whitespace (likes tabs) must be kept for alignment - caretspace = ((c.isspace() and c or ' ') for c in caretspace) - yield ' {}^\n'.format(''.join(caretspace)) - msg = value.msg or "" - yield "{}: {}\n".format(stype, msg) - -def _format_final_exc_line(etype, value): - valuestr = _some_str(value) - if value is None or not valuestr: - line = "%s\n" % etype - else: - line = "%s: %s\n" % (etype, valuestr) - return line - -def _some_str(value): - try: - return str(value) - except: - return '' % type(value).__name__ - -def print_exc(limit=None, file=None, chain=True): - """Shorthand for 'print_exception(*sys.exc_info(), limit, file)'.""" - print_exception(*sys.exc_info(), limit=limit, file=file, chain=chain) - -def format_exc(limit=None, chain=True): - """Like print_exc() but return a string.""" - return "".join(format_exception(*sys.exc_info(), limit=limit, chain=chain)) - -def print_last(limit=None, file=None, chain=True): - """This is a shorthand for 'print_exception(sys.last_type, - sys.last_value, sys.last_traceback, limit, file)'.""" - if not hasattr(sys, "last_type"): - raise ValueError("no last exception") - print_exception(sys.last_type, sys.last_value, sys.last_traceback, - limit, file, chain) - -# -# Printing and Extracting Stacks. -# - -def _extract_stack_iter(f, limit=None): - return _extract_tb_or_stack_iter( - f, limit, lambda f: (f, f.f_lineno, f.f_back)) - -def _get_stack(f): - if f is None: - f = sys._getframe().f_back.f_back - return f - -def print_stack(f=None, limit=None, file=None): - """Print a stack trace from its invocation point. - - The optional 'f' argument can be used to specify an alternate - stack frame at which to start. The optional 'limit' and 'file' - arguments have the same meaning as for print_exception(). - """ - print_list(extract_stack(_get_stack(f), limit=limit), file=file) - -def format_stack(f=None, limit=None): - """Shorthand for 'format_list(extract_stack(f, limit))'.""" - return format_list(extract_stack(_get_stack(f), limit=limit)) - -def extract_stack(f=None, limit=None): - """Extract the raw traceback from the current stack frame. - - The return value has the same format as for extract_tb(). The - optional 'f' and 'limit' arguments have the same meaning as for - print_stack(). Each item in the list is a quadruple (filename, - line number, function name, text), and the entries are in order - from oldest to newest stack frame. - """ - stack = list(_extract_stack_iter(_get_stack(f), limit=limit)) - stack.reverse() - return stack - -def clear_frames(tb): - "Clear all references to local variables in the frames of a traceback." - while tb is not None: - try: - tb.tb_frame.clear() - except RuntimeError: - # Ignore the exception raised if the frame is still executing. - pass - tb = tb.tb_next diff --git a/Darwin/lib/python3.4/turtledemo/__main__.py b/Darwin/lib/python3.4/turtledemo/__main__.py deleted file mode 100755 index a14684c..0000000 --- a/Darwin/lib/python3.4/turtledemo/__main__.py +++ /dev/null @@ -1,266 +0,0 @@ -#!/usr/bin/env python3 -import sys -import os - -from tkinter import * -from idlelib.Percolator import Percolator -from idlelib.ColorDelegator import ColorDelegator -from idlelib.textView import view_file # TextViewer -from importlib import reload - -import turtle -import time - -demo_dir = os.path.dirname(os.path.abspath(__file__)) - -STARTUP = 1 -READY = 2 -RUNNING = 3 -DONE = 4 -EVENTDRIVEN = 5 - -menufont = ("Arial", 12, NORMAL) -btnfont = ("Arial", 12, 'bold') -txtfont = ('Lucida Console', 8, 'normal') - -def getExampleEntries(): - return [entry[:-3] for entry in os.listdir(demo_dir) if - entry.endswith(".py") and entry[0] != '_'] - -def showDemoHelp(): - view_file(demo.root, "Help on turtleDemo", - os.path.join(demo_dir, "demohelp.txt")) - -def showAboutDemo(): - view_file(demo.root, "About turtleDemo", - os.path.join(demo_dir, "about_turtledemo.txt")) - -def showAboutTurtle(): - view_file(demo.root, "About the new turtle module.", - os.path.join(demo_dir, "about_turtle.txt")) - -class DemoWindow(object): - - def __init__(self, filename=None): #, root=None): - self.root = root = turtle._root = Tk() - root.wm_protocol("WM_DELETE_WINDOW", self._destroy) - - ################# - self.mBar = Frame(root, relief=RAISED, borderwidth=2) - self.mBar.pack(fill=X) - - self.ExamplesBtn = self.makeLoadDemoMenu() - self.OptionsBtn = self.makeHelpMenu() - self.mBar.tk_menuBar(self.ExamplesBtn, self.OptionsBtn) #, QuitBtn) - - root.title('Python turtle-graphics examples') - ################# - self.left_frame = left_frame = Frame(root) - self.text_frame = text_frame = Frame(left_frame) - self.vbar = vbar =Scrollbar(text_frame, name='vbar') - self.text = text = Text(text_frame, - name='text', padx=5, wrap='none', - width=45) - vbar['command'] = text.yview - vbar.pack(side=LEFT, fill=Y) - ##################### - self.hbar = hbar =Scrollbar(text_frame, name='hbar', orient=HORIZONTAL) - hbar['command'] = text.xview - hbar.pack(side=BOTTOM, fill=X) - ##################### - text['yscrollcommand'] = vbar.set - text.config(font=txtfont) - text.config(xscrollcommand=hbar.set) - text.pack(side=LEFT, fill=Y, expand=1) - ##################### - self.output_lbl = Label(left_frame, height= 1,text=" --- ", bg = "#ddf", - font = ("Arial", 16, 'normal')) - self.output_lbl.pack(side=BOTTOM, expand=0, fill=X) - ##################### - text_frame.pack(side=LEFT, fill=BOTH, expand=0) - left_frame.pack(side=LEFT, fill=BOTH, expand=0) - self.graph_frame = g_frame = Frame(root) - - turtle._Screen._root = g_frame - turtle._Screen._canvas = turtle.ScrolledCanvas(g_frame, 800, 600, 1000, 800) - #xturtle.Screen._canvas.pack(expand=1, fill="both") - self.screen = _s_ = turtle.Screen() -##### - turtle.TurtleScreen.__init__(_s_, _s_._canvas) -##### - self.scanvas = _s_._canvas - #xturtle.RawTurtle.canvases = [self.scanvas] - turtle.RawTurtle.screens = [_s_] - - self.scanvas.pack(side=TOP, fill=BOTH, expand=1) - - self.btn_frame = btn_frame = Frame(g_frame, height=100) - self.start_btn = Button(btn_frame, text=" START ", font=btnfont, fg = "white", - disabledforeground = "#fed", command=self.startDemo) - self.start_btn.pack(side=LEFT, fill=X, expand=1) - self.stop_btn = Button(btn_frame, text=" STOP ", font=btnfont, fg = "white", - disabledforeground = "#fed", command = self.stopIt) - self.stop_btn.pack(side=LEFT, fill=X, expand=1) - self.clear_btn = Button(btn_frame, text=" CLEAR ", font=btnfont, fg = "white", - disabledforeground = "#fed", command = self.clearCanvas) - self.clear_btn.pack(side=LEFT, fill=X, expand=1) - - self.btn_frame.pack(side=TOP, fill=BOTH, expand=0) - self.graph_frame.pack(side=TOP, fill=BOTH, expand=1) - - Percolator(text).insertfilter(ColorDelegator()) - self.dirty = False - self.exitflag = False - if filename: - self.loadfile(filename) - self.configGUI(NORMAL, DISABLED, DISABLED, DISABLED, - "Choose example from menu", "black") - self.state = STARTUP - - def _destroy(self): - self.root.destroy() - sys.exit() - - def configGUI(self, menu, start, stop, clear, txt="", color="blue"): - self.ExamplesBtn.config(state=menu) - - self.start_btn.config(state=start) - if start == NORMAL: - self.start_btn.config(bg="#d00") - else: - self.start_btn.config(bg="#fca") - - self.stop_btn.config(state=stop) - if stop == NORMAL: - self.stop_btn.config(bg="#d00") - else: - self.stop_btn.config(bg="#fca") - self.clear_btn.config(state=clear) - - self.clear_btn.config(state=clear) - if clear == NORMAL: - self.clear_btn.config(bg="#d00") - else: - self.clear_btn.config(bg="#fca") - - self.output_lbl.config(text=txt, fg=color) - - - def makeLoadDemoMenu(self): - CmdBtn = Menubutton(self.mBar, text='Examples', underline=0, font=menufont) - CmdBtn.pack(side=LEFT, padx="2m") - CmdBtn.menu = Menu(CmdBtn) - - for entry in getExampleEntries(): - def loadexample(x): - def emit(): - self.loadfile(x) - return emit - CmdBtn.menu.add_command(label=entry, underline=0, - font=menufont, command=loadexample(entry)) - - CmdBtn['menu'] = CmdBtn.menu - return CmdBtn - - def makeHelpMenu(self): - CmdBtn = Menubutton(self.mBar, text='Help', underline=0, font=menufont) - CmdBtn.pack(side=LEFT, padx='2m') - CmdBtn.menu = Menu(CmdBtn) - - CmdBtn.menu.add_command(label='About turtle.py', font=menufont, - command=showAboutTurtle) - CmdBtn.menu.add_command(label='turtleDemo - Help', font=menufont, - command=showDemoHelp) - CmdBtn.menu.add_command(label='About turtleDemo', font=menufont, - command=showAboutDemo) - - CmdBtn['menu'] = CmdBtn.menu - return CmdBtn - - def refreshCanvas(self): - if not self.dirty: return - self.screen.clear() - #self.screen.mode("standard") - self.dirty=False - - def loadfile(self, filename): - self.refreshCanvas() - modname = 'turtledemo.' + filename - __import__(modname) - self.module = sys.modules[modname] - with open(self.module.__file__, 'r') as f: - chars = f.read() - self.text.delete("1.0", "end") - self.text.insert("1.0", chars) - self.root.title(filename + " - a Python turtle graphics example") - reload(self.module) - self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED, - "Press start button", "red") - self.state = READY - - def startDemo(self): - self.refreshCanvas() - self.dirty = True - turtle.TurtleScreen._RUNNING = True - self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED, - "demo running...", "black") - self.screen.clear() - self.screen.mode("standard") - self.state = RUNNING - - try: - result = self.module.main() - if result == "EVENTLOOP": - self.state = EVENTDRIVEN - else: - self.state = DONE - except turtle.Terminator: - self.state = DONE - result = "stopped!" - if self.state == DONE: - self.configGUI(NORMAL, NORMAL, DISABLED, NORMAL, - result) - elif self.state == EVENTDRIVEN: - self.exitflag = True - self.configGUI(DISABLED, DISABLED, NORMAL, DISABLED, - "use mouse/keys or STOP", "red") - - def clearCanvas(self): - self.refreshCanvas() - self.screen._delete("all") - self.scanvas.config(cursor="") - self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED) - - def stopIt(self): - if self.exitflag: - self.clearCanvas() - self.exitflag = False - self.configGUI(NORMAL, NORMAL, DISABLED, DISABLED, - "STOPPED!", "red") - turtle.TurtleScreen._RUNNING = False - #print "stopIT: exitflag = True" - else: - turtle.TurtleScreen._RUNNING = False - #print "stopIt: exitflag = False" - -if __name__ == '__main__': - demo = DemoWindow() - RUN = True - while RUN: - try: - #print("ENTERING mainloop") - demo.root.mainloop() - except AttributeError: - #print("AttributeError!- WAIT A MOMENT!") - time.sleep(0.3) - print("GOING ON ..") - demo.ckearCanvas() - except TypeError: - demo.screen._delete("all") - #print("CRASH!!!- WAIT A MOMENT!") - time.sleep(0.3) - #print("GOING ON ..") - demo.clearCanvas() - except: - print("BYE!") - RUN = False diff --git a/Darwin/lib/python3.4/turtledemo/about_turtle.txt b/Darwin/lib/python3.4/turtledemo/about_turtle.txt deleted file mode 100644 index d02c7b3..0000000 --- a/Darwin/lib/python3.4/turtledemo/about_turtle.txt +++ /dev/null @@ -1,76 +0,0 @@ - -======================================================== - A new turtle module for Python -======================================================== - -Turtle graphics is a popular way for introducing programming to -kids. It was part of the original Logo programming language developed -by Wally Feurzig and Seymour Papert in 1966. - -Imagine a robotic turtle starting at (0, 0) in the x-y plane. After an ``import turtle``, give it -the command turtle.forward(15), and it moves (on-screen!) 15 pixels in -the direction it is facing, drawing a line as it moves. Give it the -command turtle.right(25), and it rotates in-place 25 degrees clockwise. - -By combining together these and similar commands, intricate shapes and -pictures can easily be drawn. - ------ turtle.py - -This module is an extended reimplementation of turtle.py from the -Python standard distribution up to Python 2.5. (See: http:\\www.python.org) - -It tries to keep the merits of turtle.py and to be (nearly) 100% -compatible with it. This means in the first place to enable the -learning programmer to use all the commands, classes and methods -interactively when using the module from within IDLE run with -the -n switch. - -Roughly it has the following features added: - -- Better animation of the turtle movements, especially of turning the - turtle. So the turtles can more easily be used as a visual feedback - instrument by the (beginning) programmer. - -- Different turtle shapes, gif-images as turtle shapes, user defined - and user controllable turtle shapes, among them compound - (multicolored) shapes. Turtle shapes can be stgretched and tilted, which - makes turtles zu very versatile geometrical objects. - -- Fine control over turtle movement and screen updates via delay(), - and enhanced tracer() and speed() methods. - -- Aliases for the most commonly used commands, like fd for forward etc., - following the early Logo traditions. This reduces the boring work of - typing long sequences of commands, which often occur in a natural way - when kids try to program fancy pictures on their first encounter with - turtle graphcis. - -- Turtles now have an undo()-method with configurable undo-buffer. - -- Some simple commands/methods for creating event driven programs - (mouse-, key-, timer-events). Especially useful for programming games. - -- A scrollable Canvas class. The default scrollable Canvas can be - extended interactively as needed while playing around with the turtle(s). - -- A TurtleScreen class with methods controlling background color or - background image, window and canvas size and other properties of the - TurtleScreen. - -- There is a method, setworldcoordinates(), to install a user defined - coordinate-system for the TurtleScreen. - -- The implementation uses a 2-vector class named Vec2D, derived from tuple. - This class is public, so it can be imported by the application programmer, - which makes certain types of computations very natural and compact. - -- Appearance of the TurtleScreen and the Turtles at startup/import can be - configured by means of a turtle.cfg configuration file. - The default configuration mimics the appearance of the old turtle module. - -- If configured appropriately the module reads in docstrings from a docstring - dictionary in some different language, supplied separately and replaces - the english ones by those read in. There is a utility function - write_docstringdict() to write a dictionary with the original (english) - docstrings to disc, so it can serve as a template for translations. diff --git a/Darwin/lib/python3.4/turtledemo/demohelp.txt b/Darwin/lib/python3.4/turtledemo/demohelp.txt deleted file mode 100644 index fe83bc7..0000000 --- a/Darwin/lib/python3.4/turtledemo/demohelp.txt +++ /dev/null @@ -1,70 +0,0 @@ - - - ---------------------------------------------- - - turtleDemo - Help - - ---------------------------------------------- - - This document has two sections: - - (1) How to use the demo viewer - (2) How to add your own demos to the demo repository - - - (1) How to use the demo viewer. - - Select a demoscript from the example menu. - The (syntax coloured) source code appears in the left - source code window. IT CANNOT BE EDITED, but ONLY VIEWED! - - - Press START button to start the demo. - - Stop execution by pressing the STOP button. - - Clear screen by pressing the CLEAR button. - - Restart by pressing the START button again. - - SPECIAL demos are those which run EVENTDRIVEN. - (For example clock.py - or oldTurtleDemo.py which - in the end expects a mouse click.): - - Press START button to start the demo. - - - Until the EVENTLOOP is entered everything works - as in an ordinary demo script. - - - When the EVENTLOOP is entered, you control the - application by using the mouse and/or keys (or it's - controlled by some timer events) - To stop it you can and must press the STOP button. - - While the EVENTLOOP is running, the examples menu is disabled. - - - Only after having pressed the STOP button, you may - restart it or choose another example script. - - * * * * * * * * - In some rare situations there may occur interferences/conflicts - between events concerning the demo script and those concerning the - demo-viewer. (They run in the same process.) Strange behaviour may be - the consequence and in the worst case you must close and restart the - viewer. - * * * * * * * * - - - (2) How to add your own demos to the demo repository - - - place: same directory as turtledemo/__main__.py - - - requirements on source code: - code must contain a main() function which will - be executed by the viewer (see provided example scripts) - main() may return a string which will be displayed - in the Label below the source code window (when execution - has finished.) - - !! For programs, which are EVENT DRIVEN, main must return - !! the string "EVENTLOOP". This informs the viewer, that the - !! script is still running and must be stopped by the user! - - - diff --git a/Darwin/lib/python3.4/turtledemo/two_canvases.py b/Darwin/lib/python3.4/turtledemo/two_canvases.py deleted file mode 100755 index 02d89db..0000000 --- a/Darwin/lib/python3.4/turtledemo/two_canvases.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/env python3 -## DEMONSTRATES USE OF 2 CANVASES, SO CANNOT BE RUN IN DEMOVIEWER! -"""turtle example: Using TurtleScreen and RawTurtle -for drawing on two distinct canvases. -""" -from turtle import TurtleScreen, RawTurtle, TK - -root = TK.Tk() -cv1 = TK.Canvas(root, width=300, height=200, bg="#ddffff") -cv2 = TK.Canvas(root, width=300, height=200, bg="#ffeeee") -cv1.pack() -cv2.pack() - -s1 = TurtleScreen(cv1) -s1.bgcolor(0.85, 0.85, 1) -s2 = TurtleScreen(cv2) -s2.bgcolor(1, 0.85, 0.85) - -p = RawTurtle(s1) -q = RawTurtle(s2) - -p.color("red", (1, 0.85, 0.85)) -p.width(3) -q.color("blue", (0.85, 0.85, 1)) -q.width(3) - -for t in p,q: - t.shape("turtle") - t.lt(36) - -q.lt(180) - -for t in p, q: - t.begin_fill() -for i in range(5): - for t in p, q: - t.fd(50) - t.lt(72) -for t in p,q: - t.end_fill() - t.lt(54) - t.pu() - t.bk(50) - -## Want to get some info? - -#print(s1, s2) -#print(p, q) -#print(s1.turtles()) -#print(s2.turtles()) - -TK.mainloop() diff --git a/Darwin/lib/python3.4/xmlrpc/__init__.py b/Darwin/lib/python3.4/xmlrpc/__init__.py deleted file mode 100644 index 196d378..0000000 --- a/Darwin/lib/python3.4/xmlrpc/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# This directory is a Python package. diff --git a/Darwin/lib/python3.4/LICENSE.txt b/Darwin/lib/python3.5/LICENSE.txt similarity index 98% rename from Darwin/lib/python3.4/LICENSE.txt rename to Darwin/lib/python3.5/LICENSE.txt index 583f9f6..88251f5 100644 --- a/Darwin/lib/python3.4/LICENSE.txt +++ b/Darwin/lib/python3.5/LICENSE.txt @@ -74,8 +74,8 @@ analyze, test, perform and/or display publicly, prepare derivative works, distribute, and otherwise use Python alone or in any derivative version, provided, however, that PSF's License Agreement and PSF's notice of copyright, i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are -retained in Python alone or in any derivative version prepared by Licensee. +2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. 3. In the event Licensee prepares a derivative work that is based on or incorporates Python or any part thereof, and wants to make diff --git a/Darwin/lib/python3.4/__future__.py b/Darwin/lib/python3.5/__future__.py similarity index 94% rename from Darwin/lib/python3.4/__future__.py rename to Darwin/lib/python3.5/__future__.py index 3b2d5ec..63b2be3 100644 --- a/Darwin/lib/python3.4/__future__.py +++ b/Darwin/lib/python3.5/__future__.py @@ -56,6 +56,7 @@ all_feature_names = [ "print_function", "unicode_literals", "barry_as_FLUFL", + "generator_stop", ] __all__ = ["all_feature_names"] + all_feature_names @@ -72,6 +73,7 @@ CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals CO_FUTURE_BARRY_AS_BDFL = 0x40000 +CO_FUTURE_GENERATOR_STOP = 0x80000 # StopIteration becomes RuntimeError in generators class _Feature: def __init__(self, optionalRelease, mandatoryRelease, compiler_flag): @@ -132,3 +134,7 @@ unicode_literals = _Feature((2, 6, 0, "alpha", 2), barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2), (3, 9, 0, "alpha", 0), CO_FUTURE_BARRY_AS_BDFL) + +generator_stop = _Feature((3, 5, 0, "beta", 1), + (3, 7, 0, "alpha", 0), + CO_FUTURE_GENERATOR_STOP) diff --git a/Darwin/lib/python3.4/__phello__.foo.py b/Darwin/lib/python3.5/__phello__.foo.py similarity index 100% rename from Darwin/lib/python3.4/__phello__.foo.py rename to Darwin/lib/python3.5/__phello__.foo.py diff --git a/Darwin/lib/python3.4/_bootlocale.py b/Darwin/lib/python3.5/_bootlocale.py similarity index 100% rename from Darwin/lib/python3.4/_bootlocale.py rename to Darwin/lib/python3.5/_bootlocale.py diff --git a/Darwin/lib/python3.4/_collections_abc.py b/Darwin/lib/python3.5/_collections_abc.py similarity index 74% rename from Darwin/lib/python3.4/_collections_abc.py rename to Darwin/lib/python3.5/_collections_abc.py index faa1ff2..f89bb6f 100644 --- a/Darwin/lib/python3.4/_collections_abc.py +++ b/Darwin/lib/python3.5/_collections_abc.py @@ -9,7 +9,8 @@ Unit tests are in test_collections. from abc import ABCMeta, abstractmethod import sys -__all__ = ["Hashable", "Iterable", "Iterator", +__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", + "Hashable", "Iterable", "Iterator", "Generator", "Sized", "Container", "Callable", "Set", "MutableSet", "Mapping", "MutableMapping", @@ -50,6 +51,13 @@ dict_values = type({}.values()) dict_items = type({}.items()) ## misc ## mappingproxy = type(type.__dict__) +generator = type((lambda: (yield))()) +## coroutine ## +async def _coro(): pass +_coro = _coro() +coroutine = type(_coro) +_coro.close() # Prevent ResourceWarning +del _coro ### ONE-TRICK PONIES ### @@ -73,6 +81,113 @@ class Hashable(metaclass=ABCMeta): return NotImplemented +class Awaitable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + def __await__(self): + yield + + @classmethod + def __subclasshook__(cls, C): + if cls is Awaitable: + for B in C.__mro__: + if "__await__" in B.__dict__: + if B.__dict__["__await__"]: + return True + break + return NotImplemented + + +class Coroutine(Awaitable): + + __slots__ = () + + @abstractmethod + def send(self, value): + """Send a value into the coroutine. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the coroutine. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside coroutine. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("coroutine ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Coroutine: + mro = C.__mro__ + for method in ('__await__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Coroutine.register(coroutine) + + +class AsyncIterable(metaclass=ABCMeta): + + __slots__ = () + + @abstractmethod + async def __aiter__(self): + return AsyncIterator() + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterable: + if any("__aiter__" in B.__dict__ for B in C.__mro__): + return True + return NotImplemented + + +class AsyncIterator(AsyncIterable): + + __slots__ = () + + @abstractmethod + async def __anext__(self): + """Return the next item or raise StopAsyncIteration when exhausted.""" + raise StopAsyncIteration + + async def __aiter__(self): + return self + + @classmethod + def __subclasshook__(cls, C): + if cls is AsyncIterator: + if (any("__anext__" in B.__dict__ for B in C.__mro__) and + any("__aiter__" in B.__dict__ for B in C.__mro__)): + return True + return NotImplemented + + class Iterable(metaclass=ABCMeta): __slots__ = () @@ -124,6 +239,64 @@ Iterator.register(str_iterator) Iterator.register(tuple_iterator) Iterator.register(zip_iterator) + +class Generator(Iterator): + + __slots__ = () + + def __next__(self): + """Return the next item from the generator. + When exhausted, raise StopIteration. + """ + return self.send(None) + + @abstractmethod + def send(self, value): + """Send a value into the generator. + Return next yielded value or raise StopIteration. + """ + raise StopIteration + + @abstractmethod + def throw(self, typ, val=None, tb=None): + """Raise an exception in the generator. + Return next yielded value or raise StopIteration. + """ + if val is None: + if tb is None: + raise typ + val = typ() + if tb is not None: + val = val.with_traceback(tb) + raise val + + def close(self): + """Raise GeneratorExit inside generator. + """ + try: + self.throw(GeneratorExit) + except (GeneratorExit, StopIteration): + pass + else: + raise RuntimeError("generator ignored GeneratorExit") + + @classmethod + def __subclasshook__(cls, C): + if cls is Generator: + mro = C.__mro__ + for method in ('__iter__', '__next__', 'send', 'throw', 'close'): + for base in mro: + if method in base.__dict__: + break + else: + return NotImplemented + return True + return NotImplemented + + +Generator.register(generator) + + class Sized(metaclass=ABCMeta): __slots__ = () @@ -183,7 +356,7 @@ class Set(Sized, Iterable, Container): methods except for __contains__, __iter__ and __len__. To override the comparisons (presumably for speed, as the - semantics are fixed), all you have to do is redefine __le__ and + semantics are fixed), redefine __le__ and __ge__, then the other operations will automatically follow suit. """ @@ -207,21 +380,23 @@ class Set(Sized, Iterable, Container): def __gt__(self, other): if not isinstance(other, Set): return NotImplemented - return other.__lt__(self) + return len(self) > len(other) and self.__ge__(other) def __ge__(self, other): if not isinstance(other, Set): return NotImplemented - return other.__le__(self) + if len(self) < len(other): + return False + for elem in other: + if elem not in self: + return False + return True def __eq__(self, other): if not isinstance(other, Set): return NotImplemented return len(self) == len(other) and self.__le__(other) - def __ne__(self, other): - return not (self == other) - @classmethod def _from_iterable(cls, it): '''Construct an instance of the class from any iterable input. @@ -236,6 +411,8 @@ class Set(Sized, Iterable, Container): return NotImplemented return self._from_iterable(value for value in other if value in self) + __rand__ = __and__ + def isdisjoint(self, other): 'Return True if two sets have a null intersection.' for value in other: @@ -249,6 +426,8 @@ class Set(Sized, Iterable, Container): chain = (e for s in (self, other) for e in s) return self._from_iterable(chain) + __ror__ = __or__ + def __sub__(self, other): if not isinstance(other, Set): if not isinstance(other, Iterable): @@ -257,6 +436,14 @@ class Set(Sized, Iterable, Container): return self._from_iterable(value for value in self if value not in other) + def __rsub__(self, other): + if not isinstance(other, Set): + if not isinstance(other, Iterable): + return NotImplemented + other = self._from_iterable(other) + return self._from_iterable(value for value in other + if value not in self) + def __xor__(self, other): if not isinstance(other, Set): if not isinstance(other, Iterable): @@ -264,6 +451,8 @@ class Set(Sized, Iterable, Container): other = self._from_iterable(other) return (self - other) | (other - self) + __rxor__ = __xor__ + def _hash(self): """Compute the hash value of a set. @@ -432,14 +621,13 @@ class Mapping(Sized, Iterable, Container): return NotImplemented return dict(self.items()) == dict(other.items()) - def __ne__(self, other): - return not (self == other) - Mapping.register(mappingproxy) class MappingView(Sized): + __slots__ = '_mapping', + def __init__(self, mapping): self._mapping = mapping @@ -452,6 +640,8 @@ class MappingView(Sized): class KeysView(MappingView, Set): + __slots__ = () + @classmethod def _from_iterable(self, it): return set(it) @@ -467,6 +657,8 @@ KeysView.register(dict_keys) class ItemsView(MappingView, Set): + __slots__ = () + @classmethod def _from_iterable(self, it): return set(it) @@ -489,6 +681,8 @@ ItemsView.register(dict_items) class ValuesView(MappingView): + __slots__ = () + def __contains__(self, value): for key in self._mapping: if value == self._mapping[key]: @@ -565,23 +759,24 @@ class MutableMapping(Mapping): If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v In either case, this is followed by: for k, v in F.items(): D[k] = v ''' - if len(args) > 2: - raise TypeError("update() takes at most 2 positional " - "arguments ({} given)".format(len(args))) - elif not args: - raise TypeError("update() takes at least 1 argument (0 given)") - self = args[0] - other = args[1] if len(args) >= 2 else () - - if isinstance(other, Mapping): - for key in other: - self[key] = other[key] - elif hasattr(other, "keys"): - for key in other.keys(): - self[key] = other[key] - else: - for key, value in other: - self[key] = value + if not args: + raise TypeError("descriptor 'update' of 'MutableMapping' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('update expected at most 1 arguments, got %d' % + len(args)) + if args: + other = args[0] + if isinstance(other, Mapping): + for key in other: + self[key] = other[key] + elif hasattr(other, "keys"): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value for key, value in kwds.items(): self[key] = value @@ -633,13 +828,23 @@ class Sequence(Sized, Iterable, Container): for i in reversed(range(len(self))): yield self[i] - def index(self, value): - '''S.index(value) -> integer -- return first index of value. + def index(self, value, start=0, stop=None): + '''S.index(value, [start, [stop]]) -> integer -- return first index of value. Raises ValueError if the value is not present. ''' - for i, v in enumerate(self): - if v == value: - return i + if start is not None and start < 0: + start = max(len(self) + start, 0) + if stop is not None and stop < 0: + stop += len(self) + + i = start + while stop is None or i < stop: + try: + if self[i] == value: + return i + except IndexError: + break + i += 1 raise ValueError def count(self, value): diff --git a/Darwin/lib/python3.5/_compat_pickle.py b/Darwin/lib/python3.5/_compat_pickle.py new file mode 100644 index 0000000..6e39d4a --- /dev/null +++ b/Darwin/lib/python3.5/_compat_pickle.py @@ -0,0 +1,237 @@ +# This module is used to map the old Python 2 names to the new names used in +# Python 3 for the pickle module. This needed to make pickle streams +# generated with Python 2 loadable by Python 3. + +# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import +# lib2to3 and use the mapping defined there, because lib2to3 uses pickle. +# Thus, this could cause the module to be imported recursively. +IMPORT_MAPPING = { + '__builtin__' : 'builtins', + 'copy_reg': 'copyreg', + 'Queue': 'queue', + 'SocketServer': 'socketserver', + 'ConfigParser': 'configparser', + 'repr': 'reprlib', + 'tkFileDialog': 'tkinter.filedialog', + 'tkSimpleDialog': 'tkinter.simpledialog', + 'tkColorChooser': 'tkinter.colorchooser', + 'tkCommonDialog': 'tkinter.commondialog', + 'Dialog': 'tkinter.dialog', + 'Tkdnd': 'tkinter.dnd', + 'tkFont': 'tkinter.font', + 'tkMessageBox': 'tkinter.messagebox', + 'ScrolledText': 'tkinter.scrolledtext', + 'Tkconstants': 'tkinter.constants', + 'Tix': 'tkinter.tix', + 'ttk': 'tkinter.ttk', + 'Tkinter': 'tkinter', + 'markupbase': '_markupbase', + '_winreg': 'winreg', + 'thread': '_thread', + 'dummy_thread': '_dummy_thread', + 'dbhash': 'dbm.bsd', + 'dumbdbm': 'dbm.dumb', + 'dbm': 'dbm.ndbm', + 'gdbm': 'dbm.gnu', + 'xmlrpclib': 'xmlrpc.client', + 'SimpleXMLRPCServer': 'xmlrpc.server', + 'httplib': 'http.client', + 'htmlentitydefs' : 'html.entities', + 'HTMLParser' : 'html.parser', + 'Cookie': 'http.cookies', + 'cookielib': 'http.cookiejar', + 'BaseHTTPServer': 'http.server', + 'test.test_support': 'test.support', + 'commands': 'subprocess', + 'urlparse' : 'urllib.parse', + 'robotparser' : 'urllib.robotparser', + 'urllib2': 'urllib.request', + 'anydbm': 'dbm', + '_abcoll' : 'collections.abc', +} + + +# This contains rename rules that are easy to handle. We ignore the more +# complex stuff (e.g. mapping the names in the urllib and types modules). +# These rules should be run before import names are fixed. +NAME_MAPPING = { + ('__builtin__', 'xrange'): ('builtins', 'range'), + ('__builtin__', 'reduce'): ('functools', 'reduce'), + ('__builtin__', 'intern'): ('sys', 'intern'), + ('__builtin__', 'unichr'): ('builtins', 'chr'), + ('__builtin__', 'unicode'): ('builtins', 'str'), + ('__builtin__', 'long'): ('builtins', 'int'), + ('itertools', 'izip'): ('builtins', 'zip'), + ('itertools', 'imap'): ('builtins', 'map'), + ('itertools', 'ifilter'): ('builtins', 'filter'), + ('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'), + ('itertools', 'izip_longest'): ('itertools', 'zip_longest'), + ('UserDict', 'IterableUserDict'): ('collections', 'UserDict'), + ('UserList', 'UserList'): ('collections', 'UserList'), + ('UserString', 'UserString'): ('collections', 'UserString'), + ('whichdb', 'whichdb'): ('dbm', 'whichdb'), + ('_socket', 'fromfd'): ('socket', 'fromfd'), + ('_multiprocessing', 'Connection'): ('multiprocessing.connection', 'Connection'), + ('multiprocessing.process', 'Process'): ('multiprocessing.context', 'Process'), + ('multiprocessing.forking', 'Popen'): ('multiprocessing.popen_fork', 'Popen'), + ('urllib', 'ContentTooShortError'): ('urllib.error', 'ContentTooShortError'), + ('urllib', 'getproxies'): ('urllib.request', 'getproxies'), + ('urllib', 'pathname2url'): ('urllib.request', 'pathname2url'), + ('urllib', 'quote_plus'): ('urllib.parse', 'quote_plus'), + ('urllib', 'quote'): ('urllib.parse', 'quote'), + ('urllib', 'unquote_plus'): ('urllib.parse', 'unquote_plus'), + ('urllib', 'unquote'): ('urllib.parse', 'unquote'), + ('urllib', 'url2pathname'): ('urllib.request', 'url2pathname'), + ('urllib', 'urlcleanup'): ('urllib.request', 'urlcleanup'), + ('urllib', 'urlencode'): ('urllib.parse', 'urlencode'), + ('urllib', 'urlopen'): ('urllib.request', 'urlopen'), + ('urllib', 'urlretrieve'): ('urllib.request', 'urlretrieve'), + ('urllib2', 'HTTPError'): ('urllib.error', 'HTTPError'), + ('urllib2', 'URLError'): ('urllib.error', 'URLError'), +} + +PYTHON2_EXCEPTIONS = ( + "ArithmeticError", + "AssertionError", + "AttributeError", + "BaseException", + "BufferError", + "BytesWarning", + "DeprecationWarning", + "EOFError", + "EnvironmentError", + "Exception", + "FloatingPointError", + "FutureWarning", + "GeneratorExit", + "IOError", + "ImportError", + "ImportWarning", + "IndentationError", + "IndexError", + "KeyError", + "KeyboardInterrupt", + "LookupError", + "MemoryError", + "NameError", + "NotImplementedError", + "OSError", + "OverflowError", + "PendingDeprecationWarning", + "ReferenceError", + "RuntimeError", + "RuntimeWarning", + # StandardError is gone in Python 3, so we map it to Exception + "StopIteration", + "SyntaxError", + "SyntaxWarning", + "SystemError", + "SystemExit", + "TabError", + "TypeError", + "UnboundLocalError", + "UnicodeDecodeError", + "UnicodeEncodeError", + "UnicodeError", + "UnicodeTranslateError", + "UnicodeWarning", + "UserWarning", + "ValueError", + "Warning", + "ZeroDivisionError", +) + +try: + WindowsError +except NameError: + pass +else: + PYTHON2_EXCEPTIONS += ("WindowsError",) + +for excname in PYTHON2_EXCEPTIONS: + NAME_MAPPING[("exceptions", excname)] = ("builtins", excname) + +MULTIPROCESSING_EXCEPTIONS = ( + 'AuthenticationError', + 'BufferTooShort', + 'ProcessError', + 'TimeoutError', +) + +for excname in MULTIPROCESSING_EXCEPTIONS: + NAME_MAPPING[("multiprocessing", excname)] = ("multiprocessing.context", excname) + +# Same, but for 3.x to 2.x +REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items()) +assert len(REVERSE_IMPORT_MAPPING) == len(IMPORT_MAPPING) +REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items()) +assert len(REVERSE_NAME_MAPPING) == len(NAME_MAPPING) + +# Non-mutual mappings. + +IMPORT_MAPPING.update({ + 'cPickle': 'pickle', + '_elementtree': 'xml.etree.ElementTree', + 'FileDialog': 'tkinter.filedialog', + 'SimpleDialog': 'tkinter.simpledialog', + 'DocXMLRPCServer': 'xmlrpc.server', + 'SimpleHTTPServer': 'http.server', + 'CGIHTTPServer': 'http.server', +}) + +REVERSE_IMPORT_MAPPING.update({ + '_bz2': 'bz2', + '_dbm': 'dbm', + '_functools': 'functools', + '_gdbm': 'gdbm', + '_pickle': 'pickle', +}) + +NAME_MAPPING.update({ + ('__builtin__', 'basestring'): ('builtins', 'str'), + ('exceptions', 'StandardError'): ('builtins', 'Exception'), + ('UserDict', 'UserDict'): ('collections', 'UserDict'), + ('socket', '_socketobject'): ('socket', 'SocketType'), +}) + +REVERSE_NAME_MAPPING.update({ + ('_functools', 'reduce'): ('__builtin__', 'reduce'), + ('tkinter.filedialog', 'FileDialog'): ('FileDialog', 'FileDialog'), + ('tkinter.filedialog', 'LoadFileDialog'): ('FileDialog', 'LoadFileDialog'), + ('tkinter.filedialog', 'SaveFileDialog'): ('FileDialog', 'SaveFileDialog'), + ('tkinter.simpledialog', 'SimpleDialog'): ('SimpleDialog', 'SimpleDialog'), + ('xmlrpc.server', 'ServerHTMLDoc'): ('DocXMLRPCServer', 'ServerHTMLDoc'), + ('xmlrpc.server', 'XMLRPCDocGenerator'): + ('DocXMLRPCServer', 'XMLRPCDocGenerator'), + ('xmlrpc.server', 'DocXMLRPCRequestHandler'): + ('DocXMLRPCServer', 'DocXMLRPCRequestHandler'), + ('xmlrpc.server', 'DocXMLRPCServer'): + ('DocXMLRPCServer', 'DocXMLRPCServer'), + ('xmlrpc.server', 'DocCGIXMLRPCRequestHandler'): + ('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'), + ('http.server', 'SimpleHTTPRequestHandler'): + ('SimpleHTTPServer', 'SimpleHTTPRequestHandler'), + ('http.server', 'CGIHTTPRequestHandler'): + ('CGIHTTPServer', 'CGIHTTPRequestHandler'), + ('_socket', 'socket'): ('socket', '_socketobject'), +}) + +PYTHON3_OSERROR_EXCEPTIONS = ( + 'BrokenPipeError', + 'ChildProcessError', + 'ConnectionAbortedError', + 'ConnectionError', + 'ConnectionRefusedError', + 'ConnectionResetError', + 'FileExistsError', + 'FileNotFoundError', + 'InterruptedError', + 'IsADirectoryError', + 'NotADirectoryError', + 'PermissionError', + 'ProcessLookupError', + 'TimeoutError', +) + +for excname in PYTHON3_OSERROR_EXCEPTIONS: + REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'OSError') diff --git a/Darwin/lib/python3.5/_compression.py b/Darwin/lib/python3.5/_compression.py new file mode 100644 index 0000000..b00f31b --- /dev/null +++ b/Darwin/lib/python3.5/_compression.py @@ -0,0 +1,152 @@ +"""Internal classes used by the gzip, lzma and bz2 modules""" + +import io + + +BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size + + +class BaseStream(io.BufferedIOBase): + """Mode-checking helper functions.""" + + def _check_not_closed(self): + if self.closed: + raise ValueError("I/O operation on closed file") + + def _check_can_read(self): + if not self.readable(): + raise io.UnsupportedOperation("File not open for reading") + + def _check_can_write(self): + if not self.writable(): + raise io.UnsupportedOperation("File not open for writing") + + def _check_can_seek(self): + if not self.readable(): + raise io.UnsupportedOperation("Seeking is only supported " + "on files open for reading") + if not self.seekable(): + raise io.UnsupportedOperation("The underlying file object " + "does not support seeking") + + +class DecompressReader(io.RawIOBase): + """Adapts the decompressor API to a RawIOBase reader API""" + + def readable(self): + return True + + def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args): + self._fp = fp + self._eof = False + self._pos = 0 # Current offset in decompressed stream + + # Set to size of decompressed stream once it is known, for SEEK_END + self._size = -1 + + # Save the decompressor factory and arguments. + # If the file contains multiple compressed streams, each + # stream will need a separate decompressor object. A new decompressor + # object is also needed when implementing a backwards seek(). + self._decomp_factory = decomp_factory + self._decomp_args = decomp_args + self._decompressor = self._decomp_factory(**self._decomp_args) + + # Exception class to catch from decompressor signifying invalid + # trailing data to ignore + self._trailing_error = trailing_error + + def close(self): + self._decompressor = None + return super().close() + + def seekable(self): + return self._fp.seekable() + + def readinto(self, b): + with memoryview(b) as view, view.cast("B") as byte_view: + data = self.read(len(byte_view)) + byte_view[:len(data)] = data + return len(data) + + def read(self, size=-1): + if size < 0: + return self.readall() + + if not size or self._eof: + return b"" + data = None # Default if EOF is encountered + # Depending on the input data, our call to the decompressor may not + # return any data. In this case, try again after reading another block. + while True: + if self._decompressor.eof: + rawblock = (self._decompressor.unused_data or + self._fp.read(BUFFER_SIZE)) + if not rawblock: + break + # Continue to next stream. + self._decompressor = self._decomp_factory( + **self._decomp_args) + try: + data = self._decompressor.decompress(rawblock, size) + except self._trailing_error: + # Trailing data isn't a valid compressed stream; ignore it. + break + else: + if self._decompressor.needs_input: + rawblock = self._fp.read(BUFFER_SIZE) + if not rawblock: + raise EOFError("Compressed file ended before the " + "end-of-stream marker was reached") + else: + rawblock = b"" + data = self._decompressor.decompress(rawblock, size) + if data: + break + if not data: + self._eof = True + self._size = self._pos + return b"" + self._pos += len(data) + return data + + # Rewind the file to the beginning of the data stream. + def _rewind(self): + self._fp.seek(0) + self._eof = False + self._pos = 0 + self._decompressor = self._decomp_factory(**self._decomp_args) + + def seek(self, offset, whence=io.SEEK_SET): + # Recalculate offset as an absolute file position. + if whence == io.SEEK_SET: + pass + elif whence == io.SEEK_CUR: + offset = self._pos + offset + elif whence == io.SEEK_END: + # Seeking relative to EOF - we need to know the file's size. + if self._size < 0: + while self.read(io.DEFAULT_BUFFER_SIZE): + pass + offset = self._size + offset + else: + raise ValueError("Invalid value for whence: {}".format(whence)) + + # Make it so that offset is the number of bytes to skip forward. + if offset < self._pos: + self._rewind() + else: + offset -= self._pos + + # Read and discard data until we reach the desired position. + while offset > 0: + data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset)) + if not data: + break + offset -= len(data) + + return self._pos + + def tell(self): + """Return the current file position.""" + return self._pos diff --git a/Darwin/lib/python3.4/_dummy_thread.py b/Darwin/lib/python3.5/_dummy_thread.py similarity index 95% rename from Darwin/lib/python3.4/_dummy_thread.py rename to Darwin/lib/python3.5/_dummy_thread.py index b67cfb9..36e5f38 100644 --- a/Darwin/lib/python3.4/_dummy_thread.py +++ b/Darwin/lib/python3.5/_dummy_thread.py @@ -140,6 +140,14 @@ class LockType(object): def locked(self): return self.locked_status + def __repr__(self): + return "<%s %s.%s object at %s>" % ( + "locked" if self.locked_status else "unlocked", + self.__class__.__module__, + self.__class__.__qualname__, + hex(id(self)) + ) + # Used to signal that interrupt_main was called in a "thread" _interrupt = False # True when not executing in a "thread" diff --git a/Darwin/lib/python3.4/_markupbase.py b/Darwin/lib/python3.5/_markupbase.py similarity index 100% rename from Darwin/lib/python3.4/_markupbase.py rename to Darwin/lib/python3.5/_markupbase.py diff --git a/Darwin/lib/python3.4/_osx_support.py b/Darwin/lib/python3.5/_osx_support.py similarity index 98% rename from Darwin/lib/python3.4/_osx_support.py rename to Darwin/lib/python3.5/_osx_support.py index b6eac5f..b07e75d 100644 --- a/Darwin/lib/python3.4/_osx_support.py +++ b/Darwin/lib/python3.5/_osx_support.py @@ -450,8 +450,16 @@ def get_platform_osx(_config_vars, osname, release, machine): # case and disallow installs. cflags = _config_vars.get(_INITPRE+'CFLAGS', _config_vars.get('CFLAGS', '')) - if ((macrelease + '.') >= '10.4.' and - '-arch' in cflags.strip()): + if macrelease: + try: + macrelease = tuple(int(i) for i in macrelease.split('.')[0:2]) + except ValueError: + macrelease = (10, 0) + else: + # assume no universal support + macrelease = (10, 0) + + if (macrelease >= (10, 4)) and '-arch' in cflags.strip(): # The universal build will build fat binaries, but not on # systems before 10.4 diff --git a/Darwin/lib/python3.4/decimal.py b/Darwin/lib/python3.5/_pydecimal.py similarity index 99% rename from Darwin/lib/python3.4/decimal.py rename to Darwin/lib/python3.5/_pydecimal.py index 5b98473..05ba4ee 100644 --- a/Darwin/lib/python3.4/decimal.py +++ b/Darwin/lib/python3.5/_pydecimal.py @@ -116,6 +116,9 @@ __all__ = [ # Two major classes 'Decimal', 'Context', + # Named tuple representation + 'DecimalTuple', + # Contexts 'DefaultContext', 'BasicContext', 'ExtendedContext', @@ -124,6 +127,9 @@ __all__ = [ 'Inexact', 'Rounded', 'Subnormal', 'Overflow', 'Underflow', 'FloatOperation', + # Exceptional conditions that trigger InvalidOperation + 'DivisionImpossible', 'InvalidContext', 'ConversionSyntax', 'DivisionUndefined', + # Constants for use in setting up contexts 'ROUND_DOWN', 'ROUND_HALF_UP', 'ROUND_HALF_EVEN', 'ROUND_CEILING', 'ROUND_FLOOR', 'ROUND_UP', 'ROUND_HALF_DOWN', 'ROUND_05UP', @@ -138,9 +144,11 @@ __all__ = [ 'HAVE_THREADS' ] +__xname__ = __name__ # sys.modules lookup (--without-threads) +__name__ = 'decimal' # For pickling __version__ = '1.70' # Highest version of the spec this complies with # See http://speleotrove.com/decimal/ -__libmpdec_version__ = "2.4.0" # compatible libmpdec version +__libmpdec_version__ = "2.4.1" # compatible libmpdec version import math as _math import numbers as _numbers @@ -434,7 +442,7 @@ except ImportError: # Python was compiled without threads; create a mock object instead class MockThreading(object): def local(self, sys=sys): - return sys.modules[__name__] + return sys.modules[__xname__] threading = MockThreading() del MockThreading @@ -915,15 +923,6 @@ class Decimal(object): return False return self._cmp(other) == 0 - def __ne__(self, other, context=None): - self, other = _convert_for_comparison(self, other, equality_op=True) - if other is NotImplemented: - return other - if self._check_nans(other, context): - return True - return self._cmp(other) != 0 - - def __lt__(self, other, context=None): self, other = _convert_for_comparison(self, other) if other is NotImplemented: @@ -961,13 +960,12 @@ class Decimal(object): return self._cmp(other) >= 0 def compare(self, other, context=None): - """Compares one to another. + """Compare self to other. Return a decimal value: - -1 => a < b - 0 => a = b - 1 => a > b - NaN => one is NaN - Like __cmp__, but returns Decimal instances. + a or b is a NaN ==> Decimal('NaN') + a < b ==> Decimal('-1') + a == b ==> Decimal('0') + a > b ==> Decimal('1') """ other = _convert_other(other, raiseit=True) @@ -2523,7 +2521,7 @@ class Decimal(object): end -= 1 return _dec_from_triple(dup._sign, dup._int[:end], exp) - def quantize(self, exp, rounding=None, context=None, watchexp=True): + def quantize(self, exp, rounding=None, context=None): """Quantize self so its exponent is the same as that of exp. Similar to self._rescale(exp._exp) but with error checking. @@ -2546,16 +2544,6 @@ class Decimal(object): return context._raise_error(InvalidOperation, 'quantize with one INF') - # if we're not watching exponents, do a simple rescale - if not watchexp: - ans = self._rescale(exp._exp, rounding) - # raise Inexact and Rounded where appropriate - if ans._exp > self._exp: - context._raise_error(Rounded) - if ans != self: - context._raise_error(Inexact) - return ans - # exp._exp should be between Etiny and Emax if not (context.Etiny() <= exp._exp <= context.Emax): return context._raise_error(InvalidOperation, @@ -3769,6 +3757,8 @@ class Decimal(object): if self._is_special: sign = _format_sign(self._sign, spec) body = str(self.copy_abs()) + if spec['type'] == '%': + body += '%' return _format_align(sign, body, spec) # a type of None defaults to 'g' or 'G', depending on context @@ -4117,7 +4107,7 @@ class Context(object): >>> context.create_decimal_from_float(3.1415926535897932) Traceback (most recent call last): ... - decimal.Inexact: None + decimal.Inexact """ d = Decimal.from_float(f) # An exact conversion @@ -6388,19 +6378,3 @@ _PyHASH_NAN = sys.hash_info.nan # _PyHASH_10INV is the inverse of 10 modulo the prime _PyHASH_MODULUS _PyHASH_10INV = pow(10, _PyHASH_MODULUS - 2, _PyHASH_MODULUS) del sys - -try: - import _decimal -except ImportError: - pass -else: - s1 = set(dir()) - s2 = set(dir(_decimal)) - for name in s1 - s2: - del globals()[name] - del s1, s2, name - from _decimal import * - -if __name__ == '__main__': - import doctest, decimal - doctest.testmod(decimal) diff --git a/Darwin/lib/python3.4/_pyio.py b/Darwin/lib/python3.5/_pyio.py similarity index 80% rename from Darwin/lib/python3.4/_pyio.py rename to Darwin/lib/python3.5/_pyio.py index b04d23a..50ad9ff 100644 --- a/Darwin/lib/python3.4/_pyio.py +++ b/Darwin/lib/python3.5/_pyio.py @@ -6,11 +6,17 @@ import os import abc import codecs import errno +import array +import stat # Import _thread instead of threading to reduce startup cost try: from _thread import allocate_lock as Lock except ImportError: from _dummy_thread import allocate_lock as Lock +if os.name == 'win32': + from msvcrt import setmode as _setmode +else: + _setmode = None import io from io import (__all__, SEEK_SET, SEEK_CUR, SEEK_END) @@ -24,8 +30,8 @@ if hasattr(os, 'SEEK_HOLE') : DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes # NOTE: Base classes defined here are registered with the "official" ABCs -# defined in io.py. We don't use real inheritance though, because we don't -# want to inherit the C implementations. +# defined in io.py. We don't use real inheritance though, because we don't want +# to inherit the C implementations. # Rebind for compatibility BlockingIOError = BlockingIOError @@ -200,38 +206,45 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None, (appending and "a" or "") + (updating and "+" or ""), closefd, opener=opener) - line_buffering = False - if buffering == 1 or buffering < 0 and raw.isatty(): - buffering = -1 - line_buffering = True - if buffering < 0: - buffering = DEFAULT_BUFFER_SIZE - try: - bs = os.fstat(raw.fileno()).st_blksize - except (OSError, AttributeError): - pass + result = raw + try: + line_buffering = False + if buffering == 1 or buffering < 0 and raw.isatty(): + buffering = -1 + line_buffering = True + if buffering < 0: + buffering = DEFAULT_BUFFER_SIZE + try: + bs = os.fstat(raw.fileno()).st_blksize + except (OSError, AttributeError): + pass + else: + if bs > 1: + buffering = bs + if buffering < 0: + raise ValueError("invalid buffering size") + if buffering == 0: + if binary: + return result + raise ValueError("can't have unbuffered text I/O") + if updating: + buffer = BufferedRandom(raw, buffering) + elif creating or writing or appending: + buffer = BufferedWriter(raw, buffering) + elif reading: + buffer = BufferedReader(raw, buffering) else: - if bs > 1: - buffering = bs - if buffering < 0: - raise ValueError("invalid buffering size") - if buffering == 0: + raise ValueError("unknown mode: %r" % mode) + result = buffer if binary: - return raw - raise ValueError("can't have unbuffered text I/O") - if updating: - buffer = BufferedRandom(raw, buffering) - elif creating or writing or appending: - buffer = BufferedWriter(raw, buffering) - elif reading: - buffer = BufferedReader(raw, buffering) - else: - raise ValueError("unknown mode: %r" % mode) - if binary: - return buffer - text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering) - text.mode = mode - return text + return result + text = TextIOWrapper(buffer, encoding, errors, newline, line_buffering) + result = text + text.mode = mode + return result + except: + result.close() + raise class DocDescriptor: @@ -249,7 +262,7 @@ class OpenWrapper: Trick so that open won't become a bound method when stored as a class variable (as dbm.dumb does). - See initstdio() in Python/pythonrun.c. + See initstdio() in Python/pylifecycle.c. """ __doc__ = DocDescriptor() @@ -655,16 +668,33 @@ class BufferedIOBase(IOBase): Raises BlockingIOError if the underlying raw stream has no data at the moment. """ - # XXX This ought to work with anything that supports the buffer API - data = self.read(len(b)) + + return self._readinto(b, read1=False) + + def readinto1(self, b): + """Read up to len(b) bytes into *b*, using at most one system call + + Returns an int representing the number of bytes read (0 for EOF). + + Raises BlockingIOError if the underlying raw stream has no + data at the moment. + """ + + return self._readinto(b, read1=True) + + def _readinto(self, b, read1): + if not isinstance(b, memoryview): + b = memoryview(b) + b = b.cast('B') + + if read1: + data = self.read1(len(b)) + else: + data = self.read(len(b)) n = len(data) - try: - b[:n] = data - except TypeError as err: - import array - if not isinstance(b, array.array): - raise err - b[:n] = array.array('b', data) + + b[:n] = data + return n def write(self, b): @@ -783,13 +813,14 @@ class _BufferedIOMixin(BufferedIOBase): .format(self.__class__.__name__)) def __repr__(self): - clsname = self.__class__.__name__ + modname = self.__class__.__module__ + clsname = self.__class__.__qualname__ try: name = self.name - except AttributeError: - return "<_pyio.{0}>".format(clsname) + except Exception: + return "<{}.{}>".format(modname, clsname) else: - return "<_pyio.{0} name={1!r}>".format(clsname, name) + return "<{}.{} name={!r}>".format(modname, clsname, name) ### Lower-level APIs ### @@ -826,8 +857,14 @@ class BytesIO(BufferedIOBase): def getbuffer(self): """Return a readable and writable view of the buffer. """ + if self.closed: + raise ValueError("getbuffer on closed file") return memoryview(self._buffer) + def close(self): + self._buffer.clear() + super().close() + def read(self, size=None): if self.closed: raise ValueError("read from closed file") @@ -980,10 +1017,7 @@ class BufferedReader(_BufferedIOMixin): current_size = 0 while True: # Read until EOF or until read() would block. - try: - chunk = self.raw.read() - except InterruptedError: - continue + chunk = self.raw.read() if chunk in empty_values: nodata_val = chunk break @@ -1002,10 +1036,7 @@ class BufferedReader(_BufferedIOMixin): chunks = [buf[pos:]] wanted = max(self.buffer_size, n) while avail < n: - try: - chunk = self.raw.read(wanted) - except InterruptedError: - continue + chunk = self.raw.read(wanted) if chunk in empty_values: nodata_val = chunk break @@ -1034,12 +1065,7 @@ class BufferedReader(_BufferedIOMixin): have = len(self._read_buf) - self._read_pos if have < want or have <= 0: to_read = self.buffer_size - have - while True: - try: - current = self.raw.read(to_read) - except InterruptedError: - continue - break + current = self.raw.read(to_read) if current: self._read_buf = self._read_buf[self._read_pos:] + current self._read_pos = 0 @@ -1058,6 +1084,58 @@ class BufferedReader(_BufferedIOMixin): return self._read_unlocked( min(size, len(self._read_buf) - self._read_pos)) + # Implementing readinto() and readinto1() is not strictly necessary (we + # could rely on the base class that provides an implementation in terms of + # read() and read1()). We do it anyway to keep the _pyio implementation + # similar to the io implementation (which implements the methods for + # performance reasons). + def _readinto(self, buf, read1): + """Read data into *buf* with at most one system call.""" + + if len(buf) == 0: + return 0 + + # Need to create a memoryview object of type 'b', otherwise + # we may not be able to assign bytes to it, and slicing it + # would create a new object. + if not isinstance(buf, memoryview): + buf = memoryview(buf) + buf = buf.cast('B') + + written = 0 + with self._read_lock: + while written < len(buf): + + # First try to read from internal buffer + avail = min(len(self._read_buf) - self._read_pos, len(buf)) + if avail: + buf[written:written+avail] = \ + self._read_buf[self._read_pos:self._read_pos+avail] + self._read_pos += avail + written += avail + if written == len(buf): + break + + # If remaining space in callers buffer is larger than + # internal buffer, read directly into callers buffer + if len(buf) - written > self.buffer_size: + n = self.raw.readinto(buf[written:]) + if not n: + break # eof + written += n + + # Otherwise refill internal buffer - unless we're + # in read1 mode and already got some data + elif not (read1 and written): + if not self._peek_unlocked(1): + break # eof + + # In readinto1 mode, return as soon as we have some data + if read1 and written: + break + + return written + def tell(self): return _BufferedIOMixin.tell(self) - len(self._read_buf) + self._read_pos @@ -1136,8 +1214,6 @@ class BufferedWriter(_BufferedIOMixin): while self._write_buf: try: n = self.raw.write(self._write_buf) - except InterruptedError: - continue except BlockingIOError: raise RuntimeError("self.raw should implement RawIOBase: it " "should not raise BlockingIOError") @@ -1207,6 +1283,9 @@ class BufferedRWPair(BufferedIOBase): def read1(self, size): return self.reader.read1(size) + def readinto1(self, b): + return self.reader.readinto1(b) + def readable(self): return self.reader.readable() @@ -1217,8 +1296,10 @@ class BufferedRWPair(BufferedIOBase): return self.writer.flush() def close(self): - self.writer.close() - self.reader.close() + try: + self.writer.close() + finally: + self.reader.close() def isatty(self): return self.reader.isatty() or self.writer.isatty() @@ -1289,6 +1370,10 @@ class BufferedRandom(BufferedWriter, BufferedReader): self.flush() return BufferedReader.read1(self, size) + def readinto1(self, b): + self.flush() + return BufferedReader.readinto1(self, b) + def write(self, b): if self._read_buf: # Undo readahead @@ -1298,6 +1383,345 @@ class BufferedRandom(BufferedWriter, BufferedReader): return BufferedWriter.write(self, b) +class FileIO(RawIOBase): + _fd = -1 + _created = False + _readable = False + _writable = False + _appending = False + _seekable = None + _closefd = True + + def __init__(self, file, mode='r', closefd=True, opener=None): + """Open a file. The mode can be 'r' (default), 'w', 'x' or 'a' for reading, + writing, exclusive creation or appending. The file will be created if it + doesn't exist when opened for writing or appending; it will be truncated + when opened for writing. A FileExistsError will be raised if it already + exists when opened for creating. Opening a file for creating implies + writing so this mode behaves in a similar way to 'w'. Add a '+' to the mode + to allow simultaneous reading and writing. A custom opener can be used by + passing a callable as *opener*. The underlying file descriptor for the file + object is then obtained by calling opener with (*name*, *flags*). + *opener* must return an open file descriptor (passing os.open as *opener* + results in functionality similar to passing None). + """ + if self._fd >= 0: + # Have to close the existing file first. + try: + if self._closefd: + os.close(self._fd) + finally: + self._fd = -1 + + if isinstance(file, float): + raise TypeError('integer argument expected, got float') + if isinstance(file, int): + fd = file + if fd < 0: + raise ValueError('negative file descriptor') + else: + fd = -1 + + if not isinstance(mode, str): + raise TypeError('invalid mode: %s' % (mode,)) + if not set(mode) <= set('xrwab+'): + raise ValueError('invalid mode: %s' % (mode,)) + if sum(c in 'rwax' for c in mode) != 1 or mode.count('+') > 1: + raise ValueError('Must have exactly one of create/read/write/append ' + 'mode and at most one plus') + + if 'x' in mode: + self._created = True + self._writable = True + flags = os.O_EXCL | os.O_CREAT + elif 'r' in mode: + self._readable = True + flags = 0 + elif 'w' in mode: + self._writable = True + flags = os.O_CREAT | os.O_TRUNC + elif 'a' in mode: + self._writable = True + self._appending = True + flags = os.O_APPEND | os.O_CREAT + + if '+' in mode: + self._readable = True + self._writable = True + + if self._readable and self._writable: + flags |= os.O_RDWR + elif self._readable: + flags |= os.O_RDONLY + else: + flags |= os.O_WRONLY + + flags |= getattr(os, 'O_BINARY', 0) + + noinherit_flag = (getattr(os, 'O_NOINHERIT', 0) or + getattr(os, 'O_CLOEXEC', 0)) + flags |= noinherit_flag + + owned_fd = None + try: + if fd < 0: + if not closefd: + raise ValueError('Cannot use closefd=False with file name') + if opener is None: + fd = os.open(file, flags, 0o666) + else: + fd = opener(file, flags) + if not isinstance(fd, int): + raise TypeError('expected integer from opener') + if fd < 0: + raise OSError('Negative file descriptor') + owned_fd = fd + if not noinherit_flag: + os.set_inheritable(fd, False) + + self._closefd = closefd + fdfstat = os.fstat(fd) + try: + if stat.S_ISDIR(fdfstat.st_mode): + raise IsADirectoryError(errno.EISDIR, + os.strerror(errno.EISDIR), file) + except AttributeError: + # Ignore the AttribueError if stat.S_ISDIR or errno.EISDIR + # don't exist. + pass + self._blksize = getattr(fdfstat, 'st_blksize', 0) + if self._blksize <= 1: + self._blksize = DEFAULT_BUFFER_SIZE + + if _setmode: + # don't translate newlines (\r\n <=> \n) + _setmode(fd, os.O_BINARY) + + self.name = file + if self._appending: + # For consistent behaviour, we explicitly seek to the + # end of file (otherwise, it might be done only on the + # first write()). + os.lseek(fd, 0, SEEK_END) + except: + if owned_fd is not None: + os.close(owned_fd) + raise + self._fd = fd + + def __del__(self): + if self._fd >= 0 and self._closefd and not self.closed: + import warnings + warnings.warn('unclosed file %r' % (self,), ResourceWarning, + stacklevel=2) + self.close() + + def __getstate__(self): + raise TypeError("cannot serialize '%s' object", self.__class__.__name__) + + def __repr__(self): + class_name = '%s.%s' % (self.__class__.__module__, + self.__class__.__qualname__) + if self.closed: + return '<%s [closed]>' % class_name + try: + name = self.name + except AttributeError: + return ('<%s fd=%d mode=%r closefd=%r>' % + (class_name, self._fd, self.mode, self._closefd)) + else: + return ('<%s name=%r mode=%r closefd=%r>' % + (class_name, name, self.mode, self._closefd)) + + def _checkReadable(self): + if not self._readable: + raise UnsupportedOperation('File not open for reading') + + def _checkWritable(self, msg=None): + if not self._writable: + raise UnsupportedOperation('File not open for writing') + + def read(self, size=None): + """Read at most size bytes, returned as bytes. + + Only makes one system call, so less data may be returned than requested + In non-blocking mode, returns None if no data is available. + Return an empty bytes object at EOF. + """ + self._checkClosed() + self._checkReadable() + if size is None or size < 0: + return self.readall() + try: + return os.read(self._fd, size) + except BlockingIOError: + return None + + def readall(self): + """Read all data from the file, returned as bytes. + + In non-blocking mode, returns as much as is immediately available, + or None if no data is available. Return an empty bytes object at EOF. + """ + self._checkClosed() + self._checkReadable() + bufsize = DEFAULT_BUFFER_SIZE + try: + pos = os.lseek(self._fd, 0, SEEK_CUR) + end = os.fstat(self._fd).st_size + if end >= pos: + bufsize = end - pos + 1 + except OSError: + pass + + result = bytearray() + while True: + if len(result) >= bufsize: + bufsize = len(result) + bufsize += max(bufsize, DEFAULT_BUFFER_SIZE) + n = bufsize - len(result) + try: + chunk = os.read(self._fd, n) + except BlockingIOError: + if result: + break + return None + if not chunk: # reached the end of the file + break + result += chunk + + return bytes(result) + + def readinto(self, b): + """Same as RawIOBase.readinto().""" + m = memoryview(b).cast('B') + data = self.read(len(m)) + n = len(data) + m[:n] = data + return n + + def write(self, b): + """Write bytes b to file, return number written. + + Only makes one system call, so not all of the data may be written. + The number of bytes actually written is returned. In non-blocking mode, + returns None if the write would block. + """ + self._checkClosed() + self._checkWritable() + try: + return os.write(self._fd, b) + except BlockingIOError: + return None + + def seek(self, pos, whence=SEEK_SET): + """Move to new file position. + + Argument offset is a byte count. Optional argument whence defaults to + SEEK_SET or 0 (offset from start of file, offset should be >= 0); other values + are SEEK_CUR or 1 (move relative to current position, positive or negative), + and SEEK_END or 2 (move relative to end of file, usually negative, although + many platforms allow seeking beyond the end of a file). + + Note that not all file objects are seekable. + """ + if isinstance(pos, float): + raise TypeError('an integer is required') + self._checkClosed() + return os.lseek(self._fd, pos, whence) + + def tell(self): + """tell() -> int. Current file position. + + Can raise OSError for non seekable files.""" + self._checkClosed() + return os.lseek(self._fd, 0, SEEK_CUR) + + def truncate(self, size=None): + """Truncate the file to at most size bytes. + + Size defaults to the current file position, as returned by tell(). + The current file position is changed to the value of size. + """ + self._checkClosed() + self._checkWritable() + if size is None: + size = self.tell() + os.ftruncate(self._fd, size) + return size + + def close(self): + """Close the file. + + A closed file cannot be used for further I/O operations. close() may be + called more than once without error. + """ + if not self.closed: + try: + if self._closefd: + os.close(self._fd) + finally: + super().close() + + def seekable(self): + """True if file supports random-access.""" + self._checkClosed() + if self._seekable is None: + try: + self.tell() + except OSError: + self._seekable = False + else: + self._seekable = True + return self._seekable + + def readable(self): + """True if file was opened in a read mode.""" + self._checkClosed() + return self._readable + + def writable(self): + """True if file was opened in a write mode.""" + self._checkClosed() + return self._writable + + def fileno(self): + """Return the underlying file descriptor (an integer).""" + self._checkClosed() + return self._fd + + def isatty(self): + """True if the file is connected to a TTY device.""" + self._checkClosed() + return os.isatty(self._fd) + + @property + def closefd(self): + """True if the file descriptor will be closed by close().""" + return self._closefd + + @property + def mode(self): + """String giving the file mode""" + if self._created: + if self._readable: + return 'xb+' + else: + return 'xb' + elif self._appending: + if self._readable: + return 'ab+' + else: + return 'ab' + elif self._readable: + if self._writable: + return 'rb+' + else: + return 'rb' + else: + return 'wb' + + class TextIOBase(IOBase): """Base class for text I/O. @@ -1551,16 +1975,17 @@ class TextIOWrapper(TextIOBase): # - "chars_..." for integer variables that count decoded characters def __repr__(self): - result = "<_pyio.TextIOWrapper" + result = "<{}.{}".format(self.__class__.__module__, + self.__class__.__qualname__) try: name = self.name - except AttributeError: + except Exception: pass else: result += " name={0!r}".format(name) try: mode = self.mode - except AttributeError: + except Exception: pass else: result += " mode={0!r}".format(mode) @@ -1850,6 +2275,19 @@ class TextIOWrapper(TextIOBase): return buffer def seek(self, cookie, whence=0): + def _reset_encoder(position): + """Reset the encoder (merely useful for proper BOM handling)""" + try: + encoder = self._encoder or self._get_encoder() + except LookupError: + # Sometimes the encoder doesn't exist + pass + else: + if position != 0: + encoder.setstate(0) + else: + encoder.reset() + if self.closed: raise ValueError("tell on closed file") if not self._seekable: @@ -1870,6 +2308,7 @@ class TextIOWrapper(TextIOBase): self._snapshot = None if self._decoder: self._decoder.reset() + _reset_encoder(position) return position if whence != 0: raise ValueError("unsupported whence (%r)" % (whence,)) @@ -1907,17 +2346,7 @@ class TextIOWrapper(TextIOBase): raise OSError("can't restore logical file position") self._decoded_chars_used = chars_to_skip - # Finally, reset the encoder (merely useful for proper BOM handling) - try: - encoder = self._encoder or self._get_encoder() - except LookupError: - # Sometimes the encoder doesn't exist - pass - else: - if cookie != 0: - encoder.setstate(0) - else: - encoder.reset() + _reset_encoder(cookie) return cookie def read(self, size=None): diff --git a/Darwin/lib/python3.4/_sitebuiltins.py b/Darwin/lib/python3.5/_sitebuiltins.py similarity index 100% rename from Darwin/lib/python3.4/_sitebuiltins.py rename to Darwin/lib/python3.5/_sitebuiltins.py diff --git a/Darwin/lib/python3.4/_strptime.py b/Darwin/lib/python3.5/_strptime.py similarity index 97% rename from Darwin/lib/python3.4/_strptime.py rename to Darwin/lib/python3.5/_strptime.py index 53bd34b..374923d 100644 --- a/Darwin/lib/python3.4/_strptime.py +++ b/Darwin/lib/python3.5/_strptime.py @@ -167,9 +167,9 @@ class LocaleTime(object): time.tzset() except AttributeError: pass - no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()]) + no_saving = frozenset({"utc", "gmt", time.tzname[0].lower()}) if time.daylight: - has_saving = frozenset([time.tzname[1].lower()]) + has_saving = frozenset({time.tzname[1].lower()}) else: has_saving = frozenset() self.timezone = (no_saving, has_saving) @@ -253,8 +253,8 @@ class TimeRE(dict): # format directives (%m, etc.). regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])") format = regex_chars.sub(r"\\\1", format) - whitespace_replacement = re_compile('\s+') - format = whitespace_replacement.sub('\s+', format) + whitespace_replacement = re_compile(r'\s+') + format = whitespace_replacement.sub(r'\\s+', format) while '%' in format: directive_index = format.index('%')+1 processed_format = "%s%s%s" % (processed_format, @@ -348,9 +348,9 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): # though week_of_year = -1 week_of_year_start = -1 - # weekday and julian defaulted to -1 so as to signal need to calculate + # weekday and julian defaulted to None so as to signal need to calculate # values - weekday = julian = -1 + weekday = julian = None found_dict = found.groupdict() for group_key in found_dict.keys(): # Directives not explicitly handled below: @@ -452,14 +452,14 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): year = 1900 # If we know the week of the year and what day of that week, we can figure # out the Julian day of the year. - if julian == -1 and week_of_year != -1 and weekday != -1: + if julian is None and week_of_year != -1 and weekday is not None: week_starts_Mon = True if week_of_year_start == 0 else False julian = _calc_julian_from_U_or_W(year, week_of_year, weekday, week_starts_Mon) # Cannot pre-calculate datetime_date() since can change in Julian # calculation and thus could have different value for the day of the week # calculation. - if julian == -1: + if julian is None: # Need to add 1 to result since first day of the year is 1, not 0. julian = datetime_date(year, month, day).toordinal() - \ datetime_date(year, 1, 1).toordinal() + 1 @@ -469,7 +469,7 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"): year = datetime_result.year month = datetime_result.month day = datetime_result.day - if weekday == -1: + if weekday is None: weekday = datetime_date(year, month, day).weekday() # Add timezone info tzname = found_dict.get("Z") diff --git a/Darwin/lib/python3.4/_sysconfigdata.py b/Darwin/lib/python3.5/_sysconfigdata.py similarity index 62% rename from Darwin/lib/python3.4/_sysconfigdata.py rename to Darwin/lib/python3.5/_sysconfigdata.py index 4a83651..e7d4617 100644 --- a/Darwin/lib/python3.4/_sysconfigdata.py +++ b/Darwin/lib/python3.5/_sysconfigdata.py @@ -4,53 +4,62 @@ build_time_vars = {'ABIFLAGS': 'm', 'AIX_GENUINE_CPLUSPLUS': 0, 'AR': 'ar', 'ARFLAGS': 'rc', - 'ASDLGEN': 'python ' - '/Users/build/platform_darwin/src/Python-3.4.1/Parser/asdl_c.py', - 'ASDLGEN_FILES': '/Users/build/platform_darwin/src/Python-3.4.1/Parser/asdl.py ' - '/Users/build/platform_darwin/src/Python-3.4.1/Parser/asdl_c.py', - 'AST_ASDL': '/Users/build/platform_darwin/src/Python-3.4.1/Parser/Python.asdl', + 'ASDLGEN': 'python ./Parser/asdl_c.py', + 'ASDLGEN_FILES': './Parser/asdl.py ./Parser/asdl_c.py', + 'AST_ASDL': './Parser/Python.asdl', 'AST_C': 'Python/Python-ast.c', 'AST_C_DIR': 'Python', 'AST_H': 'Include/Python-ast.h', 'AST_H_DIR': 'Include', - 'BASECFLAGS': '-fno-strict-aliasing -Werror=declaration-after-statement', + 'BASECFLAGS': '-Wno-unused-result -Wsign-compare -Wunreachable-code ' + '-fno-common -dynamic', 'BASECPPFLAGS': '', 'BASEMODLIBS': '', - 'BINDIR': '/Users/build/platform_darwin/bin', - 'BINLIBDEST': '/Users/build/platform_darwin/lib/python3.4', - 'BLDLIBRARY': 'libpython3.4m.a', - 'BLDSHARED': 'gcc -bundle -undefined dynamic_lookup', + 'BINDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/bin', + 'BINLIBDEST': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5', + 'BLDLIBRARY': '', + 'BLDSHARED': 'clang -bundle -undefined dynamic_lookup', 'BUILDEXE': '.exe', 'BUILDPYTHON': 'python.exe', - 'BUILD_GNU_TYPE': 'i386-apple-darwin10.8.0', + 'BUILD_GNU_TYPE': 'x86_64-apple-darwin15.2.0', 'BYTESTR_DEPS': '\\', - 'CC': 'gcc', + 'CC': 'clang', 'CCSHARED': '', - 'CFLAGS': '-fno-strict-aliasing -Werror=declaration-after-statement ' - '-DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes', + 'CFLAGS': '-Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common ' + '-dynamic -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes', 'CFLAGSFORSHARED': '', + 'CFLAGS_NODIST': '', 'CONFIGFILES': 'configure configure.ac acconfig.h pyconfig.h.in ' 'Makefile.pre.in', 'CONFIGURE_CFLAGS': '', + 'CONFIGURE_CFLAGS_NODIST': '-Werror=declaration-after-statement', 'CONFIGURE_CPPFLAGS': '', 'CONFIGURE_LDFLAGS': '', - 'CONFIG_ARGS': "'--prefix=/Users/build/platform_darwin'", - 'CONFINCLUDEDIR': '/Users/build/platform_darwin/include', - 'CONFINCLUDEPY': '/Users/build/platform_darwin/include/python3.4m', + 'CONFIG_ARGS': "'--prefix=/Users/build/.local/Cellar/python3/3.5.0' " + "'--enable-ipv6' " + "'--datarootdir=/Users/build/.local/Cellar/python3/3.5.0/share' " + "'--datadir=/Users/build/.local/Cellar/python3/3.5.0/share' " + "'--enable-framework=/Users/build/.local/Cellar/python3/3.5.0/Frameworks' " + "'--without-ensurepip' '--without-gcc' " + "'MACOSX_DEPLOYMENT_TARGET=10.11' 'CC=clang' " + "'PKG_CONFIG_PATH=/Users/build/.local/opt/xz/lib/pkgconfig:/Users/build/.local/opt/sqlite/lib/pkgconfig:/Users/build/.local/opt/openssl/lib/pkgconfig' " + "'PKG_CONFIG_LIBDIR=/usr/lib/pkgconfig:/Users/build/.local/Library/ENV/pkgconfig/10.11'", + 'CONFINCLUDEDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include', + 'CONFINCLUDEPY': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include/python3.5m', 'COREPYTHONPATH': ':plat-darwin', - 'COVERAGE_INFO': '/Users/build/platform_darwin/build/Python/Python-3.4.1/coverage.info', - 'COVERAGE_REPORT': '/Users/build/platform_darwin/build/Python/Python-3.4.1/lcov-report', + 'COVERAGE_INFO': '/private/tmp/python320151125-76692-lzmenz/Python-3.5.0/coverage.info', + 'COVERAGE_REPORT': '/private/tmp/python320151125-76692-lzmenz/Python-3.5.0/lcov-report', 'COVERAGE_REPORT_OPTIONS': '--no-branch-coverage --title "CPython lcov ' 'report"', - 'CPPFLAGS': '-I. -IInclude ' - '-I/Users/build/platform_darwin/src/Python-3.4.1/Include', - 'CXX': 'g++', - 'DESTDIRS': '/Users/build/platform_darwin /Users/build/platform_darwin/lib ' - '/Users/build/platform_darwin/lib/python3.4 ' - '/Users/build/platform_darwin/lib/python3.4/lib-dynload', - 'DESTLIB': '/Users/build/platform_darwin/lib/python3.4', + 'CPPFLAGS': '-I. -IInclude -I./Include', + 'CXX': 'clang++', + 'DESTDIRS': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5 ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5 ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5/lib-dynload', + 'DESTLIB': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5', 'DESTPATH': '', - 'DESTSHARED': '/Users/build/platform_darwin/lib/python3.4/lib-dynload', + 'DESTSHARED': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5/lib-dynload', 'DIRMODE': 755, 'DIST': 'README ChangeLog configure configure.ac acconfig.h pyconfig.h.in ' 'Makefile.pre.in Include Lib Misc Ext-dummy', @@ -64,14 +73,13 @@ build_time_vars = {'ABIFLAGS': 'm', 'DOUBLE_IS_LITTLE_ENDIAN_IEEE754': 1, 'DYNLOADFILE': 'dynload_shlib.o', 'ENABLE_IPV6': 1, - 'ENSUREPIP': 'upgrade', + 'ENSUREPIP': 'no', 'EXE': '', 'EXEMODE': 755, 'EXTRAMACHDEPPATH': '', - 'EXTRAPLATDIR': '@EXTRAPLATDIR@', 'EXTRATESTOPTS': '', 'EXTRA_CFLAGS': '', - 'EXT_SUFFIX': '.so', + 'EXT_SUFFIX': '.cpython-35m-darwin.so', 'FILEMODE': 644, 'FLOCK_NEEDS_LIBBSD': 0, 'GETPGRP_HAVE_ARG': 0, @@ -79,7 +87,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'GNULD': 'no', 'GRAMMAR_C': 'Python/graminit.c', 'GRAMMAR_H': 'Include/graminit.h', - 'GRAMMAR_INPUT': '/Users/build/platform_darwin/src/Python-3.4.1/Grammar/Grammar', + 'GRAMMAR_INPUT': './Grammar/Grammar', 'HAVE_ACCEPT4': 0, 'HAVE_ACOSH': 1, 'HAVE_ADDRINFO': 1, @@ -101,6 +109,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_BROKEN_PTHREAD_SIGMASK': 0, 'HAVE_BROKEN_SEM_GETVALUE': 1, 'HAVE_BROKEN_UNSETENV': 0, + 'HAVE_BUILTIN_ATOMIC': 1, 'HAVE_C99_BOOL': 1, 'HAVE_CHFLAGS': 1, 'HAVE_CHOWN': 1, @@ -126,6 +135,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_DEV_PTC': 0, 'HAVE_DEV_PTMX': 1, 'HAVE_DIRECT_H': 0, + 'HAVE_DIRENT_D_TYPE': 1, 'HAVE_DIRENT_H': 1, 'HAVE_DIRFD': 1, 'HAVE_DLFCN_H': 1, @@ -141,15 +151,15 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_ERRNO_H': 1, 'HAVE_EXECV': 1, 'HAVE_EXPM1': 1, - 'HAVE_FACCESSAT': 0, + 'HAVE_FACCESSAT': 1, 'HAVE_FCHDIR': 1, 'HAVE_FCHMOD': 1, - 'HAVE_FCHMODAT': 0, + 'HAVE_FCHMODAT': 1, 'HAVE_FCHOWN': 1, - 'HAVE_FCHOWNAT': 0, + 'HAVE_FCHOWNAT': 1, 'HAVE_FCNTL_H': 1, 'HAVE_FDATASYNC': 0, - 'HAVE_FDOPENDIR': 0, + 'HAVE_FDOPENDIR': 1, 'HAVE_FEXECVE': 0, 'HAVE_FINITE': 1, 'HAVE_FLOCK': 1, @@ -158,7 +168,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_FPATHCONF': 1, 'HAVE_FSEEK64': 0, 'HAVE_FSEEKO': 1, - 'HAVE_FSTATAT': 0, + 'HAVE_FSTATAT': 1, 'HAVE_FSTATVFS': 1, 'HAVE_FSYNC': 1, 'HAVE_FTELL64': 0, @@ -170,11 +180,13 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_FUTIMESAT': 0, 'HAVE_GAI_STRERROR': 1, 'HAVE_GAMMA': 1, - 'HAVE_GCC_ASM_FOR_X64': 0, + 'HAVE_GCC_ASM_FOR_MC68881': 0, + 'HAVE_GCC_ASM_FOR_X64': 1, 'HAVE_GCC_ASM_FOR_X87': 1, - 'HAVE_GCC_UINT128_T': 0, + 'HAVE_GCC_UINT128_T': 1, 'HAVE_GETADDRINFO': 1, 'HAVE_GETC_UNLOCKED': 1, + 'HAVE_GETENTROPY': 0, 'HAVE_GETGROUPLIST': 1, 'HAVE_GETGROUPS': 1, 'HAVE_GETHOSTBYNAME': 1, @@ -193,6 +205,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_GETPID': 1, 'HAVE_GETPRIORITY': 1, 'HAVE_GETPWENT': 1, + 'HAVE_GETRANDOM_SYSCALL': 0, 'HAVE_GETRESGID': 0, 'HAVE_GETRESUID': 0, 'HAVE_GETSID': 1, @@ -219,7 +232,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_KILLPG': 1, 'HAVE_KQUEUE': 1, 'HAVE_LANGINFO_H': 1, - 'HAVE_LARGEFILE_SUPPORT': 1, + 'HAVE_LARGEFILE_SUPPORT': 0, 'HAVE_LCHFLAGS': 1, 'HAVE_LCHMOD': 1, 'HAVE_LCHOWN': 1, @@ -233,9 +246,10 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_LIBSENDFILE': 0, 'HAVE_LIBUTIL_H': 0, 'HAVE_LINK': 1, - 'HAVE_LINKAT': 0, + 'HAVE_LINKAT': 1, 'HAVE_LINUX_CAN_BCM_H': 0, 'HAVE_LINUX_CAN_H': 0, + 'HAVE_LINUX_CAN_RAW_FD_FRAMES': 0, 'HAVE_LINUX_CAN_RAW_H': 0, 'HAVE_LINUX_NETLINK_H': 0, 'HAVE_LINUX_TIPC_H': 0, @@ -251,7 +265,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_MEMMOVE': 1, 'HAVE_MEMORY_H': 1, 'HAVE_MEMRCHR': 0, - 'HAVE_MKDIRAT': 0, + 'HAVE_MKDIRAT': 1, 'HAVE_MKFIFO': 1, 'HAVE_MKFIFOAT': 0, 'HAVE_MKNOD': 1, @@ -264,7 +278,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_NETPACKET_PACKET_H': 0, 'HAVE_NET_IF_H': 1, 'HAVE_NICE': 1, - 'HAVE_OPENAT': 0, + 'HAVE_OPENAT': 1, 'HAVE_OPENPTY': 1, 'HAVE_OSX105_SDK': 1, 'HAVE_PATHCONF': 1, @@ -288,17 +302,19 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_PTY_H': 0, 'HAVE_PUTENV': 1, 'HAVE_PWRITE': 1, + 'HAVE_RAND_EGD': 1, 'HAVE_READLINK': 1, - 'HAVE_READLINKAT': 0, + 'HAVE_READLINKAT': 1, 'HAVE_READV': 1, 'HAVE_REALPATH': 1, - 'HAVE_RENAMEAT': 0, + 'HAVE_RENAMEAT': 1, + 'HAVE_RL_APPEND_HISTORY': 1, 'HAVE_RL_CALLBACK': 1, - 'HAVE_RL_CATCH_SIGNAL': 0, + 'HAVE_RL_CATCH_SIGNAL': 1, 'HAVE_RL_COMPLETION_APPEND_CHARACTER': 1, 'HAVE_RL_COMPLETION_DISPLAY_MATCHES_HOOK': 1, 'HAVE_RL_COMPLETION_MATCHES': 1, - 'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 0, + 'HAVE_RL_COMPLETION_SUPPRESS_APPEND': 1, 'HAVE_RL_PRE_INPUT_HOOK': 1, 'HAVE_ROUND': 1, 'HAVE_SCHED_GET_PRIORITY_MAX': 1, @@ -352,6 +368,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_STDARG_PROTOTYPES': 1, 'HAVE_STDINT_H': 1, 'HAVE_STDLIB_H': 1, + 'HAVE_STD_ATOMIC': 0, 'HAVE_STRDUP': 1, 'HAVE_STRFTIME': 1, 'HAVE_STRINGS_H': 1, @@ -367,7 +384,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_STRUCT_TM_TM_ZONE': 1, 'HAVE_ST_BLOCKS': 1, 'HAVE_SYMLINK': 1, - 'HAVE_SYMLINKAT': 0, + 'HAVE_SYMLINKAT': 1, 'HAVE_SYNC': 1, 'HAVE_SYSCONF': 1, 'HAVE_SYSEXITS_H': 1, @@ -425,7 +442,7 @@ build_time_vars = {'ABIFLAGS': 'm', 'HAVE_UINTPTR_T': 1, 'HAVE_UNAME': 1, 'HAVE_UNISTD_H': 1, - 'HAVE_UNLINKAT': 0, + 'HAVE_UNLINKAT': 1, 'HAVE_UNSETENV': 1, 'HAVE_USABLE_WCHAR_T': 0, 'HAVE_UTIL_H': 1, @@ -448,60 +465,61 @@ build_time_vars = {'ABIFLAGS': 'm', 'HGBRANCH': '', 'HGTAG': '', 'HGVERSION': '', - 'HOST_GNU_TYPE': 'i386-apple-darwin10.8.0', - 'INCLDIRSTOMAKE': '/Users/build/platform_darwin/include ' - '/Users/build/platform_darwin/include ' - '/Users/build/platform_darwin/include/python3.4m ' - '/Users/build/platform_darwin/include/python3.4m', - 'INCLUDEDIR': '/Users/build/platform_darwin/include', - 'INCLUDEPY': '/Users/build/platform_darwin/include/python3.4m', + 'HOST_GNU_TYPE': 'x86_64-apple-darwin15.2.0', + 'INCLDIRSTOMAKE': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include/python3.5m ' + '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include/python3.5m', + 'INCLUDEDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include', + 'INCLUDEPY': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/include/python3.5m', 'INSTALL': '/usr/bin/install -c', 'INSTALL_DATA': '/usr/bin/install -c -m 644', 'INSTALL_PROGRAM': '/usr/bin/install -c', 'INSTALL_SCRIPT': '/usr/bin/install -c', 'INSTALL_SHARED': '/usr/bin/install -c -m 555', - 'INSTSONAME': 'libpython3.4m.a', + 'INSTSONAME': 'Python.framework/Versions/3.5/Python', 'IO_H': 'Modules/_io/_iomodule.h', 'IO_OBJS': '\\', - 'LDCXXSHARED': 'g++ -bundle -undefined dynamic_lookup', + 'LDCXXSHARED': 'clang++ -bundle -undefined dynamic_lookup', 'LDFLAGS': '', 'LDLAST': '', - 'LDLIBRARY': 'libpython3.4m.a', + 'LDLIBRARY': 'Python.framework/Versions/3.5/Python', 'LDLIBRARYDIR': '', - 'LDSHARED': 'gcc -bundle -undefined dynamic_lookup', - 'LDVERSION': '3.4m', + 'LDSHARED': 'clang -bundle -undefined dynamic_lookup', + 'LDVERSION': '3.5m', 'LIBC': '', - 'LIBDEST': '/Users/build/platform_darwin/lib/python3.4', - 'LIBDIR': '/Users/build/platform_darwin/lib', + 'LIBDEST': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5', + 'LIBDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib', 'LIBFFI_INCLUDEDIR': '', 'LIBM': '', 'LIBOBJDIR': 'Python/', 'LIBOBJS': '', - 'LIBPC': '/Users/build/platform_darwin/lib/pkgconfig', - 'LIBPL': '/Users/build/platform_darwin/lib/python3.4/config-3.4m', - 'LIBRARY': 'libpython3.4m.a', + 'LIBPC': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/pkgconfig', + 'LIBPL': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5/config-3.5m', + 'LIBRARY': 'libpython3.5m.a', 'LIBRARY_OBJS': '\\', 'LIBRARY_OBJS_OMIT_FROZEN': '\\', 'LIBS': '-ldl -framework CoreFoundation', 'LIBSUBDIRS': 'tkinter tkinter/test tkinter/test/test_tkinter \\', - 'LINKCC': 'gcc', - 'LINKFORSHARED': '-Wl,-stack_size,1000000 -framework CoreFoundation', + 'LINKCC': 'clang', + 'LINKFORSHARED': '-Wl,-stack_size,1000000 -framework CoreFoundation ' + 'Python.framework/Versions/3.5/Python', 'LIPO_32BIT_FLAGS': '', 'LN': 'ln', 'LOCALMODLIBS': '', 'LOG1P_DROPS_ZERO_SIGN': 0, 'MACHDEP': 'darwin', 'MACHDEPPATH': ':plat-darwin', - 'MACHDEPS': 'plat-darwin @EXTRAPLATDIR@', + 'MACHDEPS': 'plat-darwin', 'MACHDEP_OBJS': '', - 'MACHDESTLIB': '/Users/build/platform_darwin/lib/python3.4', - 'MACOSX_DEPLOYMENT_TARGET': '10.6', - 'MAINCC': 'gcc', + 'MACHDESTLIB': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib/python3.5', + 'MACOSX_DEPLOYMENT_TARGET': '10.11', + 'MAINCC': 'clang', 'MAJOR_IN_MKDEV': 0, 'MAJOR_IN_SYSMACROS': 0, - 'MAKESETUP': '/Users/build/platform_darwin/src/Python-3.4.1/Modules/makesetup', - 'MANDIR': '/Users/build/platform_darwin/share/man', - 'MKDIR_P': '/Users/build/platform_darwin/src/Python-3.4.1/install-sh -c -d', + 'MAKESETUP': './Modules/makesetup', + 'MANDIR': '/Users/build/.local/Cellar/python3/3.5.0/share/man', + 'MKDIR_P': './install-sh -c -d', 'MODLIBS': '', 'MODOBJS': 'Modules/_threadmodule.o Modules/signalmodule.o ' 'Modules/posixmodule.o Modules/errnomodule.o ' @@ -509,10 +527,10 @@ build_time_vars = {'ABIFLAGS': 'm', 'Modules/_weakref.o Modules/_functoolsmodule.o ' 'Modules/_operator.o Modules/_collectionsmodule.o ' 'Modules/itertoolsmodule.o Modules/atexitmodule.o ' - 'Modules/_stat.o Modules/_localemodule.o Modules/_iomodule.o ' - 'Modules/iobase.o Modules/fileio.o Modules/bytesio.o ' - 'Modules/bufferedio.o Modules/textio.o Modules/stringio.o ' - 'Modules/zipimport.o Modules/faulthandler.o ' + 'Modules/_stat.o Modules/timemodule.o Modules/_localemodule.o ' + 'Modules/_iomodule.o Modules/iobase.o Modules/fileio.o ' + 'Modules/bytesio.o Modules/bufferedio.o Modules/textio.o ' + 'Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o ' 'Modules/_tracemalloc.o Modules/hashtable.o ' 'Modules/symtablemodule.o Modules/xxsubtype.o', 'MODULE_OBJS': '\\', @@ -523,6 +541,11 @@ build_time_vars = {'ABIFLAGS': 'm', 'OPCODETARGETGEN': '\\', 'OPCODETARGETGEN_FILES': '\\', 'OPCODETARGETS_H': '\\', + 'OPCODE_H': './Include/opcode.h', + 'OPCODE_H_DIR': './Include', + 'OPCODE_H_GEN': 'python ./Tools/scripts/generate_opcode_h.py ./Lib/opcode.py ' + './Include/opcode.h', + 'OPCODE_H_SCRIPT': './Tools/scripts/generate_opcode_h.py', 'OPT': '-DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes', 'OTHER_LIBTOOL_OPT': '', 'PACKAGE_BUGREPORT': 0, @@ -541,31 +564,30 @@ build_time_vars = {'ABIFLAGS': 'm', 'PLATDIR': 'plat-darwin', 'POBJS': '\\', 'POSIX_SEMAPHORES_NOT_ENABLED': 0, - 'PROFILE_TASK': '/Users/build/platform_darwin/src/Python-3.4.1/Tools/pybench/pybench.py ' - '-n 2 --with-gc --with-syscheck', + 'PROFILE_TASK': './Tools/pybench/pybench.py -n 2 --with-gc --with-syscheck', 'PSRCS': '\\', 'PTHREAD_SYSTEM_SCHED_SUPPORTED': 1, 'PURIFY': '', 'PY3LIBRARY': '', 'PYLONG_BITS_IN_DIGIT': 0, 'PYTHON': 'python', - 'PYTHONFRAMEWORK': '', - 'PYTHONFRAMEWORKDIR': 'no-framework', - 'PYTHONFRAMEWORKINSTALLDIR': '', - 'PYTHONFRAMEWORKPREFIX': '', + 'PYTHONFRAMEWORK': 'Python', + 'PYTHONFRAMEWORKDIR': 'Python.framework', + 'PYTHONFRAMEWORKINSTALLDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework', + 'PYTHONFRAMEWORKPREFIX': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks', 'PYTHONPATH': ':plat-darwin', 'PYTHON_FOR_BUILD': './python.exe -E', 'PYTHON_HEADERS': '\\', 'PYTHON_OBJS': '\\', - 'PY_CFLAGS': '-fno-strict-aliasing -Werror=declaration-after-statement ' - '-DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes', - 'PY_CORE_CFLAGS': '-fno-strict-aliasing -Werror=declaration-after-statement ' - '-DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -I. ' - '-IInclude ' - '-I/Users/build/platform_darwin/src/Python-3.4.1/Include ' - '-DPy_BUILD_CORE', - 'PY_CPPFLAGS': '-I. -IInclude ' - '-I/Users/build/platform_darwin/src/Python-3.4.1/Include', + 'PY_CFLAGS': '-Wno-unused-result -Wsign-compare -Wunreachable-code ' + '-fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall ' + '-Wstrict-prototypes', + 'PY_CFLAGS_NODIST': '-Werror=declaration-after-statement', + 'PY_CORE_CFLAGS': '-Wno-unused-result -Wsign-compare -Wunreachable-code ' + '-fno-common -dynamic -DNDEBUG -g -fwrapv -O3 -Wall ' + '-Wstrict-prototypes -Werror=declaration-after-statement ' + '-I. -IInclude -I./Include -DPy_BUILD_CORE', + 'PY_CPPFLAGS': '-I. -IInclude -I./Include', 'PY_FORMAT_LONG_LONG': '"ll"', 'PY_FORMAT_SIZE_T': '"z"', 'PY_LDFLAGS': '', @@ -577,8 +599,8 @@ build_time_vars = {'ABIFLAGS': 'm', 'READELF': ':', 'RESSRCDIR': 'Mac/Resources/framework', 'RETSIGTYPE': 'void', - 'RUNSHARED': '', - 'SCRIPTDIR': '/Users/build/platform_darwin/lib', + 'RUNSHARED': 'DYLD_FRAMEWORK_PATH=/private/tmp/python320151125-76692-lzmenz/Python-3.5.0', + 'SCRIPTDIR': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/lib', 'SETPGRP_HAVE_ARG': 0, 'SGI_ABI': '', 'SHELL': '/bin/sh', @@ -591,22 +613,22 @@ build_time_vars = {'ABIFLAGS': 'm', 'SIZEOF_FLOAT': 4, 'SIZEOF_FPOS_T': 8, 'SIZEOF_INT': 4, - 'SIZEOF_LONG': 4, + 'SIZEOF_LONG': 8, 'SIZEOF_LONG_DOUBLE': 16, 'SIZEOF_LONG_LONG': 8, 'SIZEOF_OFF_T': 8, 'SIZEOF_PID_T': 4, - 'SIZEOF_PTHREAD_T': 4, + 'SIZEOF_PTHREAD_T': 8, 'SIZEOF_SHORT': 2, - 'SIZEOF_SIZE_T': 4, - 'SIZEOF_TIME_T': 4, - 'SIZEOF_UINTPTR_T': 4, - 'SIZEOF_VOID_P': 4, + 'SIZEOF_SIZE_T': 8, + 'SIZEOF_TIME_T': 8, + 'SIZEOF_UINTPTR_T': 8, + 'SIZEOF_VOID_P': 8, 'SIZEOF_WCHAR_T': 4, 'SIZEOF__BOOL': 1, - 'SOABI': 'cpython-34m', - 'SRCDIRS': 'Parser Grammar Objects Python Modules Mac', - 'SRC_GDB_HOOKS': '/Users/build/platform_darwin/src/Python-3.4.1/Tools/gdb/libpython.py', + 'SOABI': 'cpython-35m-darwin', + 'SRCDIRS': 'Parser Grammar Objects Python Modules Mac Programs', + 'SRC_GDB_HOOKS': './Tools/gdb/libpython.py', 'STDC_HEADERS': 1, 'STRICT_SYSV_CURSES': "/* Don't use ncurses extensions */", 'STRIPFLAG': '-s', @@ -619,10 +641,11 @@ build_time_vars = {'ABIFLAGS': 'm', 'TCLTK_LIBS': '', 'TESTOPTS': '', 'TESTPATH': '', - 'TESTPYTHON': './python.exe', + 'TESTPYTHON': 'DYLD_FRAMEWORK_PATH=/private/tmp/python320151125-76692-lzmenz/Python-3.5.0 ' + './python.exe', 'TESTPYTHONOPTS': '', - 'TESTRUNNER': './python.exe ' - '/Users/build/platform_darwin/src/Python-3.4.1/Tools/scripts/run_tests.py', + 'TESTRUNNER': 'DYLD_FRAMEWORK_PATH=/private/tmp/python320151125-76692-lzmenz/Python-3.5.0 ' + './python.exe ./Tools/scripts/run_tests.py', 'TESTTIMEOUT': 3600, 'THREADOBJ': 'Python/thread.o', 'TIMEMODULE_LIB': 0, @@ -632,24 +655,23 @@ build_time_vars = {'ABIFLAGS': 'm', 'UNIVERSALSDK': '', 'USE_COMPUTED_GOTOS': 0, 'USE_INLINE': 1, - 'VA_LIST_IS_ARRAY': 0, - 'VERSION': '3.4', - 'VPATH': '/Users/build/platform_darwin/src/Python-3.4.1', + 'VA_LIST_IS_ARRAY': 1, + 'VERSION': '3.5', 'WANT_SIGFPE_HANDLER': 0, 'WINDOW_HAS_FLAGS': 0, 'WITH_DOC_STRINGS': 1, 'WITH_DYLD': 1, 'WITH_LIBINTL': 0, - 'WITH_NEXT_FRAMEWORK': 0, + 'WITH_NEXT_FRAMEWORK': 1, 'WITH_PYMALLOC': 1, 'WITH_THREAD': 1, 'WITH_TSC': 0, 'WITH_VALGRIND': 0, 'X87_DOUBLE_ROUNDING': 0, 'XMLLIBSUBDIRS': 'xml xml/dom xml/etree xml/parsers xml/sax', - 'abs_builddir': '/Users/build/platform_darwin/build/Python/Python-3.4.1', - 'abs_srcdir': '/Users/build/platform_darwin/src/Python-3.4.1', - 'datarootdir': '/Users/build/platform_darwin/share', - 'exec_prefix': '/Users/build/platform_darwin', - 'prefix': '/Users/build/platform_darwin', - 'srcdir': '/Users/build/platform_darwin/src/Python-3.4.1'} + 'abs_builddir': '/private/tmp/python320151125-76692-lzmenz/Python-3.5.0', + 'abs_srcdir': '/private/tmp/python320151125-76692-lzmenz/Python-3.5.0', + 'datarootdir': '/Users/build/.local/Cellar/python3/3.5.0/share', + 'exec_prefix': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5', + 'prefix': '/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5', + 'srcdir': '.'} diff --git a/Darwin/lib/python3.4/_threading_local.py b/Darwin/lib/python3.5/_threading_local.py similarity index 100% rename from Darwin/lib/python3.4/_threading_local.py rename to Darwin/lib/python3.5/_threading_local.py diff --git a/Darwin/lib/python3.4/_weakrefset.py b/Darwin/lib/python3.5/_weakrefset.py similarity index 100% rename from Darwin/lib/python3.4/_weakrefset.py rename to Darwin/lib/python3.5/_weakrefset.py diff --git a/Darwin/lib/python3.4/abc.py b/Darwin/lib/python3.5/abc.py similarity index 99% rename from Darwin/lib/python3.4/abc.py rename to Darwin/lib/python3.5/abc.py index 0358a46..1cbf96a 100644 --- a/Darwin/lib/python3.4/abc.py +++ b/Darwin/lib/python3.5/abc.py @@ -168,7 +168,7 @@ class ABCMeta(type): def _dump_registry(cls, file=None): """Debug helper to print the ABC registry.""" - print("Class: %s.%s" % (cls.__module__, cls.__name__), file=file) + print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file) print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file) for name in sorted(cls.__dict__.keys()): if name.startswith("_abc_"): diff --git a/Darwin/lib/python3.4/aifc.py b/Darwin/lib/python3.5/aifc.py similarity index 99% rename from Darwin/lib/python3.4/aifc.py rename to Darwin/lib/python3.5/aifc.py index 9e64de9..7ebdbeb 100644 --- a/Darwin/lib/python3.4/aifc.py +++ b/Darwin/lib/python3.5/aifc.py @@ -121,7 +121,7 @@ but when it is set to the correct value, the header does not have to be patched up. It is best to first set all parameters, perhaps possibly the compression type, and then write audio frames using writeframesraw. -When all frames have been written, either call writeframes('') or +When all frames have been written, either call writeframes(b'') or close() to patch up the sizes in the header. Marks can be added anytime. If there are any marks, you must call close() after all frames have been written. @@ -356,7 +356,10 @@ class Aifc_read: self._soundpos = 0 def close(self): - self._file.close() + file = self._file + if file is not None: + self._file = None + file.close() def tell(self): return self._soundpos diff --git a/Darwin/lib/python3.4/antigravity.py b/Darwin/lib/python3.5/antigravity.py similarity index 100% rename from Darwin/lib/python3.4/antigravity.py rename to Darwin/lib/python3.5/antigravity.py diff --git a/Darwin/lib/python3.4/argparse.py b/Darwin/lib/python3.5/argparse.py similarity index 98% rename from Darwin/lib/python3.4/argparse.py rename to Darwin/lib/python3.5/argparse.py index 5ad7e13..9a06719 100644 --- a/Darwin/lib/python3.4/argparse.py +++ b/Darwin/lib/python3.5/argparse.py @@ -490,7 +490,7 @@ class HelpFormatter(object): action_width = help_position - self._current_indent - 2 action_header = self._format_action_invocation(action) - # ho nelp; start on same line and add a final newline + # no help; start on same line and add a final newline if not action.help: tup = self._current_indent, '', action_header action_header = '%*s%s\n' % tup @@ -1122,7 +1122,14 @@ class _SubParsersAction(Action): # parse all the remaining options into the namespace # store any unrecognized options on the object, so that the top # level parser can decide what to do with them - namespace, arg_strings = parser.parse_known_args(arg_strings, namespace) + + # In case this subparser defines new defaults, we parse them + # in a new namespace object and then update the original + # namespace for the relevant parts. + subnamespace, arg_strings = parser.parse_known_args(arg_strings, None) + for key, value in vars(subnamespace).items(): + setattr(namespace, key, value) + if arg_strings: vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, []) getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings) @@ -1198,11 +1205,10 @@ class Namespace(_AttributeHolder): setattr(self, name, kwargs[name]) def __eq__(self, other): + if not isinstance(other, Namespace): + return NotImplemented return vars(self) == vars(other) - def __ne__(self, other): - return not (self == other) - def __contains__(self, key): return key in self.__dict__ @@ -1584,6 +1590,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): - argument_default -- The default value for all arguments - conflict_handler -- String indicating how to handle conflicts - add_help -- Add a -h/-help option + - allow_abbrev -- Allow long options to be abbreviated unambiguously """ def __init__(self, @@ -1597,7 +1604,8 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): fromfile_prefix_chars=None, argument_default=None, conflict_handler='error', - add_help=True): + add_help=True, + allow_abbrev=True): superinit = super(ArgumentParser, self).__init__ superinit(description=description, @@ -1615,6 +1623,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): self.formatter_class = formatter_class self.fromfile_prefix_chars = fromfile_prefix_chars self.add_help = add_help + self.allow_abbrev = allow_abbrev add_group = self.add_argument_group self._positionals = add_group(_('positional arguments')) @@ -2092,23 +2101,24 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): action = self._option_string_actions[option_string] return action, option_string, explicit_arg - # search through all possible prefixes of the option string - # and all actions in the parser for possible interpretations - option_tuples = self._get_option_tuples(arg_string) + if self.allow_abbrev: + # search through all possible prefixes of the option string + # and all actions in the parser for possible interpretations + option_tuples = self._get_option_tuples(arg_string) - # if multiple actions match, the option string was ambiguous - if len(option_tuples) > 1: - options = ', '.join([option_string - for action, option_string, explicit_arg in option_tuples]) - args = {'option': arg_string, 'matches': options} - msg = _('ambiguous option: %(option)s could match %(matches)s') - self.error(msg % args) + # if multiple actions match, the option string was ambiguous + if len(option_tuples) > 1: + options = ', '.join([option_string + for action, option_string, explicit_arg in option_tuples]) + args = {'option': arg_string, 'matches': options} + msg = _('ambiguous option: %(option)s could match %(matches)s') + self.error(msg % args) - # if exactly one action matched, this segmentation is good, - # so return the parsed action - elif len(option_tuples) == 1: - option_tuple, = option_tuples - return option_tuple + # if exactly one action matched, this segmentation is good, + # so return the parsed action + elif len(option_tuples) == 1: + option_tuple, = option_tuples + return option_tuple # if it was not found as an option, but it looks like a negative # number, it was meant to be positional diff --git a/Darwin/lib/python3.4/ast.py b/Darwin/lib/python3.5/ast.py similarity index 99% rename from Darwin/lib/python3.4/ast.py rename to Darwin/lib/python3.5/ast.py index 02c3b28..03c30f6 100644 --- a/Darwin/lib/python3.4/ast.py +++ b/Darwin/lib/python3.5/ast.py @@ -194,7 +194,7 @@ def get_docstring(node, clean=True): be found. If the node provided does not have docstrings a TypeError will be raised. """ - if not isinstance(node, (FunctionDef, ClassDef, Module)): + if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)): raise TypeError("%r can't have docstrings" % node.__class__.__name__) if node.body and isinstance(node.body[0], Expr) and \ isinstance(node.body[0].value, Str): diff --git a/Darwin/lib/python3.4/asynchat.py b/Darwin/lib/python3.5/asynchat.py similarity index 81% rename from Darwin/lib/python3.4/asynchat.py rename to Darwin/lib/python3.5/asynchat.py index f1a5731..f728d1b 100644 --- a/Darwin/lib/python3.4/asynchat.py +++ b/Darwin/lib/python3.5/asynchat.py @@ -49,22 +49,22 @@ import asyncore from collections import deque -class async_chat (asyncore.dispatcher): +class async_chat(asyncore.dispatcher): """This is an abstract class. You must derive from this class, and add the two methods collect_incoming_data() and found_terminator()""" # these are overridable defaults - ac_in_buffer_size = 65536 - ac_out_buffer_size = 65536 + ac_in_buffer_size = 65536 + ac_out_buffer_size = 65536 # we don't want to enable the use of encoding by default, because that is a # sign of an application bug that we don't want to pass silently - use_encoding = 0 - encoding = 'latin-1' + use_encoding = 0 + encoding = 'latin-1' - def __init__ (self, sock=None, map=None): + def __init__(self, sock=None, map=None): # for string terminator matching self.ac_in_buffer = b'' @@ -76,7 +76,7 @@ class async_chat (asyncore.dispatcher): # we toss the use of the "simple producer" and replace it with # a pure deque, which the original fifo was a wrapping of self.producer_fifo = deque() - asyncore.dispatcher.__init__ (self, sock, map) + asyncore.dispatcher.__init__(self, sock, map) def collect_incoming_data(self, data): raise NotImplementedError("must be implemented in subclass") @@ -92,13 +92,18 @@ class async_chat (asyncore.dispatcher): def found_terminator(self): raise NotImplementedError("must be implemented in subclass") - def set_terminator (self, term): - "Set the input delimiter. Can be a fixed string of any length, an integer, or None" + def set_terminator(self, term): + """Set the input delimiter. + + Can be a fixed string of any length, an integer, or None. + """ if isinstance(term, str) and self.use_encoding: term = bytes(term, self.encoding) + elif isinstance(term, int) and term < 0: + raise ValueError('the number of received bytes must be positive') self.terminator = term - def get_terminator (self): + def get_terminator(self): return self.terminator # grab some more data from the socket, @@ -106,10 +111,12 @@ class async_chat (asyncore.dispatcher): # check for the terminator, # if found, transition to the next state. - def handle_read (self): + def handle_read(self): try: - data = self.recv (self.ac_in_buffer_size) + data = self.recv(self.ac_in_buffer_size) + except BlockingIOError: + return except OSError as why: self.handle_error() return @@ -128,17 +135,17 @@ class async_chat (asyncore.dispatcher): terminator = self.get_terminator() if not terminator: # no terminator, collect it all - self.collect_incoming_data (self.ac_in_buffer) + self.collect_incoming_data(self.ac_in_buffer) self.ac_in_buffer = b'' elif isinstance(terminator, int): # numeric terminator n = terminator if lb < n: - self.collect_incoming_data (self.ac_in_buffer) + self.collect_incoming_data(self.ac_in_buffer) self.ac_in_buffer = b'' self.terminator = self.terminator - lb else: - self.collect_incoming_data (self.ac_in_buffer[:n]) + self.collect_incoming_data(self.ac_in_buffer[:n]) self.ac_in_buffer = self.ac_in_buffer[n:] self.terminator = 0 self.found_terminator() @@ -155,32 +162,37 @@ class async_chat (asyncore.dispatcher): if index != -1: # we found the terminator if index > 0: - # don't bother reporting the empty string (source of subtle bugs) - self.collect_incoming_data (self.ac_in_buffer[:index]) + # don't bother reporting the empty string + # (source of subtle bugs) + self.collect_incoming_data(self.ac_in_buffer[:index]) self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:] - # This does the Right Thing if the terminator is changed here. + # This does the Right Thing if the terminator + # is changed here. self.found_terminator() else: # check for a prefix of the terminator - index = find_prefix_at_end (self.ac_in_buffer, terminator) + index = find_prefix_at_end(self.ac_in_buffer, terminator) if index: if index != lb: # we found a prefix, collect up to the prefix - self.collect_incoming_data (self.ac_in_buffer[:-index]) + self.collect_incoming_data(self.ac_in_buffer[:-index]) self.ac_in_buffer = self.ac_in_buffer[-index:] break else: # no prefix, collect it all - self.collect_incoming_data (self.ac_in_buffer) + self.collect_incoming_data(self.ac_in_buffer) self.ac_in_buffer = b'' - def handle_write (self): + def handle_write(self): self.initiate_send() - def handle_close (self): + def handle_close(self): self.close() - def push (self, data): + def push(self, data): + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError('data argument must be byte-ish (%r)', + type(data)) sabs = self.ac_out_buffer_size if len(data) > sabs: for i in range(0, len(data), sabs): @@ -189,11 +201,11 @@ class async_chat (asyncore.dispatcher): self.producer_fifo.append(data) self.initiate_send() - def push_with_producer (self, producer): + def push_with_producer(self, producer): self.producer_fifo.append(producer) self.initiate_send() - def readable (self): + def readable(self): "predicate for inclusion in the readable for select()" # cannot use the old predicate, it violates the claim of the # set_terminator method. @@ -201,11 +213,11 @@ class async_chat (asyncore.dispatcher): # return (len(self.ac_in_buffer) <= self.ac_in_buffer_size) return 1 - def writable (self): + def writable(self): "predicate for inclusion in the writable for select()" return self.producer_fifo or (not self.connected) - def close_when_done (self): + def close_when_done(self): "automatically close this channel once the outgoing queue is empty" self.producer_fifo.append(None) @@ -216,10 +228,8 @@ class async_chat (asyncore.dispatcher): if not first: del self.producer_fifo[0] if first is None: - ## print("first is None") self.handle_close() return - ## print("first is not None") # handle classic producer behavior obs = self.ac_out_buffer_size @@ -251,20 +261,21 @@ class async_chat (asyncore.dispatcher): # we tried to send some actual data return - def discard_buffers (self): + def discard_buffers(self): # Emergencies only! self.ac_in_buffer = b'' del self.incoming[:] self.producer_fifo.clear() + class simple_producer: - def __init__ (self, data, buffer_size=512): + def __init__(self, data, buffer_size=512): self.data = data self.buffer_size = buffer_size - def more (self): - if len (self.data) > self.buffer_size: + def more(self): + if len(self.data) > self.buffer_size: result = self.data[:self.buffer_size] self.data = self.data[self.buffer_size:] return result @@ -273,38 +284,43 @@ class simple_producer: self.data = b'' return result + class fifo: - def __init__ (self, list=None): + def __init__(self, list=None): + import warnings + warnings.warn('fifo class will be removed in Python 3.6', + DeprecationWarning, stacklevel=2) if not list: self.list = deque() else: self.list = deque(list) - def __len__ (self): + def __len__(self): return len(self.list) - def is_empty (self): + def is_empty(self): return not self.list - def first (self): + def first(self): return self.list[0] - def push (self, data): + def push(self, data): self.list.append(data) - def pop (self): + def pop(self): if self.list: return (1, self.list.popleft()) else: return (0, None) + # Given 'haystack', see if any prefix of 'needle' is at its end. This # assumes an exact match has already been checked. Return the number of # characters matched. # for example: -# f_p_a_e ("qwerty\r", "\r\n") => 1 -# f_p_a_e ("qwertydkjf", "\r\n") => 0 -# f_p_a_e ("qwerty\r\n", "\r\n") => +# f_p_a_e("qwerty\r", "\r\n") => 1 +# f_p_a_e("qwertydkjf", "\r\n") => 0 +# f_p_a_e("qwerty\r\n", "\r\n") => # this could maybe be made faster with a computed regex? # [answer: no; circa Python-2.0, Jan 2001] @@ -313,7 +329,7 @@ class fifo: # re: 12820/s # regex: 14035/s -def find_prefix_at_end (haystack, needle): +def find_prefix_at_end(haystack, needle): l = len(needle) - 1 while l and not haystack.endswith(needle[:l]): l -= 1 diff --git a/Darwin/lib/python3.4/asyncio/__init__.py b/Darwin/lib/python3.5/asyncio/__init__.py similarity index 84% rename from Darwin/lib/python3.4/asyncio/__init__.py rename to Darwin/lib/python3.5/asyncio/__init__.py index 3df2f80..011466b 100644 --- a/Darwin/lib/python3.4/asyncio/__init__.py +++ b/Darwin/lib/python3.5/asyncio/__init__.py @@ -18,6 +18,8 @@ if sys.platform == 'win32': import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. +from .base_events import * +from .coroutines import * from .events import * from .futures import * from .locks import * @@ -28,13 +30,9 @@ from .subprocess import * from .tasks import * from .transports import * -if sys.platform == 'win32': # pragma: no cover - from .windows_events import * -else: - from .unix_events import * # pragma: no cover - - -__all__ = (events.__all__ + +__all__ = (base_events.__all__ + + coroutines.__all__ + + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + @@ -43,3 +41,10 @@ __all__ = (events.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) + +if sys.platform == 'win32': # pragma: no cover + from .windows_events import * + __all__ += windows_events.__all__ +else: + from .unix_events import * # pragma: no cover + __all__ += unix_events.__all__ diff --git a/Darwin/lib/python3.4/asyncio/base_events.py b/Darwin/lib/python3.5/asyncio/base_events.py similarity index 55% rename from Darwin/lib/python3.4/asyncio/base_events.py rename to Darwin/lib/python3.5/asyncio/base_events.py index 3d4a87a..c205445 100644 --- a/Darwin/lib/python3.4/asyncio/base_events.py +++ b/Darwin/lib/python3.5/asyncio/base_events.py @@ -1,7 +1,7 @@ """Base implementation of event loop. The event loop can be broken up into a multiplexer (the part -responsible for notifying us of IO events) and the event loop proper, +responsible for notifying us of I/O events) and the event loop proper, which wraps a multiplexer with functionality for scheduling callbacks, immediately or at a given time in the future. @@ -17,25 +17,57 @@ to modify the meaning of the API call itself. import collections import concurrent.futures import heapq +import inspect import logging +import os import socket import subprocess +import threading import time -import os +import traceback import sys +import warnings +from . import compat +from . import coroutines from . import events from . import futures from . import tasks +from .coroutines import coroutine from .log import logger -__all__ = ['BaseEventLoop', 'Server'] +__all__ = ['BaseEventLoop'] # Argument for default thread pool executor creation. _MAX_WORKERS = 5 +# Minimum number of _scheduled timer handles before cleanup of +# cancelled handles is performed. +_MIN_SCHEDULED_TIMER_HANDLES = 100 + +# Minimum fraction of _scheduled timer handles that are cancelled +# before cleanup of cancelled handles is performed. +_MIN_CANCELLED_TIMER_HANDLES_FRACTION = 0.5 + +def _format_handle(handle): + cb = handle._callback + if inspect.ismethod(cb) and isinstance(cb.__self__, tasks.Task): + # format the task + return repr(cb.__self__) + else: + return str(handle) + + +def _format_pipe(fd): + if fd == subprocess.PIPE: + return '' + elif fd == subprocess.STDOUT: + return '' + else: + return repr(fd) + class _StopError(BaseException): """Raised to stop the event loop.""" @@ -44,7 +76,11 @@ class _StopError(BaseException): def _check_resolved_address(sock, address): # Ensure that the address is already resolved to avoid the trap of hanging # the entire event loop when the address requires doing a DNS lookup. + # + # getaddrinfo() is slow (around 10 us per call): this function should only + # be called in debug mode family = sock.family + if family == socket.AF_INET: host, port = address elif family == socket.AF_INET6: @@ -52,95 +88,169 @@ def _check_resolved_address(sock, address): else: return - type_mask = 0 - if hasattr(socket, 'SOCK_NONBLOCK'): - type_mask |= socket.SOCK_NONBLOCK - if hasattr(socket, 'SOCK_CLOEXEC'): - type_mask |= socket.SOCK_CLOEXEC - # Use getaddrinfo(AI_NUMERICHOST) to ensure that the address is - # already resolved. - try: - socket.getaddrinfo(host, port, - family=family, - type=(sock.type & ~type_mask), - proto=sock.proto, - flags=socket.AI_NUMERICHOST) - except socket.gaierror as err: - raise ValueError("address must be resolved (IP address), got %r: %s" - % (address, err)) + # On Windows, socket.inet_pton() is only available since Python 3.4 + if hasattr(socket, 'inet_pton'): + # getaddrinfo() is slow and has known issue: prefer inet_pton() + # if available + try: + socket.inet_pton(family, host) + except OSError as exc: + raise ValueError("address must be resolved (IP address), " + "got host %r: %s" + % (host, exc)) + else: + # Use getaddrinfo(flags=AI_NUMERICHOST) to ensure that the address is + # already resolved. + type_mask = 0 + if hasattr(socket, 'SOCK_NONBLOCK'): + type_mask |= socket.SOCK_NONBLOCK + if hasattr(socket, 'SOCK_CLOEXEC'): + type_mask |= socket.SOCK_CLOEXEC + try: + socket.getaddrinfo(host, port, + family=family, + type=(sock.type & ~type_mask), + proto=sock.proto, + flags=socket.AI_NUMERICHOST) + except socket.gaierror as err: + raise ValueError("address must be resolved (IP address), " + "got host %r: %s" + % (host, err)) def _raise_stop_error(*args): raise _StopError +def _run_until_complete_cb(fut): + exc = fut._exception + if (isinstance(exc, BaseException) + and not isinstance(exc, Exception)): + # Issue #22429: run_forever() already finished, no need to + # stop it. + return + _raise_stop_error() + + class Server(events.AbstractServer): def __init__(self, loop, sockets): - self.loop = loop + self._loop = loop self.sockets = sockets - self.active_count = 0 - self.waiters = [] + self._active_count = 0 + self._waiters = [] - def attach(self, transport): + def __repr__(self): + return '<%s sockets=%r>' % (self.__class__.__name__, self.sockets) + + def _attach(self): assert self.sockets is not None - self.active_count += 1 + self._active_count += 1 - def detach(self, transport): - assert self.active_count > 0 - self.active_count -= 1 - if self.active_count == 0 and self.sockets is None: + def _detach(self): + assert self._active_count > 0 + self._active_count -= 1 + if self._active_count == 0 and self.sockets is None: self._wakeup() def close(self): sockets = self.sockets - if sockets is not None: - self.sockets = None - for sock in sockets: - self.loop._stop_serving(sock) - if self.active_count == 0: - self._wakeup() + if sockets is None: + return + self.sockets = None + for sock in sockets: + self._loop._stop_serving(sock) + if self._active_count == 0: + self._wakeup() def _wakeup(self): - waiters = self.waiters - self.waiters = None + waiters = self._waiters + self._waiters = None for waiter in waiters: if not waiter.done(): waiter.set_result(waiter) - @tasks.coroutine + @coroutine def wait_closed(self): - if self.sockets is None or self.waiters is None: + if self.sockets is None or self._waiters is None: return - waiter = futures.Future(loop=self.loop) - self.waiters.append(waiter) + waiter = futures.Future(loop=self._loop) + self._waiters.append(waiter) yield from waiter class BaseEventLoop(events.AbstractEventLoop): def __init__(self): + self._timer_cancelled_count = 0 + self._closed = False self._ready = collections.deque() self._scheduled = [] self._default_executor = None self._internal_fds = 0 - self._running = False + # Identifier of the thread running the event loop, or None if the + # event loop is not running + self._thread_id = None self._clock_resolution = time.get_clock_info('monotonic').resolution self._exception_handler = None - self._debug = False + self.set_debug((not sys.flags.ignore_environment + and bool(os.environ.get('PYTHONASYNCIODEBUG')))) + # In debug mode, if the execution of a callback or a step of a task + # exceed this duration in seconds, the slow callback/task is logged. + self.slow_callback_duration = 0.1 + self._current_handle = None + self._task_factory = None + self._coroutine_wrapper_set = False + + def __repr__(self): + return ('<%s running=%s closed=%s debug=%s>' + % (self.__class__.__name__, self.is_running(), + self.is_closed(), self.get_debug())) + + def create_task(self, coro): + """Schedule a coroutine object. + + Return a task object. + """ + self._check_closed() + if self._task_factory is None: + task = tasks.Task(coro, loop=self) + if task._source_traceback: + del task._source_traceback[-1] + else: + task = self._task_factory(self, coro) + return task + + def set_task_factory(self, factory): + """Set a task factory that will be used by loop.create_task(). + + If factory is None the default task factory will be set. + + If factory is a callable, it should have a signature matching + '(loop, coro)', where 'loop' will be a reference to the active + event loop, 'coro' will be a coroutine object. The callable + must return a Future. + """ + if factory is not None and not callable(factory): + raise TypeError('task factory must be a callable or None') + self._task_factory = factory + + def get_task_factory(self): + """Return a task factory, or None if the default one is in use.""" + return self._task_factory def _make_socket_transport(self, sock, protocol, waiter=None, *, extra=None, server=None): """Create socket transport.""" raise NotImplementedError - def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter, *, - server_side=False, server_hostname=None, + def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter=None, + *, server_side=False, server_hostname=None, extra=None, server=None): """Create SSL transport.""" raise NotImplementedError def _make_datagram_transport(self, sock, protocol, - address=None, extra=None): + address=None, waiter=None, extra=None): """Create datagram transport.""" raise NotImplementedError @@ -154,30 +264,37 @@ class BaseEventLoop(events.AbstractEventLoop): """Create write pipe transport.""" raise NotImplementedError - @tasks.coroutine + @coroutine def _make_subprocess_transport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, extra=None, **kwargs): """Create subprocess transport.""" raise NotImplementedError - def _read_from_self(self): - """XXX""" - raise NotImplementedError - def _write_to_self(self): - """XXX""" + """Write a byte to self-pipe, to wake up the event loop. + + This may be called from a different thread. + + The subclass is responsible for implementing the self-pipe. + """ raise NotImplementedError def _process_events(self, event_list): """Process selector events.""" raise NotImplementedError + def _check_closed(self): + if self._closed: + raise RuntimeError('Event loop is closed') + def run_forever(self): """Run until stop() is called.""" - if self._running: + self._check_closed() + if self.is_running(): raise RuntimeError('Event loop is running.') - self._running = True + self._set_coroutine_wrapper(self._debug) + self._thread_id = threading.get_ident() try: while True: try: @@ -185,23 +302,40 @@ class BaseEventLoop(events.AbstractEventLoop): except _StopError: break finally: - self._running = False + self._thread_id = None + self._set_coroutine_wrapper(False) def run_until_complete(self, future): """Run until the Future is done. If the argument is a coroutine, it is wrapped in a Task. - XXX TBD: It would be disastrous to call run_until_complete() + WARNING: It would be disastrous to call run_until_complete() with the same coroutine twice -- it would wrap it in two different Tasks and that can't be good. Return the Future's result, or raise its exception. """ - future = tasks.async(future, loop=self) - future.add_done_callback(_raise_stop_error) - self.run_forever() - future.remove_done_callback(_raise_stop_error) + self._check_closed() + + new_task = not isinstance(future, futures.Future) + future = tasks.ensure_future(future, loop=self) + if new_task: + # An exception is raised if the future didn't complete, so there + # is no need to log the "destroy pending task" message + future._log_destroy_pending = False + + future.add_done_callback(_run_until_complete_cb) + try: + self.run_forever() + except: + if new_task and future.done() and not future.cancelled(): + # The coroutine raised a BaseException. Consume the exception + # to not log a warning, the caller doesn't have access to the + # local task. + future.exception() + raise + future.remove_done_callback(_run_until_complete_cb) if not future.done(): raise RuntimeError('Event loop stopped before Future completed.') @@ -210,9 +344,9 @@ class BaseEventLoop(events.AbstractEventLoop): def stop(self): """Stop running the event loop. - Every callback scheduled before stop() is called will run. - Callback scheduled after stop() is called won't. However, - those callbacks will run if run() is called again later. + Every callback scheduled before stop() is called will run. Callbacks + scheduled after stop() is called will not run. However, those callbacks + will run if run_forever is called again later. """ self.call_soon(_raise_stop_error) @@ -221,7 +355,16 @@ class BaseEventLoop(events.AbstractEventLoop): This clears the queues and shuts down the executor, but does not wait for the executor to finish. + + The event loop must not be running. """ + if self.is_running(): + raise RuntimeError("Cannot close a running event loop") + if self._closed: + return + if self._debug: + logger.debug("Close %r", self) + self._closed = True self._ready.clear() self._scheduled.clear() executor = self._default_executor @@ -229,12 +372,31 @@ class BaseEventLoop(events.AbstractEventLoop): self._default_executor = None executor.shutdown(wait=False) + def is_closed(self): + """Returns True if the event loop was closed.""" + return self._closed + + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if not self.is_closed(): + warnings.warn("unclosed event loop %r" % self, ResourceWarning) + if not self.is_running(): + self.close() + def is_running(self): - """Returns running status of event loop.""" - return self._running + """Returns True if the event loop is running.""" + return (self._thread_id is not None) def time(self): - """Return the time according to the event loop's clock.""" + """Return the time according to the event loop's clock. + + This is a float expressed in seconds since an epoch, but the + epoch, precision, accuracy and drift are unspecified and may + differ per event loop. + """ return time.monotonic() def call_later(self, delay, callback, *args): @@ -244,7 +406,7 @@ class BaseEventLoop(events.AbstractEventLoop): can be used to cancel the call. The delay can be an int or float, expressed in seconds. It is - always a relative time. + always relative to the current time. Each callback will be called exactly once. If two callbacks are scheduled for exactly the same time, it undefined which @@ -253,93 +415,157 @@ class BaseEventLoop(events.AbstractEventLoop): Any positional arguments after the callback will be passed to the callback when it is called. """ - return self.call_at(self.time() + delay, callback, *args) + timer = self.call_at(self.time() + delay, callback, *args) + if timer._source_traceback: + del timer._source_traceback[-1] + return timer def call_at(self, when, callback, *args): - """Like call_later(), but uses an absolute time.""" - if tasks.iscoroutinefunction(callback): + """Like call_later(), but uses an absolute time. + + Absolute time corresponds to the event loop's time() method. + """ + if (coroutines.iscoroutine(callback) + or coroutines.iscoroutinefunction(callback)): raise TypeError("coroutines cannot be used with call_at()") + self._check_closed() if self._debug: - self._assert_is_current_event_loop() + self._check_thread() timer = events.TimerHandle(when, callback, args, self) + if timer._source_traceback: + del timer._source_traceback[-1] heapq.heappush(self._scheduled, timer) + timer._scheduled = True return timer def call_soon(self, callback, *args): """Arrange for a callback to be called as soon as possible. - This operates as a FIFO queue, callbacks are called in the + This operates as a FIFO queue: callbacks are called in the order in which they are registered. Each callback will be called exactly once. Any positional arguments after the callback will be passed to the callback when it is called. """ - return self._call_soon(callback, args, check_loop=True) + if self._debug: + self._check_thread() + handle = self._call_soon(callback, args) + if handle._source_traceback: + del handle._source_traceback[-1] + return handle - def _call_soon(self, callback, args, check_loop): - if tasks.iscoroutinefunction(callback): + def _call_soon(self, callback, args): + if (coroutines.iscoroutine(callback) + or coroutines.iscoroutinefunction(callback)): raise TypeError("coroutines cannot be used with call_soon()") - if self._debug and check_loop: - self._assert_is_current_event_loop() + self._check_closed() handle = events.Handle(callback, args, self) + if handle._source_traceback: + del handle._source_traceback[-1] self._ready.append(handle) return handle - def _assert_is_current_event_loop(self): - """Asserts that this event loop is the current event loop. + def _check_thread(self): + """Check that the current thread is the thread running the event loop. - Non-threadsafe methods of this class make this assumption and will + Non-thread-safe methods of this class make this assumption and will likely behave incorrectly when the assumption is violated. - Should only be called when (self._debug == True). The caller is + Should only be called when (self._debug == True). The caller is responsible for checking this condition for performance reasons. """ - if events.get_event_loop() is not self: + if self._thread_id is None: + return + thread_id = threading.get_ident() + if thread_id != self._thread_id: raise RuntimeError( - "non-threadsafe operation invoked on an event loop other " + "Non-thread-safe operation invoked on an event loop other " "than the current one") def call_soon_threadsafe(self, callback, *args): - """XXX""" - handle = self._call_soon(callback, args, check_loop=False) + """Like call_soon(), but thread-safe.""" + handle = self._call_soon(callback, args) + if handle._source_traceback: + del handle._source_traceback[-1] self._write_to_self() return handle - def run_in_executor(self, executor, callback, *args): - if tasks.iscoroutinefunction(callback): + def run_in_executor(self, executor, func, *args): + if (coroutines.iscoroutine(func) + or coroutines.iscoroutinefunction(func)): raise TypeError("coroutines cannot be used with run_in_executor()") - if isinstance(callback, events.Handle): + self._check_closed() + if isinstance(func, events.Handle): assert not args - assert not isinstance(callback, events.TimerHandle) - if callback._cancelled: + assert not isinstance(func, events.TimerHandle) + if func._cancelled: f = futures.Future(loop=self) f.set_result(None) return f - callback, args = callback._callback, callback._args + func, args = func._callback, func._args if executor is None: executor = self._default_executor if executor is None: executor = concurrent.futures.ThreadPoolExecutor(_MAX_WORKERS) self._default_executor = executor - return futures.wrap_future(executor.submit(callback, *args), loop=self) + return futures.wrap_future(executor.submit(func, *args), loop=self) def set_default_executor(self, executor): self._default_executor = executor + def _getaddrinfo_debug(self, host, port, family, type, proto, flags): + msg = ["%s:%r" % (host, port)] + if family: + msg.append('family=%r' % family) + if type: + msg.append('type=%r' % type) + if proto: + msg.append('proto=%r' % proto) + if flags: + msg.append('flags=%r' % flags) + msg = ', '.join(msg) + logger.debug('Get address info %s', msg) + + t0 = self.time() + addrinfo = socket.getaddrinfo(host, port, family, type, proto, flags) + dt = self.time() - t0 + + msg = ('Getting address info %s took %.3f ms: %r' + % (msg, dt * 1e3, addrinfo)) + if dt >= self.slow_callback_duration: + logger.info(msg) + else: + logger.debug(msg) + return addrinfo + def getaddrinfo(self, host, port, *, family=0, type=0, proto=0, flags=0): - return self.run_in_executor(None, socket.getaddrinfo, - host, port, family, type, proto, flags) + if self._debug: + return self.run_in_executor(None, self._getaddrinfo_debug, + host, port, family, type, proto, flags) + else: + return self.run_in_executor(None, socket.getaddrinfo, + host, port, family, type, proto, flags) def getnameinfo(self, sockaddr, flags=0): return self.run_in_executor(None, socket.getnameinfo, sockaddr, flags) - @tasks.coroutine + @coroutine def create_connection(self, protocol_factory, host=None, port=None, *, ssl=None, family=0, proto=0, flags=0, sock=None, local_addr=None, server_hostname=None): - """XXX""" + """Connect to a TCP server. + + Create a streaming transport connection to a given Internet host and + port: socket family AF_INET or socket.AF_INET6 depending on host (or + family if specified), socket type SOCK_STREAM. protocol_factory must be + a callable returning a protocol instance. + + This method is a coroutine which will try to establish the connection + in the background. When successful, the coroutine returns a + (transport, protocol) pair. + """ if server_hostname is not None and not ssl: raise ValueError('server_hostname is only meaningful with ssl') @@ -407,11 +633,17 @@ class BaseEventLoop(events.AbstractEventLoop): sock.close() sock = None continue + if self._debug: + logger.debug("connect %r to %r", sock, address) yield from self.sock_connect(sock, address) except OSError as exc: if sock is not None: sock.close() exceptions.append(exc) + except: + if sock is not None: + sock.close() + raise else: break else: @@ -435,9 +667,15 @@ class BaseEventLoop(events.AbstractEventLoop): transport, protocol = yield from self._create_connection_transport( sock, protocol_factory, ssl, server_hostname) + if self._debug: + # Get the socket from the transport because SSL transport closes + # the old socket and creates a new SSL socket + sock = transport.get_extra_info('socket') + logger.debug("%r connected to %s:%r: (%r, %r)", + sock, host, port, transport, protocol) return transport, protocol - @tasks.coroutine + @coroutine def _create_connection_transport(self, sock, protocol_factory, ssl, server_hostname): protocol = protocol_factory() @@ -450,10 +688,15 @@ class BaseEventLoop(events.AbstractEventLoop): else: transport = self._make_socket_transport(sock, protocol, waiter) - yield from waiter + try: + yield from waiter + except: + transport.close() + raise + return transport, protocol - @tasks.coroutine + @coroutine def create_datagram_endpoint(self, protocol_factory, local_addr=None, remote_addr=None, *, family=0, proto=0, flags=0): @@ -463,7 +706,7 @@ class BaseEventLoop(events.AbstractEventLoop): raise ValueError('unexpected address family') addr_pairs_info = (((family, proto), (None, None)),) else: - # join addresss by (family, protocol) + # join address by (family, protocol) addr_infos = collections.OrderedDict() for idx, addr in ((0, local_addr), (1, remote_addr)): if addr is not None: @@ -512,16 +755,38 @@ class BaseEventLoop(events.AbstractEventLoop): if sock is not None: sock.close() exceptions.append(exc) + except: + if sock is not None: + sock.close() + raise else: break else: raise exceptions[0] protocol = protocol_factory() - transport = self._make_datagram_transport(sock, protocol, r_addr) + waiter = futures.Future(loop=self) + transport = self._make_datagram_transport(sock, protocol, r_addr, + waiter) + if self._debug: + if local_addr: + logger.info("Datagram endpoint local_addr=%r remote_addr=%r " + "created: (%r, %r)", + local_addr, remote_addr, transport, protocol) + else: + logger.debug("Datagram endpoint remote_addr=%r created: " + "(%r, %r)", + remote_addr, transport, protocol) + + try: + yield from waiter + except: + transport.close() + raise + return transport, protocol - @tasks.coroutine + @coroutine def create_server(self, protocol_factory, host=None, port=None, *, family=socket.AF_UNSPEC, @@ -530,7 +795,12 @@ class BaseEventLoop(events.AbstractEventLoop): backlog=100, ssl=None, reuse_address=None): - """XXX""" + """Create a TCP server bound to host and port. + + Return a Server object which can be used to stop the service. + + This method is a coroutine. + """ if isinstance(ssl, bool): raise TypeError('ssl argument must be an SSLContext or None') if host is not None or port is not None: @@ -559,6 +829,10 @@ class BaseEventLoop(events.AbstractEventLoop): sock = socket.socket(af, socktype, proto) except socket.error: # Assume it's a bad family/type/protocol combination. + if self._debug: + logger.warning('create_server() failed to create ' + 'socket.socket(%r, %r, %r)', + af, socktype, proto, exc_info=True) continue sockets.append(sock) if reuse_address: @@ -584,8 +858,7 @@ class BaseEventLoop(events.AbstractEventLoop): sock.close() else: if sock is None: - raise ValueError( - 'host and port was not specified and no sock specified') + raise ValueError('Neither host/port nor sock were specified') sockets = [sock] server = Server(self, sockets) @@ -593,25 +866,58 @@ class BaseEventLoop(events.AbstractEventLoop): sock.listen(backlog) sock.setblocking(False) self._start_serving(protocol_factory, sock, ssl, server) + if self._debug: + logger.info("%r is serving", server) return server - @tasks.coroutine + @coroutine def connect_read_pipe(self, protocol_factory, pipe): protocol = protocol_factory() waiter = futures.Future(loop=self) transport = self._make_read_pipe_transport(pipe, protocol, waiter) - yield from waiter + + try: + yield from waiter + except: + transport.close() + raise + + if self._debug: + logger.debug('Read pipe %r connected: (%r, %r)', + pipe.fileno(), transport, protocol) return transport, protocol - @tasks.coroutine + @coroutine def connect_write_pipe(self, protocol_factory, pipe): protocol = protocol_factory() waiter = futures.Future(loop=self) transport = self._make_write_pipe_transport(pipe, protocol, waiter) - yield from waiter + + try: + yield from waiter + except: + transport.close() + raise + + if self._debug: + logger.debug('Write pipe %r connected: (%r, %r)', + pipe.fileno(), transport, protocol) return transport, protocol - @tasks.coroutine + def _log_subprocess(self, msg, stdin, stdout, stderr): + info = [msg] + if stdin is not None: + info.append('stdin=%s' % _format_pipe(stdin)) + if stdout is not None and stderr == subprocess.STDOUT: + info.append('stdout=stderr=%s' % _format_pipe(stdout)) + else: + if stdout is not None: + info.append('stdout=%s' % _format_pipe(stdout)) + if stderr is not None: + info.append('stderr=%s' % _format_pipe(stderr)) + logger.debug(' '.join(info)) + + @coroutine def subprocess_shell(self, protocol_factory, cmd, *, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, shell=True, bufsize=0, @@ -625,11 +931,18 @@ class BaseEventLoop(events.AbstractEventLoop): if bufsize != 0: raise ValueError("bufsize must be 0") protocol = protocol_factory() + if self._debug: + # don't log parameters: they may contain sensitive information + # (password) and may be too long + debug_log = 'run shell command %r' % cmd + self._log_subprocess(debug_log, stdin, stdout, stderr) transport = yield from self._make_subprocess_transport( protocol, cmd, True, stdin, stdout, stderr, bufsize, **kwargs) + if self._debug: + logger.info('%s: %r' % (debug_log, transport)) return transport, protocol - @tasks.coroutine + @coroutine def subprocess_exec(self, protocol_factory, program, *args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=False, @@ -647,9 +960,16 @@ class BaseEventLoop(events.AbstractEventLoop): "a bytes or text string, not %s" % type(arg).__name__) protocol = protocol_factory() + if self._debug: + # don't log parameters: they may contain sensitive information + # (password) and may be too long + debug_log = 'execute program %r' % program + self._log_subprocess(debug_log, stdin, stdout, stderr) transport = yield from self._make_subprocess_transport( protocol, popen_args, False, stdin, stdout, stderr, bufsize, **kwargs) + if self._debug: + logger.info('%s: %r' % (debug_log, transport)) return transport, protocol def set_exception_handler(self, handler): @@ -659,7 +979,7 @@ class BaseEventLoop(events.AbstractEventLoop): be set. If handler is a callable object, it should have a - matching signature to '(loop, context)', where 'loop' + signature matching '(loop, context)', where 'loop' will be a reference to the active event loop, 'context' will be a dict object (see `call_exception_handler()` documentation for details about context). @@ -676,7 +996,7 @@ class BaseEventLoop(events.AbstractEventLoop): handler is set, and can be called by a custom exception handler that wants to defer to the default behavior. - context parameter has the same meaning as in + The context parameter has the same meaning as in `call_exception_handler()`. """ message = context.get('message') @@ -689,19 +1009,35 @@ class BaseEventLoop(events.AbstractEventLoop): else: exc_info = False + if ('source_traceback' not in context + and self._current_handle is not None + and self._current_handle._source_traceback): + context['handle_traceback'] = self._current_handle._source_traceback + log_lines = [message] for key in sorted(context): if key in {'message', 'exception'}: continue - log_lines.append('{}: {!r}'.format(key, context[key])) + value = context[key] + if key == 'source_traceback': + tb = ''.join(traceback.format_list(value)) + value = 'Object created at (most recent call last):\n' + value += tb.rstrip() + elif key == 'handle_traceback': + tb = ''.join(traceback.format_list(value)) + value = 'Handle created at (most recent call last):\n' + value += tb.rstrip() + else: + value = repr(value) + log_lines.append('{}: {}'.format(key, value)) logger.error('\n'.join(log_lines), exc_info=exc_info) def call_exception_handler(self, context): - """Call the current event loop exception handler. + """Call the current event loop's exception handler. + + The context argument is a dict containing the following keys: - context is a dict object containing the following keys - (new keys maybe introduced later): - 'message': Error message; - 'exception' (optional): Exception object; - 'future' (optional): Future instance; @@ -710,8 +1046,10 @@ class BaseEventLoop(events.AbstractEventLoop): - 'transport' (optional): Transport instance; - 'socket' (optional): Socket instance. - Note: this method should not be overloaded in subclassed - event loops. For any custom exception handling, use + New keys maybe introduced in the future. + + Note: do not overload this method in an event loop subclass. + For custom exception handling, use the `set_exception_handler()` method. """ if self._exception_handler is None: @@ -736,7 +1074,7 @@ class BaseEventLoop(events.AbstractEventLoop): 'context': context, }) except Exception: - # Guard 'default_exception_handler' in case it's + # Guard 'default_exception_handler' in case it is # overloaded. logger.error('Exception in default exception handler ' 'while handling an unexpected error ' @@ -744,20 +1082,23 @@ class BaseEventLoop(events.AbstractEventLoop): exc_info=True) def _add_callback(self, handle): - """Add a Handle to ready or scheduled.""" + """Add a Handle to _scheduled (TimerHandle) or _ready.""" assert isinstance(handle, events.Handle), 'A Handle is required here' if handle._cancelled: return - if isinstance(handle, events.TimerHandle): - heapq.heappush(self._scheduled, handle) - else: - self._ready.append(handle) + assert not isinstance(handle, events.TimerHandle) + self._ready.append(handle) def _add_callback_signalsafe(self, handle): """Like _add_callback() but called from a signal handler.""" self._add_callback(handle) self._write_to_self() + def _timer_handle_cancelled(self, handle): + """Notification that a TimerHandle has been cancelled.""" + if handle._scheduled: + self._timer_cancelled_count += 1 + def _run_once(self): """Run one full iteration of the event loop. @@ -765,9 +1106,29 @@ class BaseEventLoop(events.AbstractEventLoop): schedules the resulting callbacks, and finally schedules 'call_later' callbacks. """ - # Remove delayed calls that were cancelled from head of queue. - while self._scheduled and self._scheduled[0]._cancelled: - heapq.heappop(self._scheduled) + + sched_count = len(self._scheduled) + if (sched_count > _MIN_SCHEDULED_TIMER_HANDLES and + self._timer_cancelled_count / sched_count > + _MIN_CANCELLED_TIMER_HANDLES_FRACTION): + # Remove delayed calls that were cancelled if their number + # is too high + new_scheduled = [] + for handle in self._scheduled: + if handle._cancelled: + handle._scheduled = False + else: + new_scheduled.append(handle) + + heapq.heapify(new_scheduled) + self._scheduled = new_scheduled + self._timer_cancelled_count = 0 + else: + # Remove delayed calls that were cancelled from head of queue. + while self._scheduled and self._scheduled[0]._cancelled: + self._timer_cancelled_count -= 1 + handle = heapq.heappop(self._scheduled) + handle._scheduled = False timeout = None if self._ready: @@ -777,20 +1138,26 @@ class BaseEventLoop(events.AbstractEventLoop): when = self._scheduled[0]._when timeout = max(0, when - self.time()) - # TODO: Instrumentation only in debug mode? - if logger.isEnabledFor(logging.INFO): + if self._debug and timeout != 0: t0 = self.time() event_list = self._selector.select(timeout) - t1 = self.time() - if t1-t0 >= 1: + dt = self.time() - t0 + if dt >= 1.0: level = logging.INFO else: level = logging.DEBUG - if timeout is not None: - logger.log(level, 'poll %.3f took %.3f seconds', - timeout, t1-t0) - else: - logger.log(level, 'poll took %.3f seconds', t1-t0) + nevent = len(event_list) + if timeout is None: + logger.log(level, 'poll took %.3f ms: %s events', + dt * 1e3, nevent) + elif nevent: + logger.log(level, + 'poll %.3f ms took %.3f ms: %s events', + timeout * 1e3, dt * 1e3, nevent) + elif dt >= 1.0: + logger.log(level, + 'poll %.3f ms took %.3f ms: timeout', + timeout * 1e3, dt * 1e3) else: event_list = self._selector.select(timeout) self._process_events(event_list) @@ -802,6 +1169,7 @@ class BaseEventLoop(events.AbstractEventLoop): if handle._when >= end_time: break handle = heapq.heappop(self._scheduled) + handle._scheduled = False self._ready.append(handle) # This is the only place where callbacks are actually *called*. @@ -809,16 +1177,65 @@ class BaseEventLoop(events.AbstractEventLoop): # Note: We run all currently scheduled callbacks, but not any # callbacks scheduled by callbacks run this time around -- # they will be run the next time (after another I/O poll). - # Use an idiom that is threadsafe without using locks. + # Use an idiom that is thread-safe without using locks. ntodo = len(self._ready) for i in range(ntodo): handle = self._ready.popleft() - if not handle._cancelled: + if handle._cancelled: + continue + if self._debug: + try: + self._current_handle = handle + t0 = self.time() + handle._run() + dt = self.time() - t0 + if dt >= self.slow_callback_duration: + logger.warning('Executing %s took %.3f seconds', + _format_handle(handle), dt) + finally: + self._current_handle = None + else: handle._run() handle = None # Needed to break cycles when an exception occurs. + def _set_coroutine_wrapper(self, enabled): + try: + set_wrapper = sys.set_coroutine_wrapper + get_wrapper = sys.get_coroutine_wrapper + except AttributeError: + return + + enabled = bool(enabled) + if self._coroutine_wrapper_set == enabled: + return + + wrapper = coroutines.debug_wrapper + current_wrapper = get_wrapper() + + if enabled: + if current_wrapper not in (None, wrapper): + warnings.warn( + "loop.set_debug(True): cannot set debug coroutine " + "wrapper; another wrapper is already set %r" % + current_wrapper, RuntimeWarning) + else: + set_wrapper(wrapper) + self._coroutine_wrapper_set = True + else: + if current_wrapper not in (None, wrapper): + warnings.warn( + "loop.set_debug(False): cannot unset debug coroutine " + "wrapper; another wrapper was set %r" % + current_wrapper, RuntimeWarning) + else: + set_wrapper(None) + self._coroutine_wrapper_set = False + def get_debug(self): return self._debug def set_debug(self, enabled): self._debug = enabled + + if self.is_running(): + self._set_coroutine_wrapper(enabled) diff --git a/Darwin/lib/python3.5/asyncio/base_subprocess.py b/Darwin/lib/python3.5/asyncio/base_subprocess.py new file mode 100644 index 0000000..6851cd2 --- /dev/null +++ b/Darwin/lib/python3.5/asyncio/base_subprocess.py @@ -0,0 +1,280 @@ +import collections +import subprocess +import warnings + +from . import compat +from . import futures +from . import protocols +from . import transports +from .coroutines import coroutine +from .log import logger + + +class BaseSubprocessTransport(transports.SubprocessTransport): + + def __init__(self, loop, protocol, args, shell, + stdin, stdout, stderr, bufsize, + waiter=None, extra=None, **kwargs): + super().__init__(extra) + self._closed = False + self._protocol = protocol + self._loop = loop + self._proc = None + self._pid = None + self._returncode = None + self._exit_waiters = [] + self._pending_calls = collections.deque() + self._pipes = {} + self._finished = False + + if stdin == subprocess.PIPE: + self._pipes[0] = None + if stdout == subprocess.PIPE: + self._pipes[1] = None + if stderr == subprocess.PIPE: + self._pipes[2] = None + + # Create the child process: set the _proc attribute + try: + self._start(args=args, shell=shell, stdin=stdin, stdout=stdout, + stderr=stderr, bufsize=bufsize, **kwargs) + except: + self.close() + raise + + self._pid = self._proc.pid + self._extra['subprocess'] = self._proc + + if self._loop.get_debug(): + if isinstance(args, (bytes, str)): + program = args + else: + program = args[0] + logger.debug('process %r created: pid %s', + program, self._pid) + + self._loop.create_task(self._connect_pipes(waiter)) + + def __repr__(self): + info = [self.__class__.__name__] + if self._closed: + info.append('closed') + if self._pid is not None: + info.append('pid=%s' % self._pid) + if self._returncode is not None: + info.append('returncode=%s' % self._returncode) + elif self._pid is not None: + info.append('running') + else: + info.append('not started') + + stdin = self._pipes.get(0) + if stdin is not None: + info.append('stdin=%s' % stdin.pipe) + + stdout = self._pipes.get(1) + stderr = self._pipes.get(2) + if stdout is not None and stderr is stdout: + info.append('stdout=stderr=%s' % stdout.pipe) + else: + if stdout is not None: + info.append('stdout=%s' % stdout.pipe) + if stderr is not None: + info.append('stderr=%s' % stderr.pipe) + + return '<%s>' % ' '.join(info) + + def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs): + raise NotImplementedError + + def close(self): + if self._closed: + return + self._closed = True + + for proto in self._pipes.values(): + if proto is None: + continue + proto.pipe.close() + + if (self._proc is not None + # the child process finished? + and self._returncode is None + # the child process finished but the transport was not notified yet? + and self._proc.poll() is None + ): + if self._loop.get_debug(): + logger.warning('Close running child process: kill %r', self) + + try: + self._proc.kill() + except ProcessLookupError: + pass + + # Don't clear the _proc reference yet: _post_init() may still run + + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if not self._closed: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self.close() + + def get_pid(self): + return self._pid + + def get_returncode(self): + return self._returncode + + def get_pipe_transport(self, fd): + if fd in self._pipes: + return self._pipes[fd].pipe + else: + return None + + def _check_proc(self): + if self._proc is None: + raise ProcessLookupError() + + def send_signal(self, signal): + self._check_proc() + self._proc.send_signal(signal) + + def terminate(self): + self._check_proc() + self._proc.terminate() + + def kill(self): + self._check_proc() + self._proc.kill() + + @coroutine + def _connect_pipes(self, waiter): + try: + proc = self._proc + loop = self._loop + + if proc.stdin is not None: + _, pipe = yield from loop.connect_write_pipe( + lambda: WriteSubprocessPipeProto(self, 0), + proc.stdin) + self._pipes[0] = pipe + + if proc.stdout is not None: + _, pipe = yield from loop.connect_read_pipe( + lambda: ReadSubprocessPipeProto(self, 1), + proc.stdout) + self._pipes[1] = pipe + + if proc.stderr is not None: + _, pipe = yield from loop.connect_read_pipe( + lambda: ReadSubprocessPipeProto(self, 2), + proc.stderr) + self._pipes[2] = pipe + + assert self._pending_calls is not None + + loop.call_soon(self._protocol.connection_made, self) + for callback, data in self._pending_calls: + loop.call_soon(callback, *data) + self._pending_calls = None + except Exception as exc: + if waiter is not None and not waiter.cancelled(): + waiter.set_exception(exc) + else: + if waiter is not None and not waiter.cancelled(): + waiter.set_result(None) + + def _call(self, cb, *data): + if self._pending_calls is not None: + self._pending_calls.append((cb, data)) + else: + self._loop.call_soon(cb, *data) + + def _pipe_connection_lost(self, fd, exc): + self._call(self._protocol.pipe_connection_lost, fd, exc) + self._try_finish() + + def _pipe_data_received(self, fd, data): + self._call(self._protocol.pipe_data_received, fd, data) + + def _process_exited(self, returncode): + assert returncode is not None, returncode + assert self._returncode is None, self._returncode + if self._loop.get_debug(): + logger.info('%r exited with return code %r', + self, returncode) + self._returncode = returncode + self._call(self._protocol.process_exited) + self._try_finish() + + # wake up futures waiting for wait() + for waiter in self._exit_waiters: + if not waiter.cancelled(): + waiter.set_result(returncode) + self._exit_waiters = None + + @coroutine + def _wait(self): + """Wait until the process exit and return the process return code. + + This method is a coroutine.""" + if self._returncode is not None: + return self._returncode + + waiter = futures.Future(loop=self._loop) + self._exit_waiters.append(waiter) + return (yield from waiter) + + def _try_finish(self): + assert not self._finished + if self._returncode is None: + return + if all(p is not None and p.disconnected + for p in self._pipes.values()): + self._finished = True + self._call(self._call_connection_lost, None) + + def _call_connection_lost(self, exc): + try: + self._protocol.connection_lost(exc) + finally: + self._loop = None + self._proc = None + self._protocol = None + + +class WriteSubprocessPipeProto(protocols.BaseProtocol): + + def __init__(self, proc, fd): + self.proc = proc + self.fd = fd + self.pipe = None + self.disconnected = False + + def connection_made(self, transport): + self.pipe = transport + + def __repr__(self): + return ('<%s fd=%s pipe=%r>' + % (self.__class__.__name__, self.fd, self.pipe)) + + def connection_lost(self, exc): + self.disconnected = True + self.proc._pipe_connection_lost(self.fd, exc) + self.proc = None + + def pause_writing(self): + self.proc._protocol.pause_writing() + + def resume_writing(self): + self.proc._protocol.resume_writing() + + +class ReadSubprocessPipeProto(WriteSubprocessPipeProto, + protocols.Protocol): + + def data_received(self, data): + self.proc._pipe_data_received(self.fd, data) diff --git a/Darwin/lib/python3.5/asyncio/compat.py b/Darwin/lib/python3.5/asyncio/compat.py new file mode 100644 index 0000000..660b7e7 --- /dev/null +++ b/Darwin/lib/python3.5/asyncio/compat.py @@ -0,0 +1,17 @@ +"""Compatibility helpers for the different Python versions.""" + +import sys + +PY34 = sys.version_info >= (3, 4) +PY35 = sys.version_info >= (3, 5) + + +def flatten_list_bytes(list_of_data): + """Concatenate a sequence of bytes-like objects.""" + if not PY34: + # On Python 3.3 and older, bytes.join() doesn't handle + # memoryview. + list_of_data = ( + bytes(data) if isinstance(data, memoryview) else data + for data in list_of_data) + return b''.join(list_of_data) diff --git a/Darwin/lib/python3.4/asyncio/constants.py b/Darwin/lib/python3.5/asyncio/constants.py similarity index 100% rename from Darwin/lib/python3.4/asyncio/constants.py rename to Darwin/lib/python3.5/asyncio/constants.py diff --git a/Darwin/lib/python3.5/asyncio/coroutines.py b/Darwin/lib/python3.5/asyncio/coroutines.py new file mode 100644 index 0000000..e11b21b --- /dev/null +++ b/Darwin/lib/python3.5/asyncio/coroutines.py @@ -0,0 +1,299 @@ +__all__ = ['coroutine', + 'iscoroutinefunction', 'iscoroutine'] + +import functools +import inspect +import opcode +import os +import sys +import traceback +import types + +from . import compat +from . import events +from . import futures +from .log import logger + + +# Opcode of "yield from" instruction +_YIELD_FROM = opcode.opmap['YIELD_FROM'] + +# If you set _DEBUG to true, @coroutine will wrap the resulting +# generator objects in a CoroWrapper instance (defined below). That +# instance will log a message when the generator is never iterated +# over, which may happen when you forget to use "yield from" with a +# coroutine call. Note that the value of the _DEBUG flag is taken +# when the decorator is used, so to be of any use it must be set +# before you define your coroutines. A downside of using this feature +# is that tracebacks show entries for the CoroWrapper.__next__ method +# when _DEBUG is true. +_DEBUG = (not sys.flags.ignore_environment + and bool(os.environ.get('PYTHONASYNCIODEBUG'))) + + +try: + _types_coroutine = types.coroutine +except AttributeError: + _types_coroutine = None + +try: + _inspect_iscoroutinefunction = inspect.iscoroutinefunction +except AttributeError: + _inspect_iscoroutinefunction = lambda func: False + +try: + from collections.abc import Coroutine as _CoroutineABC, \ + Awaitable as _AwaitableABC +except ImportError: + _CoroutineABC = _AwaitableABC = None + + +# Check for CPython issue #21209 +def has_yield_from_bug(): + class MyGen: + def __init__(self): + self.send_args = None + def __iter__(self): + return self + def __next__(self): + return 42 + def send(self, *what): + self.send_args = what + return None + def yield_from_gen(gen): + yield from gen + value = (1, 2, 3) + gen = MyGen() + coro = yield_from_gen(gen) + next(coro) + coro.send(value) + return gen.send_args != (value,) +_YIELD_FROM_BUG = has_yield_from_bug() +del has_yield_from_bug + + +def debug_wrapper(gen): + # This function is called from 'sys.set_coroutine_wrapper'. + # We only wrap here coroutines defined via 'async def' syntax. + # Generator-based coroutines are wrapped in @coroutine + # decorator. + return CoroWrapper(gen, None) + + +class CoroWrapper: + # Wrapper for coroutine object in _DEBUG mode. + + def __init__(self, gen, func=None): + assert inspect.isgenerator(gen) or inspect.iscoroutine(gen), gen + self.gen = gen + self.func = func # Used to unwrap @coroutine decorator + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + self.__name__ = getattr(gen, '__name__', None) + self.__qualname__ = getattr(gen, '__qualname__', None) + + def __repr__(self): + coro_repr = _format_coroutine(self) + if self._source_traceback: + frame = self._source_traceback[-1] + coro_repr += ', created at %s:%s' % (frame[0], frame[1]) + return '<%s %s>' % (self.__class__.__name__, coro_repr) + + def __iter__(self): + return self + + def __next__(self): + return self.gen.send(None) + + if _YIELD_FROM_BUG: + # For for CPython issue #21209: using "yield from" and a custom + # generator, generator.send(tuple) unpacks the tuple instead of passing + # the tuple unchanged. Check if the caller is a generator using "yield + # from" to decide if the parameter should be unpacked or not. + def send(self, *value): + frame = sys._getframe() + caller = frame.f_back + assert caller.f_lasti >= 0 + if caller.f_code.co_code[caller.f_lasti] != _YIELD_FROM: + value = value[0] + return self.gen.send(value) + else: + def send(self, value): + return self.gen.send(value) + + def throw(self, exc): + return self.gen.throw(exc) + + def close(self): + return self.gen.close() + + @property + def gi_frame(self): + return self.gen.gi_frame + + @property + def gi_running(self): + return self.gen.gi_running + + @property + def gi_code(self): + return self.gen.gi_code + + if compat.PY35: + + __await__ = __iter__ # make compatible with 'await' expression + + @property + def gi_yieldfrom(self): + return self.gen.gi_yieldfrom + + @property + def cr_await(self): + return self.gen.cr_await + + @property + def cr_running(self): + return self.gen.cr_running + + @property + def cr_code(self): + return self.gen.cr_code + + @property + def cr_frame(self): + return self.gen.cr_frame + + def __del__(self): + # Be careful accessing self.gen.frame -- self.gen might not exist. + gen = getattr(self, 'gen', None) + frame = getattr(gen, 'gi_frame', None) + if frame is None: + frame = getattr(gen, 'cr_frame', None) + if frame is not None and frame.f_lasti == -1: + msg = '%r was never yielded from' % self + tb = getattr(self, '_source_traceback', ()) + if tb: + tb = ''.join(traceback.format_list(tb)) + msg += ('\nCoroutine object created at ' + '(most recent call last):\n') + msg += tb.rstrip() + logger.error(msg) + + +def coroutine(func): + """Decorator to mark coroutines. + + If the coroutine is not yielded from before it is destroyed, + an error message is logged. + """ + if _inspect_iscoroutinefunction(func): + # In Python 3.5 that's all we need to do for coroutines + # defiend with "async def". + # Wrapping in CoroWrapper will happen via + # 'sys.set_coroutine_wrapper' function. + return func + + if inspect.isgeneratorfunction(func): + coro = func + else: + @functools.wraps(func) + def coro(*args, **kw): + res = func(*args, **kw) + if isinstance(res, futures.Future) or inspect.isgenerator(res): + res = yield from res + elif _AwaitableABC is not None: + # If 'func' returns an Awaitable (new in 3.5) we + # want to run it. + try: + await_meth = res.__await__ + except AttributeError: + pass + else: + if isinstance(res, _AwaitableABC): + res = yield from await_meth() + return res + + if not _DEBUG: + if _types_coroutine is None: + wrapper = coro + else: + wrapper = _types_coroutine(coro) + else: + @functools.wraps(func) + def wrapper(*args, **kwds): + w = CoroWrapper(coro(*args, **kwds), func=func) + if w._source_traceback: + del w._source_traceback[-1] + # Python < 3.5 does not implement __qualname__ + # on generator objects, so we set it manually. + # We use getattr as some callables (such as + # functools.partial may lack __qualname__). + w.__name__ = getattr(func, '__name__', None) + w.__qualname__ = getattr(func, '__qualname__', None) + return w + + wrapper._is_coroutine = True # For iscoroutinefunction(). + return wrapper + + +def iscoroutinefunction(func): + """Return True if func is a decorated coroutine function.""" + return (getattr(func, '_is_coroutine', False) or + _inspect_iscoroutinefunction(func)) + + +_COROUTINE_TYPES = (types.GeneratorType, CoroWrapper) +if _CoroutineABC is not None: + _COROUTINE_TYPES += (_CoroutineABC,) + + +def iscoroutine(obj): + """Return True if obj is a coroutine object.""" + return isinstance(obj, _COROUTINE_TYPES) + + +def _format_coroutine(coro): + assert iscoroutine(coro) + + coro_name = None + if isinstance(coro, CoroWrapper): + func = coro.func + coro_name = coro.__qualname__ + if coro_name is not None: + coro_name = '{}()'.format(coro_name) + else: + func = coro + + if coro_name is None: + coro_name = events._format_callback(func, ()) + + try: + coro_code = coro.gi_code + except AttributeError: + coro_code = coro.cr_code + + try: + coro_frame = coro.gi_frame + except AttributeError: + coro_frame = coro.cr_frame + + filename = coro_code.co_filename + if (isinstance(coro, CoroWrapper) + and not inspect.isgeneratorfunction(coro.func) + and coro.func is not None): + filename, lineno = events._get_function_source(coro.func) + if coro_frame is None: + coro_repr = ('%s done, defined at %s:%s' + % (coro_name, filename, lineno)) + else: + coro_repr = ('%s running, defined at %s:%s' + % (coro_name, filename, lineno)) + elif coro_frame is not None: + lineno = coro_frame.f_lineno + coro_repr = ('%s running at %s:%s' + % (coro_name, filename, lineno)) + else: + lineno = coro_code.co_firstlineno + coro_repr = ('%s done, defined at %s:%s' + % (coro_name, filename, lineno)) + + return coro_repr diff --git a/Darwin/lib/python3.4/asyncio/events.py b/Darwin/lib/python3.5/asyncio/events.py similarity index 69% rename from Darwin/lib/python3.4/asyncio/events.py rename to Darwin/lib/python3.5/asyncio/events.py index 31592d1..d5f0d45 100644 --- a/Darwin/lib/python3.4/asyncio/events.py +++ b/Darwin/lib/python3.5/asyncio/events.py @@ -8,15 +8,77 @@ __all__ = ['AbstractEventLoopPolicy', 'get_child_watcher', 'set_child_watcher', ] -import subprocess -import threading +import functools +import inspect +import reprlib import socket +import subprocess +import sys +import threading +import traceback + +from asyncio import compat + + +def _get_function_source(func): + if compat.PY34: + func = inspect.unwrap(func) + elif hasattr(func, '__wrapped__'): + func = func.__wrapped__ + if inspect.isfunction(func): + code = func.__code__ + return (code.co_filename, code.co_firstlineno) + if isinstance(func, functools.partial): + return _get_function_source(func.func) + if compat.PY34 and isinstance(func, functools.partialmethod): + return _get_function_source(func.func) + return None + + +def _format_args(args): + """Format function arguments. + + Special case for a single parameter: ('hello',) is formatted as ('hello'). + """ + # use reprlib to limit the length of the output + args_repr = reprlib.repr(args) + if len(args) == 1 and args_repr.endswith(',)'): + args_repr = args_repr[:-2] + ')' + return args_repr + + +def _format_callback(func, args, suffix=''): + if isinstance(func, functools.partial): + if args is not None: + suffix = _format_args(args) + suffix + return _format_callback(func.func, func.args, suffix) + + if hasattr(func, '__qualname__'): + func_repr = getattr(func, '__qualname__') + elif hasattr(func, '__name__'): + func_repr = getattr(func, '__name__') + else: + func_repr = repr(func) + + if args is not None: + func_repr += _format_args(args) + if suffix: + func_repr += suffix + return func_repr + +def _format_callback_source(func, args): + func_repr = _format_callback(func, args) + source = _get_function_source(func) + if source: + func_repr += ' at %s:%s' % source + return func_repr class Handle: """Object returned by callback registration methods.""" - __slots__ = ['_callback', '_args', '_cancelled', '_loop', '__weakref__'] + __slots__ = ('_callback', '_args', '_cancelled', '_loop', + '_source_traceback', '_repr', '__weakref__') def __init__(self, callback, args, loop): assert not isinstance(callback, Handle), 'A Handle is not a callback' @@ -24,49 +86,75 @@ class Handle: self._callback = callback self._args = args self._cancelled = False + self._repr = None + if self._loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + else: + self._source_traceback = None + + def _repr_info(self): + info = [self.__class__.__name__] + if self._cancelled: + info.append('cancelled') + if self._callback is not None: + info.append(_format_callback_source(self._callback, self._args)) + if self._source_traceback: + frame = self._source_traceback[-1] + info.append('created at %s:%s' % (frame[0], frame[1])) + return info def __repr__(self): - res = 'Handle({}, {})'.format(self._callback, self._args) - if self._cancelled: - res += '' - return res + if self._repr is not None: + return self._repr + info = self._repr_info() + return '<%s>' % ' '.join(info) def cancel(self): - self._cancelled = True + if not self._cancelled: + self._cancelled = True + if self._loop.get_debug(): + # Keep a representation in debug mode to keep callback and + # parameters. For example, to log the warning + # "Executing took 2.5 second" + self._repr = repr(self) + self._callback = None + self._args = None def _run(self): try: self._callback(*self._args) except Exception as exc: - msg = 'Exception in callback {}{!r}'.format(self._callback, - self._args) - self._loop.call_exception_handler({ + cb = _format_callback_source(self._callback, self._args) + msg = 'Exception in callback {}'.format(cb) + context = { 'message': msg, 'exception': exc, 'handle': self, - }) + } + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) self = None # Needed to break cycles when an exception occurs. class TimerHandle(Handle): """Object returned by timed callback registration methods.""" - __slots__ = ['_when'] + __slots__ = ['_scheduled', '_when'] def __init__(self, when, callback, args, loop): assert when is not None super().__init__(callback, args, loop) - + if self._source_traceback: + del self._source_traceback[-1] self._when = when + self._scheduled = False - def __repr__(self): - res = 'TimerHandle({}, {}, {})'.format(self._when, - self._callback, - self._args) - if self._cancelled: - res += '' - - return res + def _repr_info(self): + info = super()._repr_info() + pos = 2 if self._cancelled else 1 + info.insert(pos, 'when=%s' % self._when) + return info def __hash__(self): return hash(self._when) @@ -99,6 +187,11 @@ class TimerHandle(Handle): equal = self.__eq__(other) return NotImplemented if equal is NotImplemented else not equal + def cancel(self): + if not self._cancelled: + self._loop._timer_handle_cancelled(self) + super().cancel() + class AbstractServer: """Abstract server returned by create_server().""" @@ -140,6 +233,10 @@ class AbstractEventLoop: """Return whether the event loop is currently running.""" raise NotImplementedError + def is_closed(self): + """Returns True if the event loop was closed.""" + raise NotImplementedError + def close(self): """Close the loop. @@ -153,6 +250,10 @@ class AbstractEventLoop: # Methods scheduling callbacks. All these return Handles. + def _timer_handle_cancelled(self, handle): + """Notification that a TimerHandle has been cancelled.""" + raise NotImplementedError + def call_soon(self, callback, *args): return self.call_later(0, callback, *args) @@ -165,12 +266,17 @@ class AbstractEventLoop: def time(self): raise NotImplementedError + # Method scheduling a coroutine object: create a task. + + def create_task(self, coro): + raise NotImplementedError + # Methods for interacting with threads. def call_soon_threadsafe(self, callback, *args): raise NotImplementedError - def run_in_executor(self, executor, callback, *args): + def run_in_executor(self, executor, func, *args): raise NotImplementedError def set_default_executor(self, executor): @@ -257,11 +363,11 @@ class AbstractEventLoop: # Pipes and subprocesses. def connect_read_pipe(self, protocol_factory, pipe): - """Register read pipe in event loop. + """Register read pipe in event loop. Set the pipe to non-blocking mode. protocol_factory should instantiate object with Protocol interface. - pipe is file-like object already switched to nonblocking. - Return pair (transport, protocol), where transport support + pipe is a file-like object. + Return pair (transport, protocol), where transport supports the ReadTransport interface.""" # The reason to accept file-like object instead of just file descriptor # is: we need to own pipe and close it at transport finishing @@ -331,6 +437,14 @@ class AbstractEventLoop: def remove_signal_handler(self, sig): raise NotImplementedError + # Task factory. + + def set_task_factory(self, factory): + raise NotImplementedError + + def get_task_factory(self): + raise NotImplementedError + # Error handlers. def set_exception_handler(self, handler): @@ -355,25 +469,33 @@ class AbstractEventLoopPolicy: """Abstract policy for accessing the event loop.""" def get_event_loop(self): - """XXX""" + """Get the event loop for the current context. + + Returns an event loop object implementing the BaseEventLoop interface, + or raises an exception in case no event loop has been set for the + current context and the current policy does not specify to create one. + + It should never return None.""" raise NotImplementedError def set_event_loop(self, loop): - """XXX""" + """Set the event loop for the current context to loop.""" raise NotImplementedError def new_event_loop(self): - """XXX""" + """Create and return a new event loop object according to this + policy's rules. If there's need to set this loop as the event loop for + the current context, set_event_loop must be called explicitly.""" raise NotImplementedError # Child processes handling (Unix only). def get_child_watcher(self): - """XXX""" + "Get the watcher for child processes." raise NotImplementedError def set_child_watcher(self, watcher): - """XXX""" + """Set the watcher for child processes.""" raise NotImplementedError @@ -408,9 +530,9 @@ class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy): not self._local._set_called and isinstance(threading.current_thread(), threading._MainThread)): self.set_event_loop(self.new_event_loop()) - assert self._local._loop is not None, \ - ('There is no current event loop in thread %r.' % - threading.current_thread().name) + if self._local._loop is None: + raise RuntimeError('There is no current event loop in thread %r.' + % threading.current_thread().name) return self._local._loop def set_event_loop(self, loop): @@ -447,39 +569,42 @@ def _init_event_loop_policy(): def get_event_loop_policy(): - """XXX""" + """Get the current event loop policy.""" if _event_loop_policy is None: _init_event_loop_policy() return _event_loop_policy def set_event_loop_policy(policy): - """XXX""" + """Set the current event loop policy. + + If policy is None, the default policy is restored.""" global _event_loop_policy assert policy is None or isinstance(policy, AbstractEventLoopPolicy) _event_loop_policy = policy def get_event_loop(): - """XXX""" + """Equivalent to calling get_event_loop_policy().get_event_loop().""" return get_event_loop_policy().get_event_loop() def set_event_loop(loop): - """XXX""" + """Equivalent to calling get_event_loop_policy().set_event_loop(loop).""" get_event_loop_policy().set_event_loop(loop) def new_event_loop(): - """XXX""" + """Equivalent to calling get_event_loop_policy().new_event_loop().""" return get_event_loop_policy().new_event_loop() def get_child_watcher(): - """XXX""" + """Equivalent to calling get_event_loop_policy().get_child_watcher().""" return get_event_loop_policy().get_child_watcher() def set_child_watcher(watcher): - """XXX""" + """Equivalent to calling + get_event_loop_policy().set_child_watcher(watcher).""" return get_event_loop_policy().set_child_watcher(watcher) diff --git a/Darwin/lib/python3.4/asyncio/futures.py b/Darwin/lib/python3.5/asyncio/futures.py similarity index 79% rename from Darwin/lib/python3.4/asyncio/futures.py rename to Darwin/lib/python3.5/asyncio/futures.py index 91ea170..dbe06c4 100644 --- a/Darwin/lib/python3.4/asyncio/futures.py +++ b/Darwin/lib/python3.5/asyncio/futures.py @@ -7,9 +7,11 @@ __all__ = ['CancelledError', 'TimeoutError', import concurrent.futures._base import logging +import reprlib import sys import traceback +from . import compat from . import events # States for Future. @@ -17,9 +19,6 @@ _PENDING = 'PENDING' _CANCELLED = 'CANCELLED' _FINISHED = 'FINISHED' -_PY34 = sys.version_info >= (3, 4) - -# TODO: Do we really want to depend on concurrent.futures internals? Error = concurrent.futures._base.Error CancelledError = concurrent.futures.CancelledError TimeoutError = concurrent.futures.TimeoutError @@ -29,7 +28,6 @@ STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging class InvalidStateError(Error): """The operation is not allowed in this state.""" - # TODO: Show the future, its state, the method, and the required state. class _TracebackLogger: @@ -60,7 +58,7 @@ class _TracebackLogger: the Future is collected, and the helper is present, the helper object is also collected, and its __del__() method will log the traceback. When the Future's result() or exception() method is - called (and a helper object is present), it removes the the helper + called (and a helper object is present), it removes the helper object, after calling its clear() method to prevent it from logging. @@ -82,10 +80,11 @@ class _TracebackLogger: in a discussion about closing files when they are collected. """ - __slots__ = ['exc', 'tb', 'loop'] + __slots__ = ('loop', 'source_traceback', 'exc', 'tb') - def __init__(self, exc, loop): - self.loop = loop + def __init__(self, future, exc): + self.loop = future._loop + self.source_traceback = future._source_traceback self.exc = exc self.tb = None @@ -102,11 +101,13 @@ class _TracebackLogger: def __del__(self): if self.tb: - msg = 'Future/Task exception was never retrieved:\n{tb}' - context = { - 'message': msg.format(tb=''.join(self.tb)), - } - self.loop.call_exception_handler(context) + msg = 'Future/Task exception was never retrieved\n' + if self.source_traceback: + src = ''.join(traceback.format_list(self.source_traceback)) + msg += 'Future/Task created at (most recent call last):\n' + msg += '%s\n' % src.rstrip() + msg += ''.join(self.tb).rstrip() + self.loop.call_exception_handler({'message': msg}) class Future: @@ -131,6 +132,7 @@ class Future: _result = None _exception = None _loop = None + _source_traceback = None _blocking = False # proper use of future (yield vs yield from) @@ -149,27 +151,53 @@ class Future: else: self._loop = loop self._callbacks = [] + if self._loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) - def __repr__(self): - res = self.__class__.__name__ + def _format_callbacks(self): + cb = self._callbacks + size = len(cb) + if not size: + cb = '' + + def format_cb(callback): + return events._format_callback_source(callback, ()) + + if size == 1: + cb = format_cb(cb[0]) + elif size == 2: + cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1])) + elif size > 2: + cb = '{}, <{} more>, {}'.format(format_cb(cb[0]), + size-2, + format_cb(cb[-1])) + return 'cb=[%s]' % cb + + def _repr_info(self): + info = [self._state.lower()] if self._state == _FINISHED: if self._exception is not None: - res += ''.format(self._exception) + info.append('exception={!r}'.format(self._exception)) else: - res += ''.format(self._result) - elif self._callbacks: - size = len(self._callbacks) - if size > 2: - res += '<{}, [{}, <{} more>, {}]>'.format( - self._state, self._callbacks[0], - size-2, self._callbacks[-1]) - else: - res += '<{}, {}>'.format(self._state, self._callbacks) - else: - res += '<{}>'.format(self._state) - return res + # use reprlib to limit the length of the output, especially + # for very long strings + result = reprlib.repr(self._result) + info.append('result={}'.format(result)) + if self._callbacks: + info.append(self._format_callbacks()) + if self._source_traceback: + frame = self._source_traceback[-1] + info.append('created at %s:%s' % (frame[0], frame[1])) + return info - if _PY34: + def __repr__(self): + info = self._repr_info() + return '<%s %s>' % (self.__class__.__name__, ' '.join(info)) + + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: def __del__(self): if not self._log_traceback: # set_exception() was not called, or result() or exception() @@ -177,10 +205,13 @@ class Future: return exc = self._exception context = { - 'message': 'Future/Task exception was never retrieved', + 'message': ('%s exception was never retrieved' + % self.__class__.__name__), 'exception': exc, 'future': self, } + if self._source_traceback: + context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) def cancel(self): @@ -288,6 +319,12 @@ class Future: # So-called internal methods (note: no set_running_or_notify_cancel()). + def _set_result_unless_cancelled(self, result): + """Helper setting the result only if the future was not cancelled.""" + if self.cancelled(): + return + self.set_result(result) + def set_result(self, result): """Mark the future done and set its result. @@ -313,10 +350,10 @@ class Future: self._exception = exception self._state = _FINISHED self._schedule_callbacks() - if _PY34: + if compat.PY34: self._log_traceback = True else: - self._tb_logger = _TracebackLogger(exception, self._loop) + self._tb_logger = _TracebackLogger(self, exception) # Arrange for the logger to be activated after all callbacks # have had a chance to call result() or exception(). self._loop.call_soon(self._tb_logger.activate) @@ -349,6 +386,9 @@ class Future: assert self.done(), "yield from wasn't used with future" return self.result() # May raise too. + if compat.PY35: + __await__ = __iter__ # make compatible with 'await' expression + def wrap_future(fut, *, loop=None): """Wrap concurrent.futures.Future object.""" @@ -367,5 +407,5 @@ def wrap_future(fut, *, loop=None): new_future.add_done_callback(_check_cancel_other) fut.add_done_callback( lambda future: loop.call_soon_threadsafe( - new_future._copy_state, fut)) + new_future._copy_state, future)) return new_future diff --git a/Darwin/lib/python3.4/asyncio/locks.py b/Darwin/lib/python3.5/asyncio/locks.py similarity index 92% rename from Darwin/lib/python3.4/asyncio/locks.py rename to Darwin/lib/python3.5/asyncio/locks.py index 29c4434..7a13279 100644 --- a/Darwin/lib/python3.4/asyncio/locks.py +++ b/Darwin/lib/python3.5/asyncio/locks.py @@ -4,9 +4,10 @@ __all__ = ['Lock', 'Event', 'Condition', 'Semaphore', 'BoundedSemaphore'] import collections +from . import compat from . import events from . import futures -from . import tasks +from .coroutines import coroutine class _ContextManager: @@ -39,7 +40,53 @@ class _ContextManager: self._lock = None # Crudely prevent reuse. -class Lock: +class _ContextManagerMixin: + def __enter__(self): + raise RuntimeError( + '"yield from" should be used as context manager expression') + + def __exit__(self, *args): + # This must exist because __enter__ exists, even though that + # always raises; that's how the with-statement works. + pass + + @coroutine + def __iter__(self): + # This is not a coroutine. It is meant to enable the idiom: + # + # with (yield from lock): + # + # + # as an alternative to: + # + # yield from lock.acquire() + # try: + # + # finally: + # lock.release() + yield from self.acquire() + return _ContextManager(self) + + if compat.PY35: + + def __await__(self): + # To make "with await lock" work. + yield from self.acquire() + return _ContextManager(self) + + @coroutine + def __aenter__(self): + yield from self.acquire() + # We have no use for the "as ..." clause in the with + # statement for locks. + return None + + @coroutine + def __aexit__(self, exc_type, exc, tb): + self.release() + + +class Lock(_ContextManagerMixin): """Primitive lock objects. A primitive lock is a synchronization primitive that is not owned @@ -63,7 +110,7 @@ class Lock: acquire() is a coroutine and should be called with 'yield from'. - Locks also support the context manager protocol. '(yield from lock)' + Locks also support the context management protocol. '(yield from lock)' should be used as context manager expression. Usage: @@ -112,7 +159,7 @@ class Lock: """Return True if lock is acquired.""" return self._locked - @tasks.coroutine + @coroutine def acquire(self): """Acquire a lock. @@ -153,31 +200,6 @@ class Lock: else: raise RuntimeError('Lock is not acquired.') - def __enter__(self): - raise RuntimeError( - '"yield from" should be used as context manager expression') - - def __exit__(self, *args): - # This must exist because __enter__ exists, even though that - # always raises; that's how the with-statement works. - pass - - def __iter__(self): - # This is not a coroutine. It is meant to enable the idiom: - # - # with (yield from lock): - # - # - # as an alternative to: - # - # yield from lock.acquire() - # try: - # - # finally: - # lock.release() - yield from self.acquire() - return _ContextManager(self) - class Event: """Asynchronous equivalent to threading.Event. @@ -225,7 +247,7 @@ class Event: to true again.""" self._value = False - @tasks.coroutine + @coroutine def wait(self): """Block until the internal flag is true. @@ -245,7 +267,7 @@ class Event: self._waiters.remove(fut) -class Condition: +class Condition(_ContextManagerMixin): """Asynchronous equivalent to threading.Condition. This class implements condition variable objects. A condition variable @@ -255,14 +277,17 @@ class Condition: A new Lock object is created and used as the underlying lock. """ - def __init__(self, *, loop=None): + def __init__(self, lock=None, *, loop=None): if loop is not None: self._loop = loop else: self._loop = events.get_event_loop() - # Lock as an attribute as in threading.Condition. - lock = Lock(loop=self._loop) + if lock is None: + lock = Lock(loop=self._loop) + elif lock._loop is not self._loop: + raise ValueError("loop argument must agree with lock") + self._lock = lock # Export the lock's locked(), acquire() and release() methods. self.locked = lock.locked @@ -278,7 +303,7 @@ class Condition: extra = '{},waiters:{}'.format(extra, len(self._waiters)) return '<{} [{}]>'.format(res[1:-1], extra) - @tasks.coroutine + @coroutine def wait(self): """Wait until notified. @@ -306,7 +331,7 @@ class Condition: finally: yield from self.acquire() - @tasks.coroutine + @coroutine def wait_for(self, predicate): """Wait until a predicate becomes true. @@ -352,20 +377,8 @@ class Condition: """ self.notify(len(self._waiters)) - def __enter__(self): - raise RuntimeError( - '"yield from" should be used as context manager expression') - def __exit__(self, *args): - pass - - def __iter__(self): - # See comment in Lock.__iter__(). - yield from self.acquire() - return _ContextManager(self) - - -class Semaphore: +class Semaphore(_ContextManagerMixin): """A Semaphore implementation. A semaphore manages an internal counter which is decremented by each @@ -373,7 +386,7 @@ class Semaphore: can never go below zero; when acquire() finds that it is zero, it blocks, waiting until some other thread calls release(). - Semaphores also support the context manager protocol. + Semaphores also support the context management protocol. The optional argument gives the initial value for the internal counter; it defaults to 1. If the value given is less than 0, @@ -402,7 +415,7 @@ class Semaphore: """Returns True if semaphore can not be acquired immediately.""" return self._value == 0 - @tasks.coroutine + @coroutine def acquire(self): """Acquire a semaphore. @@ -436,18 +449,6 @@ class Semaphore: waiter.set_result(True) break - def __enter__(self): - raise RuntimeError( - '"yield from" should be used as context manager expression') - - def __exit__(self, *args): - pass - - def __iter__(self): - # See comment in Lock.__iter__(). - yield from self.acquire() - return _ContextManager(self) - class BoundedSemaphore(Semaphore): """A bounded semaphore implementation. diff --git a/Darwin/lib/python3.4/asyncio/log.py b/Darwin/lib/python3.5/asyncio/log.py similarity index 100% rename from Darwin/lib/python3.4/asyncio/log.py rename to Darwin/lib/python3.5/asyncio/log.py diff --git a/Darwin/lib/python3.4/asyncio/proactor_events.py b/Darwin/lib/python3.5/asyncio/proactor_events.py similarity index 72% rename from Darwin/lib/python3.4/asyncio/proactor_events.py rename to Darwin/lib/python3.5/asyncio/proactor_events.py index d99e8ce..abe4c12 100644 --- a/Darwin/lib/python3.4/asyncio/proactor_events.py +++ b/Darwin/lib/python3.5/asyncio/proactor_events.py @@ -7,10 +7,13 @@ proactor is only implemented on Windows with IOCP. __all__ = ['BaseProactorEventLoop'] import socket +import warnings from . import base_events +from . import compat from . import constants from . import futures +from . import sslproto from . import transports from .log import logger @@ -21,9 +24,8 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, def __init__(self, loop, sock, protocol, waiter=None, extra=None, server=None): - super().__init__(extra) + super().__init__(extra, loop) self._set_extra(sock) - self._loop = loop self._sock = sock self._protocol = protocol self._server = server @@ -35,10 +37,30 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, self._closing = False # Set when close() called. self._eof_written = False if self._server is not None: - self._server.attach(self) + self._server._attach() self._loop.call_soon(self._protocol.connection_made, self) if waiter is not None: - self._loop.call_soon(waiter.set_result, None) + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(waiter._set_result_unless_cancelled, None) + + def __repr__(self): + info = [self.__class__.__name__] + if self._sock is None: + info.append('closed') + elif self._closing: + info.append('closing') + if self._sock is not None: + info.append('fd=%s' % self._sock.fileno()) + if self._read_fut is not None: + info.append('read=%s' % self._read_fut) + if self._write_fut is not None: + info.append("write=%r" % self._write_fut) + if self._buffer: + bufsize = len(self._buffer) + info.append('write_bufsize=%s' % bufsize) + if self._eof_written: + info.append('EOF written') + return '<%s>' % ' '.join(info) def _set_extra(self, sock): self._extra['pipe'] = sock @@ -52,9 +74,22 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, self._loop.call_soon(self._call_connection_lost, None) if self._read_fut is not None: self._read_fut.cancel() + self._read_fut = None + + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if self._sock is not None: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self.close() def _fatal_error(self, exc, message='Fatal error on pipe transport'): - if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if self._loop.get_debug(): + logger.debug("%r: %s", self, message, exc_info=True) + else: self._loop.call_exception_handler({ 'message': message, 'exception': exc, @@ -70,9 +105,10 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, self._conn_lost += 1 if self._write_fut: self._write_fut.cancel() + self._write_fut = None if self._read_fut: self._read_fut.cancel() - self._write_fut = self._read_fut = None + self._read_fut = None self._pending_write = 0 self._buffer = None self._loop.call_soon(self._call_connection_lost, exc) @@ -88,9 +124,10 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, if hasattr(self._sock, 'shutdown'): self._sock.shutdown(socket.SHUT_RDWR) self._sock.close() + self._sock = None server = self._server if server is not None: - server.detach(self) + server._detach() self._server = None def get_write_buffer_size(self): @@ -107,7 +144,6 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, def __init__(self, loop, sock, protocol, waiter=None, extra=None, server=None): super().__init__(loop, sock, protocol, waiter, extra, server) - self._read_fut = None self._paused = False self._loop.call_soon(self._loop_reading) @@ -117,6 +153,8 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, if self._paused: raise RuntimeError('Already paused') self._paused = True + if self._loop.get_debug(): + logger.debug("%r pauses reading", self) def resume_reading(self): if not self._paused: @@ -125,6 +163,8 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, if self._closing: return self._loop.call_soon(self._loop_reading, self._read_fut) + if self._loop.get_debug(): + logger.debug("%r resumes reading", self) def _loop_reading(self, fut=None): if self._paused: @@ -152,6 +192,9 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, except ConnectionAbortedError as exc: if not self._closing: self._fatal_error(exc, 'Fatal read error on pipe transport') + elif self._loop.get_debug(): + logger.debug("Read error on pipe transport while closing", + exc_info=True) except ConnectionResetError as exc: self._force_close(exc) except OSError as exc: @@ -165,6 +208,8 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport, if data: self._protocol.data_received(data) elif data is not None: + if self._loop.get_debug(): + logger.debug("%r received EOF", self) keep_open = self._protocol.eof_received() if not keep_open: self.close() @@ -200,10 +245,6 @@ class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, assert self._buffer is None # Pass a copy, except if it's already immutable. self._loop_writing(data=bytes(data)) - # XXX Should we pause the protocol at this point - # if len(data) > self._high_water? (That would - # require keeping track of the number of bytes passed - # to a send() that hasn't finished yet.) elif not self._buffer: # WRITING -> BACKED UP # Make a mutable copy which we can extend. self._buffer = bytearray(data) @@ -302,12 +343,16 @@ class _ProactorSocketTransport(_ProactorReadPipeTransport, try: self._extra['sockname'] = sock.getsockname() except (socket.error, AttributeError): - pass + if self._loop.get_debug(): + logger.warning("getsockname() failed on %r", + sock, exc_info=True) if 'peername' not in self._extra: try: self._extra['peername'] = sock.getpeername() except (socket.error, AttributeError): - pass + if self._loop.get_debug(): + logger.warning("getpeername() failed on %r", + sock, exc_info=True) def can_write_eof(self): return True @@ -337,6 +382,20 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): return _ProactorSocketTransport(self, sock, protocol, waiter, extra, server) + def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter=None, + *, server_side=False, server_hostname=None, + extra=None, server=None): + if not sslproto._is_sslproto_available(): + raise NotImplementedError("Proactor event loop requires Python 3.5" + " or newer (ssl.MemoryBIO) to support " + "SSL") + + ssl_protocol = sslproto.SSLProtocol(self, protocol, sslcontext, waiter, + server_side, server_hostname) + _ProactorSocketTransport(self, rawsock, ssl_protocol, + extra=extra, server=server) + return ssl_protocol._app_transport + def _make_duplex_pipe_transport(self, sock, protocol, waiter=None, extra=None): return _ProactorDuplexPipeTransport(self, @@ -353,13 +412,22 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): sock, protocol, waiter, extra) def close(self): - if self._proactor is not None: - self._close_self_pipe() - self._proactor.close() - self._proactor = None - self._selector = None - super().close() - self._accept_futures.clear() + if self.is_running(): + raise RuntimeError("Cannot close a running event loop") + if self.is_closed(): + return + + # Call these methods before closing the event loop (before calling + # BaseEventLoop.close), because they can schedule callbacks with + # call_soon(), which is forbidden when the event loop is closed. + self._stop_accept_futures() + self._close_self_pipe() + self._proactor.close() + self._proactor = None + self._selector = None + + # Close the event loop + super().close() def sock_recv(self, sock, n): return self._proactor.recv(sock, n) @@ -369,7 +437,8 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): def sock_connect(self, sock, address): try: - base_events._check_resolved_address(sock, address) + if self._debug: + base_events._check_resolved_address(sock, address) except ValueError as err: fut = futures.Future(loop=self) fut.set_exception(err) @@ -406,37 +475,55 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): if f is not None: f.result() # may raise f = self._proactor.recv(self._ssock, 4096) - except: - self.close() - raise + except futures.CancelledError: + # _close_self_pipe() has been called, stop waiting for data + return + except Exception as exc: + self.call_exception_handler({ + 'message': 'Error on reading from the event loop self pipe', + 'exception': exc, + 'loop': self, + }) else: self._self_reading_future = f f.add_done_callback(self._loop_self_reading) def _write_to_self(self): - self._csock.send(b'x') + self._csock.send(b'\0') - def _start_serving(self, protocol_factory, sock, ssl=None, server=None): - if ssl: - raise ValueError('IocpEventLoop is incompatible with SSL.') + def _start_serving(self, protocol_factory, sock, + sslcontext=None, server=None): def loop(f=None): try: if f is not None: conn, addr = f.result() + if self._debug: + logger.debug("%r got a new connection from %r: %r", + server, addr, conn) protocol = protocol_factory() - self._make_socket_transport( - conn, protocol, - extra={'peername': addr}, server=server) + if sslcontext is not None: + self._make_ssl_transport( + conn, protocol, sslcontext, server_side=True, + extra={'peername': addr}, server=server) + else: + self._make_socket_transport( + conn, protocol, + extra={'peername': addr}, server=server) + if self.is_closed(): + return f = self._proactor.accept(sock) except OSError as exc: if sock.fileno() != -1: self.call_exception_handler({ - 'message': 'Accept failed', + 'message': 'Accept failed on a socket', 'exception': exc, 'socket': sock, }) sock.close() + elif self._debug: + logger.debug("Accept failed on socket %r", + sock, exc_info=True) except futures.CancelledError: sock.close() else: @@ -446,10 +533,15 @@ class BaseProactorEventLoop(base_events.BaseEventLoop): self.call_soon(loop) def _process_events(self, event_list): - pass # XXX hard work currently done in poll + # Events are processed in the IocpProactor._poll() method + pass - def _stop_serving(self, sock): + def _stop_accept_futures(self): for future in self._accept_futures.values(): future.cancel() + self._accept_futures.clear() + + def _stop_serving(self, sock): + self._stop_accept_futures() self._proactor._stop_serving(sock) sock.close() diff --git a/Darwin/lib/python3.4/asyncio/protocols.py b/Darwin/lib/python3.5/asyncio/protocols.py similarity index 97% rename from Darwin/lib/python3.4/asyncio/protocols.py rename to Darwin/lib/python3.5/asyncio/protocols.py index 52fc25c..80fcac9 100644 --- a/Darwin/lib/python3.4/asyncio/protocols.py +++ b/Darwin/lib/python3.5/asyncio/protocols.py @@ -78,6 +78,11 @@ class Protocol(BaseProtocol): State machine of calls: start -> CM [-> DR*] [-> ER?] -> CL -> end + + * CM: connection_made() + * DR: data_received() + * ER: eof_received() + * CL: connection_lost() """ def data_received(self, data): diff --git a/Darwin/lib/python3.4/asyncio/queues.py b/Darwin/lib/python3.5/asyncio/queues.py similarity index 68% rename from Darwin/lib/python3.4/asyncio/queues.py rename to Darwin/lib/python3.5/asyncio/queues.py index 6283db3..021043d 100644 --- a/Darwin/lib/python3.4/asyncio/queues.py +++ b/Darwin/lib/python3.5/asyncio/queues.py @@ -1,24 +1,28 @@ """Queues""" -__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'JoinableQueue', - 'QueueFull', 'QueueEmpty'] +__all__ = ['Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty'] import collections import heapq +from . import compat from . import events from . import futures from . import locks -from .tasks import coroutine +from .coroutines import coroutine class QueueEmpty(Exception): - 'Exception raised by Queue.get(block=0)/get_nowait().' + """Exception raised when Queue.get_nowait() is called on a Queue object + which is empty. + """ pass class QueueFull(Exception): - 'Exception raised by Queue.put(block=0)/put_nowait().' + """Exception raised when the Queue.put_nowait() method is called on a Queue + object which is full. + """ pass @@ -43,10 +47,15 @@ class Queue: # Futures. self._getters = collections.deque() - # Pairs of (item, Future). + # Futures self._putters = collections.deque() + self._unfinished_tasks = 0 + self._finished = locks.Event(loop=self._loop) + self._finished.set() self._init(maxsize) + # These three are overridable in subclasses. + def _init(self, maxsize): self._queue = collections.deque() @@ -56,6 +65,13 @@ class Queue: def _put(self, item): self._queue.append(item) + # End of the overridable methods. + + def __put_internal(self, item): + self._put(item) + self._unfinished_tasks += 1 + self._finished.clear() + def __repr__(self): return '<{} at {:#x} {}>'.format( type(self).__name__, id(self), self._format()) @@ -71,6 +87,8 @@ class Queue: result += ' _getters[{}]'.format(len(self._getters)) if self._putters: result += ' _putters[{}]'.format(len(self._putters)) + if self._unfinished_tasks: + result += ' tasks={}'.format(self._unfinished_tasks) return result def _consume_done_getters(self): @@ -80,7 +98,7 @@ class Queue: def _consume_done_putters(self): # Delete waiters at the head of the put() queue who've timed out. - while self._putters and self._putters[0][1].done(): + while self._putters and self._putters[0].done(): self._putters.popleft() def qsize(self): @@ -105,14 +123,16 @@ class Queue: if self._maxsize <= 0: return False else: - return self.qsize() == self._maxsize + return self.qsize() >= self._maxsize @coroutine def put(self, item): """Put an item into the queue. - If you yield from put(), wait until a free slot is available - before adding item. + Put an item into the queue. If the queue is full, wait until a free + slot is available before adding item. + + This method is a coroutine. """ self._consume_done_getters() if self._getters: @@ -120,20 +140,20 @@ class Queue: 'queue non-empty, why are getters waiting?') getter = self._getters.popleft() + self.__put_internal(item) - # Use _put and _get instead of passing item straight to getter, in - # case a subclass has logic that must run (e.g. JoinableQueue). - self._put(item) + # getter cannot be cancelled, we just removed done getters getter.set_result(self._get()) - elif self._maxsize > 0 and self._maxsize == self.qsize(): + elif self._maxsize > 0 and self._maxsize <= self.qsize(): waiter = futures.Future(loop=self._loop) - self._putters.append((item, waiter)) + self._putters.append(waiter) yield from waiter + self._put(item) else: - self._put(item) + self.__put_internal(item) def put_nowait(self, item): """Put an item into the queue without blocking. @@ -146,34 +166,34 @@ class Queue: 'queue non-empty, why are getters waiting?') getter = self._getters.popleft() + self.__put_internal(item) - # Use _put and _get instead of passing item straight to getter, in - # case a subclass has logic that must run (e.g. JoinableQueue). - self._put(item) + # getter cannot be cancelled, we just removed done getters getter.set_result(self._get()) - elif self._maxsize > 0 and self._maxsize == self.qsize(): + elif self._maxsize > 0 and self._maxsize <= self.qsize(): raise QueueFull else: - self._put(item) + self.__put_internal(item) @coroutine def get(self): """Remove and return an item from the queue. - If you yield from get(), wait until a item is available. + If queue is empty, wait until an item is available. + + This method is a coroutine. """ self._consume_done_putters() if self._putters: assert self.full(), 'queue not full, why are putters waiting?' - item, putter = self._putters.popleft() - self._put(item) + putter = self._putters.popleft() # When a getter runs and frees up a slot so this putter can # run, we need to defer the put for a tick to ensure that # getters and putters alternate perfectly. See # ChannelTest.test_wait. - self._loop.call_soon(putter.set_result, None) + self._loop.call_soon(putter._set_result_unless_cancelled, None) return self._get() @@ -181,9 +201,39 @@ class Queue: return self._get() else: waiter = futures.Future(loop=self._loop) - self._getters.append(waiter) - return (yield from waiter) + try: + return (yield from waiter) + except futures.CancelledError: + # if we get CancelledError, it means someone cancelled this + # get() coroutine. But there is a chance that the waiter + # already is ready and contains an item that has just been + # removed from the queue. In this case, we need to put the item + # back into the front of the queue. This get() must either + # succeed without fault or, if it gets cancelled, it must be as + # if it never happened. + if waiter.done(): + self._put_it_back(waiter.result()) + raise + + def _put_it_back(self, item): + """ + This is called when we have a waiter to get() an item and this waiter + gets cancelled. In this case, we put the item back: wake up another + waiter or put it in the _queue. + """ + self._consume_done_getters() + if self._getters: + assert not self._queue, ( + 'queue non-empty, why are getters waiting?') + + getter = self._getters.popleft() + self.__put_internal(item) + + # getter cannot be cancelled, we just removed done getters + getter.set_result(item) + else: + self._queue.appendleft(item) def get_nowait(self): """Remove and return an item from the queue. @@ -193,9 +243,10 @@ class Queue: self._consume_done_putters() if self._putters: assert self.full(), 'queue not full, why are putters waiting?' - item, putter = self._putters.popleft() - self._put(item) + putter = self._putters.popleft() # Wake putter on next tick. + + # getter cannot be cancelled, we just removed done putters putter.set_result(None) return self._get() @@ -205,6 +256,38 @@ class Queue: else: raise QueueEmpty + def task_done(self): + """Indicate that a formerly enqueued task is complete. + + Used by queue consumers. For each get() used to fetch a task, + a subsequent call to task_done() tells the queue that the processing + on the task is complete. + + If a join() is currently blocking, it will resume when all items have + been processed (meaning that a task_done() call was received for every + item that had been put() into the queue). + + Raises ValueError if called more times than there were items placed in + the queue. + """ + if self._unfinished_tasks <= 0: + raise ValueError('task_done() called too many times') + self._unfinished_tasks -= 1 + if self._unfinished_tasks == 0: + self._finished.set() + + @coroutine + def join(self): + """Block until all items in the queue have been gotten and processed. + + The count of unfinished tasks goes up whenever an item is added to the + queue. The count goes down whenever a consumer calls task_done() to + indicate that the item was retrieved and all work on it is complete. + When the count of unfinished tasks drops to zero, join() unblocks. + """ + if self._unfinished_tasks > 0: + yield from self._finished.wait() + class PriorityQueue(Queue): """A subclass of Queue; retrieves entries in priority order (lowest first). @@ -235,54 +318,7 @@ class LifoQueue(Queue): return self._queue.pop() -class JoinableQueue(Queue): - """A subclass of Queue with task_done() and join() methods.""" - - def __init__(self, maxsize=0, *, loop=None): - super().__init__(maxsize=maxsize, loop=loop) - self._unfinished_tasks = 0 - self._finished = locks.Event(loop=self._loop) - self._finished.set() - - def _format(self): - result = Queue._format(self) - if self._unfinished_tasks: - result += ' tasks={}'.format(self._unfinished_tasks) - return result - - def _put(self, item): - super()._put(item) - self._unfinished_tasks += 1 - self._finished.clear() - - def task_done(self): - """Indicate that a formerly enqueued task is complete. - - Used by queue consumers. For each get() used to fetch a task, - a subsequent call to task_done() tells the queue that the processing - on the task is complete. - - If a join() is currently blocking, it will resume when all items have - been processed (meaning that a task_done() call was received for every - item that had been put() into the queue). - - Raises ValueError if called more times than there were items placed in - the queue. - """ - if self._unfinished_tasks <= 0: - raise ValueError('task_done() called too many times') - self._unfinished_tasks -= 1 - if self._unfinished_tasks == 0: - self._finished.set() - - @coroutine - def join(self): - """Block until all items in the queue have been gotten and processed. - - The count of unfinished tasks goes up whenever an item is added to the - queue. The count goes down whenever a consumer thread calls task_done() - to indicate that the item was retrieved and all work on it is complete. - When the count of unfinished tasks drops to zero, join() unblocks. - """ - if self._unfinished_tasks > 0: - yield from self._finished.wait() +if not compat.PY35: + JoinableQueue = Queue + """Deprecated alias for Queue.""" + __all__.append('JoinableQueue') diff --git a/Darwin/lib/python3.4/asyncio/selector_events.py b/Darwin/lib/python3.5/asyncio/selector_events.py similarity index 67% rename from Darwin/lib/python3.4/asyncio/selector_events.py rename to Darwin/lib/python3.5/asyncio/selector_events.py index c7df8d8..4a99658 100644 --- a/Darwin/lib/python3.4/asyncio/selector_events.py +++ b/Darwin/lib/python3.5/asyncio/selector_events.py @@ -8,21 +8,37 @@ __all__ = ['BaseSelectorEventLoop'] import collections import errno +import functools import socket +import warnings try: import ssl except ImportError: # pragma: no cover ssl = None from . import base_events +from . import compat from . import constants from . import events from . import futures from . import selectors from . import transports +from . import sslproto +from .coroutines import coroutine from .log import logger +def _test_selector_event(selector, fd, event): + # Test if the selector is monitoring 'event' events + # for the file descriptor 'fd'. + try: + key = selector.get_key(fd) + except KeyError: + return False + else: + return bool(key.events & event) + + class BaseSelectorEventLoop(base_events.BaseEventLoop): """Selector event loop. @@ -43,23 +59,46 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): return _SelectorSocketTransport(self, sock, protocol, waiter, extra, server) - def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter, *, - server_side=False, server_hostname=None, + def _make_ssl_transport(self, rawsock, protocol, sslcontext, waiter=None, + *, server_side=False, server_hostname=None, extra=None, server=None): + if not sslproto._is_sslproto_available(): + return self._make_legacy_ssl_transport( + rawsock, protocol, sslcontext, waiter, + server_side=server_side, server_hostname=server_hostname, + extra=extra, server=server) + + ssl_protocol = sslproto.SSLProtocol(self, protocol, sslcontext, waiter, + server_side, server_hostname) + _SelectorSocketTransport(self, rawsock, ssl_protocol, + extra=extra, server=server) + return ssl_protocol._app_transport + + def _make_legacy_ssl_transport(self, rawsock, protocol, sslcontext, + waiter, *, + server_side=False, server_hostname=None, + extra=None, server=None): + # Use the legacy API: SSL_write, SSL_read, etc. The legacy API is used + # on Python 3.4 and older, when ssl.MemoryBIO is not available. return _SelectorSslTransport( self, rawsock, protocol, sslcontext, waiter, server_side, server_hostname, extra, server) def _make_datagram_transport(self, sock, protocol, - address=None, extra=None): - return _SelectorDatagramTransport(self, sock, protocol, address, extra) + address=None, waiter=None, extra=None): + return _SelectorDatagramTransport(self, sock, protocol, + address, waiter, extra) def close(self): + if self.is_running(): + raise RuntimeError("Cannot close a running event loop") + if self.is_closed(): + return + self._close_self_pipe() + super().close() if self._selector is not None: - self._close_self_pipe() self._selector.close() self._selector = None - super().close() def _socketpair(self): raise NotImplementedError @@ -80,11 +119,20 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): self._internal_fds += 1 self.add_reader(self._ssock.fileno(), self._read_from_self) + def _process_self_data(self, data): + pass + def _read_from_self(self): - try: - self._ssock.recv(1) - except (BlockingIOError, InterruptedError): - pass + while True: + try: + data = self._ssock.recv(4096) + if not data: + break + self._process_self_data(data) + except InterruptedError: + continue + except BlockingIOError: + break def _write_to_self(self): # This may be called from a different thread, possibly after @@ -95,9 +143,12 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): csock = self._csock if csock is not None: try: - csock.send(b'x') + csock.send(b'\0') except OSError: - pass + if self._debug: + logger.debug("Fail to write a null byte into the " + "self-pipe socket", + exc_info=True) def _start_serving(self, protocol_factory, sock, sslcontext=None, server=None): @@ -108,12 +159,14 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): sslcontext=None, server=None): try: conn, addr = sock.accept() + if self._debug: + logger.debug("%r got a new connection from %r: %r", + server, addr, conn) conn.setblocking(False) except (BlockingIOError, InterruptedError, ConnectionAbortedError): pass # False alarm. except OSError as exc: # There's nowhere to send the error, so just log it. - # TODO: Someone will want an error handler for this. if exc.errno in (errno.EMFILE, errno.ENFILE, errno.ENOBUFS, errno.ENOMEM): # Some platforms (e.g. Linux keep reporting the FD as @@ -131,20 +184,51 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): else: raise # The event loop will catch, log and ignore it. else: + extra = {'peername': addr} + accept = self._accept_connection2(protocol_factory, conn, extra, + sslcontext, server) + self.create_task(accept) + + @coroutine + def _accept_connection2(self, protocol_factory, conn, extra, + sslcontext=None, server=None): + protocol = None + transport = None + try: + protocol = protocol_factory() + waiter = futures.Future(loop=self) if sslcontext: - self._make_ssl_transport( - conn, protocol_factory(), sslcontext, None, - server_side=True, extra={'peername': addr}, server=server) + transport = self._make_ssl_transport( + conn, protocol, sslcontext, waiter=waiter, + server_side=True, extra=extra, server=server) else: - self._make_socket_transport( - conn, protocol_factory(), extra={'peername': addr}, + transport = self._make_socket_transport( + conn, protocol, waiter=waiter, extra=extra, server=server) - # It's now up to the protocol to handle the connection. + + try: + yield from waiter + except: + transport.close() + raise + + # It's now up to the protocol to handle the connection. + except Exception as exc: + if self._debug: + context = { + 'message': ('Error on transport creation ' + 'for incoming connection'), + 'exception': exc, + } + if protocol is not None: + context['protocol'] = protocol + if transport is not None: + context['transport'] = transport + self.call_exception_handler(context) def add_reader(self, fd, callback, *args): """Add a reader callback.""" - if self._selector is None: - raise RuntimeError('Event loop is closed') + self._check_closed() handle = events.Handle(callback, args, self) try: key = self._selector.get_key(fd) @@ -160,7 +244,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def remove_reader(self, fd): """Remove a reader callback.""" - if self._selector is None: + if self.is_closed(): return False try: key = self._selector.get_key(fd) @@ -182,8 +266,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def add_writer(self, fd, callback, *args): """Add a writer callback..""" - if self._selector is None: - raise RuntimeError('Event loop is closed') + self._check_closed() handle = events.Handle(callback, args, self) try: key = self._selector.get_key(fd) @@ -199,7 +282,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): def remove_writer(self, fd): """Remove a writer callback.""" - if self._selector is None: + if self.is_closed(): return False try: key = self._selector.get_key(fd) @@ -221,7 +304,16 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): return False def sock_recv(self, sock, n): - """XXX""" + """Receive data from the socket. + + The return value is a bytes object representing the data received. + The maximum amount of data to be received at once is specified by + nbytes. + + This method is a coroutine. + """ + if self._debug and sock.gettimeout() != 0: + raise ValueError("the socket must be non-blocking") fut = futures.Future(loop=self) self._sock_recv(fut, False, sock, n) return fut @@ -248,7 +340,18 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): fut.set_result(data) def sock_sendall(self, sock, data): - """XXX""" + """Send data to the socket. + + The socket must be connected to a remote socket. This method continues + to send data from data until either all data has been sent or an + error occurs. None is returned on success. On error, an exception is + raised, and there is no way to determine how much data, if any, was + successfully processed by the receiving end of the connection. + + This method is a coroutine. + """ + if self._debug and sock.gettimeout() != 0: + raise ValueError("the socket must be non-blocking") fut = futures.Future(loop=self) if data: self._sock_sendall(fut, False, sock, data) @@ -280,40 +383,77 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop): self.add_writer(fd, self._sock_sendall, fut, True, sock, data) def sock_connect(self, sock, address): - """XXX""" + """Connect to a remote socket at address. + + The address must be already resolved to avoid the trap of hanging the + entire event loop when the address requires doing a DNS lookup. For + example, it must be an IP address, not an hostname, for AF_INET and + AF_INET6 address families. Use getaddrinfo() to resolve the hostname + asynchronously. + + This method is a coroutine. + """ + if self._debug and sock.gettimeout() != 0: + raise ValueError("the socket must be non-blocking") fut = futures.Future(loop=self) try: - base_events._check_resolved_address(sock, address) + if self._debug: + base_events._check_resolved_address(sock, address) except ValueError as err: fut.set_exception(err) else: - self._sock_connect(fut, False, sock, address) + self._sock_connect(fut, sock, address) return fut - def _sock_connect(self, fut, registered, sock, address): + def _sock_connect(self, fut, sock, address): fd = sock.fileno() - if registered: - self.remove_writer(fd) + try: + sock.connect(address) + except (BlockingIOError, InterruptedError): + # Issue #23618: When the C function connect() fails with EINTR, the + # connection runs in background. We have to wait until the socket + # becomes writable to be notified when the connection succeed or + # fails. + fut.add_done_callback(functools.partial(self._sock_connect_done, + fd)) + self.add_writer(fd, self._sock_connect_cb, fut, sock, address) + except Exception as exc: + fut.set_exception(exc) + else: + fut.set_result(None) + + def _sock_connect_done(self, fd, fut): + self.remove_writer(fd) + + def _sock_connect_cb(self, fut, sock, address): if fut.cancelled(): return + try: - if not registered: - # First time around. - sock.connect(address) - else: - err = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) - if err != 0: - # Jump to the except clause below. - raise OSError(err, 'Connect call failed %s' % (address,)) + err = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) + if err != 0: + # Jump to any except clause below. + raise OSError(err, 'Connect call failed %s' % (address,)) except (BlockingIOError, InterruptedError): - self.add_writer(fd, self._sock_connect, fut, True, sock, address) + # socket is still registered, the callback will be retried later + pass except Exception as exc: fut.set_exception(exc) else: fut.set_result(None) def sock_accept(self, sock): - """XXX""" + """Accept a connection. + + The socket must be bound to an address and listening for connections. + The return value is a pair (conn, address) where conn is a new socket + object usable to send and receive data on the connection, and address + is the address bound to the socket on the other end of the connection. + + This method is a coroutine. + """ + if self._debug and sock.gettimeout() != 0: + raise ValueError("the socket must be non-blocking") fut = futures.Future(loop=self) self._sock_accept(fut, False, sock) return fut @@ -360,8 +500,13 @@ class _SelectorTransport(transports._FlowControlMixin, _buffer_factory = bytearray # Constructs initial value for self._buffer. - def __init__(self, loop, sock, protocol, extra, server=None): - super().__init__(extra) + # Attribute used in the destructor: it must be set even if the constructor + # is not called (see _SelectorSslTransport which may start by raising an + # exception) + _sock = None + + def __init__(self, loop, sock, protocol, extra=None, server=None): + super().__init__(extra, loop) self._extra['socket'] = sock self._extra['sockname'] = sock.getsockname() if 'peername' not in self._extra: @@ -369,16 +514,44 @@ class _SelectorTransport(transports._FlowControlMixin, self._extra['peername'] = sock.getpeername() except socket.error: self._extra['peername'] = None - self._loop = loop self._sock = sock self._sock_fd = sock.fileno() self._protocol = protocol + self._protocol_connected = True self._server = server self._buffer = self._buffer_factory() self._conn_lost = 0 # Set when call to connection_lost scheduled. self._closing = False # Set when close() called. if self._server is not None: - self._server.attach(self) + self._server._attach() + + def __repr__(self): + info = [self.__class__.__name__] + if self._sock is None: + info.append('closed') + elif self._closing: + info.append('closing') + info.append('fd=%s' % self._sock_fd) + # test if the transport was closed + if self._loop is not None and not self._loop.is_closed(): + polling = _test_selector_event(self._loop._selector, + self._sock_fd, selectors.EVENT_READ) + if polling: + info.append('read=polling') + else: + info.append('read=idle') + + polling = _test_selector_event(self._loop._selector, + self._sock_fd, + selectors.EVENT_WRITE) + if polling: + state = 'polling' + else: + state = 'idle' + + bufsize = self.get_write_buffer_size() + info.append('write=<%s, bufsize=%s>' % (state, bufsize)) + return '<%s>' % ' '.join(info) def abort(self): self._force_close(None) @@ -392,9 +565,22 @@ class _SelectorTransport(transports._FlowControlMixin, self._conn_lost += 1 self._loop.call_soon(self._call_connection_lost, None) + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if self._sock is not None: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self._sock.close() + def _fatal_error(self, exc, message='Fatal error on transport'): # Should be called from exception handler only. - if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if isinstance(exc, (BrokenPipeError, + ConnectionResetError, ConnectionAbortedError)): + if self._loop.get_debug(): + logger.debug("%r: %s", self, message, exc_info=True) + else: self._loop.call_exception_handler({ 'message': message, 'exception': exc, @@ -417,7 +603,8 @@ class _SelectorTransport(transports._FlowControlMixin, def _call_connection_lost(self, exc): try: - self._protocol.connection_lost(exc) + if self._protocol_connected: + self._protocol.connection_lost(exc) finally: self._sock.close() self._sock = None @@ -425,7 +612,7 @@ class _SelectorTransport(transports._FlowControlMixin, self._loop = None server = self._server if server is not None: - server.detach(self) + server._detach() self._server = None def get_write_buffer_size(self): @@ -440,10 +627,13 @@ class _SelectorSocketTransport(_SelectorTransport): self._eof = False self._paused = False - self._loop.add_reader(self._sock_fd, self._read_ready) self._loop.call_soon(self._protocol.connection_made, self) + # only start reading when connection_made() has been called + self._loop.call_soon(self._loop.add_reader, + self._sock_fd, self._read_ready) if waiter is not None: - self._loop.call_soon(waiter.set_result, None) + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(waiter._set_result_unless_cancelled, None) def pause_reading(self): if self._closing: @@ -452,6 +642,8 @@ class _SelectorSocketTransport(_SelectorTransport): raise RuntimeError('Already paused') self._paused = True self._loop.remove_reader(self._sock_fd) + if self._loop.get_debug(): + logger.debug("%r pauses reading", self) def resume_reading(self): if not self._paused: @@ -460,6 +652,8 @@ class _SelectorSocketTransport(_SelectorTransport): if self._closing: return self._loop.add_reader(self._sock_fd, self._read_ready) + if self._loop.get_debug(): + logger.debug("%r resumes reading", self) def _read_ready(self): try: @@ -472,6 +666,8 @@ class _SelectorSocketTransport(_SelectorTransport): if data: self._protocol.data_received(data) else: + if self._loop.get_debug(): + logger.debug("%r received EOF", self) keep_open = self._protocol.eof_received() if keep_open: # We're keeping the connection open so the @@ -559,70 +755,69 @@ class _SelectorSslTransport(_SelectorTransport): if ssl is None: raise RuntimeError('stdlib ssl module not available') - if server_side: - if not sslcontext: - raise ValueError('Server side ssl needs a valid SSLContext') - else: - if not sslcontext: - # Client side may pass ssl=True to use a default - # context; in that case the sslcontext passed is None. - # The default is the same as used by urllib with - # cadefault=True. - if hasattr(ssl, '_create_stdlib_context'): - sslcontext = ssl._create_stdlib_context( - cert_reqs=ssl.CERT_REQUIRED, - check_hostname=bool(server_hostname)) - else: - # Fallback for Python 3.3. - sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - sslcontext.options |= ssl.OP_NO_SSLv2 - sslcontext.set_default_verify_paths() - sslcontext.verify_mode = ssl.CERT_REQUIRED + if not sslcontext: + sslcontext = sslproto._create_transport_context(server_side, server_hostname) wrap_kwargs = { 'server_side': server_side, 'do_handshake_on_connect': False, } - if server_hostname and not server_side and ssl.HAS_SNI: + if server_hostname and not server_side: wrap_kwargs['server_hostname'] = server_hostname sslsock = sslcontext.wrap_socket(rawsock, **wrap_kwargs) super().__init__(loop, sslsock, protocol, extra, server) + # the protocol connection is only made after the SSL handshake + self._protocol_connected = False self._server_hostname = server_hostname self._waiter = waiter - self._rawsock = rawsock self._sslcontext = sslcontext self._paused = False # SSL-specific extra info. (peercert is set later) self._extra.update(sslcontext=sslcontext) - self._on_handshake() + if self._loop.get_debug(): + logger.debug("%r starts SSL handshake", self) + start_time = self._loop.time() + else: + start_time = None + self._on_handshake(start_time) - def _on_handshake(self): + def _wakeup_waiter(self, exc=None): + if self._waiter is None: + return + if not self._waiter.cancelled(): + if exc is not None: + self._waiter.set_exception(exc) + else: + self._waiter.set_result(None) + self._waiter = None + + def _on_handshake(self, start_time): try: self._sock.do_handshake() except ssl.SSLWantReadError: - self._loop.add_reader(self._sock_fd, self._on_handshake) + self._loop.add_reader(self._sock_fd, + self._on_handshake, start_time) return except ssl.SSLWantWriteError: - self._loop.add_writer(self._sock_fd, self._on_handshake) - return - except Exception as exc: - self._loop.remove_reader(self._sock_fd) - self._loop.remove_writer(self._sock_fd) - self._sock.close() - if self._waiter is not None: - self._waiter.set_exception(exc) + self._loop.add_writer(self._sock_fd, + self._on_handshake, start_time) return except BaseException as exc: + if self._loop.get_debug(): + logger.warning("%r: SSL handshake failed", + self, exc_info=True) self._loop.remove_reader(self._sock_fd) self._loop.remove_writer(self._sock_fd) self._sock.close() - if self._waiter is not None: - self._waiter.set_exception(exc) - raise + self._wakeup_waiter(exc) + if isinstance(exc, Exception): + return + else: + raise self._loop.remove_reader(self._sock_fd) self._loop.remove_writer(self._sock_fd) @@ -636,9 +831,12 @@ class _SelectorSslTransport(_SelectorTransport): try: ssl.match_hostname(peercert, self._server_hostname) except Exception as exc: + if self._loop.get_debug(): + logger.warning("%r: SSL handshake failed " + "on matching the hostname", + self, exc_info=True) self._sock.close() - if self._waiter is not None: - self._waiter.set_exception(exc) + self._wakeup_waiter(exc) return # Add extra info that becomes available after handshake. @@ -650,9 +848,14 @@ class _SelectorSslTransport(_SelectorTransport): self._read_wants_write = False self._write_wants_read = False self._loop.add_reader(self._sock_fd, self._read_ready) + self._protocol_connected = True self._loop.call_soon(self._protocol.connection_made, self) - if self._waiter is not None: - self._loop.call_soon(self._waiter.set_result, None) + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(self._wakeup_waiter) + + if self._loop.get_debug(): + dt = self._loop.time() - start_time + logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3) def pause_reading(self): # XXX This is a bit icky, given the comment at the top of @@ -667,14 +870,18 @@ class _SelectorSslTransport(_SelectorTransport): raise RuntimeError('Already paused') self._paused = True self._loop.remove_reader(self._sock_fd) + if self._loop.get_debug(): + logger.debug("%r pauses reading", self) def resume_reading(self): if not self._paused: - raise ('Not paused') + raise RuntimeError('Not paused') self._paused = False if self._closing: return self._loop.add_reader(self._sock_fd, self._read_ready) + if self._loop.get_debug(): + logger.debug("%r resumes reading", self) def _read_ready(self): if self._write_wants_read: @@ -699,6 +906,8 @@ class _SelectorSslTransport(_SelectorTransport): self._protocol.data_received(data) else: try: + if self._loop.get_debug(): + logger.debug("%r received EOF", self) keep_open = self._protocol.eof_received() if keep_open: logger.warning('returning true from eof_received() ' @@ -767,11 +976,17 @@ class _SelectorDatagramTransport(_SelectorTransport): _buffer_factory = collections.deque - def __init__(self, loop, sock, protocol, address=None, extra=None): + def __init__(self, loop, sock, protocol, address=None, + waiter=None, extra=None): super().__init__(loop, sock, protocol, extra) self._address = address - self._loop.add_reader(self._sock_fd, self._read_ready) self._loop.call_soon(self._protocol.connection_made, self) + # only start reading when connection_made() has been called + self._loop.call_soon(self._loop.add_reader, + self._sock_fd, self._read_ready) + if waiter is not None: + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(waiter._set_result_unless_cancelled, None) def get_write_buffer_size(self): return sum(len(data) for data, _ in self._buffer) diff --git a/Darwin/lib/python3.5/asyncio/sslproto.py b/Darwin/lib/python3.5/asyncio/sslproto.py new file mode 100644 index 0000000..e5ae49a --- /dev/null +++ b/Darwin/lib/python3.5/asyncio/sslproto.py @@ -0,0 +1,673 @@ +import collections +import warnings +try: + import ssl +except ImportError: # pragma: no cover + ssl = None + +from . import compat +from . import protocols +from . import transports +from .log import logger + + +def _create_transport_context(server_side, server_hostname): + if server_side: + raise ValueError('Server side SSL needs a valid SSLContext') + + # Client side may pass ssl=True to use a default + # context; in that case the sslcontext passed is None. + # The default is secure for client connections. + if hasattr(ssl, 'create_default_context'): + # Python 3.4+: use up-to-date strong settings. + sslcontext = ssl.create_default_context() + if not server_hostname: + sslcontext.check_hostname = False + else: + # Fallback for Python 3.3. + sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.set_default_verify_paths() + sslcontext.verify_mode = ssl.CERT_REQUIRED + return sslcontext + + +def _is_sslproto_available(): + return hasattr(ssl, "MemoryBIO") + + +# States of an _SSLPipe. +_UNWRAPPED = "UNWRAPPED" +_DO_HANDSHAKE = "DO_HANDSHAKE" +_WRAPPED = "WRAPPED" +_SHUTDOWN = "SHUTDOWN" + + +class _SSLPipe(object): + """An SSL "Pipe". + + An SSL pipe allows you to communicate with an SSL/TLS protocol instance + through memory buffers. It can be used to implement a security layer for an + existing connection where you don't have access to the connection's file + descriptor, or for some reason you don't want to use it. + + An SSL pipe can be in "wrapped" and "unwrapped" mode. In unwrapped mode, + data is passed through untransformed. In wrapped mode, application level + data is encrypted to SSL record level data and vice versa. The SSL record + level is the lowest level in the SSL protocol suite and is what travels + as-is over the wire. + + An SslPipe initially is in "unwrapped" mode. To start SSL, call + do_handshake(). To shutdown SSL again, call unwrap(). + """ + + max_size = 256 * 1024 # Buffer size passed to read() + + def __init__(self, context, server_side, server_hostname=None): + """ + The *context* argument specifies the ssl.SSLContext to use. + + The *server_side* argument indicates whether this is a server side or + client side transport. + + The optional *server_hostname* argument can be used to specify the + hostname you are connecting to. You may only specify this parameter if + the _ssl module supports Server Name Indication (SNI). + """ + self._context = context + self._server_side = server_side + self._server_hostname = server_hostname + self._state = _UNWRAPPED + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + self._sslobj = None + self._need_ssldata = False + self._handshake_cb = None + self._shutdown_cb = None + + @property + def context(self): + """The SSL context passed to the constructor.""" + return self._context + + @property + def ssl_object(self): + """The internal ssl.SSLObject instance. + + Return None if the pipe is not wrapped. + """ + return self._sslobj + + @property + def need_ssldata(self): + """Whether more record level data is needed to complete a handshake + that is currently in progress.""" + return self._need_ssldata + + @property + def wrapped(self): + """ + Whether a security layer is currently in effect. + + Return False during handshake. + """ + return self._state == _WRAPPED + + def do_handshake(self, callback=None): + """Start the SSL handshake. + + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the handshake is complete. The callback will be + called with None if successful, else an exception instance. + """ + if self._state != _UNWRAPPED: + raise RuntimeError('handshake in progress or completed') + self._sslobj = self._context.wrap_bio( + self._incoming, self._outgoing, + server_side=self._server_side, + server_hostname=self._server_hostname) + self._state = _DO_HANDSHAKE + self._handshake_cb = callback + ssldata, appdata = self.feed_ssldata(b'', only_handshake=True) + assert len(appdata) == 0 + return ssldata + + def shutdown(self, callback=None): + """Start the SSL shutdown sequence. + + Return a list of ssldata. A ssldata element is a list of buffers + + The optional *callback* argument can be used to install a callback that + will be called when the shutdown is complete. The callback will be + called without arguments. + """ + if self._state == _UNWRAPPED: + raise RuntimeError('no security layer present') + if self._state == _SHUTDOWN: + raise RuntimeError('shutdown in progress') + assert self._state in (_WRAPPED, _DO_HANDSHAKE) + self._state = _SHUTDOWN + self._shutdown_cb = callback + ssldata, appdata = self.feed_ssldata(b'') + assert appdata == [] or appdata == [b''] + return ssldata + + def feed_eof(self): + """Send a potentially "ragged" EOF. + + This method will raise an SSL_ERROR_EOF exception if the EOF is + unexpected. + """ + self._incoming.write_eof() + ssldata, appdata = self.feed_ssldata(b'') + assert appdata == [] or appdata == [b''] + + def feed_ssldata(self, data, only_handshake=False): + """Feed SSL record level data into the pipe. + + The data must be a bytes instance. It is OK to send an empty bytes + instance. This can be used to get ssldata for a handshake initiated by + this endpoint. + + Return a (ssldata, appdata) tuple. The ssldata element is a list of + buffers containing SSL data that needs to be sent to the remote SSL. + + The appdata element is a list of buffers containing plaintext data that + needs to be forwarded to the application. The appdata list may contain + an empty buffer indicating an SSL "close_notify" alert. This alert must + be acknowledged by calling shutdown(). + """ + if self._state == _UNWRAPPED: + # If unwrapped, pass plaintext data straight through. + if data: + appdata = [data] + else: + appdata = [] + return ([], appdata) + + self._need_ssldata = False + if data: + self._incoming.write(data) + + ssldata = [] + appdata = [] + try: + if self._state == _DO_HANDSHAKE: + # Call do_handshake() until it doesn't raise anymore. + self._sslobj.do_handshake() + self._state = _WRAPPED + if self._handshake_cb: + self._handshake_cb(None) + if only_handshake: + return (ssldata, appdata) + # Handshake done: execute the wrapped block + + if self._state == _WRAPPED: + # Main state: read data from SSL until close_notify + while True: + chunk = self._sslobj.read(self.max_size) + appdata.append(chunk) + if not chunk: # close_notify + break + + elif self._state == _SHUTDOWN: + # Call shutdown() until it doesn't raise anymore. + self._sslobj.unwrap() + self._sslobj = None + self._state = _UNWRAPPED + if self._shutdown_cb: + self._shutdown_cb() + + elif self._state == _UNWRAPPED: + # Drain possible plaintext data after close_notify. + appdata.append(self._incoming.read()) + except (ssl.SSLError, ssl.CertificateError) as exc: + if getattr(exc, 'errno', None) not in ( + ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE, + ssl.SSL_ERROR_SYSCALL): + if self._state == _DO_HANDSHAKE and self._handshake_cb: + self._handshake_cb(exc) + raise + self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ) + + # Check for record level data that needs to be sent back. + # Happens for the initial handshake and renegotiations. + if self._outgoing.pending: + ssldata.append(self._outgoing.read()) + return (ssldata, appdata) + + def feed_appdata(self, data, offset=0): + """Feed plaintext data into the pipe. + + Return an (ssldata, offset) tuple. The ssldata element is a list of + buffers containing record level data that needs to be sent to the + remote SSL instance. The offset is the number of plaintext bytes that + were processed, which may be less than the length of data. + + NOTE: In case of short writes, this call MUST be retried with the SAME + buffer passed into the *data* argument (i.e. the id() must be the + same). This is an OpenSSL requirement. A further particularity is that + a short write will always have offset == 0, because the _ssl module + does not enable partial writes. And even though the offset is zero, + there will still be encrypted data in ssldata. + """ + assert 0 <= offset <= len(data) + if self._state == _UNWRAPPED: + # pass through data in unwrapped mode + if offset < len(data): + ssldata = [data[offset:]] + else: + ssldata = [] + return (ssldata, len(data)) + + ssldata = [] + view = memoryview(data) + while True: + self._need_ssldata = False + try: + if offset < len(view): + offset += self._sslobj.write(view[offset:]) + except ssl.SSLError as exc: + # It is not allowed to call write() after unwrap() until the + # close_notify is acknowledged. We return the condition to the + # caller as a short write. + if exc.reason == 'PROTOCOL_IS_SHUTDOWN': + exc.errno = ssl.SSL_ERROR_WANT_READ + if exc.errno not in (ssl.SSL_ERROR_WANT_READ, + ssl.SSL_ERROR_WANT_WRITE, + ssl.SSL_ERROR_SYSCALL): + raise + self._need_ssldata = (exc.errno == ssl.SSL_ERROR_WANT_READ) + + # See if there's any record level data back for us. + if self._outgoing.pending: + ssldata.append(self._outgoing.read()) + if offset == len(view) or self._need_ssldata: + break + return (ssldata, offset) + + +class _SSLProtocolTransport(transports._FlowControlMixin, + transports.Transport): + + def __init__(self, loop, ssl_protocol, app_protocol): + self._loop = loop + self._ssl_protocol = ssl_protocol + self._app_protocol = app_protocol + self._closed = False + + def get_extra_info(self, name, default=None): + """Get optional transport information.""" + return self._ssl_protocol._get_extra_info(name, default) + + def close(self): + """Close the transport. + + Buffered data will be flushed asynchronously. No more data + will be received. After all buffered data is flushed, the + protocol's connection_lost() method will (eventually) called + with None as its argument. + """ + self._closed = True + self._ssl_protocol._start_shutdown() + + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if not self._closed: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self.close() + + def pause_reading(self): + """Pause the receiving end. + + No data will be passed to the protocol's data_received() + method until resume_reading() is called. + """ + self._ssl_protocol._transport.pause_reading() + + def resume_reading(self): + """Resume the receiving end. + + Data received will once again be passed to the protocol's + data_received() method. + """ + self._ssl_protocol._transport.resume_reading() + + def set_write_buffer_limits(self, high=None, low=None): + """Set the high- and low-water limits for write flow control. + + These two values control when to call the protocol's + pause_writing() and resume_writing() methods. If specified, + the low-water limit must be less than or equal to the + high-water limit. Neither value can be negative. + + The defaults are implementation-specific. If only the + high-water limit is given, the low-water limit defaults to a + implementation-specific value less than or equal to the + high-water limit. Setting high to zero forces low to zero as + well, and causes pause_writing() to be called whenever the + buffer becomes non-empty. Setting low to zero causes + resume_writing() to be called only once the buffer is empty. + Use of zero for either limit is generally sub-optimal as it + reduces opportunities for doing I/O and computation + concurrently. + """ + self._ssl_protocol._transport.set_write_buffer_limits(high, low) + + def get_write_buffer_size(self): + """Return the current size of the write buffer.""" + return self._ssl_protocol._transport.get_write_buffer_size() + + def write(self, data): + """Write some data bytes to the transport. + + This does not block; it buffers the data and arranges for it + to be sent out asynchronously. + """ + if not isinstance(data, (bytes, bytearray, memoryview)): + raise TypeError("data: expecting a bytes-like instance, got {!r}" + .format(type(data).__name__)) + if not data: + return + self._ssl_protocol._write_appdata(data) + + def can_write_eof(self): + """Return True if this transport supports write_eof(), False if not.""" + return False + + def abort(self): + """Close the transport immediately. + + Buffered data will be lost. No more data will be received. + The protocol's connection_lost() method will (eventually) be + called with None as its argument. + """ + self._ssl_protocol._abort() + + +class SSLProtocol(protocols.Protocol): + """SSL protocol. + + Implementation of SSL on top of a socket using incoming and outgoing + buffers which are ssl.MemoryBIO objects. + """ + + def __init__(self, loop, app_protocol, sslcontext, waiter, + server_side=False, server_hostname=None): + if ssl is None: + raise RuntimeError('stdlib ssl module not available') + + if not sslcontext: + sslcontext = _create_transport_context(server_side, server_hostname) + + self._server_side = server_side + if server_hostname and not server_side: + self._server_hostname = server_hostname + else: + self._server_hostname = None + self._sslcontext = sslcontext + # SSL-specific extra info. More info are set when the handshake + # completes. + self._extra = dict(sslcontext=sslcontext) + + # App data write buffering + self._write_backlog = collections.deque() + self._write_buffer_size = 0 + + self._waiter = waiter + self._loop = loop + self._app_protocol = app_protocol + self._app_transport = _SSLProtocolTransport(self._loop, + self, self._app_protocol) + self._sslpipe = None + self._session_established = False + self._in_handshake = False + self._in_shutdown = False + self._transport = None + + def _wakeup_waiter(self, exc=None): + if self._waiter is None: + return + if not self._waiter.cancelled(): + if exc is not None: + self._waiter.set_exception(exc) + else: + self._waiter.set_result(None) + self._waiter = None + + def connection_made(self, transport): + """Called when the low-level connection is made. + + Start the SSL handshake. + """ + self._transport = transport + self._sslpipe = _SSLPipe(self._sslcontext, + self._server_side, + self._server_hostname) + self._start_handshake() + + def connection_lost(self, exc): + """Called when the low-level connection is lost or closed. + + The argument is an exception object or None (the latter + meaning a regular EOF is received or the connection was + aborted or closed). + """ + if self._session_established: + self._session_established = False + self._loop.call_soon(self._app_protocol.connection_lost, exc) + self._transport = None + self._app_transport = None + + def pause_writing(self): + """Called when the low-level transport's buffer goes over + the high-water mark. + """ + self._app_protocol.pause_writing() + + def resume_writing(self): + """Called when the low-level transport's buffer drains below + the low-water mark. + """ + self._app_protocol.resume_writing() + + def data_received(self, data): + """Called when some SSL data is received. + + The argument is a bytes object. + """ + try: + ssldata, appdata = self._sslpipe.feed_ssldata(data) + except ssl.SSLError as e: + if self._loop.get_debug(): + logger.warning('%r: SSL error %s (reason %s)', + self, e.errno, e.reason) + self._abort() + return + + for chunk in ssldata: + self._transport.write(chunk) + + for chunk in appdata: + if chunk: + self._app_protocol.data_received(chunk) + else: + self._start_shutdown() + break + + def eof_received(self): + """Called when the other end of the low-level stream + is half-closed. + + If this returns a false value (including None), the transport + will close itself. If it returns a true value, closing the + transport is up to the protocol. + """ + try: + if self._loop.get_debug(): + logger.debug("%r received EOF", self) + + self._wakeup_waiter(ConnectionResetError) + + if not self._in_handshake: + keep_open = self._app_protocol.eof_received() + if keep_open: + logger.warning('returning true from eof_received() ' + 'has no effect when using ssl') + finally: + self._transport.close() + + def _get_extra_info(self, name, default=None): + if name in self._extra: + return self._extra[name] + else: + return self._transport.get_extra_info(name, default) + + def _start_shutdown(self): + if self._in_shutdown: + return + self._in_shutdown = True + self._write_appdata(b'') + + def _write_appdata(self, data): + self._write_backlog.append((data, 0)) + self._write_buffer_size += len(data) + self._process_write_backlog() + + def _start_handshake(self): + if self._loop.get_debug(): + logger.debug("%r starts SSL handshake", self) + self._handshake_start_time = self._loop.time() + else: + self._handshake_start_time = None + self._in_handshake = True + # (b'', 1) is a special value in _process_write_backlog() to do + # the SSL handshake + self._write_backlog.append((b'', 1)) + self._loop.call_soon(self._process_write_backlog) + + def _on_handshake_complete(self, handshake_exc): + self._in_handshake = False + + sslobj = self._sslpipe.ssl_object + try: + if handshake_exc is not None: + raise handshake_exc + + peercert = sslobj.getpeercert() + if not hasattr(self._sslcontext, 'check_hostname'): + # Verify hostname if requested, Python 3.4+ uses check_hostname + # and checks the hostname in do_handshake() + if (self._server_hostname + and self._sslcontext.verify_mode != ssl.CERT_NONE): + ssl.match_hostname(peercert, self._server_hostname) + except BaseException as exc: + if self._loop.get_debug(): + if isinstance(exc, ssl.CertificateError): + logger.warning("%r: SSL handshake failed " + "on verifying the certificate", + self, exc_info=True) + else: + logger.warning("%r: SSL handshake failed", + self, exc_info=True) + self._transport.close() + if isinstance(exc, Exception): + self._wakeup_waiter(exc) + return + else: + raise + + if self._loop.get_debug(): + dt = self._loop.time() - self._handshake_start_time + logger.debug("%r: SSL handshake took %.1f ms", self, dt * 1e3) + + # Add extra info that becomes available after handshake. + self._extra.update(peercert=peercert, + cipher=sslobj.cipher(), + compression=sslobj.compression(), + ) + self._app_protocol.connection_made(self._app_transport) + self._wakeup_waiter() + self._session_established = True + # In case transport.write() was already called. Don't call + # immediatly _process_write_backlog(), but schedule it: + # _on_handshake_complete() can be called indirectly from + # _process_write_backlog(), and _process_write_backlog() is not + # reentrant. + self._loop.call_soon(self._process_write_backlog) + + def _process_write_backlog(self): + # Try to make progress on the write backlog. + if self._transport is None: + return + + try: + for i in range(len(self._write_backlog)): + data, offset = self._write_backlog[0] + if data: + ssldata, offset = self._sslpipe.feed_appdata(data, offset) + elif offset: + ssldata = self._sslpipe.do_handshake( + self._on_handshake_complete) + offset = 1 + else: + ssldata = self._sslpipe.shutdown(self._finalize) + offset = 1 + + for chunk in ssldata: + self._transport.write(chunk) + + if offset < len(data): + self._write_backlog[0] = (data, offset) + # A short write means that a write is blocked on a read + # We need to enable reading if it is paused! + assert self._sslpipe.need_ssldata + if self._transport._paused: + self._transport.resume_reading() + break + + # An entire chunk from the backlog was processed. We can + # delete it and reduce the outstanding buffer size. + del self._write_backlog[0] + self._write_buffer_size -= len(data) + except BaseException as exc: + if self._in_handshake: + # BaseExceptions will be re-raised in _on_handshake_complete. + self._on_handshake_complete(exc) + else: + self._fatal_error(exc, 'Fatal error on SSL transport') + if not isinstance(exc, Exception): + # BaseException + raise + + def _fatal_error(self, exc, message='Fatal error on transport'): + # Should be called from exception handler only. + if isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if self._loop.get_debug(): + logger.debug("%r: %s", self, message, exc_info=True) + else: + self._loop.call_exception_handler({ + 'message': message, + 'exception': exc, + 'transport': self._transport, + 'protocol': self, + }) + if self._transport: + self._transport._force_close(exc) + + def _finalize(self): + if self._transport is not None: + self._transport.close() + + def _abort(self): + if self._transport is not None: + try: + self._transport.abort() + finally: + self._finalize() diff --git a/Darwin/lib/python3.4/asyncio/streams.py b/Darwin/lib/python3.5/asyncio/streams.py similarity index 81% rename from Darwin/lib/python3.4/asyncio/streams.py rename to Darwin/lib/python3.5/asyncio/streams.py index e239248..6484c43 100644 --- a/Darwin/lib/python3.4/asyncio/streams.py +++ b/Darwin/lib/python3.5/asyncio/streams.py @@ -10,10 +10,13 @@ import socket if hasattr(socket, 'AF_UNIX'): __all__.extend(['open_unix_connection', 'start_unix_server']) +from . import coroutines +from . import compat from . import events from . import futures from . import protocols -from . import tasks +from .coroutines import coroutine +from .log import logger _DEFAULT_LIMIT = 2**16 @@ -33,7 +36,7 @@ class IncompleteReadError(EOFError): self.expected = expected -@tasks.coroutine +@coroutine def open_connection(host=None, port=None, *, loop=None, limit=_DEFAULT_LIMIT, **kwds): """A wrapper for create_connection() returning a (reader, writer) pair. @@ -63,7 +66,7 @@ def open_connection(host=None, port=None, *, return reader, writer -@tasks.coroutine +@coroutine def start_server(client_connected_cb, host=None, port=None, *, loop=None, limit=_DEFAULT_LIMIT, **kwds): """Start a socket server, call back for each client connected. @@ -102,7 +105,7 @@ def start_server(client_connected_cb, host=None, port=None, *, if hasattr(socket, 'AF_UNIX'): # UNIX Domain Sockets are supported on this platform - @tasks.coroutine + @coroutine def open_unix_connection(path=None, *, loop=None, limit=_DEFAULT_LIMIT, **kwds): """Similar to `open_connection` but works with UNIX Domain Sockets.""" @@ -116,7 +119,7 @@ if hasattr(socket, 'AF_UNIX'): return reader, writer - @tasks.coroutine + @coroutine def start_unix_server(client_connected_cb, path=None, *, loop=None, limit=_DEFAULT_LIMIT, **kwds): """Similar to `start_server` but works with UNIX Domain Sockets.""" @@ -139,23 +142,30 @@ class FlowControlMixin(protocols.Protocol): resume_reading() and connection_lost(). If the subclass overrides these it must call the super methods. - StreamWriter.drain() must check for error conditions and then call - _make_drain_waiter(), which will return either () or a Future - depending on the paused state. + StreamWriter.drain() must wait for _drain_helper() coroutine. """ def __init__(self, loop=None): - self._loop = loop # May be None; we may never need it. + if loop is None: + self._loop = events.get_event_loop() + else: + self._loop = loop self._paused = False self._drain_waiter = None + self._connection_lost = False def pause_writing(self): assert not self._paused self._paused = True + if self._loop.get_debug(): + logger.debug("%r pauses writing", self) def resume_writing(self): assert self._paused self._paused = False + if self._loop.get_debug(): + logger.debug("%r resumes writing", self) + waiter = self._drain_waiter if waiter is not None: self._drain_waiter = None @@ -163,6 +173,7 @@ class FlowControlMixin(protocols.Protocol): waiter.set_result(None) def connection_lost(self, exc): + self._connection_lost = True # Wake up the writer if currently paused. if not self._paused: return @@ -177,14 +188,17 @@ class FlowControlMixin(protocols.Protocol): else: waiter.set_exception(exc) - def _make_drain_waiter(self): + @coroutine + def _drain_helper(self): + if self._connection_lost: + raise ConnectionResetError('Connection lost') if not self._paused: - return () + return waiter = self._drain_waiter assert waiter is None or waiter.cancelled() waiter = futures.Future(loop=self._loop) self._drain_waiter = waiter - return waiter + yield from waiter class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): @@ -210,8 +224,8 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): self._loop) res = self._client_connected_cb(self._stream_reader, self._stream_writer) - if tasks.iscoroutine(res): - tasks.Task(res, loop=self._loop) + if coroutines.iscoroutine(res): + self._loop.create_task(res) def connection_lost(self, exc): if exc is None: @@ -225,6 +239,7 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): def eof_received(self): self._stream_reader.feed_eof() + return True class StreamWriter: @@ -240,9 +255,17 @@ class StreamWriter: def __init__(self, transport, protocol, reader, loop): self._transport = transport self._protocol = protocol + # drain() expects that the reader has a exception() method + assert reader is None or isinstance(reader, StreamReader) self._reader = reader self._loop = loop + def __repr__(self): + info = [self.__class__.__name__, 'transport=%r' % self._transport] + if self._reader is not None: + info.append('reader=%r' % self._reader) + return '<%s>' % ' '.join(info) + @property def transport(self): return self._transport @@ -265,26 +288,20 @@ class StreamWriter: def get_extra_info(self, name, default=None): return self._transport.get_extra_info(name, default) + @coroutine def drain(self): - """This method has an unusual return value. + """Flush the write buffer. The intended use is to write w.write(data) yield from w.drain() - - When there's nothing to wait for, drain() returns (), and the - yield-from continues immediately. When the transport buffer - is full (the protocol is paused), drain() creates and returns - a Future and the yield-from will block until that Future is - completed, which will happen when the buffer is (partially) - drained and the protocol is resumed. """ - if self._reader is not None and self._reader._exception is not None: - raise self._reader._exception - if self._transport._conn_lost: # Uses private variable. - raise ConnectionResetError('Connection lost') - return self._protocol._make_drain_waiter() + if self._reader is not None: + exc = self._reader.exception() + if exc is not None: + raise exc + yield from self._protocol._drain_helper() class StreamReader: @@ -294,15 +311,34 @@ class StreamReader: # it also doubles as half the buffer limit. self._limit = limit if loop is None: - loop = events.get_event_loop() - self._loop = loop + self._loop = events.get_event_loop() + else: + self._loop = loop self._buffer = bytearray() - self._eof = False # Whether we're done. - self._waiter = None # A future. + self._eof = False # Whether we're done. + self._waiter = None # A future used by _wait_for_data() self._exception = None self._transport = None self._paused = False + def __repr__(self): + info = ['StreamReader'] + if self._buffer: + info.append('%d bytes' % len(info)) + if self._eof: + info.append('eof') + if self._limit != _DEFAULT_LIMIT: + info.append('l=%d' % self._limit) + if self._waiter: + info.append('w=%r' % self._waiter) + if self._exception: + info.append('e=%r' % self._exception) + if self._transport: + info.append('t=%r' % self._transport) + if self._paused: + info.append('paused') + return '<%s>' % ' '.join(info) + def exception(self): return self._exception @@ -315,6 +351,14 @@ class StreamReader: if not waiter.cancelled(): waiter.set_exception(exc) + def _wakeup_waiter(self): + """Wakeup read() or readline() function waiting for data or EOF.""" + waiter = self._waiter + if waiter is not None: + self._waiter = None + if not waiter.cancelled(): + waiter.set_result(None) + def set_transport(self, transport): assert self._transport is None, 'Transport already set' self._transport = transport @@ -326,11 +370,7 @@ class StreamReader: def feed_eof(self): self._eof = True - waiter = self._waiter - if waiter is not None: - self._waiter = None - if not waiter.cancelled(): - waiter.set_result(True) + self._wakeup_waiter() def at_eof(self): """Return True if the buffer is empty and 'feed_eof' was called.""" @@ -343,12 +383,7 @@ class StreamReader: return self._buffer.extend(data) - - waiter = self._waiter - if waiter is not None: - self._waiter = None - if not waiter.cancelled(): - waiter.set_result(False) + self._wakeup_waiter() if (self._transport is not None and not self._paused and @@ -363,7 +398,9 @@ class StreamReader: else: self._paused = True - def _create_waiter(self, func_name): + @coroutine + def _wait_for_data(self, func_name): + """Wait until feed_data() or feed_eof() is called.""" # StreamReader uses a future to link the protocol feed_data() method # to a read coroutine. Running two read coroutines at the same time # would have an unexpected behaviour. It would not possible to know @@ -371,9 +408,14 @@ class StreamReader: if self._waiter is not None: raise RuntimeError('%s() called while another coroutine is ' 'already waiting for incoming data' % func_name) - return futures.Future(loop=self._loop) - @tasks.coroutine + self._waiter = futures.Future(loop=self._loop) + try: + yield from self._waiter + finally: + self._waiter = None + + @coroutine def readline(self): if self._exception is not None: raise self._exception @@ -401,16 +443,12 @@ class StreamReader: break if not_enough: - self._waiter = self._create_waiter('readline') - try: - yield from self._waiter - finally: - self._waiter = None + yield from self._wait_for_data('readline') self._maybe_resume_transport() return bytes(line) - @tasks.coroutine + @coroutine def read(self, n=-1): if self._exception is not None: raise self._exception @@ -432,11 +470,7 @@ class StreamReader: return b''.join(blocks) else: if not self._buffer and not self._eof: - self._waiter = self._create_waiter('read') - try: - yield from self._waiter - finally: - self._waiter = None + yield from self._wait_for_data('read') if n < 0 or len(self._buffer) <= n: data = bytes(self._buffer) @@ -449,7 +483,7 @@ class StreamReader: self._maybe_resume_transport() return data - @tasks.coroutine + @coroutine def readexactly(self, n): if self._exception is not None: raise self._exception @@ -471,3 +505,15 @@ class StreamReader: n -= len(block) return b''.join(blocks) + + if compat.PY35: + @coroutine + def __aiter__(self): + return self + + @coroutine + def __anext__(self): + val = yield from self.readline() + if val == b'': + raise StopAsyncIteration + return val diff --git a/Darwin/lib/python3.4/asyncio/subprocess.py b/Darwin/lib/python3.5/asyncio/subprocess.py similarity index 70% rename from Darwin/lib/python3.4/asyncio/subprocess.py rename to Darwin/lib/python3.5/asyncio/subprocess.py index 414e023..ead4039 100644 --- a/Darwin/lib/python3.4/asyncio/subprocess.py +++ b/Darwin/lib/python3.5/asyncio/subprocess.py @@ -1,13 +1,13 @@ __all__ = ['create_subprocess_exec', 'create_subprocess_shell'] -import collections import subprocess from . import events -from . import futures from . import protocols from . import streams from . import tasks +from .coroutines import coroutine +from .log import logger PIPE = subprocess.PIPE @@ -23,25 +23,39 @@ class SubprocessStreamProtocol(streams.FlowControlMixin, super().__init__(loop=loop) self._limit = limit self.stdin = self.stdout = self.stderr = None - self.waiter = futures.Future(loop=loop) - self._waiters = collections.deque() self._transport = None + def __repr__(self): + info = [self.__class__.__name__] + if self.stdin is not None: + info.append('stdin=%r' % self.stdin) + if self.stdout is not None: + info.append('stdout=%r' % self.stdout) + if self.stderr is not None: + info.append('stderr=%r' % self.stderr) + return '<%s>' % ' '.join(info) + def connection_made(self, transport): self._transport = transport - if transport.get_pipe_transport(1): + + stdout_transport = transport.get_pipe_transport(1) + if stdout_transport is not None: self.stdout = streams.StreamReader(limit=self._limit, loop=self._loop) - if transport.get_pipe_transport(2): + self.stdout.set_transport(stdout_transport) + + stderr_transport = transport.get_pipe_transport(2) + if stderr_transport is not None: self.stderr = streams.StreamReader(limit=self._limit, loop=self._loop) - stdin = transport.get_pipe_transport(0) - if stdin is not None: - self.stdin = streams.StreamWriter(stdin, + self.stderr.set_transport(stderr_transport) + + stdin_transport = transport.get_pipe_transport(0) + if stdin_transport is not None: + self.stdin = streams.StreamWriter(stdin_transport, protocol=self, reader=None, loop=self._loop) - self.waiter.set_result(None) def pipe_data_received(self, fd, data): if fd == 1: @@ -73,11 +87,8 @@ class SubprocessStreamProtocol(streams.FlowControlMixin, reader.set_exception(exc) def process_exited(self): - # wake up futures waiting for wait() - returncode = self._transport.get_returncode() - while self._waiters: - waiter = self._waiters.popleft() - waiter.set_result(returncode) + self._transport.close() + self._transport = None class Process: @@ -90,49 +101,52 @@ class Process: self.stderr = protocol.stderr self.pid = transport.get_pid() + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, self.pid) + @property def returncode(self): return self._transport.get_returncode() - @tasks.coroutine + @coroutine def wait(self): - """Wait until the process exit and return the process return code.""" - returncode = self._transport.get_returncode() - if returncode is not None: - return returncode + """Wait until the process exit and return the process return code. - waiter = futures.Future(loop=self._loop) - self._protocol._waiters.append(waiter) - yield from waiter - return waiter.result() - - def _check_alive(self): - if self._transport.get_returncode() is not None: - raise ProcessLookupError() + This method is a coroutine.""" + return (yield from self._transport._wait()) def send_signal(self, signal): - self._check_alive() self._transport.send_signal(signal) def terminate(self): - self._check_alive() self._transport.terminate() def kill(self): - self._check_alive() self._transport.kill() - @tasks.coroutine + @coroutine def _feed_stdin(self, input): + debug = self._loop.get_debug() self.stdin.write(input) - yield from self.stdin.drain() + if debug: + logger.debug('%r communicate: feed stdin (%s bytes)', + self, len(input)) + try: + yield from self.stdin.drain() + except (BrokenPipeError, ConnectionResetError) as exc: + # communicate() ignores BrokenPipeError and ConnectionResetError + if debug: + logger.debug('%r communicate: stdin got %r', self, exc) + + if debug: + logger.debug('%r communicate: close stdin', self) self.stdin.close() - @tasks.coroutine + @coroutine def _noop(self): return None - @tasks.coroutine + @coroutine def _read_stream(self, fd): transport = self._transport.get_pipe_transport(fd) if fd == 2: @@ -140,11 +154,17 @@ class Process: else: assert fd == 1 stream = self.stdout + if self._loop.get_debug(): + name = 'stdout' if fd == 1 else 'stderr' + logger.debug('%r communicate: read %s', self, name) output = yield from stream.read() + if self._loop.get_debug(): + name = 'stdout' if fd == 1 else 'stderr' + logger.debug('%r communicate: close %s', self, name) transport.close() return output - @tasks.coroutine + @coroutine def communicate(self, input=None): if input: stdin = self._feed_stdin(input) @@ -164,7 +184,7 @@ class Process: return (stdout, stderr) -@tasks.coroutine +@coroutine def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, loop=None, limit=streams._DEFAULT_LIMIT, **kwds): if loop is None: @@ -175,10 +195,9 @@ def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None, protocol_factory, cmd, stdin=stdin, stdout=stdout, stderr=stderr, **kwds) - yield from protocol.waiter return Process(transport, protocol, loop) -@tasks.coroutine +@coroutine def create_subprocess_exec(program, *args, stdin=None, stdout=None, stderr=None, loop=None, limit=streams._DEFAULT_LIMIT, **kwds): @@ -191,5 +210,4 @@ def create_subprocess_exec(program, *args, stdin=None, stdout=None, program, *args, stdin=stdin, stdout=stdout, stderr=stderr, **kwds) - yield from protocol.waiter return Process(transport, protocol, loop) diff --git a/Darwin/lib/python3.4/asyncio/tasks.py b/Darwin/lib/python3.5/asyncio/tasks.py similarity index 76% rename from Darwin/lib/python3.4/asyncio/tasks.py rename to Darwin/lib/python3.5/asyncio/tasks.py index 45a6342..a235e74 100644 --- a/Darwin/lib/python3.4/asyncio/tasks.py +++ b/Darwin/lib/python3.5/asyncio/tasks.py @@ -1,132 +1,24 @@ """Support for tasks, coroutines and the scheduler.""" -__all__ = ['coroutine', 'Task', - 'iscoroutinefunction', 'iscoroutine', +__all__ = ['Task', 'FIRST_COMPLETED', 'FIRST_EXCEPTION', 'ALL_COMPLETED', 'wait', 'wait_for', 'as_completed', 'sleep', 'async', - 'gather', 'shield', + 'gather', 'shield', 'ensure_future', ] import concurrent.futures import functools import inspect import linecache -import os -import sys import traceback +import warnings import weakref +from . import compat +from . import coroutines from . import events from . import futures -from .log import logger - -# If you set _DEBUG to true, @coroutine will wrap the resulting -# generator objects in a CoroWrapper instance (defined below). That -# instance will log a message when the generator is never iterated -# over, which may happen when you forget to use "yield from" with a -# coroutine call. Note that the value of the _DEBUG flag is taken -# when the decorator is used, so to be of any use it must be set -# before you define your coroutines. A downside of using this feature -# is that tracebacks show entries for the CoroWrapper.__next__ method -# when _DEBUG is true. -_DEBUG = (not sys.flags.ignore_environment - and bool(os.environ.get('PYTHONASYNCIODEBUG'))) - - -class CoroWrapper: - # Wrapper for coroutine in _DEBUG mode. - - __slots__ = ['gen', 'func', '__name__', '__doc__', '__weakref__'] - - def __init__(self, gen, func): - assert inspect.isgenerator(gen), gen - self.gen = gen - self.func = func - - def __iter__(self): - return self - - def __next__(self): - return next(self.gen) - - def send(self, *value): - # We use `*value` because of a bug in CPythons prior - # to 3.4.1. See issue #21209 and test_yield_from_corowrapper - # for details. This workaround should be removed in 3.5.0. - if len(value) == 1: - value = value[0] - return self.gen.send(value) - - def throw(self, exc): - return self.gen.throw(exc) - - def close(self): - return self.gen.close() - - @property - def gi_frame(self): - return self.gen.gi_frame - - @property - def gi_running(self): - return self.gen.gi_running - - @property - def gi_code(self): - return self.gen.gi_code - - def __del__(self): - # Be careful accessing self.gen.frame -- self.gen might not exist. - gen = getattr(self, 'gen', None) - frame = getattr(gen, 'gi_frame', None) - if frame is not None and frame.f_lasti == -1: - func = self.func - code = func.__code__ - filename = code.co_filename - lineno = code.co_firstlineno - logger.error( - 'Coroutine %r defined at %s:%s was never yielded from', - func.__name__, filename, lineno) - - -def coroutine(func): - """Decorator to mark coroutines. - - If the coroutine is not yielded from before it is destroyed, - an error message is logged. - """ - if inspect.isgeneratorfunction(func): - coro = func - else: - @functools.wraps(func) - def coro(*args, **kw): - res = func(*args, **kw) - if isinstance(res, futures.Future) or inspect.isgenerator(res): - res = yield from res - return res - - if not _DEBUG: - wrapper = coro - else: - @functools.wraps(func) - def wrapper(*args, **kwds): - w = CoroWrapper(coro(*args, **kwds), func) - w.__name__ = coro.__name__ - w.__doc__ = coro.__doc__ - return w - - wrapper._is_coroutine = True # For iscoroutinefunction(). - return wrapper - - -def iscoroutinefunction(func): - """Return True if func is a decorated coroutine function.""" - return getattr(func, '_is_coroutine', False) - - -def iscoroutine(obj): - """Return True if obj is a coroutine object.""" - return isinstance(obj, CoroWrapper) or inspect.isgenerator(obj) +from .coroutines import coroutine class Task(futures.Future): @@ -148,6 +40,10 @@ class Task(futures.Future): # all running event loops. {EventLoop: Task} _current_tasks = {} + # If False, don't log a message if the task is destroyed whereas its + # status is still pending + _log_destroy_pending = True + @classmethod def current_task(cls, loop=None): """Return the currently running task in an event loop or None. @@ -171,30 +67,49 @@ class Task(futures.Future): return {t for t in cls._all_tasks if t._loop is loop} def __init__(self, coro, *, loop=None): - assert iscoroutine(coro), repr(coro) # Not a coroutine function! + assert coroutines.iscoroutine(coro), repr(coro) super().__init__(loop=loop) - self._coro = iter(coro) # Use the iterator just in case. + if self._source_traceback: + del self._source_traceback[-1] + self._coro = coro self._fut_waiter = None self._must_cancel = False self._loop.call_soon(self._step) self.__class__._all_tasks.add(self) - def __repr__(self): - res = super().__repr__() - if (self._must_cancel and - self._state == futures._PENDING and - ')'.format(self._coro.__name__) + res[i:] - return res + # On Python 3.3 or older, objects with a destructor that are part of a + # reference cycle are never destroyed. That's not the case any more on + # Python 3.4 thanks to the PEP 442. + if compat.PY34: + def __del__(self): + if self._state == futures._PENDING and self._log_destroy_pending: + context = { + 'task': self, + 'message': 'Task was destroyed but it is pending!', + } + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + futures.Future.__del__(self) + + def _repr_info(self): + info = super()._repr_info() + + if self._must_cancel: + # replace status + info[0] = 'cancelling' + + coro = coroutines._format_coroutine(self._coro) + info.insert(1, 'coro=<%s>' % coro) + + if self._fut_waiter is not None: + info.insert(2, 'wait_for=%r' % self._fut_waiter) + return info def get_stack(self, *, limit=None): """Return the list of stack frames for this task's coroutine. - If the coroutine is active, this returns the stack where it is + If the coroutine is not done, this returns the stack where it is suspended. If the coroutine has completed successfully or was cancelled, this returns an empty list. If the coroutine was terminated by an exception, this returns the list of traceback @@ -213,7 +128,11 @@ class Task(futures.Future): returned for a suspended coroutine. """ frames = [] - f = self._coro.gi_frame + try: + # 'async def' coroutines + f = self._coro.cr_frame + except AttributeError: + f = self._coro.gi_frame if f is not None: while f is not None: if limit is not None: @@ -240,7 +159,8 @@ class Task(futures.Future): This produces output similar to that of the traceback module, for the frames retrieved by get_stack(). The limit argument is passed to get_stack(). The file argument is an I/O stream - to which the output goes; by default it goes to sys.stderr. + to which the output is written; by default output is written + to sys.stderr. """ extracted_list = [] checked = set() @@ -269,18 +189,18 @@ class Task(futures.Future): print(line, file=file, end='') def cancel(self): - """Request that a task to cancel itself. + """Request that this task cancel itself. - This arranges for a CancellationError to be thrown into the + This arranges for a CancelledError to be thrown into the wrapped coroutine on the next cycle through the event loop. The coroutine then has a chance to clean up or even deny the request using try/except/finally. - Contrary to Future.cancel(), this does not guarantee that the + Unlike Future.cancel, this does not guarantee that the task will be cancelled: the exception might be caught and - acted upon, delaying cancellation of the task or preventing it - completely. The task may also return a value or raise a - different exception. + acted upon, delaying cancellation of the task or preventing + cancellation completely. The task may also return a value or + raise a different exception. Immediately after this method is called, Task.cancelled() will not return True (unless the task was already cancelled). A @@ -315,10 +235,8 @@ class Task(futures.Future): try: if exc is not None: result = coro.throw(exc) - elif value is not None: - result = coro.send(value) else: - result = next(coro) + result = coro.send(value) except StopIteration as exc: self.set_result(exc.value) except futures.CancelledError as exc: @@ -387,6 +305,8 @@ ALL_COMPLETED = concurrent.futures.ALL_COMPLETED def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED): """Wait for the Futures and coroutines given by fs to complete. + The sequence futures must not be empty. + Coroutines will be wrapped in Tasks. Returns two sets of Future: (done, pending). @@ -398,24 +318,24 @@ def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED): Note: This does not raise TimeoutError! Futures that aren't done when the timeout occurs are returned in the second set. """ - if isinstance(fs, futures.Future) or iscoroutine(fs): + if isinstance(fs, futures.Future) or coroutines.iscoroutine(fs): raise TypeError("expect a list of futures, not %s" % type(fs).__name__) if not fs: raise ValueError('Set of coroutines/Futures is empty.') + if return_when not in (FIRST_COMPLETED, FIRST_EXCEPTION, ALL_COMPLETED): + raise ValueError('Invalid return_when value: {}'.format(return_when)) if loop is None: loop = events.get_event_loop() - fs = {async(f, loop=loop) for f in set(fs)} + fs = {ensure_future(f, loop=loop) for f in set(fs)} - if return_when not in (FIRST_COMPLETED, FIRST_EXCEPTION, ALL_COMPLETED): - raise ValueError('Invalid return_when value: {}'.format(return_when)) return (yield from _wait(fs, timeout, return_when, loop)) -def _release_waiter(waiter, value=True, *args): +def _release_waiter(waiter, *args): if not waiter.done(): - waiter.set_result(value) + waiter.set_result(None) @coroutine @@ -428,10 +348,9 @@ def wait_for(fut, timeout, *, loop=None): it cancels the task and raises TimeoutError. To avoid the task cancellation, wrap it in shield(). - Usage: - - result = yield from asyncio.wait_for(fut, 10.0) + If the wait is cancelled, the task is also cancelled. + This function is a coroutine. """ if loop is None: loop = events.get_event_loop() @@ -440,14 +359,22 @@ def wait_for(fut, timeout, *, loop=None): return (yield from fut) waiter = futures.Future(loop=loop) - timeout_handle = loop.call_later(timeout, _release_waiter, waiter, False) - cb = functools.partial(_release_waiter, waiter, True) + timeout_handle = loop.call_later(timeout, _release_waiter, waiter) + cb = functools.partial(_release_waiter, waiter) - fut = async(fut, loop=loop) + fut = ensure_future(fut, loop=loop) fut.add_done_callback(cb) try: - if (yield from waiter): + # wait until the future completes or the timeout + try: + yield from waiter + except futures.CancelledError: + fut.remove_done_callback(cb) + fut.cancel() + raise + + if fut.done(): return fut.result() else: fut.remove_done_callback(cb) @@ -480,7 +407,7 @@ def _wait(fs, timeout, return_when, loop): if timeout_handle is not None: timeout_handle.cancel() if not waiter.done(): - waiter.set_result(False) + waiter.set_result(None) for f in fs: f.add_done_callback(_on_completion) @@ -520,10 +447,10 @@ def as_completed(fs, *, loop=None, timeout=None): Note: The futures 'f' are not necessarily members of fs. """ - if isinstance(fs, futures.Future) or iscoroutine(fs): + if isinstance(fs, futures.Future) or coroutines.iscoroutine(fs): raise TypeError("expect a list of futures, not %s" % type(fs).__name__) loop = loop if loop is not None else events.get_event_loop() - todo = {async(f, loop=loop) for f in set(fs)} + todo = {ensure_future(f, loop=loop) for f in set(fs)} from .queues import Queue # Import here to avoid circular import problem. done = Queue(loop=loop) timeout_handle = None @@ -562,7 +489,8 @@ def as_completed(fs, *, loop=None, timeout=None): def sleep(delay, result=None, *, loop=None): """Coroutine that completes after a given time (in seconds).""" future = futures.Future(loop=loop) - h = future._loop.call_later(delay, future.set_result, result) + h = future._loop.call_later(delay, + future._set_result_unless_cancelled, result) try: return (yield from future) finally: @@ -572,14 +500,33 @@ def sleep(delay, result=None, *, loop=None): def async(coro_or_future, *, loop=None): """Wrap a coroutine in a future. + If the argument is a Future, it is returned directly. + + This function is deprecated in 3.5. Use asyncio.ensure_future() instead. + """ + + warnings.warn("asyncio.async() function is deprecated, use ensure_future()", + DeprecationWarning) + + return ensure_future(coro_or_future, loop=loop) + + +def ensure_future(coro_or_future, *, loop=None): + """Wrap a coroutine in a future. + If the argument is a Future, it is returned directly. """ if isinstance(coro_or_future, futures.Future): if loop is not None and loop is not coro_or_future._loop: raise ValueError('loop argument must agree with Future') return coro_or_future - elif iscoroutine(coro_or_future): - return Task(coro_or_future, loop=loop) + elif coroutines.iscoroutine(coro_or_future): + if loop is None: + loop = events.get_event_loop() + task = loop.create_task(coro_or_future) + if task._source_traceback: + del task._source_traceback[-1] + return task else: raise TypeError('A Future or coroutine is required') @@ -624,30 +571,43 @@ def gather(*coros_or_futures, loop=None, return_exceptions=False): prevent the cancellation of one child to cause other children to be cancelled.) """ - arg_to_fut = {arg: async(arg, loop=loop) for arg in set(coros_or_futures)} - children = [arg_to_fut[arg] for arg in coros_or_futures] - n = len(children) - if n == 0: + if not coros_or_futures: outer = futures.Future(loop=loop) outer.set_result([]) return outer - if loop is None: - loop = children[0]._loop - for fut in children: - if fut._loop is not loop: - raise ValueError("futures are tied to different event loops") + + arg_to_fut = {} + for arg in set(coros_or_futures): + if not isinstance(arg, futures.Future): + fut = ensure_future(arg, loop=loop) + if loop is None: + loop = fut._loop + # The caller cannot control this future, the "destroy pending task" + # warning should not be emitted. + fut._log_destroy_pending = False + else: + fut = arg + if loop is None: + loop = fut._loop + elif fut._loop is not loop: + raise ValueError("futures are tied to different event loops") + arg_to_fut[arg] = fut + + children = [arg_to_fut[arg] for arg in coros_or_futures] + nchildren = len(children) outer = _GatheringFuture(children, loop=loop) nfinished = 0 - results = [None] * n + results = [None] * nchildren def _done_callback(i, fut): nonlocal nfinished - if outer._state != futures._PENDING: - if fut._exception is not None: + if outer.done(): + if not fut.cancelled(): # Mark exception retrieved. fut.exception() return - if fut._state == futures._CANCELLED: + + if fut.cancelled(): res = futures.CancelledError() if not return_exceptions: outer.set_exception(res) @@ -661,7 +621,7 @@ def gather(*coros_or_futures, loop=None, return_exceptions=False): res = fut._result results[i] = res nfinished += 1 - if nfinished == n: + if nfinished == nchildren: outer.set_result(results) for i, fut in enumerate(children): @@ -695,7 +655,7 @@ def shield(arg, *, loop=None): except CancelledError: res = None """ - inner = async(arg, loop=loop) + inner = ensure_future(arg, loop=loop) if inner.done(): # Shortcut. return inner @@ -704,9 +664,11 @@ def shield(arg, *, loop=None): def _done_callback(inner): if outer.cancelled(): - # Mark inner's result as retrieved. - inner.cancelled() or inner.exception() + if not inner.cancelled(): + # Mark inner's result as retrieved. + inner.exception() return + if inner.cancelled(): outer.cancel() else: diff --git a/Darwin/lib/python3.4/asyncio/test_utils.py b/Darwin/lib/python3.5/asyncio/test_utils.py similarity index 82% rename from Darwin/lib/python3.4/asyncio/test_utils.py rename to Darwin/lib/python3.5/asyncio/test_utils.py index 9c3656a..8cee95b 100644 --- a/Darwin/lib/python3.4/asyncio/test_utils.py +++ b/Darwin/lib/python3.5/asyncio/test_utils.py @@ -3,6 +3,7 @@ import collections import contextlib import io +import logging import os import re import socket @@ -11,6 +12,7 @@ import sys import tempfile import threading import time +import unittest from unittest import mock from http.server import HTTPServer @@ -26,6 +28,8 @@ from . import events from . import futures from . import selectors from . import tasks +from .coroutines import coroutine +from .log import logger if sys.platform == 'win32': # pragma: no cover @@ -42,11 +46,14 @@ def dummy_ssl_context(): def run_briefly(loop): - @tasks.coroutine + @coroutine def once(): pass gen = once() - t = tasks.Task(gen, loop=loop) + t = loop.create_task(gen) + # Don't log a warning if the task is not done after run_until_complete(). + # It occurs if the loop is stopped or if a task raises a BaseException. + t._log_destroy_pending = False try: loop.run_until_complete(t) finally: @@ -84,6 +91,13 @@ class SilentWSGIRequestHandler(WSGIRequestHandler): class SilentWSGIServer(WSGIServer): + request_timeout = 2 + + def get_request(self): + request, client_addr = super().get_request() + request.settimeout(self.request_timeout) + return request, client_addr + def handle_error(self, request, client_address): pass @@ -131,7 +145,8 @@ def _run_test_server(*, address, use_ssl=False, server_cls, server_ssl_cls): httpd = server_class(address, SilentWSGIRequestHandler) httpd.set_app(app) httpd.address = httpd.server_address - server_thread = threading.Thread(target=httpd.serve_forever) + server_thread = threading.Thread( + target=lambda: httpd.serve_forever(poll_interval=0.05)) server_thread.start() try: yield httpd @@ -153,12 +168,15 @@ if hasattr(socket, 'AF_UNIX'): class UnixWSGIServer(UnixHTTPServer, WSGIServer): + request_timeout = 2 + def server_bind(self): UnixHTTPServer.server_bind(self) self.setup_environ() def get_request(self): request, client_addr = super().get_request() + request.settimeout(self.request_timeout) # Code in the stdlib expects that get_request # will return a socket and a tuple (host, port). # However, this isn't true for UNIX sockets, @@ -289,6 +307,7 @@ class TestLoop(base_events.BaseEventLoop): self._time += advance def close(self): + super().close() if self._check_on_close: try: self._gen.send(0) @@ -372,3 +391,56 @@ class MockPattern(str): """ def __eq__(self, other): return bool(re.search(str(self), other, re.S)) + + +def get_function_source(func): + source = events._get_function_source(func) + if source is None: + raise ValueError("unable to get the source of %r" % (func,)) + return source + + +class TestCase(unittest.TestCase): + def set_event_loop(self, loop, *, cleanup=True): + assert loop is not None + # ensure that the event loop is passed explicitly in asyncio + events.set_event_loop(None) + if cleanup: + self.addCleanup(loop.close) + + def new_test_loop(self, gen=None): + loop = TestLoop(gen) + self.set_event_loop(loop) + return loop + + def tearDown(self): + events.set_event_loop(None) + + # Detect CPython bug #23353: ensure that yield/yield-from is not used + # in an except block of a generator + self.assertEqual(sys.exc_info(), (None, None, None)) + + +@contextlib.contextmanager +def disable_logger(): + """Context manager to disable asyncio logger. + + For example, it can be used to ignore warnings in debug mode. + """ + old_level = logger.level + try: + logger.setLevel(logging.CRITICAL+1) + yield + finally: + logger.setLevel(old_level) + +def mock_nonblocking_socket(): + """Create a mock of a non-blocking socket.""" + sock = mock.Mock(socket.socket) + sock.gettimeout.return_value = 0.0 + return sock + + +def force_legacy_ssl_support(): + return mock.patch('asyncio.sslproto._is_sslproto_available', + return_value=False) diff --git a/Darwin/lib/python3.4/asyncio/transports.py b/Darwin/lib/python3.5/asyncio/transports.py similarity index 96% rename from Darwin/lib/python3.4/asyncio/transports.py rename to Darwin/lib/python3.5/asyncio/transports.py index 5f674f9..70b323f 100644 --- a/Darwin/lib/python3.4/asyncio/transports.py +++ b/Darwin/lib/python3.5/asyncio/transports.py @@ -1,8 +1,6 @@ """Abstract Transport class.""" -import sys - -_PY34 = sys.version_info >= (3, 4) +from asyncio import compat __all__ = ['BaseTransport', 'ReadTransport', 'WriteTransport', 'Transport', 'DatagramTransport', 'SubprocessTransport', @@ -94,12 +92,8 @@ class WriteTransport(BaseTransport): The default implementation concatenates the arguments and calls write() on the result. """ - if not _PY34: - # In Python 3.3, bytes.join() doesn't handle memoryview. - list_of_data = ( - bytes(data) if isinstance(data, memoryview) else data - for data in list_of_data) - self.write(b''.join(list_of_data)) + data = compat.flatten_list_bytes(list_of_data) + self.write(data) def write_eof(self): """Close the write end after flushing buffered data. @@ -238,8 +232,10 @@ class _FlowControlMixin(Transport): resume_writing() may be called. """ - def __init__(self, extra=None): + def __init__(self, extra=None, loop=None): super().__init__(extra) + assert loop is not None + self._loop = loop self._protocol_paused = False self._set_write_buffer_limits() @@ -273,6 +269,9 @@ class _FlowControlMixin(Transport): 'protocol': self._protocol, }) + def get_write_buffer_limits(self): + return (self._low_water, self._high_water) + def _set_write_buffer_limits(self, high=None, low=None): if high is None: if low is None: diff --git a/Darwin/lib/python3.4/asyncio/unix_events.py b/Darwin/lib/python3.5/asyncio/unix_events.py similarity index 77% rename from Darwin/lib/python3.4/asyncio/unix_events.py rename to Darwin/lib/python3.5/asyncio/unix_events.py index 1fbdd31..bf3b084 100644 --- a/Darwin/lib/python3.4/asyncio/unix_events.py +++ b/Darwin/lib/python3.5/asyncio/unix_events.py @@ -1,7 +1,6 @@ """Selector event loop for Unix with signal handling.""" import errno -import fcntl import os import signal import socket @@ -9,15 +8,20 @@ import stat import subprocess import sys import threading +import warnings from . import base_events from . import base_subprocess +from . import compat from . import constants +from . import coroutines from . import events +from . import futures from . import selector_events -from . import tasks +from . import selectors from . import transports +from .coroutines import coroutine from .log import logger @@ -30,6 +34,11 @@ if sys.platform == 'win32': # pragma: no cover raise ImportError('Signals are not really supported on Windows') +def _sighandler_noop(signum, frame): + """Dummy signal handler.""" + pass + + class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): """Unix event loop. @@ -44,9 +53,16 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): return socket.socketpair() def close(self): + super().close() for sig in list(self._signal_handlers): self.remove_signal_handler(sig) - super().close() + + def _process_self_data(self, data): + for signum in data: + if not signum: + # ignore null bytes written by _write_to_self() + continue + self._handle_signal(signum) def add_signal_handler(self, sig, callback, *args): """Add a handler for a signal. UNIX only. @@ -54,21 +70,30 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): Raise ValueError if the signal number is invalid or uncatchable. Raise RuntimeError if there is a problem setting up the handler. """ + if (coroutines.iscoroutine(callback) + or coroutines.iscoroutinefunction(callback)): + raise TypeError("coroutines cannot be used " + "with add_signal_handler()") self._check_signal(sig) + self._check_closed() try: # set_wakeup_fd() raises ValueError if this is not the # main thread. By calling it early we ensure that an # event loop running in another thread cannot add a signal # handler. signal.set_wakeup_fd(self._csock.fileno()) - except ValueError as exc: + except (ValueError, OSError) as exc: raise RuntimeError(str(exc)) handle = events.Handle(callback, args, self) self._signal_handlers[sig] = handle try: - signal.signal(sig, self._handle_signal) + # Register a dummy signal handler to ask Python to write the signal + # number in the wakup file descriptor. _process_self_data() will + # read signal numbers from this file descriptor to handle signals. + signal.signal(sig, _sighandler_noop) + # Set SA_RESTART to limit EINTR occurrences. signal.siginterrupt(sig, False) except OSError as exc: @@ -76,7 +101,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): if not self._signal_handlers: try: signal.set_wakeup_fd(-1) - except ValueError as nexc: + except (ValueError, OSError) as nexc: logger.info('set_wakeup_fd(-1) failed: %s', nexc) if exc.errno == errno.EINVAL: @@ -84,7 +109,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): else: raise - def _handle_signal(self, sig, arg): + def _handle_signal(self, sig): """Internal helper that is the actual signal handler.""" handle = self._signal_handlers.get(sig) if handle is None: @@ -121,7 +146,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): if not self._signal_handlers: try: signal.set_wakeup_fd(-1) - except ValueError as exc: + except (ValueError, OSError) as exc: logger.info('set_wakeup_fd(-1) failed: %s', exc) return True @@ -147,24 +172,40 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): extra=None): return _UnixWritePipeTransport(self, pipe, protocol, waiter, extra) - @tasks.coroutine + @coroutine def _make_subprocess_transport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, extra=None, **kwargs): with events.get_child_watcher() as watcher: + waiter = futures.Future(loop=self) transp = _UnixSubprocessTransport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, - extra=extra, **kwargs) - yield from transp._post_init() + waiter=waiter, extra=extra, + **kwargs) + watcher.add_child_handler(transp.get_pid(), self._child_watcher_callback, transp) + try: + yield from waiter + except Exception as exc: + # Workaround CPython bug #23353: using yield/yield-from in an + # except block of a generator doesn't clear properly + # sys.exc_info() + err = exc + else: + err = None + + if err is not None: + transp.close() + yield from transp._wait() + raise err return transp def _child_watcher_callback(self, pid, returncode, transp): self.call_soon_threadsafe(transp._process_exited, returncode) - @tasks.coroutine + @coroutine def create_unix_connection(self, protocol_factory, path, *, ssl=None, sock=None, server_hostname=None): @@ -199,7 +240,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): sock, protocol_factory, ssl, server_hostname) return transport, protocol - @tasks.coroutine + @coroutine def create_unix_server(self, protocol_factory, path=None, *, sock=None, backlog=100, ssl=None): if isinstance(ssl, bool): @@ -223,6 +264,9 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): raise OSError(errno.EADDRINUSE, msg) from None else: raise + except: + sock.close() + raise else: if sock is None: raise ValueError( @@ -239,10 +283,16 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop): return server -def _set_nonblocking(fd): - flags = fcntl.fcntl(fd, fcntl.F_GETFL) - flags = flags | os.O_NONBLOCK - fcntl.fcntl(fd, fcntl.F_SETFL, flags) +if hasattr(os, 'set_blocking'): + def _set_nonblocking(fd): + os.set_blocking(fd, False) +else: + import fcntl + + def _set_nonblocking(fd): + flags = fcntl.fcntl(fd, fcntl.F_GETFL) + flags = flags | os.O_NONBLOCK + fcntl.fcntl(fd, fcntl.F_SETFL, flags) class _UnixReadPipeTransport(transports.ReadTransport): @@ -263,10 +313,32 @@ class _UnixReadPipeTransport(transports.ReadTransport): _set_nonblocking(self._fileno) self._protocol = protocol self._closing = False - self._loop.add_reader(self._fileno, self._read_ready) self._loop.call_soon(self._protocol.connection_made, self) + # only start reading when connection_made() has been called + self._loop.call_soon(self._loop.add_reader, + self._fileno, self._read_ready) if waiter is not None: - self._loop.call_soon(waiter.set_result, None) + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(waiter._set_result_unless_cancelled, None) + + def __repr__(self): + info = [self.__class__.__name__] + if self._pipe is None: + info.append('closed') + elif self._closing: + info.append('closing') + info.append('fd=%s' % self._fileno) + if self._pipe is not None: + polling = selector_events._test_selector_event( + self._loop._selector, + self._fileno, selectors.EVENT_READ) + if polling: + info.append('polling') + else: + info.append('idle') + else: + info.append('closed') + return '<%s>' % ' '.join(info) def _read_ready(self): try: @@ -279,6 +351,8 @@ class _UnixReadPipeTransport(transports.ReadTransport): if data: self._protocol.data_received(data) else: + if self._loop.get_debug(): + logger.info("%r was closed by peer", self) self._closing = True self._loop.remove_reader(self._fileno) self._loop.call_soon(self._protocol.eof_received) @@ -294,9 +368,21 @@ class _UnixReadPipeTransport(transports.ReadTransport): if not self._closing: self._close(None) + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if self._pipe is not None: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self._pipe.close() + def _fatal_error(self, exc, message='Fatal error on pipe transport'): # should be called by exception handler only - if not (isinstance(exc, OSError) and exc.errno == errno.EIO): + if (isinstance(exc, OSError) and exc.errno == errno.EIO): + if self._loop.get_debug(): + logger.debug("%r: %s", self, message, exc_info=True) + else: self._loop.call_exception_handler({ 'message': message, 'exception': exc, @@ -324,9 +410,8 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, transports.WriteTransport): def __init__(self, loop, pipe, protocol, waiter=None, extra=None): - super().__init__(extra) + super().__init__(extra, loop) self._extra['pipe'] = pipe - self._loop = loop self._pipe = pipe self._fileno = pipe.fileno() mode = os.fstat(self._fileno).st_mode @@ -342,21 +427,49 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, self._conn_lost = 0 self._closing = False # Set when close() or write_eof() called. - # On AIX, the reader trick only works for sockets. - # On other platforms it works for pipes and sockets. - # (Exception: OS X 10.4? Issue #19294.) - if is_socket or not sys.platform.startswith("aix"): - self._loop.add_reader(self._fileno, self._read_ready) - self._loop.call_soon(self._protocol.connection_made, self) + + # On AIX, the reader trick (to be notified when the read end of the + # socket is closed) only works for sockets. On other platforms it + # works for pipes and sockets. (Exception: OS X 10.4? Issue #19294.) + if is_socket or not sys.platform.startswith("aix"): + # only start reading when connection_made() has been called + self._loop.call_soon(self._loop.add_reader, + self._fileno, self._read_ready) + if waiter is not None: - self._loop.call_soon(waiter.set_result, None) + # only wake up the waiter when connection_made() has been called + self._loop.call_soon(waiter._set_result_unless_cancelled, None) + + def __repr__(self): + info = [self.__class__.__name__] + if self._pipe is None: + info.append('closed') + elif self._closing: + info.append('closing') + info.append('fd=%s' % self._fileno) + if self._pipe is not None: + polling = selector_events._test_selector_event( + self._loop._selector, + self._fileno, selectors.EVENT_WRITE) + if polling: + info.append('polling') + else: + info.append('idle') + + bufsize = self.get_write_buffer_size() + info.append('bufsize=%s' % bufsize) + else: + info.append('closed') + return '<%s>' % ' '.join(info) def get_write_buffer_size(self): return sum(len(data) for data in self._buffer) def _read_ready(self): # Pipe was closed by peer. + if self._loop.get_debug(): + logger.info("%r was closed by peer", self) if self._buffer: self._close(BrokenPipeError()) else: @@ -426,9 +539,6 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, def can_write_eof(self): return True - # TODO: Make the relationships between write_eof(), close(), - # abort(), _fatal_error() and _close() more straightforward. - def write_eof(self): if self._closing: return @@ -439,16 +549,28 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, self._loop.call_soon(self._call_connection_lost, None) def close(self): - if not self._closing: + if self._pipe is not None and not self._closing: # write_eof is all what we needed to close the write pipe self.write_eof() + # On Python 3.3 and older, objects with a destructor part of a reference + # cycle are never destroyed. It's not more the case on Python 3.4 thanks + # to the PEP 442. + if compat.PY34: + def __del__(self): + if self._pipe is not None: + warnings.warn("unclosed transport %r" % self, ResourceWarning) + self._pipe.close() + def abort(self): self._close(None) def _fatal_error(self, exc, message='Fatal error on pipe transport'): # should be called by exception handler only - if not isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if isinstance(exc, (BrokenPipeError, ConnectionResetError)): + if self._loop.get_debug(): + logger.debug("%r: %s", self, message, exc_info=True) + else: self._loop.call_exception_handler({ 'message': message, 'exception': exc, @@ -475,6 +597,22 @@ class _UnixWritePipeTransport(transports._FlowControlMixin, self._loop = None +if hasattr(os, 'set_inheritable'): + # Python 3.4 and newer + _set_inheritable = os.set_inheritable +else: + import fcntl + + def _set_inheritable(fd, inheritable): + cloexec_flag = getattr(fcntl, 'FD_CLOEXEC', 1) + + old = fcntl.fcntl(fd, fcntl.F_GETFD) + if not inheritable: + fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag) + else: + fcntl.fcntl(fd, fcntl.F_SETFD, old & ~cloexec_flag) + + class _UnixSubprocessTransport(base_subprocess.BaseSubprocessTransport): def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs): @@ -486,12 +624,18 @@ class _UnixSubprocessTransport(base_subprocess.BaseSubprocessTransport): # other end). Notably this is needed on AIX, and works # just fine on other platforms. stdin, stdin_w = self._loop._socketpair() + + # Mark the write end of the stdin pipe as non-inheritable, + # needed by close_fds=False on Python 3.3 and older + # (Python 3.4 implements the PEP 446, socketpair returns + # non-inheritable sockets) + _set_inheritable(stdin_w.fileno(), False) self._proc = subprocess.Popen( args, shell=shell, stdin=stdin, stdout=stdout, stderr=stderr, universal_newlines=False, bufsize=bufsize, **kwargs) if stdin_w is not None: stdin.close() - self._proc.stdin = open(stdin_w.detach(), 'rb', buffering=bufsize) + self._proc.stdin = open(stdin_w.detach(), 'wb', buffering=bufsize) class AbstractChildWatcher: @@ -524,7 +668,7 @@ class AbstractChildWatcher: process 'pid' terminates. Specifying another callback for the same process replaces the previous handler. - Note: callback() must be thread-safe + Note: callback() must be thread-safe. """ raise NotImplementedError() @@ -644,7 +788,7 @@ class SafeChildWatcher(BaseChildWatcher): pass def add_child_handler(self, pid, callback, *args): - self._callbacks[pid] = callback, args + self._callbacks[pid] = (callback, args) # Prevent a race condition in case the child is already terminated. self._do_waitpid(pid) @@ -680,13 +824,18 @@ class SafeChildWatcher(BaseChildWatcher): return returncode = self._compute_returncode(status) + if self._loop.get_debug(): + logger.debug('process %s exited with returncode %s', + expected_pid, returncode) try: callback, args = self._callbacks.pop(pid) except KeyError: # pragma: no cover # May happen if .remove_child_handler() is called # after os.waitpid() returns. - pass + if self._loop.get_debug(): + logger.warning("Child watcher got an unexpected pid: %r", + pid, exc_info=True) else: callback(pid, returncode, *args) @@ -777,8 +926,16 @@ class FastChildWatcher(BaseChildWatcher): if self._forks: # It may not be registered yet. self._zombies[pid] = returncode + if self._loop.get_debug(): + logger.debug('unknown process %s exited ' + 'with returncode %s', + pid, returncode) continue callback = None + else: + if self._loop.get_debug(): + logger.debug('process %s exited with returncode %s', + pid, returncode) if callback is None: logger.warning( @@ -789,7 +946,7 @@ class FastChildWatcher(BaseChildWatcher): class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - """XXX""" + """UNIX event loop policy with a watcher for child processes.""" _loop_factory = _UnixSelectorEventLoop def __init__(self): @@ -819,7 +976,7 @@ class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): self._watcher.attach_loop(loop) def get_child_watcher(self): - """Get the child watcher + """Get the watcher for child processes. If not yet set, a SafeChildWatcher object is automatically created. """ @@ -829,7 +986,7 @@ class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): return self._watcher def set_child_watcher(self, watcher): - """Set the child watcher""" + """Set the watcher for child processes.""" assert watcher is None or isinstance(watcher, AbstractChildWatcher) diff --git a/Darwin/lib/python3.4/asyncio/windows_events.py b/Darwin/lib/python3.5/asyncio/windows_events.py similarity index 51% rename from Darwin/lib/python3.4/asyncio/windows_events.py rename to Darwin/lib/python3.5/asyncio/windows_events.py index 19f2588..922594f 100644 --- a/Darwin/lib/python3.4/asyncio/windows_events.py +++ b/Darwin/lib/python3.5/asyncio/windows_events.py @@ -14,8 +14,9 @@ from . import proactor_events from . import selector_events from . import tasks from . import windows_utils -from .log import logger from . import _overlapped +from .coroutines import coroutine +from .log import logger __all__ = ['SelectorEventLoop', 'ProactorEventLoop', 'IocpProactor', @@ -28,6 +29,12 @@ INFINITE = 0xffffffff ERROR_CONNECTION_REFUSED = 1225 ERROR_CONNECTION_ABORTED = 1236 +# Initial delay in seconds for connect_pipe() before retrying to connect +CONNECT_PIPE_INIT_DELAY = 0.001 + +# Maximum delay in seconds for connect_pipe() before retrying to connect +CONNECT_PIPE_MAX_DELAY = 0.100 + class _OverlappedFuture(futures.Future): """Subclass of Future which represents an overlapped operation. @@ -37,30 +44,189 @@ class _OverlappedFuture(futures.Future): def __init__(self, ov, *, loop=None): super().__init__(loop=loop) - self.ov = ov + if self._source_traceback: + del self._source_traceback[-1] + self._ov = ov + + def _repr_info(self): + info = super()._repr_info() + if self._ov is not None: + state = 'pending' if self._ov.pending else 'completed' + info.insert(1, 'overlapped=<%s, %#x>' % (state, self._ov.address)) + return info + + def _cancel_overlapped(self): + if self._ov is None: + return + try: + self._ov.cancel() + except OSError as exc: + context = { + 'message': 'Cancelling an overlapped future failed', + 'exception': exc, + 'future': self, + } + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + self._ov = None def cancel(self): - try: - self.ov.cancel() - except OSError: - pass + self._cancel_overlapped() return super().cancel() + def set_exception(self, exception): + super().set_exception(exception) + self._cancel_overlapped() -class _WaitHandleFuture(futures.Future): + def set_result(self, result): + super().set_result(result) + self._ov = None + + +class _BaseWaitHandleFuture(futures.Future): """Subclass of Future which represents a wait handle.""" - def __init__(self, wait_handle, *, loop=None): + def __init__(self, ov, handle, wait_handle, *, loop=None): super().__init__(loop=loop) + if self._source_traceback: + del self._source_traceback[-1] + # Keep a reference to the Overlapped object to keep it alive until the + # wait is unregistered + self._ov = ov + self._handle = handle self._wait_handle = wait_handle - def cancel(self): - super().cancel() + # Should we call UnregisterWaitEx() if the wait completes + # or is cancelled? + self._registered = True + + def _poll(self): + # non-blocking wait: use a timeout of 0 millisecond + return (_winapi.WaitForSingleObject(self._handle, 0) == + _winapi.WAIT_OBJECT_0) + + def _repr_info(self): + info = super()._repr_info() + info.append('handle=%#x' % self._handle) + if self._handle is not None: + state = 'signaled' if self._poll() else 'waiting' + info.append(state) + if self._wait_handle is not None: + info.append('wait_handle=%#x' % self._wait_handle) + return info + + def _unregister_wait_cb(self, fut): + # The wait was unregistered: it's not safe to destroy the Overlapped + # object + self._ov = None + + def _unregister_wait(self): + if not self._registered: + return + self._registered = False + + wait_handle = self._wait_handle + self._wait_handle = None try: - _overlapped.UnregisterWait(self._wait_handle) - except OSError as e: - if e.winerror != _overlapped.ERROR_IO_PENDING: - raise + _overlapped.UnregisterWait(wait_handle) + except OSError as exc: + if exc.winerror != _overlapped.ERROR_IO_PENDING: + context = { + 'message': 'Failed to unregister the wait handle', + 'exception': exc, + 'future': self, + } + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + return + # ERROR_IO_PENDING means that the unregister is pending + + self._unregister_wait_cb(None) + + def cancel(self): + self._unregister_wait() + return super().cancel() + + def set_exception(self, exception): + self._unregister_wait() + super().set_exception(exception) + + def set_result(self, result): + self._unregister_wait() + super().set_result(result) + + +class _WaitCancelFuture(_BaseWaitHandleFuture): + """Subclass of Future which represents a wait for the cancellation of a + _WaitHandleFuture using an event. + """ + + def __init__(self, ov, event, wait_handle, *, loop=None): + super().__init__(ov, event, wait_handle, loop=loop) + + self._done_callback = None + + def cancel(self): + raise RuntimeError("_WaitCancelFuture must not be cancelled") + + def _schedule_callbacks(self): + super(_WaitCancelFuture, self)._schedule_callbacks() + if self._done_callback is not None: + self._done_callback(self) + + +class _WaitHandleFuture(_BaseWaitHandleFuture): + def __init__(self, ov, handle, wait_handle, proactor, *, loop=None): + super().__init__(ov, handle, wait_handle, loop=loop) + self._proactor = proactor + self._unregister_proactor = True + self._event = _overlapped.CreateEvent(None, True, False, None) + self._event_fut = None + + def _unregister_wait_cb(self, fut): + if self._event is not None: + _winapi.CloseHandle(self._event) + self._event = None + self._event_fut = None + + # If the wait was cancelled, the wait may never be signalled, so + # it's required to unregister it. Otherwise, IocpProactor.close() will + # wait forever for an event which will never come. + # + # If the IocpProactor already received the event, it's safe to call + # _unregister() because we kept a reference to the Overlapped object + # which is used as an unique key. + self._proactor._unregister(self._ov) + self._proactor = None + + super()._unregister_wait_cb(fut) + + def _unregister_wait(self): + if not self._registered: + return + self._registered = False + + wait_handle = self._wait_handle + self._wait_handle = None + try: + _overlapped.UnregisterWaitEx(wait_handle, self._event) + except OSError as exc: + if exc.winerror != _overlapped.ERROR_IO_PENDING: + context = { + 'message': 'Failed to unregister the wait handle', + 'exception': exc, + 'future': self, + } + if self._source_traceback: + context['source_traceback'] = self._source_traceback + self._loop.call_exception_handler(context) + return + # ERROR_IO_PENDING is not an error, the wait was unregistered + + self._event_fut = self._proactor._wait_cancel(self._event, + self._unregister_wait_cb) class PipeServer(object): @@ -71,6 +237,11 @@ class PipeServer(object): def __init__(self, address): self._address = address self._free_instances = weakref.WeakSet() + # initialize the pipe attribute before calling _server_pipe_handle() + # because this function can raise an exception and the destructor calls + # the close() method + self._pipe = None + self._accept_pipe_future = None self._pipe = self._server_pipe_handle(True) def _get_unconnected_pipe(self): @@ -83,7 +254,7 @@ class PipeServer(object): def _server_pipe_handle(self, first): # Return a wrapper for a new pipe handle. - if self._address is None: + if self.closed(): return None flags = _winapi.PIPE_ACCESS_DUPLEX | _winapi.FILE_FLAG_OVERLAPPED if first: @@ -99,7 +270,13 @@ class PipeServer(object): self._free_instances.add(pipe) return pipe + def closed(self): + return (self._address is None) + def close(self): + if self._accept_pipe_future is not None: + self._accept_pipe_future.cancel() + self._accept_pipe_future = None # Close all instances which have not been connected to by a client. if self._address is not None: for pipe in self._free_instances: @@ -129,7 +306,7 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop): def _socketpair(self): return windows_utils.socketpair() - @tasks.coroutine + @coroutine def create_pipe_connection(self, protocol_factory, address): f = self._proactor.connect_pipe(address) pipe = yield from f @@ -138,22 +315,31 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop): extra={'addr': address}) return trans, protocol - @tasks.coroutine + @coroutine def start_serving_pipe(self, protocol_factory, address): server = PipeServer(address) - def loop(f=None): + def loop_accept_pipe(f=None): pipe = None try: if f: pipe = f.result() server._free_instances.discard(pipe) + + if server.closed(): + # A client connected before the server was closed: + # drop the client (close the pipe) and exit + pipe.close() + return + protocol = protocol_factory() self._make_duplex_pipe_transport( pipe, protocol, extra={'addr': address}) + pipe = server._get_unconnected_pipe() if pipe is None: return + f = self._proactor.accept_pipe(pipe) except OSError as exc: if pipe and pipe.fileno() != -1: @@ -163,23 +349,42 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop): 'pipe': pipe, }) pipe.close() + elif self._debug: + logger.warning("Accept pipe failed on pipe %r", + pipe, exc_info=True) except futures.CancelledError: if pipe: pipe.close() else: - f.add_done_callback(loop) + server._accept_pipe_future = f + f.add_done_callback(loop_accept_pipe) - self.call_soon(loop) + self.call_soon(loop_accept_pipe) return [server] - @tasks.coroutine + @coroutine def _make_subprocess_transport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, extra=None, **kwargs): + waiter = futures.Future(loop=self) transp = _WindowsSubprocessTransport(self, protocol, args, shell, stdin, stdout, stderr, bufsize, - extra=extra, **kwargs) - yield from transp._post_init() + waiter=waiter, extra=extra, + **kwargs) + try: + yield from waiter + except Exception as exc: + # Workaround CPython bug #23353: using yield/yield-from in an + # except block of a generator doesn't clear properly sys.exc_info() + err = exc + else: + err = None + + if err is not None: + transp.close() + yield from transp._wait() + raise err + return transp @@ -193,8 +398,14 @@ class IocpProactor: _overlapped.INVALID_HANDLE_VALUE, NULL, 0, concurrency) self._cache = {} self._registered = weakref.WeakSet() + self._unregistered = [] self._stopped_serving = weakref.WeakSet() + def __repr__(self): + return ('<%s overlapped#=%s result#=%s>' + % (self.__class__.__name__, len(self._cache), + len(self._results))) + def set_loop(self, loop): self._loop = loop @@ -205,13 +416,21 @@ class IocpProactor: self._results = [] return tmp + def _result(self, value): + fut = futures.Future(loop=self._loop) + fut.set_result(value) + return fut + def recv(self, conn, nbytes, flags=0): self._register_with_iocp(conn) ov = _overlapped.Overlapped(NULL) - if isinstance(conn, socket.socket): - ov.WSARecv(conn.fileno(), nbytes, flags) - else: - ov.ReadFile(conn.fileno(), nbytes) + try: + if isinstance(conn, socket.socket): + ov.WSARecv(conn.fileno(), nbytes, flags) + else: + ov.ReadFile(conn.fileno(), nbytes) + except BrokenPipeError: + return self._result(b'') def finish_recv(trans, key, ov): try: @@ -258,7 +477,7 @@ class IocpProactor: conn.settimeout(listener.gettimeout()) return conn, conn.getpeername() - @tasks.coroutine + @coroutine def accept_coro(future, conn): # Coroutine closing the accept socket if the future is cancelled try: @@ -269,7 +488,7 @@ class IocpProactor: future = self._register(ov, listener, finish_accept) coro = accept_coro(future, conn) - tasks.async(coro, loop=self._loop) + tasks.ensure_future(coro, loop=self._loop) return future def connect(self, conn, address): @@ -298,7 +517,13 @@ class IocpProactor: def accept_pipe(self, pipe): self._register_with_iocp(pipe) ov = _overlapped.Overlapped(NULL) - ov.ConnectNamedPipe(pipe.fileno()) + connected = ov.ConnectNamedPipe(pipe.fileno()) + + if connected: + # ConnectNamePipe() failed with ERROR_PIPE_CONNECTED which means + # that the pipe is connected. There is no need to wait for the + # completion of the connection. + return self._result(pipe) def finish_accept_pipe(trans, key, ov): ov.getresult() @@ -306,26 +531,42 @@ class IocpProactor: return self._register(ov, pipe, finish_accept_pipe) + @coroutine def connect_pipe(self, address): - ov = _overlapped.Overlapped(NULL) - ov.WaitNamedPipeAndConnect(address, self._iocp, ov.address) + delay = CONNECT_PIPE_INIT_DELAY + while True: + # Unfortunately there is no way to do an overlapped connect to a pipe. + # Call CreateFile() in a loop until it doesn't fail with + # ERROR_PIPE_BUSY + try: + handle = _overlapped.ConnectPipe(address) + break + except OSError as exc: + if exc.winerror != _overlapped.ERROR_PIPE_BUSY: + raise - def finish_connect_pipe(err, handle, ov): - # err, handle were arguments passed to PostQueuedCompletionStatus() - # in a function run in a thread pool. - if err == _overlapped.ERROR_SEM_TIMEOUT: - # Connection did not succeed within time limit. - msg = _overlapped.FormatMessage(err) - raise ConnectionRefusedError(0, msg, None, err) - elif err != 0: - msg = _overlapped.FormatMessage(err) - raise OSError(0, msg, None, err) - else: - return windows_utils.PipeHandle(handle) + # ConnectPipe() failed with ERROR_PIPE_BUSY: retry later + delay = min(delay * 2, CONNECT_PIPE_MAX_DELAY) + yield from tasks.sleep(delay, loop=self._loop) - return self._register(ov, None, finish_connect_pipe, wait_for_post=True) + return windows_utils.PipeHandle(handle) def wait_for_handle(self, handle, timeout=None): + """Wait for a handle. + + Return a Future object. The result of the future is True if the wait + completed, or False if the wait did not complete (on timeout). + """ + return self._wait_for_handle(handle, timeout, False) + + def _wait_cancel(self, event, done_callback): + fut = self._wait_for_handle(event, None, True) + # add_done_callback() cannot be used because the wait may only complete + # in IocpProactor.close(), while the event loop is not running. + fut._done_callback = done_callback + return fut + + def _wait_for_handle(self, handle, timeout, _is_cancel): if timeout is None: ms = _winapi.INFINITE else: @@ -335,27 +576,26 @@ class IocpProactor: # We only create ov so we can use ov.address as a key for the cache. ov = _overlapped.Overlapped(NULL) - wh = _overlapped.RegisterWaitWithQueue( + wait_handle = _overlapped.RegisterWaitWithQueue( handle, self._iocp, ov.address, ms) - f = _WaitHandleFuture(wh, loop=self._loop) + if _is_cancel: + f = _WaitCancelFuture(ov, handle, wait_handle, loop=self._loop) + else: + f = _WaitHandleFuture(ov, handle, wait_handle, self, + loop=self._loop) + if f._source_traceback: + del f._source_traceback[-1] def finish_wait_for_handle(trans, key, ov): - if not f.cancelled(): - try: - _overlapped.UnregisterWait(wh) - except OSError as e: - if e.winerror != _overlapped.ERROR_IO_PENDING: - raise # Note that this second wait means that we should only use # this with handles types where a successful wait has no # effect. So events or processes are all right, but locks # or semaphores are not. Also note if the handle is # signalled and then quickly reset, then we may return # False even though we have not timed out. - return (_winapi.WaitForSingleObject(handle, 0) == - _winapi.WAIT_OBJECT_0) + return f._poll() - self._cache[ov.address] = (f, ov, None, finish_wait_for_handle) + self._cache[ov.address] = (f, ov, 0, finish_wait_for_handle) return f def _register_with_iocp(self, obj): @@ -368,17 +608,14 @@ class IocpProactor: # to avoid sending notifications to completion port of ops # that succeed immediately. - def _register(self, ov, obj, callback, wait_for_post=False): + def _register(self, ov, obj, callback): # Return a future which will be set with the result of the # operation when it completes. The future's value is actually # the value returned by callback(). f = _OverlappedFuture(ov, loop=self._loop) - if ov.pending or wait_for_post: - # Register the overlapped operation for later. Note that - # we only store obj to prevent it from being garbage - # collected too early. - self._cache[ov.address] = (f, ov, obj, callback) - else: + if f._source_traceback: + del f._source_traceback[-1] + if not ov.pending: # The operation has completed, so no need to postpone the # work. We cannot take this short cut if we need the # NumberOfBytes, CompletionKey values returned by @@ -389,8 +626,27 @@ class IocpProactor: f.set_exception(e) else: f.set_result(value) + # Even if GetOverlappedResult() was called, we have to wait for the + # notification of the completion in GetQueuedCompletionStatus(). + # Register the overlapped operation to keep a reference to the + # OVERLAPPED object, otherwise the memory is freed and Windows may + # read uninitialized memory. + + # Register the overlapped operation for later. Note that + # we only store obj to prevent it from being garbage + # collected too early. + self._cache[ov.address] = (f, ov, obj, callback) return f + def _unregister(self, ov): + """Unregister an overlapped object. + + Call this method when its future has been cancelled. The event can + already be signalled (pending in the proactor event queue). It is also + safe if the event is never signalled (because it was cancelled). + """ + self._unregistered.append(ov) + def _get_accept_socket(self, family): s = socket.socket(family) s.settimeout(0) @@ -407,23 +663,36 @@ class IocpProactor: ms = math.ceil(timeout * 1e3) if ms >= INFINITE: raise ValueError("timeout too big") + while True: status = _overlapped.GetQueuedCompletionStatus(self._iocp, ms) if status is None: - return + break + ms = 0 + err, transferred, key, address = status try: f, ov, obj, callback = self._cache.pop(address) except KeyError: + if self._loop.get_debug(): + self._loop.call_exception_handler({ + 'message': ('GetQueuedCompletionStatus() returned an ' + 'unexpected event'), + 'status': ('err=%s transferred=%s key=%#x address=%#x' + % (err, transferred, key, address)), + }) + # key is either zero, or it is used to return a pipe # handle which should be closed to avoid a leak. if key not in (0, _overlapped.INVALID_HANDLE_VALUE): _winapi.CloseHandle(key) - ms = 0 continue + if obj in self._stopped_serving: f.cancel() - elif not f.cancelled(): + # Don't call the callback if _register() already read the result or + # if the overlapped has been cancelled + elif not f.done(): try: value = callback(transferred, key, ov) except OSError as e: @@ -432,7 +701,11 @@ class IocpProactor: else: f.set_result(value) self._results.append(f) - ms = 0 + + # Remove unregisted futures + for ov in self._unregistered: + self._cache.pop(ov.address, None) + self._unregistered.clear() def _stop_serving(self, obj): # obj is a socket or pipe handle. It will be closed in @@ -442,17 +715,26 @@ class IocpProactor: def close(self): # Cancel remaining registered operations. - for address, (f, ov, obj, callback) in list(self._cache.items()): - if obj is None: - # The operation was started with connect_pipe() which - # queues a task to Windows' thread pool. This cannot - # be cancelled, so just forget it. - del self._cache[address] + for address, (fut, ov, obj, callback) in list(self._cache.items()): + if fut.cancelled(): + # Nothing to do with cancelled futures + pass + elif isinstance(fut, _WaitCancelFuture): + # _WaitCancelFuture must not be cancelled + pass else: try: - ov.cancel() - except OSError: - pass + fut.cancel() + except OSError as exc: + if self._loop is not None: + context = { + 'message': 'Cancelling a future failed', + 'exception': exc, + 'future': fut, + } + if fut._source_traceback: + context['source_traceback'] = fut._source_traceback + self._loop.call_exception_handler(context) while self._cache: if not self._poll(1): @@ -463,6 +745,9 @@ class IocpProactor: _winapi.CloseHandle(self._iocp) self._iocp = None + def __del__(self): + self.close() + class _WindowsSubprocessTransport(base_subprocess.BaseSubprocessTransport): diff --git a/Darwin/lib/python3.4/asyncio/windows_utils.py b/Darwin/lib/python3.5/asyncio/windows_utils.py similarity index 66% rename from Darwin/lib/python3.4/asyncio/windows_utils.py rename to Darwin/lib/python3.5/asyncio/windows_utils.py index 2a196cc..870cd13 100644 --- a/Darwin/lib/python3.4/asyncio/windows_utils.py +++ b/Darwin/lib/python3.5/asyncio/windows_utils.py @@ -7,13 +7,14 @@ import sys if sys.platform != 'win32': # pragma: no cover raise ImportError('win32 only') -import socket +import _winapi import itertools import msvcrt import os +import socket import subprocess import tempfile -import _winapi +import warnings __all__ = ['socketpair', 'pipe', 'Popen', 'PIPE', 'PipeHandle'] @@ -28,47 +29,52 @@ STDOUT = subprocess.STDOUT _mmap_counter = itertools.count() -# Replacement for socket.socketpair() +if hasattr(socket, 'socketpair'): + # Since Python 3.5, socket.socketpair() is now also available on Windows + socketpair = socket.socketpair +else: + # Replacement for socket.socketpair() + def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0): + """A socket pair usable as a self-pipe, for Windows. + Origin: https://gist.github.com/4325783, by Geert Jansen. + Public domain. + """ + if family == socket.AF_INET: + host = '127.0.0.1' + elif family == socket.AF_INET6: + host = '::1' + else: + raise ValueError("Only AF_INET and AF_INET6 socket address " + "families are supported") + if type != socket.SOCK_STREAM: + raise ValueError("Only SOCK_STREAM socket type is supported") + if proto != 0: + raise ValueError("Only protocol zero is supported") -def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0): - """A socket pair usable as a self-pipe, for Windows. - - Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain. - """ - if family == socket.AF_INET: - host = '127.0.0.1' - elif family == socket.AF_INET6: - host = '::1' - else: - raise ValueError("Ony AF_INET and AF_INET6 socket address families " - "are supported") - if type != socket.SOCK_STREAM: - raise ValueError("Only SOCK_STREAM socket type is supported") - if proto != 0: - raise ValueError("Only protocol zero is supported") - - # We create a connected TCP socket. Note the trick with setblocking(0) - # that prevents us from having to create a thread. - lsock = socket.socket(family, type, proto) - lsock.bind((host, 0)) - lsock.listen(1) - # On IPv6, ignore flow_info and scope_id - addr, port = lsock.getsockname()[:2] - csock = socket.socket(family, type, proto) - csock.setblocking(False) - try: - csock.connect((addr, port)) - except (BlockingIOError, InterruptedError): - pass - except Exception: - lsock.close() - csock.close() - raise - ssock, _ = lsock.accept() - csock.setblocking(True) - lsock.close() - return (ssock, csock) + # We create a connected TCP socket. Note the trick with setblocking(0) + # that prevents us from having to create a thread. + lsock = socket.socket(family, type, proto) + try: + lsock.bind((host, 0)) + lsock.listen(1) + # On IPv6, ignore flow_info and scope_id + addr, port = lsock.getsockname()[:2] + csock = socket.socket(family, type, proto) + try: + csock.setblocking(False) + try: + csock.connect((addr, port)) + except (BlockingIOError, InterruptedError): + pass + csock.setblocking(True) + ssock, _ = lsock.accept() + except: + csock.close() + raise + finally: + lsock.close() + return (ssock, csock) # Replacement for os.pipe() using handles instead of fds @@ -130,19 +136,31 @@ class PipeHandle: def __init__(self, handle): self._handle = handle + def __repr__(self): + if self._handle is not None: + handle = 'handle=%r' % self._handle + else: + handle = 'closed' + return '<%s %s>' % (self.__class__.__name__, handle) + @property def handle(self): return self._handle def fileno(self): + if self._handle is None: + raise ValueError("I/O operatioon on closed pipe") return self._handle def close(self, *, CloseHandle=_winapi.CloseHandle): - if self._handle != -1: + if self._handle is not None: CloseHandle(self._handle) - self._handle = -1 + self._handle = None - __del__ = close + def __del__(self): + if self._handle is not None: + warnings.warn("unclosed %r" % self, ResourceWarning) + self.close() def __enter__(self): return self diff --git a/Darwin/lib/python3.4/asyncore.py b/Darwin/lib/python3.5/asyncore.py similarity index 94% rename from Darwin/lib/python3.4/asyncore.py rename to Darwin/lib/python3.5/asyncore.py index 75481dd..3b51f0f 100644 --- a/Darwin/lib/python3.4/asyncore.py +++ b/Darwin/lib/python3.5/asyncore.py @@ -57,8 +57,8 @@ from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, EINVAL, \ ENOTCONN, ESHUTDOWN, EISCONN, EBADF, ECONNABORTED, EPIPE, EAGAIN, \ errorcode -_DISCONNECTED = frozenset((ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE, - EBADF)) +_DISCONNECTED = frozenset({ECONNRESET, ENOTCONN, ESHUTDOWN, ECONNABORTED, EPIPE, + EBADF}) try: socket_map @@ -141,10 +141,7 @@ def poll(timeout=0.0, map=None): time.sleep(timeout) return - try: - r, w, e = select.select(r, w, e, timeout) - except InterruptedError: - return + r, w, e = select.select(r, w, e, timeout) for fd in r: obj = map.get(fd) @@ -182,10 +179,8 @@ def poll2(timeout=0.0, map=None): flags |= select.POLLOUT if flags: pollster.register(fd, flags) - try: - r = pollster.poll(timeout) - except InterruptedError: - r = [] + + r = pollster.poll(timeout) for fd, flags in r: obj = map.get(fd) if obj is None: @@ -220,7 +215,7 @@ class dispatcher: connecting = False closing = False addr = None - ignore_log_types = frozenset(['warning']) + ignore_log_types = frozenset({'warning'}) def __init__(self, sock=None, map=None): if map is None: @@ -255,7 +250,7 @@ class dispatcher: self.socket = None def __repr__(self): - status = [self.__class__.__module__+"."+self.__class__.__name__] + status = [self.__class__.__module__+"."+self.__class__.__qualname__] if self.accepting and self.addr: status.append('listening') elif self.connected: @@ -404,20 +399,6 @@ class dispatcher: if why.args[0] not in (ENOTCONN, EBADF): raise - # cheap inheritance, used to pass all other attribute - # references to the underlying socket object. - def __getattr__(self, attr): - try: - retattr = getattr(self.socket, attr) - except AttributeError: - raise AttributeError("%s instance has no attribute '%s'" - %(self.__class__.__name__, attr)) - else: - msg = "%(me)s.%(attr)s is deprecated; use %(me)s.socket.%(attr)s " \ - "instead" % {'me' : self.__class__.__name__, 'attr' : attr} - warnings.warn(msg, DeprecationWarning, stacklevel=2) - return retattr - # log and log_info may be overridden to provide more sophisticated # logging and warning methods. In general, log is for 'hit' logging # and 'log_info' is for informational, warning and error logging. @@ -604,8 +585,6 @@ def close_all(map=None, ignore_all=False): # Regardless, this is useful for pipes, and stdin/stdout... if os.name == 'posix': - import fcntl - class file_wrapper: # Here we override just enough to make a file # look like a socket for the purposes of asyncore. @@ -614,6 +593,11 @@ if os.name == 'posix': def __init__(self, fd): self.fd = os.dup(fd) + def __del__(self): + if self.fd >= 0: + warnings.warn("unclosed file %r" % self, ResourceWarning) + self.close() + def recv(self, *args): return os.read(self.fd, *args) @@ -632,7 +616,10 @@ if os.name == 'posix': write = send def close(self): + if self.fd < 0: + return os.close(self.fd) + self.fd = -1 def fileno(self): return self.fd @@ -648,9 +635,7 @@ if os.name == 'posix': pass self.set_file(fd) # set it to non-blocking mode - flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0) - flags = flags | os.O_NONBLOCK - fcntl.fcntl(fd, fcntl.F_SETFL, flags) + os.set_blocking(fd, False) def set_file(self, fd): self.socket = file_wrapper(fd) diff --git a/Darwin/lib/python3.4/base64.py b/Darwin/lib/python3.5/base64.py similarity index 99% rename from Darwin/lib/python3.4/base64.py rename to Darwin/lib/python3.5/base64.py index 36c68a6..640f787 100755 --- a/Darwin/lib/python3.4/base64.py +++ b/Darwin/lib/python3.5/base64.py @@ -324,7 +324,7 @@ def a85encode(b, *, foldspaces=False, wrapcol=0, pad=False, adobe=False): instead of 4 consecutive spaces (ASCII 0x20) as supported by 'btoa'. This feature is not supported by the "standard" Adobe encoding. - wrapcol controls whether the output should have newline ('\n') characters + wrapcol controls whether the output should have newline ('\\n') characters added to it. If this is non-zero, each output line will be at most this many characters long. @@ -434,7 +434,7 @@ _b85dec = None def b85encode(b, pad=False): """Encode an ASCII-encoded byte array in base85 format. - If pad is true, the input is padded with "\0" so its length is a multiple of + If pad is true, the input is padded with "\\0" so its length is a multiple of 4 characters before encoding. """ global _b85chars, _b85chars2 diff --git a/Darwin/lib/python3.4/bdb.py b/Darwin/lib/python3.5/bdb.py similarity index 100% rename from Darwin/lib/python3.4/bdb.py rename to Darwin/lib/python3.5/bdb.py diff --git a/Darwin/lib/python3.4/binhex.py b/Darwin/lib/python3.5/binhex.py similarity index 91% rename from Darwin/lib/python3.4/binhex.py rename to Darwin/lib/python3.5/binhex.py index 7bf9278..56b5f85 100644 --- a/Darwin/lib/python3.4/binhex.py +++ b/Darwin/lib/python3.5/binhex.py @@ -32,7 +32,8 @@ class Error(Exception): pass # States (what have we written) -[_DID_HEADER, _DID_DATA, _DID_RSRC] = range(3) +_DID_HEADER = 0 +_DID_DATA = 1 # Various constants REASONABLY_LARGE = 32768 # Minimal amount we pass the rle-coder @@ -213,30 +214,34 @@ class BinHex: self._write(data) def close(self): - if self.state < _DID_DATA: - self.close_data() - if self.state != _DID_DATA: - raise Error('Close at the wrong time') - if self.rlen != 0: - raise Error("Incorrect resource-datasize, diff=%r" % (self.rlen,)) - self._writecrc() - self.ofp.close() - self.state = None - del self.ofp + if self.state is None: + return + try: + if self.state < _DID_DATA: + self.close_data() + if self.state != _DID_DATA: + raise Error('Close at the wrong time') + if self.rlen != 0: + raise Error("Incorrect resource-datasize, diff=%r" % (self.rlen,)) + self._writecrc() + finally: + self.state = None + ofp = self.ofp + del self.ofp + ofp.close() def binhex(inp, out): """binhex(infilename, outfilename): create binhex-encoded copy of a file""" finfo = getfileinfo(inp) ofp = BinHex(finfo, out) - ifp = io.open(inp, 'rb') - # XXXX Do textfile translation on non-mac systems - while True: - d = ifp.read(128000) - if not d: break - ofp.write(d) - ofp.close_data() - ifp.close() + with io.open(inp, 'rb') as ifp: + # XXXX Do textfile translation on non-mac systems + while True: + d = ifp.read(128000) + if not d: break + ofp.write(d) + ofp.close_data() ifp = openrsrc(inp, 'rb') while True: @@ -436,11 +441,15 @@ class HexBin: return self._read(n) def close(self): - if self.rlen: - dummy = self.read_rsrc(self.rlen) - self._checkcrc() - self.state = _DID_RSRC - self.ifp.close() + if self.state is None: + return + try: + if self.rlen: + dummy = self.read_rsrc(self.rlen) + self._checkcrc() + finally: + self.state = None + self.ifp.close() def hexbin(inp, out): """hexbin(infilename, outfilename) - Decode binhexed file""" @@ -449,13 +458,12 @@ def hexbin(inp, out): if not out: out = ifp.FName - ofp = io.open(out, 'wb') - # XXXX Do translation on non-mac systems - while True: - d = ifp.read(128000) - if not d: break - ofp.write(d) - ofp.close() + with io.open(out, 'wb') as ofp: + # XXXX Do translation on non-mac systems + while True: + d = ifp.read(128000) + if not d: break + ofp.write(d) ifp.close_data() d = ifp.read_rsrc(128000) diff --git a/Darwin/lib/python3.4/bisect.py b/Darwin/lib/python3.5/bisect.py similarity index 100% rename from Darwin/lib/python3.4/bisect.py rename to Darwin/lib/python3.5/bisect.py diff --git a/Darwin/lib/python3.4/bz2.py b/Darwin/lib/python3.5/bz2.py similarity index 57% rename from Darwin/lib/python3.4/bz2.py rename to Darwin/lib/python3.5/bz2.py index 6f47bfa..bc78c54 100644 --- a/Darwin/lib/python3.4/bz2.py +++ b/Darwin/lib/python3.5/bz2.py @@ -9,8 +9,10 @@ __all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", __author__ = "Nadeem Vawda " +from builtins import open as _builtin_open import io import warnings +import _compression try: from threading import RLock @@ -22,15 +24,11 @@ from _bz2 import BZ2Compressor, BZ2Decompressor _MODE_CLOSED = 0 _MODE_READ = 1 -_MODE_READ_EOF = 2 +# Value 2 no longer used _MODE_WRITE = 3 -_BUFFER_SIZE = 8192 -_builtin_open = open - - -class BZ2File(io.BufferedIOBase): +class BZ2File(_compression.BaseStream): """A file object providing transparent bzip2 (de)compression. @@ -62,13 +60,11 @@ class BZ2File(io.BufferedIOBase): multiple compressed streams. """ # This lock must be recursive, so that BufferedIOBase's - # readline(), readlines() and writelines() don't deadlock. + # writelines() does not deadlock. self._lock = RLock() self._fp = None self._closefp = False self._mode = _MODE_CLOSED - self._pos = 0 - self._size = -1 if buffering is not None: warnings.warn("Use of 'buffering' argument is deprecated", @@ -80,9 +76,6 @@ class BZ2File(io.BufferedIOBase): if mode in ("", "r", "rb"): mode = "rb" mode_code = _MODE_READ - self._decompressor = BZ2Decompressor() - self._buffer = b"" - self._buffer_offset = 0 elif mode in ("w", "wb"): mode = "wb" mode_code = _MODE_WRITE @@ -108,6 +101,13 @@ class BZ2File(io.BufferedIOBase): else: raise TypeError("filename must be a str or bytes object, or a file") + if self._mode == _MODE_READ: + raw = _compression.DecompressReader(self._fp, + BZ2Decompressor, trailing_error=OSError) + self._buffer = io.BufferedReader(raw) + else: + self._pos = 0 + def close(self): """Flush and close the file. @@ -118,8 +118,8 @@ class BZ2File(io.BufferedIOBase): if self._mode == _MODE_CLOSED: return try: - if self._mode in (_MODE_READ, _MODE_READ_EOF): - self._decompressor = None + if self._mode == _MODE_READ: + self._buffer.close() elif self._mode == _MODE_WRITE: self._fp.write(self._compressor.flush()) self._compressor = None @@ -131,8 +131,7 @@ class BZ2File(io.BufferedIOBase): self._fp = None self._closefp = False self._mode = _MODE_CLOSED - self._buffer = b"" - self._buffer_offset = 0 + self._buffer = None @property def closed(self): @@ -146,125 +145,18 @@ class BZ2File(io.BufferedIOBase): def seekable(self): """Return whether the file supports seeking.""" - return self.readable() and self._fp.seekable() + return self.readable() and self._buffer.seekable() def readable(self): """Return whether the file was opened for reading.""" self._check_not_closed() - return self._mode in (_MODE_READ, _MODE_READ_EOF) + return self._mode == _MODE_READ def writable(self): """Return whether the file was opened for writing.""" self._check_not_closed() return self._mode == _MODE_WRITE - # Mode-checking helper functions. - - def _check_not_closed(self): - if self.closed: - raise ValueError("I/O operation on closed file") - - def _check_can_read(self): - if self._mode not in (_MODE_READ, _MODE_READ_EOF): - self._check_not_closed() - raise io.UnsupportedOperation("File not open for reading") - - def _check_can_write(self): - if self._mode != _MODE_WRITE: - self._check_not_closed() - raise io.UnsupportedOperation("File not open for writing") - - def _check_can_seek(self): - if self._mode not in (_MODE_READ, _MODE_READ_EOF): - self._check_not_closed() - raise io.UnsupportedOperation("Seeking is only supported " - "on files open for reading") - if not self._fp.seekable(): - raise io.UnsupportedOperation("The underlying file object " - "does not support seeking") - - # Fill the readahead buffer if it is empty. Returns False on EOF. - def _fill_buffer(self): - if self._mode == _MODE_READ_EOF: - return False - # Depending on the input data, our call to the decompressor may not - # return any data. In this case, try again after reading another block. - while self._buffer_offset == len(self._buffer): - rawblock = (self._decompressor.unused_data or - self._fp.read(_BUFFER_SIZE)) - - if not rawblock: - if self._decompressor.eof: - # End-of-stream marker and end of file. We're good. - self._mode = _MODE_READ_EOF - self._size = self._pos - return False - else: - # Problem - we were expecting more compressed data. - raise EOFError("Compressed file ended before the " - "end-of-stream marker was reached") - - if self._decompressor.eof: - # Continue to next stream. - self._decompressor = BZ2Decompressor() - try: - self._buffer = self._decompressor.decompress(rawblock) - except OSError: - # Trailing data isn't a valid bzip2 stream. We're done here. - self._mode = _MODE_READ_EOF - self._size = self._pos - return False - else: - self._buffer = self._decompressor.decompress(rawblock) - self._buffer_offset = 0 - return True - - # Read data until EOF. - # If return_data is false, consume the data without returning it. - def _read_all(self, return_data=True): - # The loop assumes that _buffer_offset is 0. Ensure that this is true. - self._buffer = self._buffer[self._buffer_offset:] - self._buffer_offset = 0 - - blocks = [] - while self._fill_buffer(): - if return_data: - blocks.append(self._buffer) - self._pos += len(self._buffer) - self._buffer = b"" - if return_data: - return b"".join(blocks) - - # Read a block of up to n bytes. - # If return_data is false, consume the data without returning it. - def _read_block(self, n, return_data=True): - # If we have enough data buffered, return immediately. - end = self._buffer_offset + n - if end <= len(self._buffer): - data = self._buffer[self._buffer_offset : end] - self._buffer_offset = end - self._pos += len(data) - return data if return_data else None - - # The loop assumes that _buffer_offset is 0. Ensure that this is true. - self._buffer = self._buffer[self._buffer_offset:] - self._buffer_offset = 0 - - blocks = [] - while n > 0 and self._fill_buffer(): - if n < len(self._buffer): - data = self._buffer[:n] - self._buffer_offset = n - else: - data = self._buffer - self._buffer = b"" - if return_data: - blocks.append(data) - self._pos += len(data) - n -= len(data) - if return_data: - return b"".join(blocks) - def peek(self, n=0): """Return buffered data without advancing the file position. @@ -273,9 +165,10 @@ class BZ2File(io.BufferedIOBase): """ with self._lock: self._check_can_read() - if not self._fill_buffer(): - return b"" - return self._buffer[self._buffer_offset:] + # Relies on the undocumented fact that BufferedReader.peek() + # always returns at least one byte (except at EOF), independent + # of the value of n + return self._buffer.peek(n) def read(self, size=-1): """Read up to size uncompressed bytes from the file. @@ -285,47 +178,29 @@ class BZ2File(io.BufferedIOBase): """ with self._lock: self._check_can_read() - if size == 0: - return b"" - elif size < 0: - return self._read_all() - else: - return self._read_block(size) + return self._buffer.read(size) def read1(self, size=-1): """Read up to size uncompressed bytes, while trying to avoid - making multiple reads from the underlying stream. + making multiple reads from the underlying stream. Reads up to a + buffer's worth of data if size is negative. Returns b'' if the file is at EOF. """ - # Usually, read1() calls _fp.read() at most once. However, sometimes - # this does not give enough data for the decompressor to make progress. - # In this case we make multiple reads, to avoid returning b"". with self._lock: self._check_can_read() - if (size == 0 or - # Only call _fill_buffer() if the buffer is actually empty. - # This gives a significant speedup if *size* is small. - (self._buffer_offset == len(self._buffer) and not self._fill_buffer())): - return b"" - if size > 0: - data = self._buffer[self._buffer_offset : - self._buffer_offset + size] - self._buffer_offset += len(data) - else: - data = self._buffer[self._buffer_offset:] - self._buffer = b"" - self._buffer_offset = 0 - self._pos += len(data) - return data + if size < 0: + size = io.DEFAULT_BUFFER_SIZE + return self._buffer.read1(size) def readinto(self, b): - """Read up to len(b) bytes into b. + """Read bytes into b. Returns the number of bytes read (0 for EOF). """ with self._lock: - return io.BufferedIOBase.readinto(self, b) + self._check_can_read() + return self._buffer.readinto(b) def readline(self, size=-1): """Read a line of uncompressed bytes from the file. @@ -340,15 +215,7 @@ class BZ2File(io.BufferedIOBase): size = size.__index__() with self._lock: self._check_can_read() - # Shortcut for the common case - the whole line is in the buffer. - if size < 0: - end = self._buffer.find(b"\n", self._buffer_offset) + 1 - if end > 0: - line = self._buffer[self._buffer_offset : end] - self._buffer_offset = end - self._pos += len(line) - return line - return io.BufferedIOBase.readline(self, size) + return self._buffer.readline(size) def readlines(self, size=-1): """Read a list of lines of uncompressed bytes from the file. @@ -362,7 +229,8 @@ class BZ2File(io.BufferedIOBase): raise TypeError("Integer argument expected") size = size.__index__() with self._lock: - return io.BufferedIOBase.readlines(self, size) + self._check_can_read() + return self._buffer.readlines(size) def write(self, data): """Write a byte string to the file. @@ -387,18 +255,9 @@ class BZ2File(io.BufferedIOBase): Line separators are not added between the written byte strings. """ with self._lock: - return io.BufferedIOBase.writelines(self, seq) + return _compression.BaseStream.writelines(self, seq) - # Rewind the file to the beginning of the data stream. - def _rewind(self): - self._fp.seek(0, 0) - self._mode = _MODE_READ - self._pos = 0 - self._decompressor = BZ2Decompressor() - self._buffer = b"" - self._buffer_offset = 0 - - def seek(self, offset, whence=0): + def seek(self, offset, whence=io.SEEK_SET): """Change the file position. The new position is specified by offset, relative to the @@ -415,35 +274,14 @@ class BZ2File(io.BufferedIOBase): """ with self._lock: self._check_can_seek() - - # Recalculate offset as an absolute file position. - if whence == 0: - pass - elif whence == 1: - offset = self._pos + offset - elif whence == 2: - # Seeking relative to EOF - we need to know the file's size. - if self._size < 0: - self._read_all(return_data=False) - offset = self._size + offset - else: - raise ValueError("Invalid value for whence: %s" % (whence,)) - - # Make it so that offset is the number of bytes to skip forward. - if offset < self._pos: - self._rewind() - else: - offset -= self._pos - - # Read and discard data until we reach the desired position. - self._read_block(offset, return_data=False) - - return self._pos + return self._buffer.seek(offset, whence) def tell(self): """Return the current file position.""" with self._lock: self._check_not_closed() + if self._mode == _MODE_READ: + return self._buffer.tell() return self._pos diff --git a/Darwin/lib/python3.4/cProfile.py b/Darwin/lib/python3.5/cProfile.py similarity index 100% rename from Darwin/lib/python3.4/cProfile.py rename to Darwin/lib/python3.5/cProfile.py diff --git a/Darwin/lib/python3.4/calendar.py b/Darwin/lib/python3.5/calendar.py similarity index 100% rename from Darwin/lib/python3.4/calendar.py rename to Darwin/lib/python3.5/calendar.py diff --git a/Darwin/lib/python3.4/cgi.py b/Darwin/lib/python3.5/cgi.py similarity index 98% rename from Darwin/lib/python3.4/cgi.py rename to Darwin/lib/python3.5/cgi.py index 1ef780c..4be28ba 100755 --- a/Darwin/lib/python3.4/cgi.py +++ b/Darwin/lib/python3.5/cgi.py @@ -566,6 +566,12 @@ class FieldStorage: except AttributeError: pass + def __enter__(self): + return self + + def __exit__(self, *args): + self.file.close() + def __repr__(self): """Return a printable representation.""" return "FieldStorage(%r, %r, %r)" % ( @@ -693,8 +699,13 @@ class FieldStorage: raise ValueError("%s should return bytes, got %s" \ % (self.fp, type(first_line).__name__)) self.bytes_read += len(first_line) - # first line holds boundary ; ignore it, or check that - # b"--" + ib == first_line.strip() ? + + # Ensure that we consume the file until we've hit our inner boundary + while (first_line.strip() != (b"--" + self.innerboundary) and + first_line): + first_line = self.fp.readline() + self.bytes_read += len(first_line) + while True: parser = FeedParser() hdr_text = b"" diff --git a/Darwin/lib/python3.4/cgitb.py b/Darwin/lib/python3.5/cgitb.py similarity index 99% rename from Darwin/lib/python3.4/cgitb.py rename to Darwin/lib/python3.5/cgitb.py index 6eb52e7..b291100 100644 --- a/Darwin/lib/python3.4/cgitb.py +++ b/Darwin/lib/python3.5/cgitb.py @@ -294,9 +294,8 @@ class Hook: (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir) try: - file = os.fdopen(fd, 'w') - file.write(doc) - file.close() + with os.fdopen(fd, 'w') as file: + file.write(doc) msg = '%s contains the description of this error.' % path except: msg = 'Tried to save traceback to %s, but failed.' % path diff --git a/Darwin/lib/python3.4/chunk.py b/Darwin/lib/python3.5/chunk.py similarity index 97% rename from Darwin/lib/python3.4/chunk.py rename to Darwin/lib/python3.5/chunk.py index dc90a75..84b77cc 100644 --- a/Darwin/lib/python3.4/chunk.py +++ b/Darwin/lib/python3.5/chunk.py @@ -85,8 +85,10 @@ class Chunk: def close(self): if not self.closed: - self.skip() - self.closed = True + try: + self.skip() + finally: + self.closed = True def isatty(self): if self.closed: @@ -126,7 +128,7 @@ class Chunk: if self.closed: raise ValueError("I/O operation on closed file") if self.size_read >= self.chunksize: - return '' + return b'' if size < 0: size = self.chunksize - self.size_read if size > self.chunksize - self.size_read: diff --git a/Darwin/lib/python3.4/cmd.py b/Darwin/lib/python3.5/cmd.py similarity index 100% rename from Darwin/lib/python3.4/cmd.py rename to Darwin/lib/python3.5/cmd.py diff --git a/Darwin/lib/python3.4/code.py b/Darwin/lib/python3.5/code.py similarity index 91% rename from Darwin/lib/python3.4/code.py rename to Darwin/lib/python3.5/code.py index f8184b6..53244e3 100644 --- a/Darwin/lib/python3.4/code.py +++ b/Darwin/lib/python3.5/code.py @@ -7,6 +7,7 @@ import sys import traceback +import argparse from codeop import CommandCompiler, compile_command __all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", @@ -136,25 +137,18 @@ class InteractiveInterpreter: The output is written by self.write(), below. """ + sys.last_type, sys.last_value, last_tb = ei = sys.exc_info() + sys.last_traceback = last_tb try: - type, value, tb = sys.exc_info() - sys.last_type = type - sys.last_value = value - sys.last_traceback = tb - tblist = traceback.extract_tb(tb) - del tblist[:1] - lines = traceback.format_list(tblist) - if lines: - lines.insert(0, "Traceback (most recent call last):\n") - lines.extend(traceback.format_exception_only(type, value)) + lines = traceback.format_exception(ei[0], ei[1], last_tb.tb_next) + if sys.excepthook is sys.__excepthook__: + self.write(''.join(lines)) + else: + # If someone has set sys.excepthook, we let that take precedence + # over self.write + sys.excepthook(ei[0], ei[1], last_tb) finally: - tblist = tb = None - if sys.excepthook is sys.__excepthook__: - self.write(''.join(lines)) - else: - # If someone has set sys.excepthook, we let that take precedence - # over self.write - sys.excepthook(type, value, tb) + last_tb = ei = None def write(self, data): """Write a string. @@ -299,4 +293,12 @@ def interact(banner=None, readfunc=None, local=None): if __name__ == "__main__": - interact() + parser = argparse.ArgumentParser() + parser.add_argument('-q', action='store_true', + help="don't print version and copyright messages") + args = parser.parse_args() + if args.q or sys.flags.quiet: + banner = '' + else: + banner = None + interact(banner) diff --git a/Darwin/lib/python3.4/codecs.py b/Darwin/lib/python3.5/codecs.py similarity index 91% rename from Darwin/lib/python3.4/codecs.py rename to Darwin/lib/python3.5/codecs.py index c2065da..22d5f82 100644 --- a/Darwin/lib/python3.4/codecs.py +++ b/Darwin/lib/python3.5/codecs.py @@ -20,8 +20,15 @@ __all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE", "BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE", "BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE", "BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE", + "CodecInfo", "Codec", "IncrementalEncoder", "IncrementalDecoder", + "StreamReader", "StreamWriter", + "StreamReaderWriter", "StreamRecoder", + "getencoder", "getdecoder", "getincrementalencoder", + "getincrementaldecoder", "getreader", "getwriter", + "encode", "decode", "iterencode", "iterdecode", "strict_errors", "ignore_errors", "replace_errors", "xmlcharrefreplace_errors", + "backslashreplace_errors", "namereplace_errors", "register_error", "lookup_error"] ### Constants @@ -99,8 +106,8 @@ class CodecInfo(tuple): return self def __repr__(self): - return "<%s.%s object for encoding %s at 0x%x>" % \ - (self.__class__.__module__, self.__class__.__name__, + return "<%s.%s object for encoding %s at %#x>" % \ + (self.__class__.__module__, self.__class__.__qualname__, self.name, id(self)) class Codec: @@ -117,10 +124,11 @@ class Codec: Python will use the official U+FFFD REPLACEMENT CHARACTER for the builtin Unicode codecs on decoding and '?' on encoding. - 'surrogateescape' - replace with private codepoints U+DCnn. + 'surrogateescape' - replace with private code points U+DCnn. 'xmlcharrefreplace' - Replace with the appropriate XML character reference (only for encoding). - 'backslashreplace' - Replace with backslashed escape sequences + 'backslashreplace' - Replace with backslashed escape sequences. + 'namereplace' - Replace with \\N{...} escape sequences (only for encoding). The set of allowed values can be extended via register_error. @@ -135,8 +143,8 @@ class Codec: 'strict' handling. The method may not store state in the Codec instance. Use - StreamCodec for codecs which have to keep state in order to - make encoding/decoding efficient. + StreamWriter for codecs which have to keep state in order to + make encoding efficient. The encoder must be able to handle zero length input and return an empty object of the output object type in this @@ -158,8 +166,8 @@ class Codec: 'strict' handling. The method may not store state in the Codec instance. Use - StreamCodec for codecs which have to keep state in order to - make encoding/decoding efficient. + StreamReader for codecs which have to keep state in order to + make decoding efficient. The decoder must be able to handle zero length input and return an empty object of the output object type in this @@ -340,8 +348,7 @@ class StreamWriter(Codec): """ Creates a StreamWriter instance. - stream must be a file-like object open for writing - (binary) data. + stream must be a file-like object open for writing. The StreamWriter may use different error handling schemes by providing the errors keyword argument. These @@ -353,7 +360,8 @@ class StreamWriter(Codec): 'xmlcharrefreplace' - Replace with the appropriate XML character reference. 'backslashreplace' - Replace with backslashed escape - sequences (only for encoding). + sequences. + 'namereplace' - Replace with \\N{...} escape sequences. The set of allowed parameter values can be extended via register_error. @@ -415,8 +423,7 @@ class StreamReader(Codec): """ Creates a StreamReader instance. - stream must be a file-like object open for reading - (binary) data. + stream must be a file-like object open for reading. The StreamReader may use different error handling schemes by providing the errors keyword argument. These @@ -424,7 +431,8 @@ class StreamReader(Codec): 'strict' - raise a ValueError (or a subclass) 'ignore' - ignore the character and continue with the next - 'replace'- replace with a suitable replacement character; + 'replace'- replace with a suitable replacement character + 'backslashreplace' - Replace with backslashed escape sequences; The set of allowed parameter values can be extended via register_error. @@ -444,13 +452,12 @@ class StreamReader(Codec): """ Decodes data from the stream self.stream and returns the resulting object. - chars indicates the number of characters to read from the - stream. read() will never return more than chars - characters, but it might return less, if there are not enough - characters available. + chars indicates the number of decoded code points or bytes to + return. read() will never return more data than requested, + but it might return less, if there is not enough available. - size indicates the approximate maximum number of bytes to - read from the stream for decoding purposes. The decoder + size indicates the approximate maximum number of decoded + bytes or code points to read for decoding. The decoder can modify this setting as appropriate. The default value -1 indicates to read and decode as much as possible. size is intended to prevent having to decode huge files in one @@ -461,7 +468,7 @@ class StreamReader(Codec): will be returned, the rest of the input will be kept until the next call to read(). - The method should use a greedy read strategy meaning that + The method should use a greedy read strategy, meaning that it should read as much data as is allowed within the definition of the encoding and the given size, e.g. if optional encoding endings or state markers are available @@ -596,7 +603,7 @@ class StreamReader(Codec): def readlines(self, sizehint=None, keepends=True): """ Read all lines available on the input stream - and return them as list of lines. + and return them as a list. Line breaks are implemented using the codec's decoder method and are included in the list entries. @@ -744,19 +751,18 @@ class StreamReaderWriter: class StreamRecoder: - """ StreamRecoder instances provide a frontend - backend - view of encoding data. + """ StreamRecoder instances translate data from one encoding to another. They use the complete set of APIs returned by the codecs.lookup() function to implement their task. - Data written to the stream is first decoded into an - intermediate format (which is dependent on the given codec - combination) and then written to the stream using an instance - of the provided Writer class. + Data written to the StreamRecoder is first decoded into an + intermediate format (depending on the "decode" codec) and then + written to the underlying stream using an instance of the provided + Writer class. - In the other direction, data is read from the stream using a - Reader instance and then return encoded data to the caller. + In the other direction, data is read from the underlying stream using + a Reader instance and then encoded and returned to the caller. """ # Optional attributes set by the file wrappers below @@ -768,22 +774,17 @@ class StreamRecoder: """ Creates a StreamRecoder instance which implements a two-way conversion: encode and decode work on the frontend (the - input to .read() and output of .write()) while - Reader and Writer work on the backend (reading and - writing to the stream). + data visible to .read() and .write()) while Reader and Writer + work on the backend (the data in stream). - You can use these objects to do transparent direct - recodings from e.g. latin-1 to utf-8 and back. + You can use these objects to do transparent + transcodings from e.g. latin-1 to utf-8 and back. stream must be a file-like object. - encode, decode must adhere to the Codec interface, Reader, + encode and decode must adhere to the Codec interface; Reader and Writer must be factory functions or classes providing the - StreamReader, StreamWriter interface resp. - - encode and decode are needed for the frontend translation, - Reader and Writer for the backend translation. Unicode is - used as intermediate encoding. + StreamReader and StreamWriter interfaces resp. Error handling is done in the same way as defined for the StreamWriter/Readers. @@ -858,7 +859,7 @@ class StreamRecoder: ### Shortcuts -def open(filename, mode='rb', encoding=None, errors='strict', buffering=1): +def open(filename, mode='r', encoding=None, errors='strict', buffering=1): """ Open an encoded file using the given mode and return a wrapped version providing transparent encoding/decoding. @@ -868,10 +869,8 @@ def open(filename, mode='rb', encoding=None, errors='strict', buffering=1): codecs. Output is also codec dependent and will usually be Unicode as well. - Files are always opened in binary mode, even if no binary mode - was specified. This is done to avoid data loss due to encodings - using 8-bit values. The default file mode is 'rb' meaning to - open the file in binary read mode. + Underlying encoded files are always opened in binary mode. + The default file mode is 'r', meaning to open the file in read mode. encoding specifies the encoding which is to be used for the file. @@ -907,13 +906,13 @@ def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'): """ Return a wrapped version of file which provides transparent encoding translation. - Strings written to the wrapped file are interpreted according - to the given data_encoding and then written to the original - file as string using file_encoding. The intermediate encoding + Data written to the wrapped file is decoded according + to the given data_encoding and then encoded to the underlying + file using file_encoding. The intermediate data type will usually be Unicode but depends on the specified codecs. - Strings are read from the file using file_encoding and then - passed back to the caller as string using data_encoding. + Bytes read from the file are decoded using file_encoding and then + passed back to the caller encoded using data_encoding. If file_encoding is not given, it defaults to data_encoding. @@ -1066,7 +1065,7 @@ def make_encoding_map(decoding_map): during translation. One example where this happens is cp875.py which decodes - multiple character to \u001a. + multiple character to \\u001a. """ m = {} @@ -1085,6 +1084,7 @@ try: replace_errors = lookup_error("replace") xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace") backslashreplace_errors = lookup_error("backslashreplace") + namereplace_errors = lookup_error("namereplace") except LookupError: # In --disable-unicode builds, these error handler are missing strict_errors = None @@ -1092,6 +1092,7 @@ except LookupError: replace_errors = None xmlcharrefreplace_errors = None backslashreplace_errors = None + namereplace_errors = None # Tell modulefinder that using codecs probably needs the encodings # package diff --git a/Darwin/lib/python3.4/codeop.py b/Darwin/lib/python3.5/codeop.py similarity index 100% rename from Darwin/lib/python3.4/codeop.py rename to Darwin/lib/python3.5/codeop.py diff --git a/Darwin/lib/python3.4/collections/__init__.py b/Darwin/lib/python3.5/collections/__init__.py similarity index 92% rename from Darwin/lib/python3.4/collections/__init__.py rename to Darwin/lib/python3.5/collections/__init__.py index d6deb6a..80dc4f6 100644 --- a/Darwin/lib/python3.4/collections/__init__.py +++ b/Darwin/lib/python3.5/collections/__init__.py @@ -7,7 +7,6 @@ from _collections_abc import * import _collections_abc __all__ += _collections_abc.__all__ -from _collections import deque, defaultdict from operator import itemgetter as _itemgetter, eq as _eq from keyword import iskeyword as _iskeyword import sys as _sys @@ -16,10 +15,40 @@ from _weakref import proxy as _proxy from itertools import repeat as _repeat, chain as _chain, starmap as _starmap from reprlib import recursive_repr as _recursive_repr +try: + from _collections import deque +except ImportError: + pass +else: + MutableSequence.register(deque) + +try: + from _collections import defaultdict +except ImportError: + pass + + ################################################################################ ### OrderedDict ################################################################################ +class _OrderedDictKeysView(KeysView): + + def __reversed__(self): + yield from reversed(self._mapping) + +class _OrderedDictItemsView(ItemsView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield (key, self._mapping[key]) + +class _OrderedDictValuesView(ValuesView): + + def __reversed__(self): + for key in reversed(self._mapping): + yield self._mapping[key] + class _Link(object): __slots__ = 'prev', 'next', 'key', '__weakref__' @@ -38,12 +67,16 @@ class OrderedDict(dict): # Individual links are kept alive by the hard reference in self.__map. # Those hard references disappear when a key is deleted from an OrderedDict. - def __init__(self, *args, **kwds): + def __init__(*args, **kwds): '''Initialize an ordered dictionary. The signature is the same as regular dictionaries, but keyword arguments are not recommended because their insertion order is arbitrary. ''' + if not args: + raise TypeError("descriptor '__init__' of 'OrderedDict' object " + "needs an argument") + self, *args = args if len(args) > 1: raise TypeError('expected at most 1 arguments, got %d' % len(args)) try: @@ -79,6 +112,8 @@ class OrderedDict(dict): link_next = link.next link_prev.next = link_next link_next.prev = link_prev + link.prev = None + link.next = None def __iter__(self): 'od.__iter__() <==> iter(od)' @@ -162,9 +197,19 @@ class OrderedDict(dict): return size update = __update = MutableMapping.update - keys = MutableMapping.keys - values = MutableMapping.values - items = MutableMapping.items + + def keys(self): + "D.keys() -> a set-like object providing a view on D's keys" + return _OrderedDictKeysView(self) + + def items(self): + "D.items() -> a set-like object providing a view on D's items" + return _OrderedDictItemsView(self) + + def values(self): + "D.values() -> an object providing a view on D's values" + return _OrderedDictValuesView(self) + __ne__ = MutableMapping.__ne__ __marker = object() @@ -229,6 +274,13 @@ class OrderedDict(dict): return dict.__eq__(self, other) +try: + from _collections import OrderedDict +except ImportError: + # Leave the pure Python version in place. + pass + + ################################################################################ ### namedtuple ################################################################################ @@ -323,6 +375,7 @@ def namedtuple(typename, field_names, verbose=False, rename=False): if isinstance(field_names, str): field_names = field_names.replace(',', ' ').split() field_names = list(map(str, field_names)) + typename = str(typename) if rename: seen = set() for index, name in enumerate(field_names): @@ -333,6 +386,8 @@ def namedtuple(typename, field_names, verbose=False, rename=False): field_names[index] = '_%d' % index seen.add(name) for name in [typename] + field_names: + if type(name) != str: + raise TypeError('Type names and field names must be strings') if not name.isidentifier(): raise ValueError('Type names and field names must be valid ' 'identifiers: %r' % name) @@ -447,7 +502,7 @@ class Counter(dict): # http://code.activestate.com/recipes/259174/ # Knuth, TAOCP Vol. II section 4.6.3 - def __init__(self, iterable=None, **kwds): + def __init__(*args, **kwds): '''Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts. @@ -458,8 +513,14 @@ class Counter(dict): >>> c = Counter(a=4, b=2) # a new counter from keyword args ''' - super().__init__() - self.update(iterable, **kwds) + if not args: + raise TypeError("descriptor '__init__' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + super(Counter, self).__init__() + self.update(*args, **kwds) def __missing__(self, key): 'The count of elements not in the Counter is zero.' @@ -510,7 +571,7 @@ class Counter(dict): raise NotImplementedError( 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') - def update(self, iterable=None, **kwds): + def update(*args, **kwds): '''Like dict.update() but add counts instead of replacing them. Source can be an iterable, a dictionary, or another Counter instance. @@ -530,6 +591,13 @@ class Counter(dict): # contexts. Instead, we implement straight-addition. Both the inputs # and outputs are allowed to contain zero and negative counts. + if not args: + raise TypeError("descriptor 'update' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None if iterable is not None: if isinstance(iterable, Mapping): if self: @@ -537,13 +605,13 @@ class Counter(dict): for elem, count in iterable.items(): self[elem] = count + self_get(elem, 0) else: - super().update(iterable) # fast path when counter is empty + super(Counter, self).update(iterable) # fast path when counter is empty else: _count_elements(self, iterable) if kwds: self.update(kwds) - def subtract(self, iterable=None, **kwds): + def subtract(*args, **kwds): '''Like dict.update() but subtracts counts instead of replacing them. Counts can be reduced below zero. Both the inputs and outputs are allowed to contain zero and negative counts. @@ -559,6 +627,13 @@ class Counter(dict): -1 ''' + if not args: + raise TypeError("descriptor 'subtract' of 'Counter' object " + "needs an argument") + self, *args = args + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + iterable = args[0] if args else None if iterable is not None: self_get = self.get if isinstance(iterable, Mapping): @@ -678,14 +753,22 @@ class Counter(dict): def __pos__(self): 'Adds an empty counter, effectively stripping negative and zero counts' - return self + Counter() + result = Counter() + for elem, count in self.items(): + if count > 0: + result[elem] = count + return result def __neg__(self): '''Subtracts from an empty counter. Strips positive and zero counts, and flips the sign on negative counts. ''' - return Counter() - self + result = Counter() + for elem, count in self.items(): + if count < 0: + result[elem] = 0 - count + return result def _keep_positive(self): '''Internal method to strip elements with a negative or zero count''' @@ -931,7 +1014,6 @@ class UserList(MutableSequence): def __lt__(self, other): return self.data < self.__cast(other) def __le__(self, other): return self.data <= self.__cast(other) def __eq__(self, other): return self.data == self.__cast(other) - def __ne__(self, other): return self.data != self.__cast(other) def __gt__(self, other): return self.data > self.__cast(other) def __ge__(self, other): return self.data >= self.__cast(other) def __cast(self, other): @@ -1003,15 +1085,13 @@ class UserString(Sequence): def __float__(self): return float(self.data) def __complex__(self): return complex(self.data) def __hash__(self): return hash(self.data) + def __getnewargs__(self): + return (self.data[:],) def __eq__(self, string): if isinstance(string, UserString): return self.data == string.data return self.data == string - def __ne__(self, string): - if isinstance(string, UserString): - return self.data != string.data - return self.data != string def __lt__(self, string): if isinstance(string, UserString): return self.data < string.data @@ -1051,9 +1131,13 @@ class UserString(Sequence): __rmul__ = __mul__ def __mod__(self, args): return self.__class__(self.data % args) + def __rmod__(self, format): + return self.__class__(format % args) # the following methods are defined in alphabetical order: def capitalize(self): return self.__class__(self.data.capitalize()) + def casefold(self): + return self.__class__(self.data.casefold()) def center(self, width, *args): return self.__class__(self.data.center(width, *args)) def count(self, sub, start=0, end=_sys.maxsize): @@ -1076,6 +1160,8 @@ class UserString(Sequence): return self.data.find(sub, start, end) def format(self, *args, **kwds): return self.data.format(*args, **kwds) + def format_map(self, mapping): + return self.data.format_map(mapping) def index(self, sub, start=0, end=_sys.maxsize): return self.data.index(sub, start, end) def isalpha(self): return self.data.isalpha() @@ -1085,6 +1171,7 @@ class UserString(Sequence): def isidentifier(self): return self.data.isidentifier() def islower(self): return self.data.islower() def isnumeric(self): return self.data.isnumeric() + def isprintable(self): return self.data.isprintable() def isspace(self): return self.data.isspace() def istitle(self): return self.data.istitle() def isupper(self): return self.data.isupper() @@ -1093,6 +1180,7 @@ class UserString(Sequence): return self.__class__(self.data.ljust(width, *args)) def lower(self): return self.__class__(self.data.lower()) def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans def partition(self, sep): return self.data.partition(sep) def replace(self, old, new, maxsplit=-1): diff --git a/Darwin/lib/python3.4/collections/__main__.py b/Darwin/lib/python3.5/collections/__main__.py similarity index 100% rename from Darwin/lib/python3.4/collections/__main__.py rename to Darwin/lib/python3.5/collections/__main__.py diff --git a/Darwin/lib/python3.4/collections/abc.py b/Darwin/lib/python3.5/collections/abc.py similarity index 100% rename from Darwin/lib/python3.4/collections/abc.py rename to Darwin/lib/python3.5/collections/abc.py diff --git a/Darwin/lib/python3.4/colorsys.py b/Darwin/lib/python3.5/colorsys.py similarity index 100% rename from Darwin/lib/python3.4/colorsys.py rename to Darwin/lib/python3.5/colorsys.py diff --git a/Darwin/lib/python3.4/compileall.py b/Darwin/lib/python3.5/compileall.py similarity index 72% rename from Darwin/lib/python3.4/compileall.py rename to Darwin/lib/python3.5/compileall.py index d957ee5..64c0a9a 100644 --- a/Darwin/lib/python3.4/compileall.py +++ b/Darwin/lib/python3.5/compileall.py @@ -1,4 +1,4 @@ -"""Module/script to byte-compile all .py files to .pyc (or .pyo) files. +"""Module/script to byte-compile all .py files to .pyc files. When called as a script with arguments, this compiles the directories given as arguments recursively; the -l option prevents it from @@ -16,32 +16,24 @@ import importlib.util import py_compile import struct +try: + from concurrent.futures import ProcessPoolExecutor +except ImportError: + ProcessPoolExecutor = None +from functools import partial + __all__ = ["compile_dir","compile_file","compile_path"] -def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, - quiet=False, legacy=False, optimize=-1): - """Byte-compile all modules in the given directory tree. - - Arguments (only dir is required): - - dir: the directory to byte-compile - maxlevels: maximum recursion level (default 10) - ddir: the directory that will be prepended to the path to the - file as it is compiled into each byte-code file. - force: if True, force compilation, even if timestamps are up-to-date - quiet: if True, be quiet during compilation - legacy: if True, produce legacy pyc paths instead of PEP 3147 paths - optimize: optimization level or -1 for level of the interpreter - """ +def _walk_dir(dir, ddir=None, maxlevels=10, quiet=0): if not quiet: print('Listing {!r}...'.format(dir)) try: names = os.listdir(dir) except OSError: - print("Can't list {!r}".format(dir)) + if quiet < 2: + print("Can't list {!r}".format(dir)) names = [] names.sort() - success = 1 for name in names: if name == '__pycache__': continue @@ -51,17 +43,53 @@ def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, else: dfile = None if not os.path.isdir(fullname): - if not compile_file(fullname, ddir, force, rx, quiet, - legacy, optimize): - success = 0 + yield fullname elif (maxlevels > 0 and name != os.curdir and name != os.pardir and os.path.isdir(fullname) and not os.path.islink(fullname)): - if not compile_dir(fullname, maxlevels - 1, dfile, force, rx, - quiet, legacy, optimize): + yield from _walk_dir(fullname, ddir=dfile, + maxlevels=maxlevels - 1, quiet=quiet) + +def compile_dir(dir, maxlevels=10, ddir=None, force=False, rx=None, + quiet=0, legacy=False, optimize=-1, workers=1): + """Byte-compile all modules in the given directory tree. + + Arguments (only dir is required): + + dir: the directory to byte-compile + maxlevels: maximum recursion level (default 10) + ddir: the directory that will be prepended to the path to the + file as it is compiled into each byte-code file. + force: if True, force compilation, even if timestamps are up-to-date + quiet: full output with False or 0, errors only with 1, + no output with 2 + legacy: if True, produce legacy pyc paths instead of PEP 3147 paths + optimize: optimization level or -1 for level of the interpreter + workers: maximum number of parallel workers + """ + files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels, + ddir=ddir) + success = 1 + if workers is not None and workers != 1 and ProcessPoolExecutor is not None: + if workers < 0: + raise ValueError('workers must be greater or equal to 0') + + workers = workers or None + with ProcessPoolExecutor(max_workers=workers) as executor: + results = executor.map(partial(compile_file, + ddir=ddir, force=force, + rx=rx, quiet=quiet, + legacy=legacy, + optimize=optimize), + files) + success = min(results, default=1) + else: + for file in files: + if not compile_file(file, ddir, force, rx, quiet, + legacy, optimize): success = 0 return success -def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, +def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0, legacy=False, optimize=-1): """Byte-compile one file. @@ -71,7 +99,8 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, ddir: if given, the directory name compiled in to the byte-code file. force: if True, force compilation, even if timestamps are up-to-date - quiet: if True, be quiet during compilation + quiet: full output with False or 0, errors only with 1, + no output with 2 legacy: if True, produce legacy pyc paths instead of PEP 3147 paths optimize: optimization level or -1 for level of the interpreter """ @@ -87,11 +116,12 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, return success if os.path.isfile(fullname): if legacy: - cfile = fullname + ('c' if __debug__ else 'o') + cfile = fullname + 'c' else: if optimize >= 0: + opt = optimize if optimize >= 1 else '' cfile = importlib.util.cache_from_source( - fullname, debug_override=not optimize) + fullname, optimization=opt) else: cfile = importlib.util.cache_from_source(fullname) cache_dir = os.path.dirname(cfile) @@ -114,7 +144,10 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, ok = py_compile.compile(fullname, cfile, dfile, True, optimize=optimize) except py_compile.PyCompileError as err: - if quiet: + success = 0 + if quiet >= 2: + return success + elif quiet: print('*** Error compiling {!r}...'.format(fullname)) else: print('*** ', end='') @@ -123,20 +156,21 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=False, errors='backslashreplace') msg = msg.decode(sys.stdout.encoding) print(msg) - success = 0 except (SyntaxError, UnicodeError, OSError) as e: - if quiet: + success = 0 + if quiet >= 2: + return success + elif quiet: print('*** Error compiling {!r}...'.format(fullname)) else: print('*** ', end='') print(e.__class__.__name__ + ':', e) - success = 0 else: if ok == 0: success = 0 return success -def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=False, +def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=0, legacy=False, optimize=-1): """Byte-compile all module on sys.path. @@ -145,14 +179,15 @@ def compile_path(skip_curdir=1, maxlevels=0, force=False, quiet=False, skip_curdir: if true, skip current directory (default True) maxlevels: max recursion level (default 0) force: as for compile_dir() (default False) - quiet: as for compile_dir() (default False) + quiet: as for compile_dir() (default 0) legacy: as for compile_dir() (default False) optimize: as for compile_dir() (default -1) """ success = 1 for dir in sys.path: if (not dir or dir == os.curdir) and skip_curdir: - print('Skipping current directory') + if quiet < 2: + print('Skipping current directory') else: success = success and compile_dir(dir, maxlevels, None, force, quiet=quiet, @@ -169,10 +204,15 @@ def main(): parser.add_argument('-l', action='store_const', const=0, default=10, dest='maxlevels', help="don't recurse into subdirectories") + parser.add_argument('-r', type=int, dest='recursion', + help=('control the maximum recursion level. ' + 'if `-l` and `-r` options are specified, ' + 'then `-r` takes precedence.')) parser.add_argument('-f', action='store_true', dest='force', help='force rebuild even if timestamps are up to date') - parser.add_argument('-q', action='store_true', dest='quiet', - help='output only error messages') + parser.add_argument('-q', action='count', dest='quiet', default=0, + help='output only error messages; -qq will suppress ' + 'the error messages as well.') parser.add_argument('-b', action='store_true', dest='legacy', help='use legacy (pre-PEP3147) compiled file locations') parser.add_argument('-d', metavar='DESTDIR', dest='ddir', default=None, @@ -192,8 +232,10 @@ def main(): help=('zero or more file and directory names ' 'to compile; if no arguments given, defaults ' 'to the equivalent of -l sys.path')) - args = parser.parse_args() + parser.add_argument('-j', '--workers', default=1, + type=int, help='Run compileall concurrently') + args = parser.parse_args() compile_dests = args.compile_dest if (args.ddir and (len(compile_dests) != 1 @@ -203,6 +245,12 @@ def main(): import re args.rx = re.compile(args.rx) + + if args.recursion is not None: + maxlevels = args.recursion + else: + maxlevels = args.maxlevels + # if flist is provided then load it if args.flist: try: @@ -210,9 +258,13 @@ def main(): for line in f: compile_dests.append(line.strip()) except OSError: - print("Error reading file list {}".format(args.flist)) + if args.quiet < 2: + print("Error reading file list {}".format(args.flist)) return False + if args.workers is not None: + args.workers = args.workers or None + success = True try: if compile_dests: @@ -222,16 +274,17 @@ def main(): args.quiet, args.legacy): success = False else: - if not compile_dir(dest, args.maxlevels, args.ddir, + if not compile_dir(dest, maxlevels, args.ddir, args.force, args.rx, args.quiet, - args.legacy): + args.legacy, workers=args.workers): success = False return success else: return compile_path(legacy=args.legacy, force=args.force, quiet=args.quiet) except KeyboardInterrupt: - print("\n[interrupted]") + if args.quiet < 2: + print("\n[interrupted]") return False return True diff --git a/Darwin/lib/python3.4/concurrent/__init__.py b/Darwin/lib/python3.5/concurrent/__init__.py similarity index 100% rename from Darwin/lib/python3.4/concurrent/__init__.py rename to Darwin/lib/python3.5/concurrent/__init__.py diff --git a/Darwin/lib/python3.4/concurrent/futures/__init__.py b/Darwin/lib/python3.5/concurrent/futures/__init__.py similarity index 100% rename from Darwin/lib/python3.4/concurrent/futures/__init__.py rename to Darwin/lib/python3.5/concurrent/futures/__init__.py diff --git a/Darwin/lib/python3.4/concurrent/futures/_base.py b/Darwin/lib/python3.5/concurrent/futures/_base.py similarity index 96% rename from Darwin/lib/python3.4/concurrent/futures/_base.py rename to Darwin/lib/python3.5/concurrent/futures/_base.py index acd05d0..9e44713 100644 --- a/Darwin/lib/python3.4/concurrent/futures/_base.py +++ b/Darwin/lib/python3.5/concurrent/futures/_base.py @@ -302,17 +302,20 @@ class Future(object): with self._condition: if self._state == FINISHED: if self._exception: - return '' % ( - hex(id(self)), + return '<%s at %#x state=%s raised %s>' % ( + self.__class__.__name__, + id(self), _STATE_TO_DESCRIPTION_MAP[self._state], self._exception.__class__.__name__) else: - return '' % ( - hex(id(self)), + return '<%s at %#x state=%s returned %s>' % ( + self.__class__.__name__, + id(self), _STATE_TO_DESCRIPTION_MAP[self._state], self._result.__class__.__name__) - return '' % ( - hex(id(self)), + return '<%s at %#x state=%s>' % ( + self.__class__.__name__, + id(self), _STATE_TO_DESCRIPTION_MAP[self._state]) def cancel(self): @@ -517,7 +520,7 @@ class Executor(object): """ raise NotImplementedError() - def map(self, fn, *iterables, timeout=None): + def map(self, fn, *iterables, timeout=None, chunksize=1): """Returns a iterator equivalent to map(fn, iter). Args: @@ -525,6 +528,10 @@ class Executor(object): passed iterables. timeout: The maximum number of seconds to wait. If None, then there is no limit on the wait time. + chunksize: The size of the chunks the iterable will be broken into + before being passed to a child process. This argument is only + used by ProcessPoolExecutor; it is ignored by + ThreadPoolExecutor. Returns: An iterator equivalent to: map(func, *iterables) but the calls may diff --git a/Darwin/lib/python3.4/concurrent/futures/process.py b/Darwin/lib/python3.5/concurrent/futures/process.py similarity index 86% rename from Darwin/lib/python3.4/concurrent/futures/process.py rename to Darwin/lib/python3.5/concurrent/futures/process.py index 07b5225..3dd6da1 100644 --- a/Darwin/lib/python3.4/concurrent/futures/process.py +++ b/Darwin/lib/python3.5/concurrent/futures/process.py @@ -55,6 +55,9 @@ from multiprocessing import SimpleQueue from multiprocessing.connection import wait import threading import weakref +from functools import partial +import itertools +import traceback # Workers are created as daemon threads and processes. This is done to allow the # interpreter to exit when there are still idle processes in a @@ -88,6 +91,27 @@ def _python_exit(): # (Futures in the call queue cannot be cancelled). EXTRA_QUEUED_CALLS = 1 +# Hack to embed stringification of remote traceback in local traceback + +class _RemoteTraceback(Exception): + def __init__(self, tb): + self.tb = tb + def __str__(self): + return self.tb + +class _ExceptionWithTraceback: + def __init__(self, exc, tb): + tb = traceback.format_exception(type(exc), exc, tb) + tb = ''.join(tb) + self.exc = exc + self.tb = '\n"""\n%s"""' % tb + def __reduce__(self): + return _rebuild_exc, (self.exc, self.tb) + +def _rebuild_exc(exc, tb): + exc.__cause__ = _RemoteTraceback(tb) + return exc + class _WorkItem(object): def __init__(self, future, fn, args, kwargs): self.future = future @@ -108,6 +132,26 @@ class _CallItem(object): self.args = args self.kwargs = kwargs +def _get_chunks(*iterables, chunksize): + """ Iterates over zip()ed iterables in chunks. """ + it = zip(*iterables) + while True: + chunk = tuple(itertools.islice(it, chunksize)) + if not chunk: + return + yield chunk + +def _process_chunk(fn, chunk): + """ Processes a chunk of an iterable passed to map. + + Runs the function passed to map() on a chunk of the + iterable passed to map. + + This function is run in a separate process. + + """ + return [fn(*args) for args in chunk] + def _process_worker(call_queue, result_queue): """Evaluates calls from call_queue and places the results in result_queue. @@ -130,8 +174,8 @@ def _process_worker(call_queue, result_queue): try: r = call_item.fn(*call_item.args, **call_item.kwargs) except BaseException as e: - result_queue.put(_ResultItem(call_item.work_id, - exception=e)) + exc = _ExceptionWithTraceback(e, e.__traceback__) + result_queue.put(_ResultItem(call_item.work_id, exception=exc)) else: result_queue.put(_ResultItem(call_item.work_id, result=r)) @@ -334,6 +378,9 @@ class ProcessPoolExecutor(_base.Executor): if max_workers is None: self._max_workers = os.cpu_count() or 1 else: + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + self._max_workers = max_workers # Make the call queue slightly larger than the number of processes to @@ -408,6 +455,35 @@ class ProcessPoolExecutor(_base.Executor): return f submit.__doc__ = _base.Executor.submit.__doc__ + def map(self, fn, *iterables, timeout=None, chunksize=1): + """Returns a iterator equivalent to map(fn, iter). + + Args: + fn: A callable that will take as many arguments as there are + passed iterables. + timeout: The maximum number of seconds to wait. If None, then there + is no limit on the wait time. + chunksize: If greater than one, the iterables will be chopped into + chunks of size chunksize and submitted to the process pool. + If set to one, the items in the list will be sent one at a time. + + Returns: + An iterator equivalent to: map(func, *iterables) but the calls may + be evaluated out-of-order. + + Raises: + TimeoutError: If the entire result iterator could not be generated + before the given timeout. + Exception: If fn(*args) raises for any values. + """ + if chunksize < 1: + raise ValueError("chunksize must be >= 1.") + + results = super().map(partial(_process_chunk, fn), + _get_chunks(*iterables, chunksize=chunksize), + timeout=timeout) + return itertools.chain.from_iterable(results) + def shutdown(self, wait=True): with self._shutdown_lock: self._shutdown_thread = True diff --git a/Darwin/lib/python3.4/concurrent/futures/thread.py b/Darwin/lib/python3.5/concurrent/futures/thread.py similarity index 92% rename from Darwin/lib/python3.4/concurrent/futures/thread.py rename to Darwin/lib/python3.5/concurrent/futures/thread.py index f9beb0f..3ae442d 100644 --- a/Darwin/lib/python3.4/concurrent/futures/thread.py +++ b/Darwin/lib/python3.5/concurrent/futures/thread.py @@ -10,6 +10,7 @@ from concurrent.futures import _base import queue import threading import weakref +import os # Workers are created as daemon threads. This is done to allow the interpreter # to exit when there are still idle threads in a ThreadPoolExecutor's thread @@ -80,13 +81,20 @@ def _worker(executor_reference, work_queue): _base.LOGGER.critical('Exception in worker', exc_info=True) class ThreadPoolExecutor(_base.Executor): - def __init__(self, max_workers): + def __init__(self, max_workers=None): """Initializes a new ThreadPoolExecutor instance. Args: max_workers: The maximum number of threads that can be used to execute the given calls. """ + if max_workers is None: + # Use this number because ThreadPoolExecutor is often + # used to overlap I/O instead of CPU work. + max_workers = (os.cpu_count() or 1) * 5 + if max_workers <= 0: + raise ValueError("max_workers must be greater than 0") + self._max_workers = max_workers self._work_queue = queue.Queue() self._threads = set() diff --git a/Darwin/lib/python3.4/config-3.4m/Makefile b/Darwin/lib/python3.5/config-3.5m/Makefile similarity index 84% rename from Darwin/lib/python3.4/config-3.4m/Makefile rename to Darwin/lib/python3.5/config-3.5m/Makefile index 24f8953..dfd693e 100644 --- a/Darwin/lib/python3.4/config-3.4m/Makefile +++ b/Darwin/lib/python3.5/config-3.5m/Makefile @@ -21,25 +21,25 @@ # === Variables set by makesetup === -MODOBJS= Modules/_threadmodule.o Modules/signalmodule.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/_operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/atexitmodule.o Modules/_stat.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/_tracemalloc.o Modules/hashtable.o Modules/symtablemodule.o Modules/xxsubtype.o +MODOBJS= Modules/_threadmodule.o Modules/signalmodule.o Modules/posixmodule.o Modules/errnomodule.o Modules/pwdmodule.o Modules/_sre.o Modules/_codecsmodule.o Modules/_weakref.o Modules/_functoolsmodule.o Modules/_operator.o Modules/_collectionsmodule.o Modules/itertoolsmodule.o Modules/atexitmodule.o Modules/_stat.o Modules/timemodule.o Modules/_localemodule.o Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o Modules/zipimport.o Modules/faulthandler.o Modules/_tracemalloc.o Modules/hashtable.o Modules/symtablemodule.o Modules/xxsubtype.o MODLIBS= $(LOCALMODLIBS) $(BASEMODLIBS) # === Variables set by configure -VERSION= 3.4 -srcdir= /Users/build/platform_darwin/src/Python-3.4.1 -VPATH= /Users/build/platform_darwin/src/Python-3.4.1 -abs_srcdir= /Users/build/platform_darwin/src/Python-3.4.1 -abs_builddir= /Users/build/platform_darwin/build/Python/Python-3.4.1 +VERSION= 3.5 +srcdir= . + +abs_srcdir= /private/tmp/python320151125-76692-lzmenz/Python-3.5.0 +abs_builddir= /private/tmp/python320151125-76692-lzmenz/Python-3.5.0 -CC= gcc -CXX= g++ +CC= clang +CXX= clang++ MAINCC= $(CC) LINKCC= $(PURIFY) $(MAINCC) AR= ar RANLIB= ranlib READELF= : -SOABI= cpython-34m +SOABI= cpython-35m-darwin LDVERSION= $(VERSION)$(ABIFLAGS) HGVERSION= HGTAG= @@ -63,21 +63,26 @@ INSTALL_DATA= ${INSTALL} -m 644 # Also, making them read-only seems to be a good idea... INSTALL_SHARED= ${INSTALL} -m 555 -MKDIR_P= /Users/build/platform_darwin/src/Python-3.4.1/install-sh -c -d +MKDIR_P= ./install-sh -c -d MAKESETUP= $(srcdir)/Modules/makesetup # Compiler options OPT= -DNDEBUG -g -fwrapv -O3 -Wall -Wstrict-prototypes -BASECFLAGS= -fno-strict-aliasing -Werror=declaration-after-statement +BASECFLAGS= -Wno-unused-result -Wsign-compare -Wunreachable-code -fno-common -dynamic BASECPPFLAGS= CONFIGURE_CFLAGS= +# CFLAGS_NODIST is used for building the interpreter and stdlib C extensions. +# Use it when a compiler flag should _not_ be part of the distutils CFLAGS +# once Python is installed (Issue #21121). +CONFIGURE_CFLAGS_NODIST= -Werror=declaration-after-statement CONFIGURE_CPPFLAGS= CONFIGURE_LDFLAGS= # Avoid assigning CFLAGS, LDFLAGS, etc. so users can use them on the # command line to append to these values without stomping the pre-set # values. PY_CFLAGS= $(BASECFLAGS) $(OPT) $(CONFIGURE_CFLAGS) $(CFLAGS) $(EXTRA_CFLAGS) +PY_CFLAGS_NODIST=$(CONFIGURE_CFLAGS_NODIST) $(CFLAGS_NODIST) # Both CPPFLAGS and LDFLAGS need to contain the shell's value for setup.py to # be able to build extension modules using the directories specified in the # environment variables @@ -87,12 +92,12 @@ NO_AS_NEEDED= LDLAST= SGI_ABI= CCSHARED= -LINKFORSHARED= -Wl,-stack_size,1000000 -framework CoreFoundation +LINKFORSHARED=-u _PyMac_Error /Users/build/.local/opt/python3/Frameworks/Python.framework/Versions/3.5/Python ARFLAGS= rc # Extra C flags added for building the interpreter object files. -CFLAGSFORSHARED= +CFLAGSFORSHARED=$(CCSHARED) # C flags used for building the interpreter object files -PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE +PY_CORE_CFLAGS= $(PY_CFLAGS) $(PY_CFLAGS_NODIST) $(PY_CPPFLAGS) $(CFLAGSFORSHARED) -DPy_BUILD_CORE # Machine-dependent subdirectories @@ -102,13 +107,13 @@ MACHDEP= darwin MULTIARCH= # Install prefix for architecture-independent files -prefix= /Users/build/platform_darwin +prefix= /Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5 # Install prefix for architecture-dependent files exec_prefix= ${prefix} # Install prefix for data files -datarootdir= ${prefix}/share +datarootdir= /Users/build/.local/Cellar/python3/3.5.0/share # Expanded directories BINDIR= ${exec_prefix}/bin @@ -127,7 +132,7 @@ CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION) # Symbols used for using shared libraries SHLIB_SUFFIX= .so -EXT_SUFFIX= .so +EXT_SUFFIX= .cpython-35m-darwin.so LDSHARED= $(CC) -bundle -undefined dynamic_lookup $(PY_LDFLAGS) BLDSHARED= $(CC) -bundle -undefined dynamic_lookup $(PY_LDFLAGS) LDCXXSHARED= $(CXX) -bundle -undefined dynamic_lookup @@ -139,14 +144,14 @@ BUILDEXE= .exe # Short name and location for Mac OS X Python framework UNIVERSALSDK= -PYTHONFRAMEWORK= -PYTHONFRAMEWORKDIR= no-framework -PYTHONFRAMEWORKPREFIX= -PYTHONFRAMEWORKINSTALLDIR= +PYTHONFRAMEWORK= Python +PYTHONFRAMEWORKDIR= Python.framework +PYTHONFRAMEWORKPREFIX= /Users/build/.local/Cellar/python3/3.5.0/Frameworks +PYTHONFRAMEWORKINSTALLDIR= /Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework # Deployment target selected during configure, to be checked # by distutils. The export statement is needed to ensure that the # deployment target is active during build. -MACOSX_DEPLOYMENT_TARGET=10.6 +MACOSX_DEPLOYMENT_TARGET=10.11 export MACOSX_DEPLOYMENT_TARGET # Option to install to strip binaries @@ -159,10 +164,10 @@ LIPO_32BIT_FLAGS= OTHER_LIBTOOL_OPT= # Environment to run shared python without installed libraries -RUNSHARED= +RUNSHARED= DYLD_FRAMEWORK_PATH=/private/tmp/python320151125-76692-lzmenz/Python-3.5.0 # ensurepip options -ENSUREPIP= upgrade +ENSUREPIP= no # Modes for directories, executables and data files created by the # install process. Default to user-only-writable for all file types. @@ -171,11 +176,11 @@ EXEMODE= 755 FILEMODE= 644 # configure script arguments -CONFIG_ARGS= '--prefix=/Users/build/platform_darwin' +CONFIG_ARGS= '--prefix=/Users/build/.local/Cellar/python3/3.5.0' '--enable-ipv6' '--datarootdir=/Users/build/.local/Cellar/python3/3.5.0/share' '--datadir=/Users/build/.local/Cellar/python3/3.5.0/share' '--enable-framework=/Users/build/.local/Cellar/python3/3.5.0/Frameworks' '--without-ensurepip' '--without-gcc' 'MACOSX_DEPLOYMENT_TARGET=10.11' 'CC=clang' 'PKG_CONFIG_PATH=/Users/build/.local/opt/xz/lib/pkgconfig:/Users/build/.local/opt/sqlite/lib/pkgconfig:/Users/build/.local/opt/openssl/lib/pkgconfig' 'PKG_CONFIG_LIBDIR=/usr/lib/pkgconfig:/Users/build/.local/Library/ENV/pkgconfig/10.11' # Subdirectories with code -SRCDIRS= Parser Grammar Objects Python Modules Mac +SRCDIRS= Parser Grammar Objects Python Modules Mac Programs # Other subdirectories SUBDIRSTOO= Include Lib Misc @@ -188,8 +193,8 @@ DIST= $(DISTFILES) $(DISTDIRS) LIBRARY= libpython$(VERSION)$(ABIFLAGS).a -LDLIBRARY= libpython$(VERSION)$(ABIFLAGS).a -BLDLIBRARY= $(LDLIBRARY) +LDLIBRARY= $(PYTHONFRAMEWORKDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK) +BLDLIBRARY= PY3LIBRARY= DLLLIBRARY= LDLIBRARYDIR= @@ -214,8 +219,8 @@ BUILDPYTHON= python$(BUILDEXE) PYTHON_FOR_BUILD=./$(BUILDPYTHON) -E _PYTHON_HOST_PLATFORM= -BUILD_GNU_TYPE= i386-apple-darwin10.8.0 -HOST_GNU_TYPE= i386-apple-darwin10.8.0 +BUILD_GNU_TYPE= x86_64-apple-darwin15.2.0 +HOST_GNU_TYPE= x86_64-apple-darwin15.2.0 # Tcl and Tk config info from --with-tcltk-includes and -libs options TCLTK_INCLUDES= @@ -233,12 +238,12 @@ COVERAGE_REPORT_OPTIONS=--no-branch-coverage --title "CPython lcov report" # === Definitions added by makesetup === -LOCALMODLIBS= +LOCALMODLIBS= BASEMODLIBS= PYTHONPATH=$(COREPYTHONPATH) COREPYTHONPATH=$(DESTPATH)$(SITEPATH)$(TESTPATH)$(MACHDEPPATH)$(EXTRAMACHDEPPATH) EXTRAMACHDEPPATH= -MACHDEPPATH=:plat-$(MACHDEP) +MACHDEPPATH=:$(PLATDIR) TESTPATH= SITEPATH= DESTPATH= @@ -335,6 +340,13 @@ PARSER_HEADERS= \ PGENSRCS= $(PSRCS) $(PGSRCS) PGENOBJS= $(POBJS) $(PGOBJS) +########################################################################## +# opcode.h generation +OPCODE_H_DIR= $(srcdir)/Include +OPCODE_H_SCRIPT= $(srcdir)/Tools/scripts/generate_opcode_h.py +OPCODE_H= $(OPCODE_H_DIR)/opcode.h +OPCODE_H_GEN= python $(OPCODE_H_SCRIPT) $(srcdir)/Lib/opcode.py $(OPCODE_H) +# ########################################################################## # AST AST_H_DIR= Include @@ -344,7 +356,8 @@ AST_C= $(AST_C_DIR)/Python-ast.c AST_ASDL= $(srcdir)/Parser/Python.asdl ASDLGEN_FILES= $(srcdir)/Parser/asdl.py $(srcdir)/Parser/asdl_c.py -# XXX Note that a build now requires Python exist before the build starts +# Note that a build now requires Python to exist before the build starts. +# Use "hg touch" to fix up screwed up file mtimes in a checkout. ASDLGEN= python $(srcdir)/Parser/asdl_c.py ########################################################################## @@ -389,6 +402,7 @@ PYTHON_OBJS= \ Python/pyctype.o \ Python/pyfpe.o \ Python/pyhash.o \ + Python/pylifecycle.o \ Python/pymath.o \ Python/pystate.o \ Python/pythonrun.o \ @@ -401,6 +415,7 @@ PYTHON_OBJS= \ Python/getopt.o \ Python/pystrcmp.o \ Python/pystrtod.o \ + Python/pystrhex.o \ Python/dtoa.o \ Python/formatter_unicode.o \ Python/fileutils.o \ @@ -435,6 +450,7 @@ OBJECT_OBJS= \ Objects/listobject.o \ Objects/longobject.o \ Objects/dictobject.o \ + Objects/odictobject.o \ Objects/memoryobject.o \ Objects/methodobject.o \ Objects/moduleobject.o \ @@ -472,7 +488,7 @@ LIBRARY_OBJS= \ # Default target all: build_all -build_all: $(BUILDPYTHON) oldsharedmods sharedmods gdbhooks Modules/_testembed python-config +build_all: $(BUILDPYTHON) oldsharedmods sharedmods gdbhooks Programs/_testembed python-config # Compile a binary with gcc profile guided optimization. profile-opt: @@ -487,14 +503,14 @@ profile-opt: $(MAKE) build_all_use_profile build_all_generate_profile: - $(MAKE) all CFLAGS="$(CFLAGS) -fprofile-generate" LIBS="$(LIBS) -lgcov" + $(MAKE) all CFLAGS_NODIST="$(CFLAGS) -fprofile-generate" LDFLAGS="-fprofile-generate" LIBS="$(LIBS) -lgcov" run_profile_task: : # FIXME: can't run for a cross build $(RUNSHARED) ./$(BUILDPYTHON) $(PROFILE_TASK) build_all_use_profile: - $(MAKE) all CFLAGS="$(CFLAGS) -fprofile-use -fprofile-correction" + $(MAKE) all CFLAGS_NODIST="$(CFLAGS) -fprofile-use -fprofile-correction" # Compile and run with gcov .PHONY=coverage coverage-lcov coverage-report @@ -531,6 +547,7 @@ coverage-report: : # force rebuilding of parser and importlib @touch $(GRAMMAR_INPUT) @touch $(srcdir)/Lib/importlib/_bootstrap.py + @touch $(srcdir)/Lib/importlib/_bootstrap_external.py : # build with coverage info $(MAKE) coverage : # run tests, ignore failures @@ -545,8 +562,8 @@ clinic: $(BUILDPYTHON) $(RUNSHARED) $(PYTHON_FOR_BUILD) ./Tools/clinic/clinic.py --make # Build the interpreter -$(BUILDPYTHON): Modules/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) - $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Modules/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) +$(BUILDPYTHON): Programs/python.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/python.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) platform: $(BUILDPYTHON) pybuilddir.txt $(RUNSHARED) $(PYTHON_FOR_BUILD) -c 'import sys ; from sysconfig import get_platform ; print(get_platform()+"-"+sys.version[0:3])' >platform @@ -554,8 +571,18 @@ platform: $(BUILDPYTHON) pybuilddir.txt # Create build directory and generate the sysconfig build-time data there. # pybuilddir.txt contains the name of the build dir and is used for # sys.path fixup -- see Modules/getpath.c. +# Since this step runs before shared modules are built, try to avoid bootstrap +# problems by creating a dummy pybuilddir.txt just to allow interpreter +# initialization to succeed. It will be overwritten by generate-posix-vars +# or removed in case of failure. pybuilddir.txt: $(BUILDPYTHON) - $(RUNSHARED) $(PYTHON_FOR_BUILD) -S -m sysconfig --generate-posix-vars + @echo "none" > ./pybuilddir.txt + $(RUNSHARED) $(PYTHON_FOR_BUILD) -S -m sysconfig --generate-posix-vars ;\ + if test $$? -ne 0 ; then \ + echo "generate-posix-vars failed" ; \ + rm -f ./pybuilddir.txt ; \ + exit 1 ; \ + fi # Build the shared modules # Under GNU make, MAKEFLAGS are sorted and normalized; the 's' for @@ -671,18 +698,23 @@ Modules/Setup: $(srcdir)/Modules/Setup.dist echo "-----------------------------------------------"; \ fi -Modules/_testembed: Modules/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) - $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Modules/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) +Programs/_testembed: Programs/_testembed.o $(LIBRARY) $(LDLIBRARY) $(PY3LIBRARY) + $(LINKCC) $(PY_LDFLAGS) $(LINKFORSHARED) -o $@ Programs/_testembed.o $(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) ############################################################################ # Importlib -Modules/_freeze_importlib: Modules/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) - $(LINKCC) $(PY_LDFLAGS) -o $@ Modules/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) +Programs/_freeze_importlib.o: Programs/_freeze_importlib.c Makefile -Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py Modules/_freeze_importlib.c - $(MAKE) Modules/_freeze_importlib - ./Modules/_freeze_importlib \ +Programs/_freeze_importlib: Programs/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) + $(LINKCC) $(PY_LDFLAGS) -o $@ Programs/_freeze_importlib.o $(LIBRARY_OBJS_OMIT_FROZEN) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST) + +Python/importlib_external.h: $(srcdir)/Lib/importlib/_bootstrap_external.py Programs/_freeze_importlib + ./Programs/_freeze_importlib \ + $(srcdir)/Lib/importlib/_bootstrap_external.py Python/importlib_external.h + +Python/importlib.h: $(srcdir)/Lib/importlib/_bootstrap.py Programs/_freeze_importlib + ./Programs/_freeze_importlib \ $(srcdir)/Lib/importlib/_bootstrap.py Python/importlib.h @@ -710,11 +742,11 @@ Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile -DVPATH='"$(VPATH)"' \ -o $@ $(srcdir)/Modules/getpath.c -Modules/python.o: $(srcdir)/Modules/python.c - $(MAINCC) -c $(PY_CORE_CFLAGS) -o $@ $(srcdir)/Modules/python.c +Programs/python.o: $(srcdir)/Programs/python.c + $(MAINCC) -c $(PY_CORE_CFLAGS) -o $@ $(srcdir)/Programs/python.c -Modules/_testembed.o: $(srcdir)/Modules/_testembed.c - $(MAINCC) -c $(PY_CORE_CFLAGS) -o $@ $(srcdir)/Modules/_testembed.c +Programs/_testembed.o: $(srcdir)/Programs/_testembed.c + $(MAINCC) -c $(PY_CORE_CFLAGS) -o $@ $(srcdir)/Programs/_testembed.c Modules/_sre.o: $(srcdir)/Modules/_sre.c $(srcdir)/Modules/sre.h $(srcdir)/Modules/sre_constants.h $(srcdir)/Modules/sre_lib.h @@ -743,15 +775,13 @@ Python/sysmodule.o: $(srcdir)/Python/sysmodule.c Makefile $(IO_OBJS): $(IO_H) -$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGENSRCS) +$(GRAMMAR_H): $(GRAMMAR_INPUT) $(PGEN) @$(MKDIR_P) Include - $(MAKE) $(PGEN) $(PGEN) $(GRAMMAR_INPUT) $(GRAMMAR_H) $(GRAMMAR_C) -$(GRAMMAR_C): $(GRAMMAR_H) $(GRAMMAR_INPUT) $(PGENSRCS) - $(MAKE) $(GRAMMAR_H) +$(GRAMMAR_C): $(GRAMMAR_H) touch $(GRAMMAR_C) -$(PGEN): $(PGENOBJS) +$(PGEN): $(PGENOBJS) $(CC) $(OPT) $(PY_LDFLAGS) $(PGENOBJS) $(LIBS) -o $(PGEN) Parser/grammar.o: $(srcdir)/Parser/grammar.c \ @@ -773,6 +803,9 @@ $(AST_C): $(AST_H) $(AST_ASDL) $(ASDLGEN_FILES) $(MKDIR_P) $(AST_C_DIR) $(ASDLGEN) -c $(AST_C_DIR) $(AST_ASDL) +$(OPCODE_H): $(srcdir)/Lib/opcode.py $(OPCODE_H_SCRIPT) + $(OPCODE_H_GEN) + Python/compile.o Python/symtable.o Python/ast.o: $(GRAMMAR_H) $(AST_H) Python/getplatform.o: $(srcdir)/Python/getplatform.c @@ -827,7 +860,7 @@ $(OPCODETARGETS_H): $(OPCODETARGETGEN_FILES) Python/ceval.o: $(OPCODETARGETS_H) $(srcdir)/Python/ceval_gil.h -Python/frozen.o: Python/importlib.h +Python/frozen.o: Python/importlib.h Python/importlib_external.h Objects/typeobject.o: Objects/typeslots.inc Objects/typeslots.inc: $(srcdir)/Include/typeslots.h $(srcdir)/Objects/typeslots.py @@ -884,7 +917,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/node.h \ $(srcdir)/Include/object.h \ $(srcdir)/Include/objimpl.h \ - $(srcdir)/Include/opcode.h \ + $(OPCODE_H) \ $(srcdir)/Include/osdefs.h \ $(srcdir)/Include/patchlevel.h \ $(srcdir)/Include/pgen.h \ @@ -897,6 +930,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/pyerrors.h \ $(srcdir)/Include/pyfpe.h \ $(srcdir)/Include/pyhash.h \ + $(srcdir)/Include/pylifecycle.h \ $(srcdir)/Include/pymath.h \ $(srcdir)/Include/pygetopt.h \ $(srcdir)/Include/pymacro.h \ @@ -905,6 +939,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/pystate.h \ $(srcdir)/Include/pystrcmp.h \ $(srcdir)/Include/pystrtod.h \ + $(srcdir)/Include/pystrhex.h \ $(srcdir)/Include/pythonrun.h \ $(srcdir)/Include/pythread.h \ $(srcdir)/Include/pytime.h \ @@ -925,7 +960,7 @@ PYTHON_HEADERS= \ $(PARSER_HEADERS) \ $(AST_H) -$(LIBRARY_OBJS) $(MODOBJS) Modules/python.o: $(PYTHON_HEADERS) +$(LIBRARY_OBJS) $(MODOBJS) Programs/python.o: $(PYTHON_HEADERS) ###################################################################### @@ -983,7 +1018,7 @@ quicktest: all platform $(TESTRUNNER) $(QUICKTESTOPTS) -install: commoninstall bininstall maninstall +install: frameworkinstallstructure commoninstall bininstall maninstall frameworkinstallmaclib frameworkinstallapps frameworkinstallunixtools if test "x$(ENSUREPIP)" != "xno" ; then \ case $(ENSUREPIP) in \ upgrade) ensurepip="--upgrade" ;; \ @@ -1003,10 +1038,10 @@ altinstall: commoninstall $$ensurepip --root=$(DESTDIR)/ ; \ fi -commoninstall: \ +commoninstall: frameworkinstallstructure \ altbininstall libinstall inclinstall libainstall \ sharedinstall oldsharedinstall altmaninstall \ - + frameworkinstallmaclib frameworkinstallapps frameworkaltinstallunixtools # Install shared libraries enabled by Setup DESTDIRS= $(exec_prefix) $(LIBDIR) $(BINLIBDEST) $(DESTSHARED) @@ -1031,7 +1066,7 @@ $(DESTSHARED): # Install the interpreter with $(VERSION) affixed # This goes into $(exec_prefix) -altbininstall: $(BUILDPYTHON) +altbininstall: $(BUILDPYTHON) frameworkpythonw @for i in $(BINDIR) $(LIBDIR); \ do \ if test ! -d $(DESTDIR)$$i; then \ @@ -1120,18 +1155,22 @@ maninstall: altmaninstall (cd $(DESTDIR)$(MANDIR)/man1; $(LN) -s python$(VERSION).1 python3.1) # Install the library -PLATDIR= plat-$(MACHDEP) -EXTRAPLATDIR= @EXTRAPLATDIR@ -MACHDEPS= $(PLATDIR) $(EXTRAPLATDIR) +PLATDIR= plat-darwin +MACHDEPS= $(PLATDIR) XMLLIBSUBDIRS= xml xml/dom xml/etree xml/parsers xml/sax LIBSUBDIRS= tkinter tkinter/test tkinter/test/test_tkinter \ tkinter/test/test_ttk site-packages test \ test/audiodata \ test/capath test/data \ test/cjkencodings test/decimaltestdata test/xmltestdata \ + test/eintrdata \ test/imghdrdata \ test/subprocessdata test/sndhdrdata test/support \ test/tracedmodules test/encoded_modules \ + test/test_import \ + test/test_import/data \ + test/test_import/data/circular_imports \ + test/test_import/data/circular_imports/subpkg \ test/test_importlib/namespace_pkgs \ test/test_importlib/namespace_pkgs/both_portions \ test/test_importlib/namespace_pkgs/both_portions/foo \ @@ -1244,7 +1283,12 @@ libinstall: build_all $(srcdir)/Lib/$(PLATDIR) $(srcdir)/Modules/xxmodule.c -d $(LIBDEST) -f \ -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \ $(DESTDIR)$(LIBDEST) - -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ + -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ + $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ + -d $(LIBDEST) -f \ + -x 'bad_coding|badsyntax|site-packages|lib2to3/tests/data' \ + $(DESTDIR)$(LIBDEST) + -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -Wi $(DESTDIR)$(LIBDEST)/compileall.py \ -d $(LIBDEST)/site-packages -f \ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages @@ -1252,6 +1296,10 @@ libinstall: build_all $(srcdir)/Lib/$(PLATDIR) $(srcdir)/Modules/xxmodule.c $(PYTHON_FOR_BUILD) -Wi -O $(DESTDIR)$(LIBDEST)/compileall.py \ -d $(LIBDEST)/site-packages -f \ -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages + -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ + $(PYTHON_FOR_BUILD) -Wi -OO $(DESTDIR)$(LIBDEST)/compileall.py \ + -d $(LIBDEST)/site-packages -f \ + -x badsyntax $(DESTDIR)$(LIBDEST)/site-packages -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ $(PYTHON_FOR_BUILD) -m lib2to3.pgen2.driver $(DESTDIR)$(LIBDEST)/lib2to3/Grammar.txt -PYTHONPATH=$(DESTDIR)$(LIBDEST) $(RUNSHARED) \ @@ -1308,7 +1356,7 @@ inclinstall: # Install the library and miscellaneous stuff needed for extending/embedding # This goes into $(exec_prefix) -LIBPL= $(LIBDEST)/config-$(LDVERSION) +LIBPL= $(prefix)/lib/python3.5/config-$(VERSION)$(ABIFLAGS) # pkgconfig directory LIBPC= $(LIBDIR)/pkgconfig @@ -1335,7 +1383,7 @@ libainstall: all python-config fi; \ fi $(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c - $(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o + $(INSTALL_DATA) Programs/python.o $(DESTDIR)$(LIBPL)/python.o $(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in $(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile $(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup @@ -1346,10 +1394,10 @@ libainstall: all python-config $(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh $(INSTALL_SCRIPT) python-config.py $(DESTDIR)$(LIBPL)/python-config.py $(INSTALL_SCRIPT) python-config $(DESTDIR)$(BINDIR)/python$(LDVERSION)-config - @if [ -s Modules/python.exp -a \ + @if [ -s Programs/python.exp -a \ "`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \ echo; echo "Installing support files for building shared extension modules on AIX:"; \ - $(INSTALL_DATA) Modules/python.exp \ + $(INSTALL_DATA) Programs/python.exp \ $(DESTDIR)$(LIBPL)/python.exp; \ echo; echo "$(LIBPL)/python.exp"; \ $(INSTALL_SCRIPT) $(srcdir)/Modules/makexp_aix \ @@ -1498,7 +1546,10 @@ TAGS:: etags Include/*.h; \ for i in $(SRCDIRS); do etags -a $$i/*.[ch]; done -# Touch generated files +# This fixes up the mtimes of checked-in generated files, assuming that they +# only *appear* to be outdated because of checkout order. +# This is run while preparing a source release tarball, and can be run manually +# to avoid bootstrap issues. touch: cd $(srcdir); \ hg --config extensions.touch=Tools/hg/hgtouch.py touch -v @@ -1529,7 +1580,7 @@ clean: pycremoval find build -name '*.py[co]' -exec rm -f {} ';' || true -rm -f pybuilddir.txt -rm -f Lib/lib2to3/*Grammar*.pickle - -rm -f Modules/_testembed Modules/_freeze_importlib + -rm -f Programs/_testembed Programs/_freeze_importlib profile-removal: find . -name '*.gc??' -exec rm -f {} ';' @@ -1553,7 +1604,7 @@ distclean: clobber done -rm -f core Makefile Makefile.pre config.status \ Modules/Setup Modules/Setup.local Modules/Setup.config \ - Modules/ld_so_aix Modules/python.exp Misc/python.pc + Modules/ld_so_aix Programs/python.exp Misc/python.pc -rm -f python*-gdb.py find $(srcdir)/[a-zA-Z]* '(' -name '*.fdc' -o -name '*~' \ -o -name '[@,#]*' -o -name '*.old' \ @@ -1600,7 +1651,7 @@ funny: -o -print # Perform some verification checks on any modified files. -patchcheck: +patchcheck: all $(RUNSHARED) ./$(BUILDPYTHON) $(srcdir)/Tools/scripts/patchcheck.py # Dependencies @@ -1625,35 +1676,37 @@ Python/thread.o: $(srcdir)/Python/thread_foobar.h $(srcdir)/Python/thread_nt.h # Rules appended by makedepend Modules/_threadmodule.o: $(srcdir)/Modules/_threadmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_threadmodule.c -o Modules/_threadmodule.o -Modules/_threadmodule$(SO): Modules/_threadmodule.o; $(BLDSHARED) Modules/_threadmodule.o -o Modules/_threadmodule$(SO) +Modules/_thread$(EXT_SUFFIX): Modules/_threadmodule.o; $(BLDSHARED) Modules/_threadmodule.o -o Modules/_thread$(EXT_SUFFIX) Modules/signalmodule.o: $(srcdir)/Modules/signalmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/signalmodule.c -o Modules/signalmodule.o -Modules/signalmodule$(SO): Modules/signalmodule.o; $(BLDSHARED) Modules/signalmodule.o -o Modules/signalmodule$(SO) +Modules/_signal$(EXT_SUFFIX): Modules/signalmodule.o; $(BLDSHARED) Modules/signalmodule.o -o Modules/_signal$(EXT_SUFFIX) Modules/posixmodule.o: $(srcdir)/Modules/posixmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/posixmodule.c -o Modules/posixmodule.o -Modules/posixmodule$(SO): Modules/posixmodule.o; $(BLDSHARED) Modules/posixmodule.o -o Modules/posixmodule$(SO) +Modules/posix$(EXT_SUFFIX): Modules/posixmodule.o; $(BLDSHARED) Modules/posixmodule.o -o Modules/posix$(EXT_SUFFIX) Modules/errnomodule.o: $(srcdir)/Modules/errnomodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/errnomodule.c -o Modules/errnomodule.o -Modules/errnomodule$(SO): Modules/errnomodule.o; $(BLDSHARED) Modules/errnomodule.o -o Modules/errnomodule$(SO) +Modules/errno$(EXT_SUFFIX): Modules/errnomodule.o; $(BLDSHARED) Modules/errnomodule.o -o Modules/errno$(EXT_SUFFIX) Modules/pwdmodule.o: $(srcdir)/Modules/pwdmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/pwdmodule.c -o Modules/pwdmodule.o -Modules/pwdmodule$(SO): Modules/pwdmodule.o; $(BLDSHARED) Modules/pwdmodule.o -o Modules/pwdmodule$(SO) +Modules/pwd$(EXT_SUFFIX): Modules/pwdmodule.o; $(BLDSHARED) Modules/pwdmodule.o -o Modules/pwd$(EXT_SUFFIX) Modules/_sre.o: $(srcdir)/Modules/_sre.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_sre.c -o Modules/_sre.o -Modules/_sre$(SO): Modules/_sre.o; $(BLDSHARED) Modules/_sre.o -o Modules/_sre$(SO) +Modules/_sre$(EXT_SUFFIX): Modules/_sre.o; $(BLDSHARED) Modules/_sre.o -o Modules/_sre$(EXT_SUFFIX) Modules/_codecsmodule.o: $(srcdir)/Modules/_codecsmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_codecsmodule.c -o Modules/_codecsmodule.o -Modules/_codecsmodule$(SO): Modules/_codecsmodule.o; $(BLDSHARED) Modules/_codecsmodule.o -o Modules/_codecsmodule$(SO) +Modules/_codecs$(EXT_SUFFIX): Modules/_codecsmodule.o; $(BLDSHARED) Modules/_codecsmodule.o -o Modules/_codecs$(EXT_SUFFIX) Modules/_weakref.o: $(srcdir)/Modules/_weakref.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_weakref.c -o Modules/_weakref.o -Modules/_weakref$(SO): Modules/_weakref.o; $(BLDSHARED) Modules/_weakref.o -o Modules/_weakref$(SO) +Modules/_weakref$(EXT_SUFFIX): Modules/_weakref.o; $(BLDSHARED) Modules/_weakref.o -o Modules/_weakref$(EXT_SUFFIX) Modules/_functoolsmodule.o: $(srcdir)/Modules/_functoolsmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_functoolsmodule.c -o Modules/_functoolsmodule.o -Modules/_functoolsmodule$(SO): Modules/_functoolsmodule.o; $(BLDSHARED) Modules/_functoolsmodule.o -o Modules/_functoolsmodule$(SO) +Modules/_functools$(EXT_SUFFIX): Modules/_functoolsmodule.o; $(BLDSHARED) Modules/_functoolsmodule.o -o Modules/_functools$(EXT_SUFFIX) Modules/_operator.o: $(srcdir)/Modules/_operator.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_operator.c -o Modules/_operator.o -Modules/_operator$(SO): Modules/_operator.o; $(BLDSHARED) Modules/_operator.o -o Modules/_operator$(SO) +Modules/_operator$(EXT_SUFFIX): Modules/_operator.o; $(BLDSHARED) Modules/_operator.o -o Modules/_operator$(EXT_SUFFIX) Modules/_collectionsmodule.o: $(srcdir)/Modules/_collectionsmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_collectionsmodule.c -o Modules/_collectionsmodule.o -Modules/_collectionsmodule$(SO): Modules/_collectionsmodule.o; $(BLDSHARED) Modules/_collectionsmodule.o -o Modules/_collectionsmodule$(SO) +Modules/_collections$(EXT_SUFFIX): Modules/_collectionsmodule.o; $(BLDSHARED) Modules/_collectionsmodule.o -o Modules/_collections$(EXT_SUFFIX) Modules/itertoolsmodule.o: $(srcdir)/Modules/itertoolsmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/itertoolsmodule.c -o Modules/itertoolsmodule.o -Modules/itertoolsmodule$(SO): Modules/itertoolsmodule.o; $(BLDSHARED) Modules/itertoolsmodule.o -o Modules/itertoolsmodule$(SO) +Modules/itertools$(EXT_SUFFIX): Modules/itertoolsmodule.o; $(BLDSHARED) Modules/itertoolsmodule.o -o Modules/itertools$(EXT_SUFFIX) Modules/atexitmodule.o: $(srcdir)/Modules/atexitmodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/atexitmodule.c -o Modules/atexitmodule.o -Modules/atexitmodule$(SO): Modules/atexitmodule.o; $(BLDSHARED) Modules/atexitmodule.o -o Modules/atexitmodule$(SO) +Modules/atexit$(EXT_SUFFIX): Modules/atexitmodule.o; $(BLDSHARED) Modules/atexitmodule.o -o Modules/atexit$(EXT_SUFFIX) Modules/_stat.o: $(srcdir)/Modules/_stat.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_stat.c -o Modules/_stat.o -Modules/_stat$(SO): Modules/_stat.o; $(BLDSHARED) Modules/_stat.o -o Modules/_stat$(SO) +Modules/_stat$(EXT_SUFFIX): Modules/_stat.o; $(BLDSHARED) Modules/_stat.o -o Modules/_stat$(EXT_SUFFIX) +Modules/timemodule.o: $(srcdir)/Modules/timemodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/timemodule.c -o Modules/timemodule.o +Modules/time$(EXT_SUFFIX): Modules/timemodule.o; $(BLDSHARED) Modules/timemodule.o -o Modules/time$(EXT_SUFFIX) Modules/_localemodule.o: $(srcdir)/Modules/_localemodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_localemodule.c -o Modules/_localemodule.o -Modules/_localemodule$(SO): Modules/_localemodule.o; $(BLDSHARED) Modules/_localemodule.o -o Modules/_localemodule$(SO) +Modules/_locale$(EXT_SUFFIX): Modules/_localemodule.o; $(BLDSHARED) Modules/_localemodule.o -o Modules/_locale$(EXT_SUFFIX) Modules/_iomodule.o: $(srcdir)/Modules/_io/_iomodule.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/_iomodule.c -o Modules/_iomodule.o Modules/iobase.o: $(srcdir)/Modules/_io/iobase.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/iobase.c -o Modules/iobase.o Modules/fileio.o: $(srcdir)/Modules/_io/fileio.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/fileio.c -o Modules/fileio.o @@ -1661,15 +1714,15 @@ Modules/bytesio.o: $(srcdir)/Modules/_io/bytesio.c; $(CC) $(PY_CORE_CFLAGS) -I$ Modules/bufferedio.o: $(srcdir)/Modules/_io/bufferedio.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/bufferedio.c -o Modules/bufferedio.o Modules/textio.o: $(srcdir)/Modules/_io/textio.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/textio.c -o Modules/textio.o Modules/stringio.o: $(srcdir)/Modules/_io/stringio.c; $(CC) $(PY_CORE_CFLAGS) -I$(srcdir)/Modules/_io -c $(srcdir)/Modules/_io/stringio.c -o Modules/stringio.o -Modules/_iomodule$(SO): Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o; $(BLDSHARED) Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o -o Modules/_iomodule$(SO) +Modules/_io$(EXT_SUFFIX): Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o; $(BLDSHARED) Modules/_iomodule.o Modules/iobase.o Modules/fileio.o Modules/bytesio.o Modules/bufferedio.o Modules/textio.o Modules/stringio.o -o Modules/_io$(EXT_SUFFIX) Modules/zipimport.o: $(srcdir)/Modules/zipimport.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/zipimport.c -o Modules/zipimport.o -Modules/zipimport$(SO): Modules/zipimport.o; $(BLDSHARED) Modules/zipimport.o -o Modules/zipimport$(SO) +Modules/zipimport$(EXT_SUFFIX): Modules/zipimport.o; $(BLDSHARED) Modules/zipimport.o -o Modules/zipimport$(EXT_SUFFIX) Modules/faulthandler.o: $(srcdir)/Modules/faulthandler.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/faulthandler.c -o Modules/faulthandler.o -Modules/faulthandler$(SO): Modules/faulthandler.o; $(BLDSHARED) Modules/faulthandler.o -o Modules/faulthandler$(SO) +Modules/faulthandler$(EXT_SUFFIX): Modules/faulthandler.o; $(BLDSHARED) Modules/faulthandler.o -o Modules/faulthandler$(EXT_SUFFIX) Modules/_tracemalloc.o: $(srcdir)/Modules/_tracemalloc.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/_tracemalloc.c -o Modules/_tracemalloc.o Modules/hashtable.o: $(srcdir)/Modules/hashtable.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/hashtable.c -o Modules/hashtable.o -Modules/_tracemalloc$(SO): Modules/_tracemalloc.o Modules/hashtable.o; $(BLDSHARED) Modules/_tracemalloc.o Modules/hashtable.o -o Modules/_tracemalloc$(SO) +Modules/_tracemalloc$(EXT_SUFFIX): Modules/_tracemalloc.o Modules/hashtable.o; $(BLDSHARED) Modules/_tracemalloc.o Modules/hashtable.o -o Modules/_tracemalloc$(EXT_SUFFIX) Modules/symtablemodule.o: $(srcdir)/Modules/symtablemodule.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/symtablemodule.c -o Modules/symtablemodule.o -Modules/_symtablemodule$(SO): Modules/symtablemodule.o; $(BLDSHARED) Modules/symtablemodule.o -o Modules/_symtablemodule$(SO) +Modules/_symtable$(EXT_SUFFIX): Modules/symtablemodule.o; $(BLDSHARED) Modules/symtablemodule.o -o Modules/_symtable$(EXT_SUFFIX) Modules/xxsubtype.o: $(srcdir)/Modules/xxsubtype.c; $(CC) $(PY_CORE_CFLAGS) -c $(srcdir)/Modules/xxsubtype.c -o Modules/xxsubtype.o -Modules/xxsubtype$(SO): Modules/xxsubtype.o; $(BLDSHARED) Modules/xxsubtype.o -o Modules/xxsubtype$(SO) +Modules/xxsubtype$(EXT_SUFFIX): Modules/xxsubtype.o; $(BLDSHARED) Modules/xxsubtype.o -o Modules/xxsubtype$(EXT_SUFFIX) diff --git a/Darwin/lib/python3.4/config-3.4m/Setup b/Darwin/lib/python3.5/config-3.5m/Setup similarity index 99% rename from Darwin/lib/python3.4/config-3.4m/Setup rename to Darwin/lib/python3.5/config-3.5m/Setup index 01fb85f..06ba6ad 100644 --- a/Darwin/lib/python3.4/config-3.4m/Setup +++ b/Darwin/lib/python3.5/config-3.5m/Setup @@ -91,7 +91,7 @@ SITEPATH= TESTPATH= # Path components for machine- or system-dependent modules and shared libraries -MACHDEPPATH=:plat-$(MACHDEP) +MACHDEPPATH=:$(PLATDIR) EXTRAMACHDEPPATH= COREPYTHONPATH=$(DESTPATH)$(SITEPATH)$(TESTPATH)$(MACHDEPPATH)$(EXTRAMACHDEPPATH) @@ -118,6 +118,7 @@ _collections _collectionsmodule.c # Container types itertools itertoolsmodule.c # Functions creating iterators for efficient looping atexit atexitmodule.c # Register functions to be run at interpreter-shutdown _stat _stat.c # stat.h interface +time timemodule.c # -lm # time operations and variables # access to ISO C locale support _locale _localemodule.c # -lintl @@ -171,7 +172,6 @@ _symtable symtablemodule.c #cmath cmathmodule.c _math.c # -lm # complex math library functions #math mathmodule.c _math.c # -lm # math library functions, e.g. sin() #_struct _struct.c # binary structure packing/unpacking -#time timemodule.c # -lm # time operations and variables #_weakref _weakref.c # basic weak reference support #_testcapi _testcapimodule.c # Python C API test module #_random _randommodule.c # Random number generator diff --git a/Darwin/lib/python3.4/config-3.4m/Setup.config b/Darwin/lib/python3.5/config-3.5m/Setup.config similarity index 93% rename from Darwin/lib/python3.4/config-3.4m/Setup.config rename to Darwin/lib/python3.5/config-3.5m/Setup.config index a195331..7afe555 100644 --- a/Darwin/lib/python3.4/config-3.4m/Setup.config +++ b/Darwin/lib/python3.5/config-3.5m/Setup.config @@ -7,7 +7,7 @@ _thread _threadmodule.c # The signal module -signal signalmodule.c +_signal signalmodule.c # The rest of the modules previously listed in this file are built # by the setup.py script in Python 2.1 and later. diff --git a/Darwin/lib/python3.4/config-3.4m/Setup.local b/Darwin/lib/python3.5/config-3.5m/Setup.local similarity index 100% rename from Darwin/lib/python3.4/config-3.4m/Setup.local rename to Darwin/lib/python3.5/config-3.5m/Setup.local diff --git a/Darwin/lib/python3.4/config-3.4m/config.c b/Darwin/lib/python3.5/config-3.5m/config.c similarity index 92% rename from Darwin/lib/python3.4/config-3.4m/config.c rename to Darwin/lib/python3.5/config-3.5m/config.c index 9e2a673..ef131df 100644 --- a/Darwin/lib/python3.4/config-3.4m/config.c +++ b/Darwin/lib/python3.5/config-3.5m/config.c @@ -1,4 +1,4 @@ -/* Generated automatically from /Users/build/platform_darwin/src/Python-3.4.1/Modules/config.c.in by makesetup. */ +/* Generated automatically from ./Modules/config.c.in by makesetup. */ /* -*- C -*- *********************************************** Copyright (c) 2000, BeOpen.com. Copyright (c) 1995-2000, Corporation for National Research Initiatives. @@ -14,7 +14,7 @@ redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES. /* !!! !!! !!! This file is edited by the makesetup script !!! !!! !!! */ /* This file contains the table of built-in modules. - See init_builtin() in import.c. */ + See create_builtin() in import.c. */ #include "Python.h" @@ -24,7 +24,7 @@ extern "C" { extern PyObject* PyInit__thread(void); -extern PyObject* PyInit_signal(void); +extern PyObject* PyInit__signal(void); extern PyObject* PyInit_posix(void); extern PyObject* PyInit_errno(void); extern PyObject* PyInit_pwd(void); @@ -37,6 +37,7 @@ extern PyObject* PyInit__collections(void); extern PyObject* PyInit_itertools(void); extern PyObject* PyInit_atexit(void); extern PyObject* PyInit__stat(void); +extern PyObject* PyInit_time(void); extern PyObject* PyInit__locale(void); extern PyObject* PyInit__io(void); extern PyObject* PyInit_zipimport(void); @@ -56,7 +57,7 @@ extern PyObject* PyInit__string(void); struct _inittab _PyImport_Inittab[] = { {"_thread", PyInit__thread}, -{"signal", PyInit_signal}, +{"_signal", PyInit__signal}, {"posix", PyInit_posix}, {"errno", PyInit_errno}, {"pwd", PyInit_pwd}, @@ -69,6 +70,7 @@ struct _inittab _PyImport_Inittab[] = { {"itertools", PyInit_itertools}, {"atexit", PyInit_atexit}, {"_stat", PyInit__stat}, +{"time", PyInit_time}, {"_locale", PyInit__locale}, {"_io", PyInit__io}, {"zipimport", PyInit_zipimport}, diff --git a/Darwin/lib/python3.4/config-3.4m/config.c.in b/Darwin/lib/python3.5/config-3.5m/config.c.in similarity index 97% rename from Darwin/lib/python3.4/config-3.4m/config.c.in rename to Darwin/lib/python3.5/config-3.5m/config.c.in index 7a24e2d..7b77199 100644 --- a/Darwin/lib/python3.4/config-3.4m/config.c.in +++ b/Darwin/lib/python3.5/config-3.5m/config.c.in @@ -13,7 +13,7 @@ redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES. /* !!! !!! !!! This file is edited by the makesetup script !!! !!! !!! */ /* This file contains the table of built-in modules. - See init_builtin() in import.c. */ + See create_builtin() in import.c. */ #include "Python.h" diff --git a/Darwin/lib/python3.4/config-3.4m/install-sh b/Darwin/lib/python3.5/config-3.5m/install-sh similarity index 100% rename from Darwin/lib/python3.4/config-3.4m/install-sh rename to Darwin/lib/python3.5/config-3.5m/install-sh diff --git a/Darwin/lib/python3.5/config-3.5m/libpython3.5.a b/Darwin/lib/python3.5/config-3.5m/libpython3.5.a new file mode 120000 index 0000000..e3878af --- /dev/null +++ b/Darwin/lib/python3.5/config-3.5m/libpython3.5.a @@ -0,0 +1 @@ +../../../Python \ No newline at end of file diff --git a/Darwin/lib/python3.5/config-3.5m/libpython3.5.dylib b/Darwin/lib/python3.5/config-3.5m/libpython3.5.dylib new file mode 120000 index 0000000..e3878af --- /dev/null +++ b/Darwin/lib/python3.5/config-3.5m/libpython3.5.dylib @@ -0,0 +1 @@ +../../../Python \ No newline at end of file diff --git a/Darwin/lib/python3.5/config-3.5m/libpython3.5m.a b/Darwin/lib/python3.5/config-3.5m/libpython3.5m.a new file mode 120000 index 0000000..e3878af --- /dev/null +++ b/Darwin/lib/python3.5/config-3.5m/libpython3.5m.a @@ -0,0 +1 @@ +../../../Python \ No newline at end of file diff --git a/Darwin/lib/python3.5/config-3.5m/libpython3.5m.dylib b/Darwin/lib/python3.5/config-3.5m/libpython3.5m.dylib new file mode 120000 index 0000000..e3878af --- /dev/null +++ b/Darwin/lib/python3.5/config-3.5m/libpython3.5m.dylib @@ -0,0 +1 @@ +../../../Python \ No newline at end of file diff --git a/Darwin/lib/python3.4/config-3.4m/makesetup b/Darwin/lib/python3.5/config-3.5m/makesetup similarity index 97% rename from Darwin/lib/python3.4/config-3.4m/makesetup rename to Darwin/lib/python3.5/config-3.5m/makesetup index 40dfa9d..90db42e 100755 --- a/Darwin/lib/python3.4/config-3.4m/makesetup +++ b/Darwin/lib/python3.5/config-3.5m/makesetup @@ -217,7 +217,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | *) src='$(srcdir)/'"$srcdir/$src";; esac case $doconfig in - no) cc="$cc \$(CCSHARED) \$(CFLAGS) \$(CPPFLAGS)";; + no) cc="$cc \$(CCSHARED) \$(PY_CFLAGS) \$(PY_CPPFLAGS)";; *) cc="$cc \$(PY_CORE_CFLAGS)";; esac @@ -229,11 +229,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | esac for mod in $mods do - case $objs in - *$mod.o*) base=$mod;; - *) base=${mod}module;; - esac - file="$srcdir/$base\$(SO)" + file="$srcdir/$mod\$(EXT_SUFFIX)" case $doconfig in no) SHAREDMODS="$SHAREDMODS $file";; esac @@ -270,7 +266,7 @@ sed -e 's/[ ]*#.*//' -e '/^[ ]*$/d' | *) sed -e " 1i$NL/* Generated automatically from $config by makesetup. */ /MARKER 1/i$NL$EXTDECLS - + /MARKER 2/i$NL$INITBITS " $config >config.c diff --git a/Darwin/lib/python3.4/config-3.4m/python-config.py b/Darwin/lib/python3.5/config-3.5m/python-config.py similarity index 89% rename from Darwin/lib/python3.4/config-3.4m/python-config.py rename to Darwin/lib/python3.5/config-3.5m/python-config.py index f4a7706..55e4d14 100755 --- a/Darwin/lib/python3.4/config-3.4m/python-config.py +++ b/Darwin/lib/python3.5/config-3.5m/python-config.py @@ -1,4 +1,4 @@ -#!/Users/build/platform_darwin/bin/python3.4m +#!/Users/build/.local/Cellar/python3/3.5.0/Frameworks/Python.framework/Versions/3.5/bin/python3.5m # -*- python -*- # Keep this script in sync with python-config.sh.in @@ -47,8 +47,9 @@ for opt in opt_flags: print(' '.join(flags)) elif opt in ('--libs', '--ldflags'): - libs = getvar('LIBS').split() + getvar('SYSLIBS').split() - libs.append('-lpython' + pyver + sys.abiflags) + libs = ['-lpython' + pyver + sys.abiflags] + libs += getvar('LIBS').split() + libs += getvar('SYSLIBS').split() # add the prefix/lib/pythonX.Y/config dir, but only if there is no # shared library in prefix/lib/. if opt == '--ldflags': diff --git a/Darwin/lib/python3.5/config-3.5m/python.o b/Darwin/lib/python3.5/config-3.5m/python.o new file mode 100644 index 0000000..5036186 Binary files /dev/null and b/Darwin/lib/python3.5/config-3.5m/python.o differ diff --git a/Darwin/lib/python3.4/configparser.py b/Darwin/lib/python3.5/configparser.py similarity index 89% rename from Darwin/lib/python3.4/configparser.py rename to Darwin/lib/python3.5/configparser.py index 4ee8307..ecd0660 100644 --- a/Darwin/lib/python3.4/configparser.py +++ b/Darwin/lib/python3.5/configparser.py @@ -17,7 +17,8 @@ ConfigParser -- responsible for parsing a list of __init__(defaults=None, dict_type=_default_dict, allow_no_value=False, delimiters=('=', ':'), comment_prefixes=('#', ';'), inline_comment_prefixes=None, strict=True, - empty_lines_in_values=True): + empty_lines_in_values=True, default_section='DEFAULT', + interpolation=, converters=): Create the parser. When `defaults' is given, it is initialized into the dictionary or intrinsic defaults. The keys must be strings, the values must be appropriate for %()s string interpolation. @@ -47,6 +48,25 @@ ConfigParser -- responsible for parsing a list of When `allow_no_value' is True (default: False), options without values are accepted; the value presented for these is None. + When `default_section' is given, the name of the special section is + named accordingly. By default it is called ``"DEFAULT"`` but this can + be customized to point to any other valid section name. Its current + value can be retrieved using the ``parser_instance.default_section`` + attribute and may be modified at runtime. + + When `interpolation` is given, it should be an Interpolation subclass + instance. It will be used as the handler for option value + pre-processing when using getters. RawConfigParser object s don't do + any sort of interpolation, whereas ConfigParser uses an instance of + BasicInterpolation. The library also provides a ``zc.buildbot`` + inspired ExtendedInterpolation implementation. + + When `converters` is given, it should be a dictionary where each key + represents the name of a type converter and each value is a callable + implementing the conversion from string to the desired datatype. Every + converter gets its corresponding get*() method on the parser object and + section proxies. + sections() Return all the configuration section names, sans DEFAULT. @@ -129,9 +149,11 @@ import warnings __all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError", "NoOptionError", "InterpolationError", "InterpolationDepthError", - "InterpolationSyntaxError", "ParsingError", - "MissingSectionHeaderError", + "InterpolationMissingOptionError", "InterpolationSyntaxError", + "ParsingError", "MissingSectionHeaderError", "ConfigParser", "SafeConfigParser", "RawConfigParser", + "Interpolation", "BasicInterpolation", "ExtendedInterpolation", + "LegacyInterpolation", "SectionProxy", "ConverterMapping", "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] DEFAULTSECT = "DEFAULT" @@ -410,7 +432,7 @@ class BasicInterpolation(Interpolation): v = map[var] except KeyError: raise InterpolationMissingOptionError( - option, section, rest, var) + option, section, rest, var) from None if "%" in v: self._interpolate_some(parser, option, accum, v, section, map, depth + 1) @@ -482,7 +504,7 @@ class ExtendedInterpolation(Interpolation): "More than one ':' found: %r" % (rest,)) except (KeyError, NoSectionError, NoOptionError): raise InterpolationMissingOptionError( - option, section, rest, ":".join(path)) + option, section, rest, ":".join(path)) from None if "$" in v: self._interpolate_some(parser, opt, accum, v, sect, dict(parser.items(sect, raw=True)), @@ -515,7 +537,7 @@ class LegacyInterpolation(Interpolation): value = value % vars except KeyError as e: raise InterpolationMissingOptionError( - option, section, rawval, e.args[0]) + option, section, rawval, e.args[0]) from None else: break if value and "%(" in value: @@ -580,11 +602,12 @@ class RawConfigParser(MutableMapping): comment_prefixes=('#', ';'), inline_comment_prefixes=None, strict=True, empty_lines_in_values=True, default_section=DEFAULTSECT, - interpolation=_UNSET): + interpolation=_UNSET, converters=_UNSET): self._dict = dict_type self._sections = self._dict() self._defaults = self._dict() + self._converters = ConverterMapping(self) self._proxies = self._dict() self._proxies[default_section] = SectionProxy(self, default_section) if defaults: @@ -612,6 +635,8 @@ class RawConfigParser(MutableMapping): self._interpolation = self._DEFAULT_INTERPOLATION if self._interpolation is None: self._interpolation = Interpolation() + if converters is not _UNSET: + self._converters.update(converters) def defaults(self): return self._defaults @@ -647,7 +672,7 @@ class RawConfigParser(MutableMapping): try: opts = self._sections[section].copy() except KeyError: - raise NoSectionError(section) + raise NoSectionError(section) from None opts.update(self._defaults) return list(opts.keys()) @@ -775,36 +800,31 @@ class RawConfigParser(MutableMapping): def _get(self, section, conv, option, **kwargs): return conv(self.get(section, option, **kwargs)) - def getint(self, section, option, *, raw=False, vars=None, - fallback=_UNSET): + def _get_conv(self, section, option, conv, *, raw=False, vars=None, + fallback=_UNSET, **kwargs): try: - return self._get(section, int, option, raw=raw, vars=vars) + return self._get(section, conv, option, raw=raw, vars=vars, + **kwargs) except (NoSectionError, NoOptionError): if fallback is _UNSET: raise - else: - return fallback + return fallback + + # getint, getfloat and getboolean provided directly for backwards compat + def getint(self, section, option, *, raw=False, vars=None, + fallback=_UNSET, **kwargs): + return self._get_conv(section, option, int, raw=raw, vars=vars, + fallback=fallback, **kwargs) def getfloat(self, section, option, *, raw=False, vars=None, - fallback=_UNSET): - try: - return self._get(section, float, option, raw=raw, vars=vars) - except (NoSectionError, NoOptionError): - if fallback is _UNSET: - raise - else: - return fallback + fallback=_UNSET, **kwargs): + return self._get_conv(section, option, float, raw=raw, vars=vars, + fallback=fallback, **kwargs) def getboolean(self, section, option, *, raw=False, vars=None, - fallback=_UNSET): - try: - return self._get(section, self._convert_to_boolean, option, - raw=raw, vars=vars) - except (NoSectionError, NoOptionError): - if fallback is _UNSET: - raise - else: - return fallback + fallback=_UNSET, **kwargs): + return self._get_conv(section, option, self._convert_to_boolean, + raw=raw, vars=vars, fallback=fallback, **kwargs) def items(self, section=_UNSET, raw=False, vars=None): """Return a list of (name, value) tuples for each option in a section. @@ -876,7 +896,7 @@ class RawConfigParser(MutableMapping): try: sectdict = self._sections[section] except KeyError: - raise NoSectionError(section) + raise NoSectionError(section) from None sectdict[self.optionxform(option)] = value def write(self, fp, space_around_delimiters=True): @@ -917,7 +937,7 @@ class RawConfigParser(MutableMapping): try: sectdict = self._sections[section] except KeyError: - raise NoSectionError(section) + raise NoSectionError(section) from None option = self.optionxform(option) existed = option in sectdict if existed: @@ -1154,6 +1174,10 @@ class RawConfigParser(MutableMapping): if not isinstance(value, str): raise TypeError("option values must be strings") + @property + def converters(self): + return self._converters + class ConfigParser(RawConfigParser): """ConfigParser implementing interpolation.""" @@ -1194,6 +1218,10 @@ class SectionProxy(MutableMapping): """Creates a view on a section of the specified `name` in `parser`.""" self._parser = parser self._name = name + for conv in parser.converters: + key = 'get' + conv + getter = functools.partial(self.get, _impl=getattr(parser, key)) + setattr(self, key, getter) def __repr__(self): return ''.format(self._name) @@ -1227,22 +1255,6 @@ class SectionProxy(MutableMapping): else: return self._parser.defaults() - def get(self, option, fallback=None, *, raw=False, vars=None): - return self._parser.get(self._name, option, raw=raw, vars=vars, - fallback=fallback) - - def getint(self, option, fallback=None, *, raw=False, vars=None): - return self._parser.getint(self._name, option, raw=raw, vars=vars, - fallback=fallback) - - def getfloat(self, option, fallback=None, *, raw=False, vars=None): - return self._parser.getfloat(self._name, option, raw=raw, vars=vars, - fallback=fallback) - - def getboolean(self, option, fallback=None, *, raw=False, vars=None): - return self._parser.getboolean(self._name, option, raw=raw, vars=vars, - fallback=fallback) - @property def parser(self): # The parser object of the proxy is read-only. @@ -1252,3 +1264,77 @@ class SectionProxy(MutableMapping): def name(self): # The name of the section on a proxy is read-only. return self._name + + def get(self, option, fallback=None, *, raw=False, vars=None, + _impl=None, **kwargs): + """Get an option value. + + Unless `fallback` is provided, `None` will be returned if the option + is not found. + + """ + # If `_impl` is provided, it should be a getter method on the parser + # object that provides the desired type conversion. + if not _impl: + _impl = self._parser.get + return _impl(self._name, option, raw=raw, vars=vars, + fallback=fallback, **kwargs) + + +class ConverterMapping(MutableMapping): + """Enables reuse of get*() methods between the parser and section proxies. + + If a parser class implements a getter directly, the value for the given + key will be ``None``. The presence of the converter name here enables + section proxies to find and use the implementation on the parser class. + """ + + GETTERCRE = re.compile(r"^get(?P.+)$") + + def __init__(self, parser): + self._parser = parser + self._data = {} + for getter in dir(self._parser): + m = self.GETTERCRE.match(getter) + if not m or not callable(getattr(self._parser, getter)): + continue + self._data[m.group('name')] = None # See class docstring. + + def __getitem__(self, key): + return self._data[key] + + def __setitem__(self, key, value): + try: + k = 'get' + key + except TypeError: + raise ValueError('Incompatible key: {} (type: {})' + ''.format(key, type(key))) + if k == 'get': + raise ValueError('Incompatible key: cannot use "" as a name') + self._data[key] = value + func = functools.partial(self._parser._get_conv, conv=value) + func.converter = value + setattr(self._parser, k, func) + for proxy in self._parser.values(): + getter = functools.partial(proxy.get, _impl=func) + setattr(proxy, k, getter) + + def __delitem__(self, key): + try: + k = 'get' + (key or None) + except TypeError: + raise KeyError(key) + del self._data[key] + for inst in itertools.chain((self._parser,), self._parser.values()): + try: + delattr(inst, k) + except AttributeError: + # don't raise since the entry was present in _data, silently + # clean up + continue + + def __iter__(self): + return iter(self._data) + + def __len__(self): + return len(self._data) diff --git a/Darwin/lib/python3.4/contextlib.py b/Darwin/lib/python3.5/contextlib.py similarity index 91% rename from Darwin/lib/python3.4/contextlib.py rename to Darwin/lib/python3.5/contextlib.py index 82ee955..5377987 100644 --- a/Darwin/lib/python3.4/contextlib.py +++ b/Darwin/lib/python3.5/contextlib.py @@ -5,7 +5,7 @@ from collections import deque from functools import wraps __all__ = ["contextmanager", "closing", "ContextDecorator", "ExitStack", - "redirect_stdout", "suppress"] + "redirect_stdout", "redirect_stderr", "suppress"] class ContextDecorator(object): @@ -34,7 +34,7 @@ class ContextDecorator(object): class _GeneratorContextManager(ContextDecorator): """Helper for @contextmanager decorator.""" - def __init__(self, func, *args, **kwds): + def __init__(self, func, args, kwds): self.gen = func(*args, **kwds) self.func, self.args, self.kwds = func, args, kwds # Issue 19330: ensure context manager instances have good docstrings @@ -52,7 +52,7 @@ class _GeneratorContextManager(ContextDecorator): # _GCM instances are one-shot context managers, so the # CM must be recreated each time a decorated function is # called - return self.__class__(self.func, *self.args, **self.kwds) + return self.__class__(self.func, self.args, self.kwds) def __enter__(self): try: @@ -77,10 +77,17 @@ class _GeneratorContextManager(ContextDecorator): self.gen.throw(type, value, traceback) raise RuntimeError("generator didn't stop after throw()") except StopIteration as exc: - # Suppress the exception *unless* it's the same exception that + # Suppress StopIteration *unless* it's the same exception that # was passed to throw(). This prevents a StopIteration - # raised inside the "with" statement from being suppressed + # raised inside the "with" statement from being suppressed. return exc is not value + except RuntimeError as exc: + # Likewise, avoid suppressing if a StopIteration exception + # was passed to throw() and later wrapped into a RuntimeError + # (see PEP 479). + if exc.__cause__ is value: + return False + raise except: # only re-raise if it's *not* the exception that was # passed to throw(), because __exit__() must not raise @@ -123,7 +130,7 @@ def contextmanager(func): """ @wraps(func) def helper(*args, **kwds): - return _GeneratorContextManager(func, *args, **kwds) + return _GeneratorContextManager(func, args, kwds) return helper @@ -151,8 +158,27 @@ class closing(object): def __exit__(self, *exc_info): self.thing.close() -class redirect_stdout: - """Context manager for temporarily redirecting stdout to another file + +class _RedirectStream: + + _stream = None + + def __init__(self, new_target): + self._new_target = new_target + # We use a list of old targets to make this CM re-entrant + self._old_targets = [] + + def __enter__(self): + self._old_targets.append(getattr(sys, self._stream)) + setattr(sys, self._stream, self._new_target) + return self._new_target + + def __exit__(self, exctype, excinst, exctb): + setattr(sys, self._stream, self._old_targets.pop()) + + +class redirect_stdout(_RedirectStream): + """Context manager for temporarily redirecting stdout to another file. # How to send help() to stderr with redirect_stdout(sys.stderr): @@ -164,18 +190,13 @@ class redirect_stdout: help(pow) """ - def __init__(self, new_target): - self._new_target = new_target - # We use a list of old targets to make this CM re-entrant - self._old_targets = [] + _stream = "stdout" - def __enter__(self): - self._old_targets.append(sys.stdout) - sys.stdout = self._new_target - return self._new_target - def __exit__(self, exctype, excinst, exctb): - sys.stdout = self._old_targets.pop() +class redirect_stderr(_RedirectStream): + """Context manager for temporarily redirecting stderr to another file.""" + + _stream = "stderr" class suppress: diff --git a/Darwin/lib/python3.4/copy.py b/Darwin/lib/python3.5/copy.py similarity index 97% rename from Darwin/lib/python3.4/copy.py rename to Darwin/lib/python3.5/copy.py index bb8840e..3a45fdf 100644 --- a/Darwin/lib/python3.4/copy.py +++ b/Darwin/lib/python3.5/copy.py @@ -94,7 +94,7 @@ def copy(x): else: reductor = getattr(x, "__reduce_ex__", None) if reductor: - rv = reductor(2) + rv = reductor(4) else: reductor = getattr(x, "__reduce__", None) if reductor: @@ -171,7 +171,7 @@ def deepcopy(x, memo=None, _nil=[]): else: reductor = getattr(x, "__reduce_ex__", None) if reductor: - rv = reductor(2) + rv = reductor(4) else: reductor = getattr(x, "__reduce__", None) if reductor: @@ -221,17 +221,15 @@ def _deepcopy_list(x, memo): d[list] = _deepcopy_list def _deepcopy_tuple(x, memo): - y = [] - for a in x: - y.append(deepcopy(a, memo)) + y = [deepcopy(a, memo) for a in x] # We're not going to put the tuple in the memo, but it's still important we # check for it, in case the tuple contains recursive mutable structures. try: return memo[id(x)] except KeyError: pass - for i in range(len(x)): - if x[i] is not y[i]: + for k, j in zip(x, y): + if k is not j: y = tuple(y) break else: diff --git a/Darwin/lib/python3.4/copyreg.py b/Darwin/lib/python3.5/copyreg.py similarity index 100% rename from Darwin/lib/python3.4/copyreg.py rename to Darwin/lib/python3.5/copyreg.py diff --git a/Darwin/lib/python3.4/crypt.py b/Darwin/lib/python3.5/crypt.py similarity index 100% rename from Darwin/lib/python3.4/crypt.py rename to Darwin/lib/python3.5/crypt.py diff --git a/Darwin/lib/python3.4/csv.py b/Darwin/lib/python3.5/csv.py similarity index 97% rename from Darwin/lib/python3.4/csv.py rename to Darwin/lib/python3.5/csv.py index a56eed8..ca40e5e 100644 --- a/Darwin/lib/python3.4/csv.py +++ b/Darwin/lib/python3.5/csv.py @@ -147,16 +147,13 @@ class DictWriter: if wrong_fields: raise ValueError("dict contains fields not in fieldnames: " + ", ".join([repr(x) for x in wrong_fields])) - return [rowdict.get(key, self.restval) for key in self.fieldnames] + return (rowdict.get(key, self.restval) for key in self.fieldnames) def writerow(self, rowdict): return self.writer.writerow(self._dict_to_list(rowdict)) def writerows(self, rowdicts): - rows = [] - for rowdict in rowdicts: - rows.append(self._dict_to_list(rowdict)) - return self.writer.writerows(rows) + return self.writer.writerows(map(self._dict_to_list, rowdicts)) # Guard Sniffer's type checking against builds that exclude complex() try: @@ -231,20 +228,21 @@ class Sniffer: quotes = {} delims = {} spaces = 0 + groupindex = regexp.groupindex for m in matches: - n = regexp.groupindex['quote'] - 1 + n = groupindex['quote'] - 1 key = m[n] if key: quotes[key] = quotes.get(key, 0) + 1 try: - n = regexp.groupindex['delim'] - 1 + n = groupindex['delim'] - 1 key = m[n] except KeyError: continue if key and (delimiters is None or key in delimiters): delims[key] = delims.get(key, 0) + 1 try: - n = regexp.groupindex['space'] - 1 + n = groupindex['space'] - 1 except KeyError: continue if m[n]: diff --git a/Darwin/lib/python3.4/ctypes/__init__.py b/Darwin/lib/python3.5/ctypes/__init__.py similarity index 96% rename from Darwin/lib/python3.4/ctypes/__init__.py rename to Darwin/lib/python3.5/ctypes/__init__.py index e34c646..4cb6d0d 100644 --- a/Darwin/lib/python3.4/ctypes/__init__.py +++ b/Darwin/lib/python3.5/ctypes/__init__.py @@ -49,7 +49,7 @@ def create_string_buffer(init, size=None): create_string_buffer(anInteger) -> character array create_string_buffer(aString, anInteger) -> character array """ - if isinstance(init, (str, bytes)): + if isinstance(init, bytes): if size is None: size = len(init)+1 buftype = c_char * size @@ -237,14 +237,8 @@ _check_size(c_char) class c_char_p(_SimpleCData): _type_ = "z" - if _os.name == "nt": - def __repr__(self): - if not windll.kernel32.IsBadStringPtrA(self, -1): - return "%s(%r)" % (self.__class__.__name__, self.value) - return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) - else: - def __repr__(self): - return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value) + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, c_void_p.from_buffer(self).value) _check_size(c_char_p, "P") class c_void_p(_SimpleCData): @@ -259,6 +253,8 @@ from _ctypes import POINTER, pointer, _pointer_type_cache class c_wchar_p(_SimpleCData): _type_ = "Z" + def __repr__(self): + return "%s(%s)" % (self.__class__.__name__, c_void_p.from_buffer(self).value) class c_wchar(_SimpleCData): _type_ = "u" @@ -284,7 +280,7 @@ def create_unicode_buffer(init, size=None): create_unicode_buffer(anInteger) -> character array create_unicode_buffer(aString, anInteger) -> character array """ - if isinstance(init, (str, bytes)): + if isinstance(init, str): if size is None: size = len(init)+1 buftype = c_wchar * size @@ -353,7 +349,7 @@ class CDLL(object): self._handle = handle def __repr__(self): - return "<%s '%s', handle %x at %x>" % \ + return "<%s '%s', handle %x at %#x>" % \ (self.__class__.__name__, self._name, (self._handle & (_sys.maxsize*2 + 1)), id(self) & (_sys.maxsize*2 + 1)) diff --git a/Darwin/lib/python3.4/ctypes/_endian.py b/Darwin/lib/python3.5/ctypes/_endian.py similarity index 97% rename from Darwin/lib/python3.4/ctypes/_endian.py rename to Darwin/lib/python3.5/ctypes/_endian.py index dae65fc..37444bd 100644 --- a/Darwin/lib/python3.4/ctypes/_endian.py +++ b/Darwin/lib/python3.5/ctypes/_endian.py @@ -45,6 +45,7 @@ if sys.byteorder == "little": class BigEndianStructure(Structure, metaclass=_swapped_meta): """Structure with big endian byte order""" + __slots__ = () _swappedbytes_ = None elif sys.byteorder == "big": @@ -53,6 +54,7 @@ elif sys.byteorder == "big": BigEndianStructure = Structure class LittleEndianStructure(Structure, metaclass=_swapped_meta): """Structure with little endian byte order""" + __slots__ = () _swappedbytes_ = None else: diff --git a/Darwin/lib/python3.4/ctypes/macholib/README.ctypes b/Darwin/lib/python3.5/ctypes/macholib/README.ctypes similarity index 100% rename from Darwin/lib/python3.4/ctypes/macholib/README.ctypes rename to Darwin/lib/python3.5/ctypes/macholib/README.ctypes diff --git a/Darwin/lib/python3.4/ctypes/macholib/__init__.py b/Darwin/lib/python3.5/ctypes/macholib/__init__.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/macholib/__init__.py rename to Darwin/lib/python3.5/ctypes/macholib/__init__.py diff --git a/Darwin/lib/python3.4/ctypes/macholib/dyld.py b/Darwin/lib/python3.5/ctypes/macholib/dyld.py similarity index 97% rename from Darwin/lib/python3.4/ctypes/macholib/dyld.py rename to Darwin/lib/python3.5/ctypes/macholib/dyld.py index dc7052e..1b54cc1 100644 --- a/Darwin/lib/python3.4/ctypes/macholib/dyld.py +++ b/Darwin/lib/python3.5/ctypes/macholib/dyld.py @@ -14,14 +14,14 @@ __all__ = [ # These are the defaults as per man dyld(1) # -DEFAULT_FRAMEWORK_FALLBACK = [ +DEFAULT_FRAMEWORK_FALLBACK = [ '/Users/build/.local/Frameworks', os.path.expanduser("~/Library/Frameworks"), "/Library/Frameworks", "/Network/Library/Frameworks", "/System/Library/Frameworks", ] -DEFAULT_LIBRARY_FALLBACK = [ +DEFAULT_LIBRARY_FALLBACK = [ '/Users/build/.local/lib', os.path.expanduser("~/lib"), "/usr/local/lib", "/lib", diff --git a/Darwin/lib/python3.4/ctypes/macholib/dylib.py b/Darwin/lib/python3.5/ctypes/macholib/dylib.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/macholib/dylib.py rename to Darwin/lib/python3.5/ctypes/macholib/dylib.py diff --git a/Darwin/lib/python3.4/ctypes/macholib/fetch_macholib b/Darwin/lib/python3.5/ctypes/macholib/fetch_macholib similarity index 100% rename from Darwin/lib/python3.4/ctypes/macholib/fetch_macholib rename to Darwin/lib/python3.5/ctypes/macholib/fetch_macholib diff --git a/Darwin/lib/python3.4/ctypes/macholib/fetch_macholib.bat b/Darwin/lib/python3.5/ctypes/macholib/fetch_macholib.bat similarity index 98% rename from Darwin/lib/python3.4/ctypes/macholib/fetch_macholib.bat rename to Darwin/lib/python3.5/ctypes/macholib/fetch_macholib.bat index f9e1c0d..f474d5c 100644 --- a/Darwin/lib/python3.4/ctypes/macholib/fetch_macholib.bat +++ b/Darwin/lib/python3.5/ctypes/macholib/fetch_macholib.bat @@ -1 +1 @@ -svn export --force http://svn.red-bean.com/bob/macholib/trunk/macholib/ . +svn export --force http://svn.red-bean.com/bob/macholib/trunk/macholib/ . diff --git a/Darwin/lib/python3.4/ctypes/macholib/framework.py b/Darwin/lib/python3.5/ctypes/macholib/framework.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/macholib/framework.py rename to Darwin/lib/python3.5/ctypes/macholib/framework.py diff --git a/Darwin/lib/python3.5/ctypes/test/__init__.py b/Darwin/lib/python3.5/ctypes/test/__init__.py new file mode 100644 index 0000000..26a70b7 --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/__init__.py @@ -0,0 +1,14 @@ +import os +import unittest +from test import support + +# skip tests if _ctypes was not built +ctypes = support.import_module('ctypes') +ctypes_symbols = dir(ctypes) + +def need_symbol(name): + return unittest.skipUnless(name in ctypes_symbols, + '{!r} is required'.format(name)) + +def load_tests(*args): + return support.load_package_tests(os.path.dirname(__file__), *args) diff --git a/Darwin/lib/python3.5/ctypes/test/__main__.py b/Darwin/lib/python3.5/ctypes/test/__main__.py new file mode 100644 index 0000000..362a9ec --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/__main__.py @@ -0,0 +1,4 @@ +from ctypes.test import load_tests +import unittest + +unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_anon.py b/Darwin/lib/python3.5/ctypes/test/test_anon.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_anon.py rename to Darwin/lib/python3.5/ctypes/test/test_anon.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_array_in_pointer.py b/Darwin/lib/python3.5/ctypes/test/test_array_in_pointer.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_array_in_pointer.py rename to Darwin/lib/python3.5/ctypes/test/test_array_in_pointer.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_arrays.py b/Darwin/lib/python3.5/ctypes/test/test_arrays.py similarity index 91% rename from Darwin/lib/python3.4/ctypes/test/test_arrays.py rename to Darwin/lib/python3.5/ctypes/test/test_arrays.py index 99b97aa..8ca77e0 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_arrays.py +++ b/Darwin/lib/python3.5/ctypes/test/test_arrays.py @@ -1,6 +1,8 @@ import unittest from ctypes import * +from ctypes.test import need_symbol + formats = "bBhHiIlLqQfd" formats = c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, \ @@ -98,20 +100,16 @@ class ArrayTestCase(unittest.TestCase): self.assertEqual(sz[1:4:2], b"o") self.assertEqual(sz.value, b"foo") - try: - create_unicode_buffer - except NameError: - pass - else: - def test_from_addressW(self): - p = create_unicode_buffer("foo") - sz = (c_wchar * 3).from_address(addressof(p)) - self.assertEqual(sz[:], "foo") - self.assertEqual(sz[::], "foo") - self.assertEqual(sz[::-1], "oof") - self.assertEqual(sz[::3], "f") - self.assertEqual(sz[1:4:2], "o") - self.assertEqual(sz.value, "foo") + @need_symbol('create_unicode_buffer') + def test_from_addressW(self): + p = create_unicode_buffer("foo") + sz = (c_wchar * 3).from_address(addressof(p)) + self.assertEqual(sz[:], "foo") + self.assertEqual(sz[::], "foo") + self.assertEqual(sz[::-1], "oof") + self.assertEqual(sz[::3], "f") + self.assertEqual(sz[1:4:2], "o") + self.assertEqual(sz.value, "foo") def test_cache(self): # Array types are cached internally in the _ctypes extension, diff --git a/Darwin/lib/python3.4/ctypes/test/test_as_parameter.py b/Darwin/lib/python3.5/ctypes/test/test_as_parameter.py similarity index 98% rename from Darwin/lib/python3.4/ctypes/test/test_as_parameter.py rename to Darwin/lib/python3.5/ctypes/test/test_as_parameter.py index 43703e3..2a3484b 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_as_parameter.py +++ b/Darwin/lib/python3.5/ctypes/test/test_as_parameter.py @@ -1,5 +1,6 @@ import unittest from ctypes import * +from ctypes.test import need_symbol import _ctypes_test dll = CDLL(_ctypes_test.__file__) @@ -17,11 +18,8 @@ class BasicWrapTestCase(unittest.TestCase): def wrap(self, param): return param + @need_symbol('c_wchar') def test_wchar_parm(self): - try: - c_wchar - except NameError: - return f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double] result = f(self.wrap(1), self.wrap("x"), self.wrap(3), self.wrap(4), self.wrap(5.0), self.wrap(6.0)) @@ -196,7 +194,7 @@ class BasicWrapTestCase(unittest.TestCase): a = A() a._as_parameter_ = a - with self.assertRaises(RuntimeError): + with self.assertRaises(RecursionError): c_int.from_param(a) diff --git a/Darwin/lib/python3.4/ctypes/test/test_bitfields.py b/Darwin/lib/python3.5/ctypes/test/test_bitfields.py similarity index 88% rename from Darwin/lib/python3.4/ctypes/test/test_bitfields.py rename to Darwin/lib/python3.5/ctypes/test/test_bitfields.py index 77de606..b39d82c 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_bitfields.py +++ b/Darwin/lib/python3.5/ctypes/test/test_bitfields.py @@ -1,4 +1,5 @@ from ctypes import * +from ctypes.test import need_symbol import unittest import os @@ -127,20 +128,18 @@ class BitFieldTest(unittest.TestCase): result = self.fail_fields(("a", c_char, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_char')) - try: - c_wchar - except NameError: - pass - else: - result = self.fail_fields(("a", c_wchar, 1)) - self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_wchar')) - class Dummy(Structure): _fields_ = [] result = self.fail_fields(("a", Dummy, 1)) self.assertEqual(result, (TypeError, 'bit fields not allowed for type Dummy')) + @need_symbol('c_wchar') + def test_c_wchar(self): + result = self.fail_fields(("a", c_wchar, 1)) + self.assertEqual(result, + (TypeError, 'bit fields not allowed for type c_wchar')) + def test_single_bitfield_size(self): for c_typ in int_types: result = self.fail_fields(("a", c_typ, -1)) @@ -240,7 +239,7 @@ class BitFieldTest(unittest.TestCase): _anonymous_ = ["_"] _fields_ = [("_", X)] - @unittest.skipUnless(hasattr(ctypes, "c_uint32"), "c_int32 is required") + @need_symbol('c_uint32') def test_uint32(self): class X(Structure): _fields_ = [("a", c_uint32, 32)] @@ -250,7 +249,7 @@ class BitFieldTest(unittest.TestCase): x.a = 0xFDCBA987 self.assertEqual(x.a, 0xFDCBA987) - @unittest.skipUnless(hasattr(ctypes, "c_uint64"), "c_int64 is required") + @need_symbol('c_uint64') def test_uint64(self): class X(Structure): _fields_ = [("a", c_uint64, 64)] @@ -260,5 +259,33 @@ class BitFieldTest(unittest.TestCase): x.a = 0xFEDCBA9876543211 self.assertEqual(x.a, 0xFEDCBA9876543211) + @need_symbol('c_uint32') + def test_uint32_swap_little_endian(self): + # Issue #23319 + class Little(LittleEndianStructure): + _fields_ = [("a", c_uint32, 24), + ("b", c_uint32, 4), + ("c", c_uint32, 4)] + b = bytearray(4) + x = Little.from_buffer(b) + x.a = 0xabcdef + x.b = 1 + x.c = 2 + self.assertEqual(b, b'\xef\xcd\xab\x21') + + @need_symbol('c_uint32') + def test_uint32_swap_big_endian(self): + # Issue #23319 + class Big(BigEndianStructure): + _fields_ = [("a", c_uint32, 24), + ("b", c_uint32, 4), + ("c", c_uint32, 4)] + b = bytearray(4) + x = Big.from_buffer(b) + x.a = 0xabcdef + x.b = 1 + x.c = 2 + self.assertEqual(b, b'\xab\xcd\xef\x12') + if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.5/ctypes/test/test_buffers.py b/Darwin/lib/python3.5/ctypes/test/test_buffers.py new file mode 100644 index 0000000..166faaf --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_buffers.py @@ -0,0 +1,64 @@ +from ctypes import * +from ctypes.test import need_symbol +import unittest + +class StringBufferTestCase(unittest.TestCase): + + def test_buffer(self): + b = create_string_buffer(32) + self.assertEqual(len(b), 32) + self.assertEqual(sizeof(b), 32 * sizeof(c_char)) + self.assertIs(type(b[0]), bytes) + + b = create_string_buffer(b"abc") + self.assertEqual(len(b), 4) # trailing nul char + self.assertEqual(sizeof(b), 4 * sizeof(c_char)) + self.assertIs(type(b[0]), bytes) + self.assertEqual(b[0], b"a") + self.assertEqual(b[:], b"abc\0") + self.assertEqual(b[::], b"abc\0") + self.assertEqual(b[::-1], b"\0cba") + self.assertEqual(b[::2], b"ac") + self.assertEqual(b[::5], b"a") + + self.assertRaises(TypeError, create_string_buffer, "abc") + + def test_buffer_interface(self): + self.assertEqual(len(bytearray(create_string_buffer(0))), 0) + self.assertEqual(len(bytearray(create_string_buffer(1))), 1) + + @need_symbol('c_wchar') + def test_unicode_buffer(self): + b = create_unicode_buffer(32) + self.assertEqual(len(b), 32) + self.assertEqual(sizeof(b), 32 * sizeof(c_wchar)) + self.assertIs(type(b[0]), str) + + b = create_unicode_buffer("abc") + self.assertEqual(len(b), 4) # trailing nul char + self.assertEqual(sizeof(b), 4 * sizeof(c_wchar)) + self.assertIs(type(b[0]), str) + self.assertEqual(b[0], "a") + self.assertEqual(b[:], "abc\0") + self.assertEqual(b[::], "abc\0") + self.assertEqual(b[::-1], "\0cba") + self.assertEqual(b[::2], "ac") + self.assertEqual(b[::5], "a") + + self.assertRaises(TypeError, create_unicode_buffer, b"abc") + + @need_symbol('c_wchar') + def test_unicode_conversion(self): + b = create_unicode_buffer("abc") + self.assertEqual(len(b), 4) # trailing nul char + self.assertEqual(sizeof(b), 4 * sizeof(c_wchar)) + self.assertIs(type(b[0]), str) + self.assertEqual(b[0], "a") + self.assertEqual(b[:], "abc\0") + self.assertEqual(b[::], "abc\0") + self.assertEqual(b[::-1], "\0cba") + self.assertEqual(b[::2], "ac") + self.assertEqual(b[::5], "a") + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_bytes.py b/Darwin/lib/python3.5/ctypes/test/test_bytes.py similarity index 51% rename from Darwin/lib/python3.4/ctypes/test/test_bytes.py rename to Darwin/lib/python3.5/ctypes/test/test_bytes.py index ee49c45..20fa056 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_bytes.py +++ b/Darwin/lib/python3.5/ctypes/test/test_bytes.py @@ -6,27 +6,40 @@ from ctypes import * class BytesTest(unittest.TestCase): def test_c_char(self): x = c_char(b"x") + self.assertRaises(TypeError, c_char, "x") x.value = b"y" + with self.assertRaises(TypeError): + x.value = "y" c_char.from_param(b"x") + self.assertRaises(TypeError, c_char.from_param, "x") (c_char * 3)(b"a", b"b", b"c") + self.assertRaises(TypeError, c_char * 3, "a", "b", "c") def test_c_wchar(self): x = c_wchar("x") + self.assertRaises(TypeError, c_wchar, b"x") x.value = "y" + with self.assertRaises(TypeError): + x.value = b"y" c_wchar.from_param("x") + self.assertRaises(TypeError, c_wchar.from_param, b"x") (c_wchar * 3)("a", "b", "c") + self.assertRaises(TypeError, c_wchar * 3, b"a", b"b", b"c") def test_c_char_p(self): c_char_p(b"foo bar") + self.assertRaises(TypeError, c_char_p, "foo bar") def test_c_wchar_p(self): c_wchar_p("foo bar") + self.assertRaises(TypeError, c_wchar_p, b"foo bar") def test_struct(self): class X(Structure): _fields_ = [("a", c_char * 3)] x = X(b"abc") + self.assertRaises(TypeError, X, "abc") self.assertEqual(x.a, b"abc") self.assertEqual(type(x.a), bytes) @@ -35,16 +48,18 @@ class BytesTest(unittest.TestCase): _fields_ = [("a", c_wchar * 3)] x = X("abc") + self.assertRaises(TypeError, X, b"abc") self.assertEqual(x.a, "abc") self.assertEqual(type(x.a), str) - if sys.platform == "win32": - def test_BSTR(self): - from _ctypes import _SimpleCData - class BSTR(_SimpleCData): - _type_ = "X" + @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') + def test_BSTR(self): + from _ctypes import _SimpleCData + class BSTR(_SimpleCData): + _type_ = "X" + + BSTR("abc") - BSTR("abc") if __name__ == '__main__': unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_byteswap.py b/Darwin/lib/python3.5/ctypes/test/test_byteswap.py similarity index 94% rename from Darwin/lib/python3.4/ctypes/test/test_byteswap.py rename to Darwin/lib/python3.5/ctypes/test/test_byteswap.py index 63dde13..01c97e8 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_byteswap.py +++ b/Darwin/lib/python3.5/ctypes/test/test_byteswap.py @@ -14,13 +14,34 @@ def bin(s): # For Structures and Unions, these types are created on demand. class Test(unittest.TestCase): - def X_test(self): + @unittest.skip('test disabled') + def test_X(self): print(sys.byteorder, file=sys.stderr) for i in range(32): bits = BITS() setattr(bits, "i%s" % i, 1) dump(bits) + def test_slots(self): + class BigPoint(BigEndianStructure): + __slots__ = () + _fields_ = [("x", c_int), ("y", c_int)] + + class LowPoint(LittleEndianStructure): + __slots__ = () + _fields_ = [("x", c_int), ("y", c_int)] + + big = BigPoint() + little = LowPoint() + big.x = 4 + big.y = 2 + little.x = 2 + little.y = 4 + with self.assertRaises(AttributeError): + big.z = 42 + with self.assertRaises(AttributeError): + little.z = 24 + def test_endian_short(self): if sys.byteorder == "little": self.assertIs(c_short.__ctype_le__, c_short) diff --git a/Darwin/lib/python3.4/ctypes/test/test_callbacks.py b/Darwin/lib/python3.5/ctypes/test/test_callbacks.py similarity index 91% rename from Darwin/lib/python3.4/ctypes/test/test_callbacks.py rename to Darwin/lib/python3.5/ctypes/test/test_callbacks.py index 5600b43..3824f7c 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_callbacks.py +++ b/Darwin/lib/python3.5/ctypes/test/test_callbacks.py @@ -1,5 +1,6 @@ import unittest from ctypes import * +from ctypes.test import need_symbol import _ctypes_test class Callbacks(unittest.TestCase): @@ -88,9 +89,10 @@ class Callbacks(unittest.TestCase): # disabled: would now (correctly) raise a RuntimeWarning about # a memory leak. A callback function cannot return a non-integral # C type without causing a memory leak. -## def test_char_p(self): -## self.check_type(c_char_p, "abc") -## self.check_type(c_char_p, "def") + @unittest.skip('test disabled') + def test_char_p(self): + self.check_type(c_char_p, "abc") + self.check_type(c_char_p, "def") def test_pyobject(self): o = () @@ -142,13 +144,12 @@ class Callbacks(unittest.TestCase): CFUNCTYPE(None)(lambda x=Nasty(): None) -try: - WINFUNCTYPE -except NameError: - pass -else: - class StdcallCallbacks(Callbacks): +@need_symbol('WINFUNCTYPE') +class StdcallCallbacks(Callbacks): + try: functype = WINFUNCTYPE + except NameError: + pass ################################################################ @@ -178,7 +179,7 @@ class SampleCallbacksTestCase(unittest.TestCase): from ctypes.util import find_library libc_path = find_library("c") if not libc_path: - return # cannot test + self.skipTest('could not find libc') libc = CDLL(libc_path) @CFUNCTYPE(c_int, POINTER(c_int), POINTER(c_int)) @@ -190,23 +191,19 @@ class SampleCallbacksTestCase(unittest.TestCase): libc.qsort(array, len(array), sizeof(c_int), cmp_func) self.assertEqual(array[:], [1, 5, 7, 33, 99]) - try: - WINFUNCTYPE - except NameError: - pass - else: - def test_issue_8959_b(self): - from ctypes.wintypes import BOOL, HWND, LPARAM + @need_symbol('WINFUNCTYPE') + def test_issue_8959_b(self): + from ctypes.wintypes import BOOL, HWND, LPARAM + global windowCount + windowCount = 0 + + @WINFUNCTYPE(BOOL, HWND, LPARAM) + def EnumWindowsCallbackFunc(hwnd, lParam): global windowCount - windowCount = 0 + windowCount += 1 + return True #Allow windows to keep enumerating - @WINFUNCTYPE(BOOL, HWND, LPARAM) - def EnumWindowsCallbackFunc(hwnd, lParam): - global windowCount - windowCount += 1 - return True #Allow windows to keep enumerating - - windll.user32.EnumWindows(EnumWindowsCallbackFunc, 0) + windll.user32.EnumWindows(EnumWindowsCallbackFunc, 0) def test_callback_register_int(self): # Issue #8275: buggy handling of callback args under Win64 diff --git a/Darwin/lib/python3.4/ctypes/test/test_cast.py b/Darwin/lib/python3.5/ctypes/test/test_cast.py similarity index 92% rename from Darwin/lib/python3.4/ctypes/test/test_cast.py rename to Darwin/lib/python3.5/ctypes/test/test_cast.py index 32496f6..187d2bd 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_cast.py +++ b/Darwin/lib/python3.5/ctypes/test/test_cast.py @@ -1,4 +1,5 @@ from ctypes import * +from ctypes.test import need_symbol import unittest import sys @@ -75,15 +76,11 @@ class Test(unittest.TestCase): self.assertEqual(cast(cast(s, c_void_p), c_char_p).value, b"hiho") - try: - c_wchar_p - except NameError: - pass - else: - def test_wchar_p(self): - s = c_wchar_p("hiho") - self.assertEqual(cast(cast(s, c_void_p), c_wchar_p).value, - "hiho") + @need_symbol('c_wchar_p') + def test_wchar_p(self): + s = c_wchar_p("hiho") + self.assertEqual(cast(cast(s, c_void_p), c_wchar_p).value, + "hiho") if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_cfuncs.py b/Darwin/lib/python3.5/ctypes/test/test_cfuncs.py similarity index 97% rename from Darwin/lib/python3.4/ctypes/test/test_cfuncs.py rename to Darwin/lib/python3.5/ctypes/test/test_cfuncs.py index a080496..ac2240f 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_cfuncs.py +++ b/Darwin/lib/python3.5/ctypes/test/test_cfuncs.py @@ -3,6 +3,7 @@ import unittest from ctypes import * +from ctypes.test import need_symbol import _ctypes_test @@ -193,7 +194,7 @@ class CFunctions(unittest.TestCase): try: WinDLL except NameError: - pass + def stdcall_dll(*_): pass else: class stdcall_dll(WinDLL): def __getattr__(self, name): @@ -203,9 +204,9 @@ else: setattr(self, name, func) return func - class stdcallCFunctions(CFunctions): - _dll = stdcall_dll(_ctypes_test.__file__) - pass +@need_symbol('WinDLL') +class stdcallCFunctions(CFunctions): + _dll = stdcall_dll(_ctypes_test.__file__) if __name__ == '__main__': unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_checkretval.py b/Darwin/lib/python3.5/ctypes/test/test_checkretval.py similarity index 74% rename from Darwin/lib/python3.4/ctypes/test/test_checkretval.py rename to Darwin/lib/python3.5/ctypes/test/test_checkretval.py index 19bb813..e9567dc 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_checkretval.py +++ b/Darwin/lib/python3.5/ctypes/test/test_checkretval.py @@ -1,6 +1,7 @@ import unittest from ctypes import * +from ctypes.test import need_symbol class CHECKED(c_int): def _check_retval_(value): @@ -25,15 +26,11 @@ class Test(unittest.TestCase): del dll._testfunc_p_p.restype self.assertEqual(42, dll._testfunc_p_p(42)) - try: - oledll - except NameError: - pass - else: - def test_oledll(self): - self.assertRaises(OSError, - oledll.oleaut32.CreateTypeLib2, - 0, None, None) + @need_symbol('oledll') + def test_oledll(self): + self.assertRaises(OSError, + oledll.oleaut32.CreateTypeLib2, + 0, None, None) if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_delattr.py b/Darwin/lib/python3.5/ctypes/test/test_delattr.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_delattr.py rename to Darwin/lib/python3.5/ctypes/test/test_delattr.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_errno.py b/Darwin/lib/python3.5/ctypes/test/test_errno.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_errno.py rename to Darwin/lib/python3.5/ctypes/test/test_errno.py diff --git a/Darwin/lib/python3.5/ctypes/test/test_find.py b/Darwin/lib/python3.5/ctypes/test/test_find.py new file mode 100644 index 0000000..e6bc19d --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_find.py @@ -0,0 +1,91 @@ +import unittest +import os +import sys +import test.support +from ctypes import * +from ctypes.util import find_library + +# On some systems, loading the OpenGL libraries needs the RTLD_GLOBAL mode. +class Test_OpenGL_libs(unittest.TestCase): + @classmethod + def setUpClass(cls): + lib_gl = lib_glu = lib_gle = None + if sys.platform == "win32": + lib_gl = find_library("OpenGL32") + lib_glu = find_library("Glu32") + elif sys.platform == "darwin": + lib_gl = lib_glu = find_library("OpenGL") + else: + lib_gl = find_library("GL") + lib_glu = find_library("GLU") + lib_gle = find_library("gle") + + ## print, for debugging + if test.support.verbose: + print("OpenGL libraries:") + for item in (("GL", lib_gl), + ("GLU", lib_glu), + ("gle", lib_gle)): + print("\t", item) + + cls.gl = cls.glu = cls.gle = None + if lib_gl: + try: + cls.gl = CDLL(lib_gl, mode=RTLD_GLOBAL) + except OSError: + pass + if lib_glu: + try: + cls.glu = CDLL(lib_glu, RTLD_GLOBAL) + except OSError: + pass + if lib_gle: + try: + cls.gle = CDLL(lib_gle) + except OSError: + pass + + @classmethod + def tearDownClass(cls): + cls.gl = cls.glu = cls.gle = None + + def test_gl(self): + if self.gl is None: + self.skipTest('lib_gl not available') + self.gl.glClearIndex + + def test_glu(self): + if self.glu is None: + self.skipTest('lib_glu not available') + self.glu.gluBeginCurve + + def test_gle(self): + if self.gle is None: + self.skipTest('lib_gle not available') + self.gle.gleGetJoinStyle + +# On platforms where the default shared library suffix is '.so', +# at least some libraries can be loaded as attributes of the cdll +# object, since ctypes now tries loading the lib again +# with '.so' appended of the first try fails. +# +# Won't work for libc, unfortunately. OTOH, it isn't +# needed for libc since this is already mapped into the current +# process (?) +# +# On MAC OSX, it won't work either, because dlopen() needs a full path, +# and the default suffix is either none or '.dylib'. +@unittest.skip('test disabled') +@unittest.skipUnless(os.name=="posix" and sys.platform != "darwin", + 'test not suitable for this platform') +class LoadLibs(unittest.TestCase): + def test_libm(self): + import math + libm = cdll.libm + sqrt = libm.sqrt + sqrt.argtypes = (c_double,) + sqrt.restype = c_double + self.assertEqual(sqrt(2), math.sqrt(2)) + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_frombuffer.py b/Darwin/lib/python3.5/ctypes/test/test_frombuffer.py similarity index 53% rename from Darwin/lib/python3.4/ctypes/test/test_frombuffer.py rename to Darwin/lib/python3.5/ctypes/test/test_frombuffer.py index ffb27a6..6aa2d1c 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_frombuffer.py +++ b/Darwin/lib/python3.5/ctypes/test/test_frombuffer.py @@ -10,7 +10,7 @@ class X(Structure): self._init_called = True class Test(unittest.TestCase): - def test_fom_buffer(self): + def test_from_buffer(self): a = array.array("i", range(16)) x = (c_int * 16).from_buffer(a) @@ -23,25 +23,37 @@ class Test(unittest.TestCase): a[0], a[-1] = 200, -200 self.assertEqual(x[:], a.tolist()) - self.assertIn(a, x._objects.values()) + self.assertRaises(BufferError, a.append, 100) + self.assertRaises(BufferError, a.pop) - self.assertRaises(ValueError, - c_int.from_buffer, a, -1) + del x; del y; gc.collect(); gc.collect(); gc.collect() + a.append(100) + a.pop() + x = (c_int * 16).from_buffer(a) + + self.assertIn(a, [obj.obj if isinstance(obj, memoryview) else obj + for obj in x._objects.values()]) expected = x[:] del a; gc.collect(); gc.collect(); gc.collect() self.assertEqual(x[:], expected) - self.assertRaises(TypeError, - (c_char * 16).from_buffer, "a" * 16) + with self.assertRaises(TypeError): + (c_char * 16).from_buffer(b"a" * 16) + with self.assertRaises(TypeError): + (c_char * 16).from_buffer("a" * 16) - def test_fom_buffer_with_offset(self): + def test_from_buffer_with_offset(self): a = array.array("i", range(16)) x = (c_int * 15).from_buffer(a, sizeof(c_int)) self.assertEqual(x[:], a.tolist()[1:]) - self.assertRaises(ValueError, lambda: (c_int * 16).from_buffer(a, sizeof(c_int))) - self.assertRaises(ValueError, lambda: (c_int * 1).from_buffer(a, 16 * sizeof(c_int))) + with self.assertRaises(ValueError): + c_int.from_buffer(a, -1) + with self.assertRaises(ValueError): + (c_int * 16).from_buffer(a, sizeof(c_int)) + with self.assertRaises(ValueError): + (c_int * 1).from_buffer(a, 16 * sizeof(c_int)) def test_from_buffer_copy(self): a = array.array("i", range(16)) @@ -56,26 +68,30 @@ class Test(unittest.TestCase): a[0], a[-1] = 200, -200 self.assertEqual(x[:], list(range(16))) - self.assertEqual(x._objects, None) + a.append(100) + self.assertEqual(x[:], list(range(16))) - self.assertRaises(ValueError, - c_int.from_buffer, a, -1) + self.assertEqual(x._objects, None) del a; gc.collect(); gc.collect(); gc.collect() self.assertEqual(x[:], list(range(16))) x = (c_char * 16).from_buffer_copy(b"a" * 16) self.assertEqual(x[:], b"a" * 16) + with self.assertRaises(TypeError): + (c_char * 16).from_buffer_copy("a" * 16) - def test_fom_buffer_copy_with_offset(self): + def test_from_buffer_copy_with_offset(self): a = array.array("i", range(16)) x = (c_int * 15).from_buffer_copy(a, sizeof(c_int)) self.assertEqual(x[:], a.tolist()[1:]) - self.assertRaises(ValueError, - (c_int * 16).from_buffer_copy, a, sizeof(c_int)) - self.assertRaises(ValueError, - (c_int * 1).from_buffer_copy, a, 16 * sizeof(c_int)) + with self.assertRaises(ValueError): + c_int.from_buffer_copy(a, -1) + with self.assertRaises(ValueError): + (c_int * 16).from_buffer_copy(a, sizeof(c_int)) + with self.assertRaises(ValueError): + (c_int * 1).from_buffer_copy(a, 16 * sizeof(c_int)) if __name__ == '__main__': unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_funcptr.py b/Darwin/lib/python3.5/ctypes/test/test_funcptr.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_funcptr.py rename to Darwin/lib/python3.5/ctypes/test/test_funcptr.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_functions.py b/Darwin/lib/python3.5/ctypes/test/test_functions.py similarity index 88% rename from Darwin/lib/python3.4/ctypes/test/test_functions.py rename to Darwin/lib/python3.5/ctypes/test/test_functions.py index 07eeb68..7562892 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_functions.py +++ b/Darwin/lib/python3.5/ctypes/test/test_functions.py @@ -6,6 +6,7 @@ Later... """ from ctypes import * +from ctypes.test import need_symbol import sys, unittest try: @@ -63,22 +64,16 @@ class FunctionTestCase(unittest.TestCase): pass + @need_symbol('c_wchar') def test_wchar_parm(self): - try: - c_wchar - except NameError: - return f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double] result = f(1, "x", 3, 4, 5.0, 6.0) self.assertEqual(result, 139) self.assertEqual(type(result), int) + @need_symbol('c_wchar') def test_wchar_result(self): - try: - c_wchar - except NameError: - return f = dll._testfunc_i_bhilfd f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] f.restype = c_wchar @@ -155,11 +150,8 @@ class FunctionTestCase(unittest.TestCase): self.assertEqual(result, -21) self.assertEqual(type(result), float) + @need_symbol('c_longlong') def test_longlongresult(self): - try: - c_longlong - except NameError: - return f = dll._testfunc_q_bhilfd f.restype = c_longlong f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_double] @@ -296,6 +288,7 @@ class FunctionTestCase(unittest.TestCase): result = f(-10, cb) self.assertEqual(result, -18) + @need_symbol('c_longlong') def test_longlong_callbacks(self): f = dll._testfunc_callback_q_qf @@ -348,16 +341,16 @@ class FunctionTestCase(unittest.TestCase): s2h = dll.ret_2h_func(inp) self.assertEqual((s2h.x, s2h.y), (99*2, 88*3)) - if sys.platform == "win32": - def test_struct_return_2H_stdcall(self): - class S2H(Structure): - _fields_ = [("x", c_short), - ("y", c_short)] + @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') + def test_struct_return_2H_stdcall(self): + class S2H(Structure): + _fields_ = [("x", c_short), + ("y", c_short)] - windll.s_ret_2h_func.restype = S2H - windll.s_ret_2h_func.argtypes = [S2H] - s2h = windll.s_ret_2h_func(S2H(99, 88)) - self.assertEqual((s2h.x, s2h.y), (99*2, 88*3)) + windll.s_ret_2h_func.restype = S2H + windll.s_ret_2h_func.argtypes = [S2H] + s2h = windll.s_ret_2h_func(S2H(99, 88)) + self.assertEqual((s2h.x, s2h.y), (99*2, 88*3)) def test_struct_return_8H(self): class S8I(Structure): @@ -376,23 +369,24 @@ class FunctionTestCase(unittest.TestCase): self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h), (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) - if sys.platform == "win32": - def test_struct_return_8H_stdcall(self): - class S8I(Structure): - _fields_ = [("a", c_int), - ("b", c_int), - ("c", c_int), - ("d", c_int), - ("e", c_int), - ("f", c_int), - ("g", c_int), - ("h", c_int)] - windll.s_ret_8i_func.restype = S8I - windll.s_ret_8i_func.argtypes = [S8I] - inp = S8I(9, 8, 7, 6, 5, 4, 3, 2) - s8i = windll.s_ret_8i_func(inp) - self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h), - (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) + @unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') + def test_struct_return_8H_stdcall(self): + class S8I(Structure): + _fields_ = [("a", c_int), + ("b", c_int), + ("c", c_int), + ("d", c_int), + ("e", c_int), + ("f", c_int), + ("g", c_int), + ("h", c_int)] + windll.s_ret_8i_func.restype = S8I + windll.s_ret_8i_func.argtypes = [S8I] + inp = S8I(9, 8, 7, 6, 5, 4, 3, 2) + s8i = windll.s_ret_8i_func(inp) + self.assertEqual( + (s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h), + (9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9)) def test_sf1651235(self): # see http://www.python.org/sf/1651235 diff --git a/Darwin/lib/python3.4/ctypes/test/test_incomplete.py b/Darwin/lib/python3.5/ctypes/test/test_incomplete.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_incomplete.py rename to Darwin/lib/python3.5/ctypes/test/test_incomplete.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_init.py b/Darwin/lib/python3.5/ctypes/test/test_init.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_init.py rename to Darwin/lib/python3.5/ctypes/test/test_init.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_internals.py b/Darwin/lib/python3.5/ctypes/test/test_internals.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_internals.py rename to Darwin/lib/python3.5/ctypes/test/test_internals.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_keeprefs.py b/Darwin/lib/python3.5/ctypes/test/test_keeprefs.py similarity index 98% rename from Darwin/lib/python3.4/ctypes/test/test_keeprefs.py rename to Darwin/lib/python3.5/ctypes/test/test_keeprefs.py index db8adfb..94c0257 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_keeprefs.py +++ b/Darwin/lib/python3.5/ctypes/test/test_keeprefs.py @@ -94,7 +94,8 @@ class PointerTestCase(unittest.TestCase): self.assertEqual(x._objects, {'1': i}) class DeletePointerTestCase(unittest.TestCase): - def X_test(self): + @unittest.skip('test disabled') + def test_X(self): class X(Structure): _fields_ = [("p", POINTER(c_char_p))] x = X() diff --git a/Darwin/lib/python3.4/ctypes/test/test_libc.py b/Darwin/lib/python3.5/ctypes/test/test_libc.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_libc.py rename to Darwin/lib/python3.5/ctypes/test/test_libc.py diff --git a/Darwin/lib/python3.5/ctypes/test/test_loading.py b/Darwin/lib/python3.5/ctypes/test/test_loading.py new file mode 100644 index 0000000..28468c1 --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_loading.py @@ -0,0 +1,121 @@ +from ctypes import * +import os +import sys +import unittest +import test.support +from ctypes.util import find_library + +libc_name = None + +def setUpModule(): + global libc_name + if os.name == "nt": + libc_name = find_library("c") + elif os.name == "ce": + libc_name = "coredll" + elif sys.platform == "cygwin": + libc_name = "cygwin1.dll" + else: + libc_name = find_library("c") + + if test.support.verbose: + print("libc_name is", libc_name) + +class LoaderTest(unittest.TestCase): + + unknowndll = "xxrandomnamexx" + + def test_load(self): + if libc_name is None: + self.skipTest('could not find libc') + CDLL(libc_name) + CDLL(os.path.basename(libc_name)) + self.assertRaises(OSError, CDLL, self.unknowndll) + + def test_load_version(self): + if libc_name is None: + self.skipTest('could not find libc') + if os.path.basename(libc_name) != 'libc.so.6': + self.skipTest('wrong libc path for test') + cdll.LoadLibrary("libc.so.6") + # linux uses version, libc 9 should not exist + self.assertRaises(OSError, cdll.LoadLibrary, "libc.so.9") + self.assertRaises(OSError, cdll.LoadLibrary, self.unknowndll) + + def test_find(self): + for name in ("c", "m"): + lib = find_library(name) + if lib: + cdll.LoadLibrary(lib) + CDLL(lib) + + @unittest.skipUnless(os.name in ("nt", "ce"), + 'test specific to Windows (NT/CE)') + def test_load_library(self): + # CRT is no longer directly loadable. See issue23606 for the + # discussion about alternative approaches. + #self.assertIsNotNone(libc_name) + if test.support.verbose: + print(find_library("kernel32")) + print(find_library("user32")) + + if os.name == "nt": + windll.kernel32.GetModuleHandleW + windll["kernel32"].GetModuleHandleW + windll.LoadLibrary("kernel32").GetModuleHandleW + WinDLL("kernel32").GetModuleHandleW + elif os.name == "ce": + windll.coredll.GetModuleHandleW + windll["coredll"].GetModuleHandleW + windll.LoadLibrary("coredll").GetModuleHandleW + WinDLL("coredll").GetModuleHandleW + + @unittest.skipUnless(os.name in ("nt", "ce"), + 'test specific to Windows (NT/CE)') + def test_load_ordinal_functions(self): + import _ctypes_test + dll = WinDLL(_ctypes_test.__file__) + # We load the same function both via ordinal and name + func_ord = dll[2] + func_name = dll.GetString + # addressof gets the address where the function pointer is stored + a_ord = addressof(func_ord) + a_name = addressof(func_name) + f_ord_addr = c_void_p.from_address(a_ord).value + f_name_addr = c_void_p.from_address(a_name).value + self.assertEqual(hex(f_ord_addr), hex(f_name_addr)) + + self.assertRaises(AttributeError, dll.__getitem__, 1234) + + @unittest.skipUnless(os.name == "nt", 'Windows-specific test') + def test_1703286_A(self): + from _ctypes import LoadLibrary, FreeLibrary + # On winXP 64-bit, advapi32 loads at an address that does + # NOT fit into a 32-bit integer. FreeLibrary must be able + # to accept this address. + + # These are tests for http://www.python.org/sf/1703286 + handle = LoadLibrary("advapi32") + FreeLibrary(handle) + + @unittest.skipUnless(os.name == "nt", 'Windows-specific test') + def test_1703286_B(self): + # Since on winXP 64-bit advapi32 loads like described + # above, the (arbitrarily selected) CloseEventLog function + # also has a high address. 'call_function' should accept + # addresses so large. + from _ctypes import call_function + advapi32 = windll.advapi32 + # Calling CloseEventLog with a NULL argument should fail, + # but the call should not segfault or so. + self.assertEqual(0, advapi32.CloseEventLog(None)) + windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p + windll.kernel32.GetProcAddress.restype = c_void_p + proc = windll.kernel32.GetProcAddress(advapi32._handle, + b"CloseEventLog") + self.assertTrue(proc) + # This is the real test: call the function via 'call_function' + self.assertEqual(0, call_function(proc, (None,))) + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_macholib.py b/Darwin/lib/python3.5/ctypes/test/test_macholib.py similarity index 63% rename from Darwin/lib/python3.4/ctypes/test/test_macholib.py rename to Darwin/lib/python3.5/ctypes/test/test_macholib.py index 8a2b2c3..6b35269 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_macholib.py +++ b/Darwin/lib/python3.5/ctypes/test/test_macholib.py @@ -43,21 +43,21 @@ def find_lib(name): raise ValueError("%s not found" % (name,)) class MachOTest(unittest.TestCase): - if sys.platform == "darwin": - def test_find(self): + @unittest.skipUnless(sys.platform == "darwin", 'OSX-specific test') + def test_find(self): - self.assertEqual(find_lib('pthread'), - '/usr/lib/libSystem.B.dylib') + self.assertEqual(find_lib('pthread'), + '/usr/lib/libSystem.B.dylib') - result = find_lib('z') - # Issue #21093: dyld default search path includes $HOME/lib and - # /usr/local/lib before /usr/lib, which caused test failures if - # a local copy of libz exists in one of them. Now ignore the head - # of the path. - self.assertRegex(result, r".*/lib/libz\..*.*\.dylib") + result = find_lib('z') + # Issue #21093: dyld default search path includes $HOME/lib and + # /usr/local/lib before /usr/lib, which caused test failures if + # a local copy of libz exists in one of them. Now ignore the head + # of the path. + self.assertRegex(result, r".*/lib/libz\..*.*\.dylib") - self.assertEqual(find_lib('IOKit'), - '/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit') + self.assertEqual(find_lib('IOKit'), + '/System/Library/Frameworks/IOKit.framework/Versions/A/IOKit') if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_memfunctions.py b/Darwin/lib/python3.5/ctypes/test/test_memfunctions.py similarity index 65% rename from Darwin/lib/python3.4/ctypes/test/test_memfunctions.py rename to Darwin/lib/python3.5/ctypes/test/test_memfunctions.py index aec4aaa..e784b9a 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_memfunctions.py +++ b/Darwin/lib/python3.5/ctypes/test/test_memfunctions.py @@ -2,17 +2,19 @@ import sys from test import support import unittest from ctypes import * +from ctypes.test import need_symbol class MemFunctionsTest(unittest.TestCase): -## def test_overflow(self): -## # string_at and wstring_at must use the Python calling -## # convention (which acquires the GIL and checks the Python -## # error flag). Provoke an error and catch it; see also issue -## # #3554: -## self.assertRaises((OverflowError, MemoryError, SystemError), -## lambda: wstring_at(u"foo", sys.maxint - 1)) -## self.assertRaises((OverflowError, MemoryError, SystemError), -## lambda: string_at("foo", sys.maxint - 1)) + @unittest.skip('test disabled') + def test_overflow(self): + # string_at and wstring_at must use the Python calling + # convention (which acquires the GIL and checks the Python + # error flag). Provoke an error and catch it; see also issue + # #3554: + self.assertRaises((OverflowError, MemoryError, SystemError), + lambda: wstring_at(u"foo", sys.maxint - 1)) + self.assertRaises((OverflowError, MemoryError, SystemError), + lambda: string_at("foo", sys.maxint - 1)) def test_memmove(self): # large buffers apparently increase the chance that the memory @@ -61,21 +63,17 @@ class MemFunctionsTest(unittest.TestCase): self.assertEqual(string_at(b"foo bar", 7), b"foo bar") self.assertEqual(string_at(b"foo bar", 3), b"foo") - try: - create_unicode_buffer - except NameError: - pass - else: - def test_wstring_at(self): - p = create_unicode_buffer("Hello, World") - a = create_unicode_buffer(1000000) - result = memmove(a, p, len(p) * sizeof(c_wchar)) - self.assertEqual(a.value, "Hello, World") + @need_symbol('create_unicode_buffer') + def test_wstring_at(self): + p = create_unicode_buffer("Hello, World") + a = create_unicode_buffer(1000000) + result = memmove(a, p, len(p) * sizeof(c_wchar)) + self.assertEqual(a.value, "Hello, World") - self.assertEqual(wstring_at(a), "Hello, World") - self.assertEqual(wstring_at(a, 5), "Hello") - self.assertEqual(wstring_at(a, 16), "Hello, World\0\0\0\0") - self.assertEqual(wstring_at(a, 0), "") + self.assertEqual(wstring_at(a), "Hello, World") + self.assertEqual(wstring_at(a, 5), "Hello") + self.assertEqual(wstring_at(a, 16), "Hello, World\0\0\0\0") + self.assertEqual(wstring_at(a, 0), "") if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_numbers.py b/Darwin/lib/python3.5/ctypes/test/test_numbers.py similarity index 91% rename from Darwin/lib/python3.4/ctypes/test/test_numbers.py rename to Darwin/lib/python3.5/ctypes/test/test_numbers.py index 3b7194f..2afca26 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_numbers.py +++ b/Darwin/lib/python3.5/ctypes/test/test_numbers.py @@ -82,12 +82,13 @@ class NumberTestCase(unittest.TestCase): self.assertRaises(TypeError, t, "") self.assertRaises(TypeError, t, None) -## def test_valid_ranges(self): -## # invalid values of the correct type -## # raise ValueError (not OverflowError) -## for t, (l, h) in zip(unsigned_types, unsigned_ranges): -## self.assertRaises(ValueError, t, l-1) -## self.assertRaises(ValueError, t, h+1) + @unittest.skip('test disabled') + def test_valid_ranges(self): + # invalid values of the correct type + # raise ValueError (not OverflowError) + for t, (l, h) in zip(unsigned_types, unsigned_ranges): + self.assertRaises(ValueError, t, l-1) + self.assertRaises(ValueError, t, h+1) def test_from_param(self): # the from_param class method attribute always @@ -200,16 +201,17 @@ class NumberTestCase(unittest.TestCase): self.assertEqual(v.value, b'?') # array does not support c_bool / 't' - # def test_bool_from_address(self): - # from ctypes import c_bool - # from array import array - # a = array(c_bool._type_, [True]) - # v = t.from_address(a.buffer_info()[0]) - # self.assertEqual(v.value, a[0]) - # self.assertEqual(type(v) is t) - # a[0] = False - # self.assertEqual(v.value, a[0]) - # self.assertEqual(type(v) is t) + @unittest.skip('test disabled') + def test_bool_from_address(self): + from ctypes import c_bool + from array import array + a = array(c_bool._type_, [True]) + v = t.from_address(a.buffer_info()[0]) + self.assertEqual(v.value, a[0]) + self.assertEqual(type(v) is t) + a[0] = False + self.assertEqual(v.value, a[0]) + self.assertEqual(type(v) is t) def test_init(self): # c_int() can be initialized from Python's int, and c_int. @@ -227,8 +229,9 @@ class NumberTestCase(unittest.TestCase): if (hasattr(t, "__ctype_le__")): self.assertRaises(OverflowError, t.__ctype_le__, big_int) -## def test_perf(self): -## check_perf() + @unittest.skip('test disabled') + def test_perf(self): + check_perf() from ctypes import _SimpleCData class c_int_S(_SimpleCData): diff --git a/Darwin/lib/python3.4/ctypes/test/test_objects.py b/Darwin/lib/python3.5/ctypes/test/test_objects.py similarity index 83% rename from Darwin/lib/python3.4/ctypes/test/test_objects.py rename to Darwin/lib/python3.5/ctypes/test/test_objects.py index f075c20..ef7b20b 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_objects.py +++ b/Darwin/lib/python3.5/ctypes/test/test_objects.py @@ -59,12 +59,9 @@ import unittest, doctest, sys import ctypes.test.test_objects class TestCase(unittest.TestCase): - if sys.hexversion > 0x02040000: - # Python 2.3 has no ELLIPSIS flag, so we don't test with this - # version: - def test(self): - doctest.testmod(ctypes.test.test_objects) + def test(self): + failures, tests = doctest.testmod(ctypes.test.test_objects) + self.assertFalse(failures, 'doctests failed, see output above') if __name__ == '__main__': - if sys.hexversion > 0x02040000: - doctest.testmod(ctypes.test.test_objects) + doctest.testmod(ctypes.test.test_objects) diff --git a/Darwin/lib/python3.4/ctypes/test/test_parameters.py b/Darwin/lib/python3.5/ctypes/test/test_parameters.py similarity index 94% rename from Darwin/lib/python3.4/ctypes/test/test_parameters.py rename to Darwin/lib/python3.5/ctypes/test/test_parameters.py index 12b5bd5..e56bccf 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_parameters.py +++ b/Darwin/lib/python3.5/ctypes/test/test_parameters.py @@ -1,4 +1,5 @@ import unittest, sys +from ctypes.test import need_symbol class SimpleTypesTestCase(unittest.TestCase): @@ -35,10 +36,9 @@ class SimpleTypesTestCase(unittest.TestCase): self.assertEqual(CVOIDP.from_param("abc"), "abcabc") self.assertEqual(CCHARP.from_param("abc"), "abcabcabcabc") - try: - from ctypes import c_wchar_p - except ImportError: - return + @need_symbol('c_wchar_p') + def test_subclasses_c_wchar_p(self): + from ctypes import c_wchar_p class CWCHARP(c_wchar_p): def from_param(cls, value): @@ -66,13 +66,9 @@ class SimpleTypesTestCase(unittest.TestCase): a = c_char_p(b"123") self.assertIs(c_char_p.from_param(a), a) + @need_symbol('c_wchar_p') def test_cw_strings(self): - from ctypes import byref - try: - from ctypes import c_wchar_p - except ImportError: -## print "(No c_wchar_p)" - return + from ctypes import byref, c_wchar_p c_wchar_p.from_param("123") @@ -139,9 +135,6 @@ class SimpleTypesTestCase(unittest.TestCase): self.assertRaises(TypeError, LPINT.from_param, c_long*3) self.assertRaises(TypeError, LPINT.from_param, c_uint*3) -## def test_performance(self): -## check_perf() - def test_noctypes_argtype(self): import _ctypes_test from ctypes import CDLL, c_void_p, ArgumentError diff --git a/Darwin/lib/python3.4/ctypes/test/test_pep3118.py b/Darwin/lib/python3.5/ctypes/test/test_pep3118.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_pep3118.py rename to Darwin/lib/python3.5/ctypes/test/test_pep3118.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_pickling.py b/Darwin/lib/python3.5/ctypes/test/test_pickling.py similarity index 84% rename from Darwin/lib/python3.4/ctypes/test/test_pickling.py rename to Darwin/lib/python3.5/ctypes/test/test_pickling.py index 8c91222..c4a79b9 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_pickling.py +++ b/Darwin/lib/python3.5/ctypes/test/test_pickling.py @@ -14,9 +14,9 @@ class X(Structure): class Y(X): _fields_ = [("str", c_char_p)] -class PickleTest(unittest.TestCase): +class PickleTest: def dumps(self, item): - return pickle.dumps(item) + return pickle.dumps(item, self.proto) def loads(self, item): return pickle.loads(item) @@ -67,17 +67,15 @@ class PickleTest(unittest.TestCase): self.assertRaises(ValueError, lambda: self.dumps(item)) def test_wchar(self): - pickle.dumps(c_char(b"x")) + self.dumps(c_char(b"x")) # Issue 5049 - pickle.dumps(c_wchar("x")) + self.dumps(c_wchar("x")) -class PickleTest_1(PickleTest): - def dumps(self, item): - return pickle.dumps(item, 1) - -class PickleTest_2(PickleTest): - def dumps(self, item): - return pickle.dumps(item, 2) +for proto in range(pickle.HIGHEST_PROTOCOL + 1): + name = 'PickleTest_%s' % proto + globals()[name] = type(name, + (PickleTest, unittest.TestCase), + {'proto': proto}) if __name__ == "__main__": unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_pointers.py b/Darwin/lib/python3.5/ctypes/test/test_pointers.py similarity index 93% rename from Darwin/lib/python3.4/ctypes/test/test_pointers.py rename to Darwin/lib/python3.5/ctypes/test/test_pointers.py index f8ef0ab..40738f7 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_pointers.py +++ b/Darwin/lib/python3.5/ctypes/test/test_pointers.py @@ -22,7 +22,10 @@ class PointersTestCase(unittest.TestCase): def test_pass_pointers(self): dll = CDLL(_ctypes_test.__file__) func = dll._testfunc_p_p - func.restype = c_long + if sizeof(c_longlong) == sizeof(c_void_p): + func.restype = c_longlong + else: + func.restype = c_long i = c_int(12345678) ## func.argtypes = (POINTER(c_int),) @@ -188,5 +191,13 @@ class PointersTestCase(unittest.TestCase): mth = WINFUNCTYPE(None)(42, "name", (), None) self.assertEqual(bool(mth), True) + def test_pointer_type_name(self): + LargeNamedType = type('T' * 2 ** 25, (Structure,), {}) + self.assertTrue(POINTER(LargeNamedType)) + + def test_pointer_type_str_name(self): + large_string = 'T' * 2 ** 25 + self.assertTrue(POINTER(large_string)) + if __name__ == '__main__': unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_prototypes.py b/Darwin/lib/python3.5/ctypes/test/test_prototypes.py similarity index 73% rename from Darwin/lib/python3.4/ctypes/test/test_prototypes.py rename to Darwin/lib/python3.5/ctypes/test/test_prototypes.py index 6ef1b1b..cd0c649 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_prototypes.py +++ b/Darwin/lib/python3.5/ctypes/test/test_prototypes.py @@ -1,4 +1,5 @@ from ctypes import * +from ctypes.test import need_symbol import unittest # IMPORTANT INFO: @@ -68,7 +69,10 @@ class CharPointersTestCase(unittest.TestCase): def test_int_pointer_arg(self): func = testdll._testfunc_p_p - func.restype = c_long + if sizeof(c_longlong) == sizeof(c_void_p): + func.restype = c_longlong + else: + func.restype = c_long self.assertEqual(0, func(0)) ci = c_int(0) @@ -135,13 +139,14 @@ class CharPointersTestCase(unittest.TestCase): func(pointer(c_int())) func((c_int * 3)()) - try: - func.restype = c_wchar_p - except NameError: - pass - else: - self.assertEqual(None, func(c_wchar_p(None))) - self.assertEqual("123", func(c_wchar_p("123"))) + @need_symbol('c_wchar_p') + def test_c_void_p_arg_with_c_wchar_p(self): + func = testdll._testfunc_p_p + func.restype = c_wchar_p + func.argtypes = c_void_p, + + self.assertEqual(None, func(c_wchar_p(None))) + self.assertEqual("123", func(c_wchar_p("123"))) def test_instance(self): func = testdll._testfunc_p_p @@ -156,51 +161,47 @@ class CharPointersTestCase(unittest.TestCase): func.argtypes = None self.assertEqual(None, func(X())) -try: - c_wchar -except NameError: - pass -else: - class WCharPointersTestCase(unittest.TestCase): +@need_symbol('c_wchar') +class WCharPointersTestCase(unittest.TestCase): - def setUp(self): - func = testdll._testfunc_p_p - func.restype = c_int - func.argtypes = None + def setUp(self): + func = testdll._testfunc_p_p + func.restype = c_int + func.argtypes = None - def test_POINTER_c_wchar_arg(self): - func = testdll._testfunc_p_p - func.restype = c_wchar_p - func.argtypes = POINTER(c_wchar), + def test_POINTER_c_wchar_arg(self): + func = testdll._testfunc_p_p + func.restype = c_wchar_p + func.argtypes = POINTER(c_wchar), - self.assertEqual(None, func(None)) - self.assertEqual("123", func("123")) - self.assertEqual(None, func(c_wchar_p(None))) - self.assertEqual("123", func(c_wchar_p("123"))) + self.assertEqual(None, func(None)) + self.assertEqual("123", func("123")) + self.assertEqual(None, func(c_wchar_p(None))) + self.assertEqual("123", func(c_wchar_p("123"))) - self.assertEqual("123", func(c_wbuffer("123"))) - ca = c_wchar("a") - self.assertEqual("a", func(pointer(ca))[0]) - self.assertEqual("a", func(byref(ca))[0]) + self.assertEqual("123", func(c_wbuffer("123"))) + ca = c_wchar("a") + self.assertEqual("a", func(pointer(ca))[0]) + self.assertEqual("a", func(byref(ca))[0]) - def test_c_wchar_p_arg(self): - func = testdll._testfunc_p_p - func.restype = c_wchar_p - func.argtypes = c_wchar_p, + def test_c_wchar_p_arg(self): + func = testdll._testfunc_p_p + func.restype = c_wchar_p + func.argtypes = c_wchar_p, - c_wchar_p.from_param("123") + c_wchar_p.from_param("123") - self.assertEqual(None, func(None)) - self.assertEqual("123", func("123")) - self.assertEqual(None, func(c_wchar_p(None))) - self.assertEqual("123", func(c_wchar_p("123"))) + self.assertEqual(None, func(None)) + self.assertEqual("123", func("123")) + self.assertEqual(None, func(c_wchar_p(None))) + self.assertEqual("123", func(c_wchar_p("123"))) - # XXX Currently, these raise TypeErrors, although they shouldn't: - self.assertEqual("123", func(c_wbuffer("123"))) - ca = c_wchar("a") - self.assertEqual("a", func(pointer(ca))[0]) - self.assertEqual("a", func(byref(ca))[0]) + # XXX Currently, these raise TypeErrors, although they shouldn't: + self.assertEqual("123", func(c_wbuffer("123"))) + ca = c_wchar("a") + self.assertEqual("a", func(pointer(ca))[0]) + self.assertEqual("a", func(byref(ca))[0]) class ArrayTest(unittest.TestCase): def test(self): diff --git a/Darwin/lib/python3.4/ctypes/test/test_python_api.py b/Darwin/lib/python3.5/ctypes/test/test_python_api.py similarity index 75% rename from Darwin/lib/python3.4/ctypes/test/test_python_api.py rename to Darwin/lib/python3.5/ctypes/test/test_python_api.py index 5eb882a..9c13746 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_python_api.py +++ b/Darwin/lib/python3.5/ctypes/test/test_python_api.py @@ -1,7 +1,6 @@ from ctypes import * import unittest, sys from test import support -from ctypes.test import is_resource_enabled ################################################################ # This section should be moved into ctypes\__init__.py, when it's ready. @@ -39,24 +38,21 @@ class PythonAPITestCase(unittest.TestCase): del pyob self.assertEqual(grc(s), refcnt) - if is_resource_enabled("refcount"): - # This test is unreliable, because it is possible that code in - # unittest changes the refcount of the '42' integer. So, it - # is disabled by default. - def test_PyLong_Long(self): - ref42 = grc(42) - pythonapi.PyLong_FromLong.restype = py_object - self.assertEqual(pythonapi.PyLong_FromLong(42), 42) + @support.refcount_test + def test_PyLong_Long(self): + ref42 = grc(42) + pythonapi.PyLong_FromLong.restype = py_object + self.assertEqual(pythonapi.PyLong_FromLong(42), 42) - self.assertEqual(grc(42), ref42) + self.assertEqual(grc(42), ref42) - pythonapi.PyLong_AsLong.argtypes = (py_object,) - pythonapi.PyLong_AsLong.restype = c_long + pythonapi.PyLong_AsLong.argtypes = (py_object,) + pythonapi.PyLong_AsLong.restype = c_long - res = pythonapi.PyLong_AsLong(42) - self.assertEqual(grc(res), ref42 + 1) - del res - self.assertEqual(grc(42), ref42) + res = pythonapi.PyLong_AsLong(42) + self.assertEqual(grc(res), ref42 + 1) + del res + self.assertEqual(grc(42), ref42) @support.refcount_test def test_PyObj_FromPtr(self): diff --git a/Darwin/lib/python3.4/ctypes/test/test_random_things.py b/Darwin/lib/python3.5/ctypes/test/test_random_things.py similarity index 72% rename from Darwin/lib/python3.4/ctypes/test/test_random_things.py rename to Darwin/lib/python3.5/ctypes/test/test_random_things.py index 515acf5..4555ecd 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_random_things.py +++ b/Darwin/lib/python3.5/ctypes/test/test_random_things.py @@ -5,23 +5,22 @@ def callback_func(arg): 42 / arg raise ValueError(arg) -if sys.platform == "win32": +@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') +class call_function_TestCase(unittest.TestCase): + # _ctypes.call_function is deprecated and private, but used by + # Gary Bishp's readline module. If we have it, we must test it as well. - class call_function_TestCase(unittest.TestCase): - # _ctypes.call_function is deprecated and private, but used by - # Gary Bishp's readline module. If we have it, we must test it as well. + def test(self): + from _ctypes import call_function + windll.kernel32.LoadLibraryA.restype = c_void_p + windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p + windll.kernel32.GetProcAddress.restype = c_void_p - def test(self): - from _ctypes import call_function - windll.kernel32.LoadLibraryA.restype = c_void_p - windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p - windll.kernel32.GetProcAddress.restype = c_void_p + hdll = windll.kernel32.LoadLibraryA(b"kernel32") + funcaddr = windll.kernel32.GetProcAddress(hdll, b"GetModuleHandleA") - hdll = windll.kernel32.LoadLibraryA(b"kernel32") - funcaddr = windll.kernel32.GetProcAddress(hdll, b"GetModuleHandleA") - - self.assertEqual(call_function(funcaddr, (None,)), - windll.kernel32.GetModuleHandleA(None)) + self.assertEqual(call_function(funcaddr, (None,)), + windll.kernel32.GetModuleHandleA(None)) class CallbackTracbackTestCase(unittest.TestCase): # When an exception is raised in a ctypes callback function, the C diff --git a/Darwin/lib/python3.4/ctypes/test/test_refcounts.py b/Darwin/lib/python3.5/ctypes/test/test_refcounts.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_refcounts.py rename to Darwin/lib/python3.5/ctypes/test/test_refcounts.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_repr.py b/Darwin/lib/python3.5/ctypes/test/test_repr.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_repr.py rename to Darwin/lib/python3.5/ctypes/test/test_repr.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_returnfuncptrs.py b/Darwin/lib/python3.5/ctypes/test/test_returnfuncptrs.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_returnfuncptrs.py rename to Darwin/lib/python3.5/ctypes/test/test_returnfuncptrs.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_simplesubclasses.py b/Darwin/lib/python3.5/ctypes/test/test_simplesubclasses.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_simplesubclasses.py rename to Darwin/lib/python3.5/ctypes/test/test_simplesubclasses.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_sizes.py b/Darwin/lib/python3.5/ctypes/test/test_sizes.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_sizes.py rename to Darwin/lib/python3.5/ctypes/test/test_sizes.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_slicing.py b/Darwin/lib/python3.5/ctypes/test/test_slicing.py similarity index 75% rename from Darwin/lib/python3.4/ctypes/test/test_slicing.py rename to Darwin/lib/python3.5/ctypes/test/test_slicing.py index 82fee96..240dc0c 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_slicing.py +++ b/Darwin/lib/python3.5/ctypes/test/test_slicing.py @@ -1,5 +1,6 @@ import unittest from ctypes import * +from ctypes.test import need_symbol import _ctypes_test @@ -125,44 +126,40 @@ class SlicesTestCase(unittest.TestCase): self.assertEqual(p[2:5:-3], s[2:5:-3]) - try: - c_wchar - except NameError: - pass - else: - def test_wchar_ptr(self): - s = "abcdefghijklmnopqrstuvwxyz\0" + @need_symbol('c_wchar') + def test_wchar_ptr(self): + s = "abcdefghijklmnopqrstuvwxyz\0" - dll = CDLL(_ctypes_test.__file__) - dll.my_wcsdup.restype = POINTER(c_wchar) - dll.my_wcsdup.argtypes = POINTER(c_wchar), - dll.my_free.restype = None - res = dll.my_wcsdup(s) - self.assertEqual(res[:len(s)], s) - self.assertEqual(res[:len(s):], s) - self.assertEqual(res[len(s)-1:-1:-1], s[::-1]) - self.assertEqual(res[len(s)-1:5:-7], s[:5:-7]) + dll = CDLL(_ctypes_test.__file__) + dll.my_wcsdup.restype = POINTER(c_wchar) + dll.my_wcsdup.argtypes = POINTER(c_wchar), + dll.my_free.restype = None + res = dll.my_wcsdup(s) + self.assertEqual(res[:len(s)], s) + self.assertEqual(res[:len(s):], s) + self.assertEqual(res[len(s)-1:-1:-1], s[::-1]) + self.assertEqual(res[len(s)-1:5:-7], s[:5:-7]) - import operator - self.assertRaises(TypeError, operator.setitem, - res, slice(0, 5), "abcde") - dll.my_free(res) + import operator + self.assertRaises(TypeError, operator.setitem, + res, slice(0, 5), "abcde") + dll.my_free(res) - if sizeof(c_wchar) == sizeof(c_short): - dll.my_wcsdup.restype = POINTER(c_short) - elif sizeof(c_wchar) == sizeof(c_int): - dll.my_wcsdup.restype = POINTER(c_int) - elif sizeof(c_wchar) == sizeof(c_long): - dll.my_wcsdup.restype = POINTER(c_long) - else: - return - res = dll.my_wcsdup(s) - tmpl = list(range(ord("a"), ord("z")+1)) - self.assertEqual(res[:len(s)-1], tmpl) - self.assertEqual(res[:len(s)-1:], tmpl) - self.assertEqual(res[len(s)-2:-1:-1], tmpl[::-1]) - self.assertEqual(res[len(s)-2:5:-7], tmpl[:5:-7]) - dll.my_free(res) + if sizeof(c_wchar) == sizeof(c_short): + dll.my_wcsdup.restype = POINTER(c_short) + elif sizeof(c_wchar) == sizeof(c_int): + dll.my_wcsdup.restype = POINTER(c_int) + elif sizeof(c_wchar) == sizeof(c_long): + dll.my_wcsdup.restype = POINTER(c_long) + else: + self.skipTest('Pointers to c_wchar are not supported') + res = dll.my_wcsdup(s) + tmpl = list(range(ord("a"), ord("z")+1)) + self.assertEqual(res[:len(s)-1], tmpl) + self.assertEqual(res[:len(s)-1:], tmpl) + self.assertEqual(res[len(s)-2:-1:-1], tmpl[::-1]) + self.assertEqual(res[len(s)-2:5:-7], tmpl[:5:-7]) + dll.my_free(res) ################################################################ diff --git a/Darwin/lib/python3.4/ctypes/test/test_stringptr.py b/Darwin/lib/python3.5/ctypes/test/test_stringptr.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_stringptr.py rename to Darwin/lib/python3.5/ctypes/test/test_stringptr.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_strings.py b/Darwin/lib/python3.5/ctypes/test/test_strings.py similarity index 65% rename from Darwin/lib/python3.4/ctypes/test/test_strings.py rename to Darwin/lib/python3.5/ctypes/test/test_strings.py index 9dc2a29..c7bfbda 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_strings.py +++ b/Darwin/lib/python3.5/ctypes/test/test_strings.py @@ -1,5 +1,6 @@ import unittest from ctypes import * +from ctypes.test import need_symbol class StringArrayTestCase(unittest.TestCase): def test(self): @@ -53,36 +54,33 @@ class StringArrayTestCase(unittest.TestCase): ## print BUF.from_param(c_char_p("python")) ## print BUF.from_param(BUF(*"pyth")) -try: - c_wchar -except NameError: - pass -else: - class WStringArrayTestCase(unittest.TestCase): - def test(self): - BUF = c_wchar * 4 +@need_symbol('c_wchar') +class WStringArrayTestCase(unittest.TestCase): + def test(self): + BUF = c_wchar * 4 - buf = BUF("a", "b", "c") - self.assertEqual(buf.value, "abc") + buf = BUF("a", "b", "c") + self.assertEqual(buf.value, "abc") - buf.value = "ABCD" - self.assertEqual(buf.value, "ABCD") + buf.value = "ABCD" + self.assertEqual(buf.value, "ABCD") - buf.value = "x" - self.assertEqual(buf.value, "x") + buf.value = "x" + self.assertEqual(buf.value, "x") - buf[1] = "Z" - self.assertEqual(buf.value, "xZCD") + buf[1] = "Z" + self.assertEqual(buf.value, "xZCD") - @unittest.skipIf(sizeof(c_wchar) < 4, - "sizeof(wchar_t) is smaller than 4 bytes") - def test_nonbmp(self): - u = chr(0x10ffff) - w = c_wchar(u) - self.assertEqual(w.value, u) + @unittest.skipIf(sizeof(c_wchar) < 4, + "sizeof(wchar_t) is smaller than 4 bytes") + def test_nonbmp(self): + u = chr(0x10ffff) + w = c_wchar(u) + self.assertEqual(w.value, u) class StringTestCase(unittest.TestCase): - def XX_test_basic_strings(self): + @unittest.skip('test disabled') + def test_basic_strings(self): cs = c_string("abcdef") # Cannot call len on a c_string any longer @@ -108,7 +106,8 @@ class StringTestCase(unittest.TestCase): self.assertRaises(TypeError, c_string, "123") - def XX_test_sized_strings(self): + @unittest.skip('test disabled') + def test_sized_strings(self): # New in releases later than 0.4.0: self.assertRaises(TypeError, c_string, None) @@ -125,7 +124,8 @@ class StringTestCase(unittest.TestCase): self.assertEqual(c_string(2).raw[-1], "\000") self.assertEqual(len(c_string(2).raw), 2) - def XX_test_initialized_strings(self): + @unittest.skip('test disabled') + def test_initialized_strings(self): self.assertEqual(c_string("ab", 4).raw[:2], "ab") self.assertEqual(c_string("ab", 4).raw[:2:], "ab") @@ -134,7 +134,8 @@ class StringTestCase(unittest.TestCase): self.assertEqual(c_string("ab", 4).raw[-1], "\000") self.assertEqual(c_string("ab", 2).raw, "a\000") - def XX_test_toolong(self): + @unittest.skip('test disabled') + def test_toolong(self): cs = c_string("abcdef") # Much too long string: self.assertRaises(ValueError, setattr, cs, "value", "123456789012345") @@ -142,54 +143,53 @@ class StringTestCase(unittest.TestCase): # One char too long values: self.assertRaises(ValueError, setattr, cs, "value", "1234567") -## def test_perf(self): -## check_perf() + @unittest.skip('test disabled') + def test_perf(self): + check_perf() -try: - c_wchar -except NameError: - pass -else: - class WStringTestCase(unittest.TestCase): - def test_wchar(self): - c_wchar("x") - repr(byref(c_wchar("x"))) - c_wchar("x") +@need_symbol('c_wchar') +class WStringTestCase(unittest.TestCase): + def test_wchar(self): + c_wchar("x") + repr(byref(c_wchar("x"))) + c_wchar("x") - def X_test_basic_wstrings(self): - cs = c_wstring("abcdef") + @unittest.skip('test disabled') + def test_basic_wstrings(self): + cs = c_wstring("abcdef") - # XXX This behaviour is about to change: - # len returns the size of the internal buffer in bytes. - # This includes the terminating NUL character. - self.assertEqual(sizeof(cs), 14) + # XXX This behaviour is about to change: + # len returns the size of the internal buffer in bytes. + # This includes the terminating NUL character. + self.assertEqual(sizeof(cs), 14) - # The value property is the string up to the first terminating NUL. - self.assertEqual(cs.value, "abcdef") - self.assertEqual(c_wstring("abc\000def").value, "abc") + # The value property is the string up to the first terminating NUL. + self.assertEqual(cs.value, "abcdef") + self.assertEqual(c_wstring("abc\000def").value, "abc") - self.assertEqual(c_wstring("abc\000def").value, "abc") + self.assertEqual(c_wstring("abc\000def").value, "abc") - # The raw property is the total buffer contents: - self.assertEqual(cs.raw, "abcdef\000") - self.assertEqual(c_wstring("abc\000def").raw, "abc\000def\000") + # The raw property is the total buffer contents: + self.assertEqual(cs.raw, "abcdef\000") + self.assertEqual(c_wstring("abc\000def").raw, "abc\000def\000") - # We can change the value: - cs.value = "ab" - self.assertEqual(cs.value, "ab") - self.assertEqual(cs.raw, "ab\000\000\000\000\000") + # We can change the value: + cs.value = "ab" + self.assertEqual(cs.value, "ab") + self.assertEqual(cs.raw, "ab\000\000\000\000\000") - self.assertRaises(TypeError, c_wstring, "123") - self.assertRaises(ValueError, c_wstring, 0) + self.assertRaises(TypeError, c_wstring, "123") + self.assertRaises(ValueError, c_wstring, 0) - def X_test_toolong(self): - cs = c_wstring("abcdef") - # Much too long string: - self.assertRaises(ValueError, setattr, cs, "value", "123456789012345") + @unittest.skip('test disabled') + def test_toolong(self): + cs = c_wstring("abcdef") + # Much too long string: + self.assertRaises(ValueError, setattr, cs, "value", "123456789012345") - # One char too long values: - self.assertRaises(ValueError, setattr, cs, "value", "1234567") + # One char too long values: + self.assertRaises(ValueError, setattr, cs, "value", "1234567") def run_test(rep, msg, func, arg): diff --git a/Darwin/lib/python3.4/ctypes/test/test_struct_fields.py b/Darwin/lib/python3.5/ctypes/test/test_struct_fields.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_struct_fields.py rename to Darwin/lib/python3.5/ctypes/test/test_struct_fields.py diff --git a/Darwin/lib/python3.4/ctypes/test/test_structures.py b/Darwin/lib/python3.5/ctypes/test/test_structures.py similarity index 96% rename from Darwin/lib/python3.4/ctypes/test/test_structures.py rename to Darwin/lib/python3.5/ctypes/test/test_structures.py index 87613ad..84d456c 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_structures.py +++ b/Darwin/lib/python3.5/ctypes/test/test_structures.py @@ -1,5 +1,6 @@ import unittest from ctypes import * +from ctypes.test import need_symbol from struct import calcsize import _testcapi @@ -291,12 +292,8 @@ class StructureTestCase(unittest.TestCase): self.assertEqual(p.phone.number, b"5678") self.assertEqual(p.age, 5) + @need_symbol('c_wchar') def test_structures_with_wchar(self): - try: - c_wchar - except NameError: - return # no unicode - class PersonW(Structure): _fields_ = [("name", c_wchar * 12), ("age", c_int)] @@ -325,7 +322,7 @@ class StructureTestCase(unittest.TestCase): self.assertEqual(cls, RuntimeError) self.assertEqual(msg, "(Phone) : " - "expected string, int found") + "expected bytes, int found") cls, msg = self.get_except(Person, b"Someone", (b"a", b"b", b"c")) self.assertEqual(cls, RuntimeError) @@ -354,14 +351,14 @@ class StructureTestCase(unittest.TestCase): except Exception as detail: return detail.__class__, str(detail) - -## def test_subclass_creation(self): -## meta = type(Structure) -## # same as 'class X(Structure): pass' -## # fails, since we need either a _fields_ or a _abstract_ attribute -## cls, msg = self.get_except(meta, "X", (Structure,), {}) -## self.assertEqual((cls, msg), -## (AttributeError, "class must define a '_fields_' attribute")) + @unittest.skip('test disabled') + def test_subclass_creation(self): + meta = type(Structure) + # same as 'class X(Structure): pass' + # fails, since we need either a _fields_ or a _abstract_ attribute + cls, msg = self.get_except(meta, "X", (Structure,), {}) + self.assertEqual((cls, msg), + (AttributeError, "class must define a '_fields_' attribute")) def test_abstract_class(self): class X(Structure): diff --git a/Darwin/lib/python3.4/ctypes/test/test_unaligned_structures.py b/Darwin/lib/python3.5/ctypes/test/test_unaligned_structures.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_unaligned_structures.py rename to Darwin/lib/python3.5/ctypes/test/test_unaligned_structures.py diff --git a/Darwin/lib/python3.5/ctypes/test/test_unicode.py b/Darwin/lib/python3.5/ctypes/test/test_unicode.py new file mode 100644 index 0000000..c200af7 --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_unicode.py @@ -0,0 +1,56 @@ +import unittest +import ctypes +from ctypes.test import need_symbol + +import _ctypes_test + +@need_symbol('c_wchar') +class UnicodeTestCase(unittest.TestCase): + def test_wcslen(self): + dll = ctypes.CDLL(_ctypes_test.__file__) + wcslen = dll.my_wcslen + wcslen.argtypes = [ctypes.c_wchar_p] + + self.assertEqual(wcslen("abc"), 3) + self.assertEqual(wcslen("ab\u2070"), 3) + self.assertRaises(ctypes.ArgumentError, wcslen, b"ab\xe4") + + def test_buffers(self): + buf = ctypes.create_unicode_buffer("abc") + self.assertEqual(len(buf), 3+1) + + buf = ctypes.create_unicode_buffer("ab\xe4\xf6\xfc") + self.assertEqual(buf[:], "ab\xe4\xf6\xfc\0") + self.assertEqual(buf[::], "ab\xe4\xf6\xfc\0") + self.assertEqual(buf[::-1], '\x00\xfc\xf6\xe4ba') + self.assertEqual(buf[::2], 'a\xe4\xfc') + self.assertEqual(buf[6:5:-1], "") + +func = ctypes.CDLL(_ctypes_test.__file__)._testfunc_p_p + +class StringTestCase(UnicodeTestCase): + def setUp(self): + func.argtypes = [ctypes.c_char_p] + func.restype = ctypes.c_char_p + + def tearDown(self): + func.argtypes = None + func.restype = ctypes.c_int + + def test_func(self): + self.assertEqual(func(b"abc\xe4"), b"abc\xe4") + + def test_buffers(self): + buf = ctypes.create_string_buffer(b"abc") + self.assertEqual(len(buf), 3+1) + + buf = ctypes.create_string_buffer(b"ab\xe4\xf6\xfc") + self.assertEqual(buf[:], b"ab\xe4\xf6\xfc\0") + self.assertEqual(buf[::], b"ab\xe4\xf6\xfc\0") + self.assertEqual(buf[::-1], b'\x00\xfc\xf6\xe4ba') + self.assertEqual(buf[::2], b'a\xe4\xfc') + self.assertEqual(buf[6:5:-1], b"") + + +if __name__ == '__main__': + unittest.main() diff --git a/Darwin/lib/python3.5/ctypes/test/test_values.py b/Darwin/lib/python3.5/ctypes/test/test_values.py new file mode 100644 index 0000000..9551e7a --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_values.py @@ -0,0 +1,100 @@ +""" +A testcase which accesses *values* in a dll. +""" + +import unittest +import sys +from ctypes import * + +import _ctypes_test + +class ValuesTestCase(unittest.TestCase): + + def test_an_integer(self): + # This test checks and changes an integer stored inside the + # _ctypes_test dll/shared lib. + ctdll = CDLL(_ctypes_test.__file__) + an_integer = c_int.in_dll(ctdll, "an_integer") + x = an_integer.value + self.assertEqual(x, ctdll.get_an_integer()) + an_integer.value *= 2 + self.assertEqual(x*2, ctdll.get_an_integer()) + # To avoid test failures when this test is repeated several + # times the original value must be restored + an_integer.value = x + self.assertEqual(x, ctdll.get_an_integer()) + + def test_undefined(self): + ctdll = CDLL(_ctypes_test.__file__) + self.assertRaises(ValueError, c_int.in_dll, ctdll, "Undefined_Symbol") + +@unittest.skipUnless(sys.platform == 'win32', 'Windows-specific test') +class Win_ValuesTestCase(unittest.TestCase): + """This test only works when python itself is a dll/shared library""" + + def test_optimizeflag(self): + # This test accesses the Py_OptimizeFlag integer, which is + # exported by the Python dll and should match the sys.flags value + + opt = c_int.in_dll(pythonapi, "Py_OptimizeFlag").value + self.assertEqual(opt, sys.flags.optimize) + + def test_frozentable(self): + # Python exports a PyImport_FrozenModules symbol. This is a + # pointer to an array of struct _frozen entries. The end of the + # array is marked by an entry containing a NULL name and zero + # size. + + # In standard Python, this table contains a __hello__ + # module, and a __phello__ package containing a spam + # module. + class struct_frozen(Structure): + _fields_ = [("name", c_char_p), + ("code", POINTER(c_ubyte)), + ("size", c_int)] + FrozenTable = POINTER(struct_frozen) + + ft = FrozenTable.in_dll(pythonapi, "PyImport_FrozenModules") + # ft is a pointer to the struct_frozen entries: + items = [] + # _frozen_importlib changes size whenever importlib._bootstrap + # changes, so it gets a special case. We should make sure it's + # found, but don't worry about its size too much. The same + # applies to _frozen_importlib_external. + bootstrap_seen = [] + bootstrap_expected = [ + b'_frozen_importlib', + b'_frozen_importlib_external', + ] + for entry in ft: + # This is dangerous. We *can* iterate over a pointer, but + # the loop will not terminate (maybe with an access + # violation;-) because the pointer instance has no size. + if entry.name is None: + break + + if entry.name in bootstrap_expected: + bootstrap_seen.append(entry.name) + self.assertTrue(entry.size, + "{} was reported as having no size".format(entry.name)) + continue + items.append((entry.name, entry.size)) + + expected = [(b"__hello__", 161), + (b"__phello__", -161), + (b"__phello__.spam", 161), + ] + self.assertEqual(items, expected) + + self.assertEqual(sorted(bootstrap_seen), bootstrap_expected, + "frozen bootstrap modules did not match PyImport_FrozenModules") + + from ctypes import _pointer_type_cache + del _pointer_type_cache[struct_frozen] + + def test_undefined(self): + self.assertRaises(ValueError, c_int.in_dll, pythonapi, + "Undefined_Symbol") + +if __name__ == '__main__': + unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_varsize_struct.py b/Darwin/lib/python3.5/ctypes/test/test_varsize_struct.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/test/test_varsize_struct.py rename to Darwin/lib/python3.5/ctypes/test/test_varsize_struct.py diff --git a/Darwin/lib/python3.5/ctypes/test/test_win32.py b/Darwin/lib/python3.5/ctypes/test/test_win32.py new file mode 100644 index 0000000..5867b05 --- /dev/null +++ b/Darwin/lib/python3.5/ctypes/test/test_win32.py @@ -0,0 +1,139 @@ +# Windows specific tests + +from ctypes import * +import unittest, sys +from test import support + +import _ctypes_test + +# Only windows 32-bit has different calling conventions. +@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') +@unittest.skipUnless(sizeof(c_void_p) == sizeof(c_int), + "sizeof c_void_p and c_int differ") +class WindowsTestCase(unittest.TestCase): + def test_callconv_1(self): + # Testing stdcall function + + IsWindow = windll.user32.IsWindow + # ValueError: Procedure probably called with not enough arguments + # (4 bytes missing) + self.assertRaises(ValueError, IsWindow) + + # This one should succeed... + self.assertEqual(0, IsWindow(0)) + + # ValueError: Procedure probably called with too many arguments + # (8 bytes in excess) + self.assertRaises(ValueError, IsWindow, 0, 0, 0) + + def test_callconv_2(self): + # Calling stdcall function as cdecl + + IsWindow = cdll.user32.IsWindow + + # ValueError: Procedure called with not enough arguments + # (4 bytes missing) or wrong calling convention + self.assertRaises(ValueError, IsWindow, None) + +@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') +class FunctionCallTestCase(unittest.TestCase): + @unittest.skipUnless('MSC' in sys.version, "SEH only supported by MSC") + @unittest.skipIf(sys.executable.lower().endswith('_d.exe'), + "SEH not enabled in debug builds") + def test_SEH(self): + # Call functions with invalid arguments, and make sure + # that access violations are trapped and raise an + # exception. + self.assertRaises(OSError, windll.kernel32.GetModuleHandleA, 32) + + def test_noargs(self): + # This is a special case on win32 x64 + windll.user32.GetDesktopWindow() + +@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') +class TestWintypes(unittest.TestCase): + def test_HWND(self): + from ctypes import wintypes + self.assertEqual(sizeof(wintypes.HWND), sizeof(c_void_p)) + + def test_PARAM(self): + from ctypes import wintypes + self.assertEqual(sizeof(wintypes.WPARAM), + sizeof(c_void_p)) + self.assertEqual(sizeof(wintypes.LPARAM), + sizeof(c_void_p)) + + def test_COMError(self): + from _ctypes import COMError + if support.HAVE_DOCSTRINGS: + self.assertEqual(COMError.__doc__, + "Raised when a COM method call failed.") + + ex = COMError(-1, "text", ("details",)) + self.assertEqual(ex.hresult, -1) + self.assertEqual(ex.text, "text") + self.assertEqual(ex.details, ("details",)) + +@unittest.skipUnless(sys.platform == "win32", 'Windows-specific test') +class TestWinError(unittest.TestCase): + def test_winerror(self): + # see Issue 16169 + import errno + ERROR_INVALID_PARAMETER = 87 + msg = FormatError(ERROR_INVALID_PARAMETER).strip() + args = (errno.EINVAL, msg, None, ERROR_INVALID_PARAMETER) + + e = WinError(ERROR_INVALID_PARAMETER) + self.assertEqual(e.args, args) + self.assertEqual(e.errno, errno.EINVAL) + self.assertEqual(e.winerror, ERROR_INVALID_PARAMETER) + + windll.kernel32.SetLastError(ERROR_INVALID_PARAMETER) + try: + raise WinError() + except OSError as exc: + e = exc + self.assertEqual(e.args, args) + self.assertEqual(e.errno, errno.EINVAL) + self.assertEqual(e.winerror, ERROR_INVALID_PARAMETER) + +class Structures(unittest.TestCase): + def test_struct_by_value(self): + class POINT(Structure): + _fields_ = [("x", c_long), + ("y", c_long)] + + class RECT(Structure): + _fields_ = [("left", c_long), + ("top", c_long), + ("right", c_long), + ("bottom", c_long)] + + dll = CDLL(_ctypes_test.__file__) + + pt = POINT(15, 25) + left = c_long.in_dll(dll, 'left') + top = c_long.in_dll(dll, 'top') + right = c_long.in_dll(dll, 'right') + bottom = c_long.in_dll(dll, 'bottom') + rect = RECT(left, top, right, bottom) + PointInRect = dll.PointInRect + PointInRect.argtypes = [POINTER(RECT), POINT] + self.assertEqual(1, PointInRect(byref(rect), pt)) + + ReturnRect = dll.ReturnRect + ReturnRect.argtypes = [c_int, RECT, POINTER(RECT), POINT, RECT, + POINTER(RECT), POINT, RECT] + ReturnRect.restype = RECT + for i in range(4): + ret = ReturnRect(i, rect, pointer(rect), pt, rect, + byref(rect), pt, rect) + # the c function will check and modify ret if something is + # passed in improperly + self.assertEqual(ret.left, left.value) + self.assertEqual(ret.right, right.value) + self.assertEqual(ret.top, top.value) + self.assertEqual(ret.bottom, bottom.value) + +if __name__ == '__main__': + unittest.main() diff --git a/Darwin/lib/python3.4/ctypes/test/test_wintypes.py b/Darwin/lib/python3.5/ctypes/test/test_wintypes.py similarity index 92% rename from Darwin/lib/python3.4/ctypes/test/test_wintypes.py rename to Darwin/lib/python3.5/ctypes/test/test_wintypes.py index 806fcce..71442df 100644 --- a/Darwin/lib/python3.4/ctypes/test/test_wintypes.py +++ b/Darwin/lib/python3.5/ctypes/test/test_wintypes.py @@ -1,14 +1,12 @@ import sys import unittest -if not sys.platform.startswith('win'): - raise unittest.SkipTest('Windows-only test') - from ctypes import * -from ctypes import wintypes +@unittest.skipUnless(sys.platform.startswith('win'), 'Windows-only test') class WinTypesTest(unittest.TestCase): def test_variant_bool(self): + from ctypes import wintypes # reads 16-bits from memory, anything non-zero is True for true_value in (1, 32767, 32768, 65535, 65537): true = POINTER(c_int16)(c_int16(true_value)) diff --git a/Darwin/lib/python3.4/ctypes/util.py b/Darwin/lib/python3.5/ctypes/util.py similarity index 97% rename from Darwin/lib/python3.4/ctypes/util.py rename to Darwin/lib/python3.5/ctypes/util.py index 595113b..9e74ccd 100644 --- a/Darwin/lib/python3.4/ctypes/util.py +++ b/Darwin/lib/python3.5/ctypes/util.py @@ -19,6 +19,8 @@ if os.name == "nt": i = i + len(prefix) s, rest = sys.version[i:].split(" ", 1) majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + majorVersion += 1 minorVersion = int(s[2:3]) / 10.0 # I don't think paths are affected by minor version in version 6 if majorVersion == 6: @@ -36,8 +38,12 @@ if os.name == "nt": return None if version <= 6: clibname = 'msvcrt' - else: + elif version <= 13: clibname = 'msvcr%d' % (version * 10) + else: + # CRT is no longer directly loadable. See issue23606 for the + # discussion about alternative approaches. + return None # If python was built with in debug mode import importlib.machinery diff --git a/Darwin/lib/python3.4/ctypes/wintypes.py b/Darwin/lib/python3.5/ctypes/wintypes.py similarity index 100% rename from Darwin/lib/python3.4/ctypes/wintypes.py rename to Darwin/lib/python3.5/ctypes/wintypes.py diff --git a/Darwin/lib/python3.4/curses/__init__.py b/Darwin/lib/python3.5/curses/__init__.py similarity index 100% rename from Darwin/lib/python3.4/curses/__init__.py rename to Darwin/lib/python3.5/curses/__init__.py diff --git a/Darwin/lib/python3.4/curses/ascii.py b/Darwin/lib/python3.5/curses/ascii.py similarity index 100% rename from Darwin/lib/python3.4/curses/ascii.py rename to Darwin/lib/python3.5/curses/ascii.py diff --git a/Darwin/lib/python3.4/curses/has_key.py b/Darwin/lib/python3.5/curses/has_key.py similarity index 100% rename from Darwin/lib/python3.4/curses/has_key.py rename to Darwin/lib/python3.5/curses/has_key.py diff --git a/Darwin/lib/python3.4/curses/panel.py b/Darwin/lib/python3.5/curses/panel.py similarity index 100% rename from Darwin/lib/python3.4/curses/panel.py rename to Darwin/lib/python3.5/curses/panel.py diff --git a/Darwin/lib/python3.4/curses/textpad.py b/Darwin/lib/python3.5/curses/textpad.py similarity index 100% rename from Darwin/lib/python3.4/curses/textpad.py rename to Darwin/lib/python3.5/curses/textpad.py diff --git a/Darwin/lib/python3.4/datetime.py b/Darwin/lib/python3.5/datetime.py similarity index 87% rename from Darwin/lib/python3.4/datetime.py rename to Darwin/lib/python3.5/datetime.py index 1789714..db13b12 100644 --- a/Darwin/lib/python3.4/datetime.py +++ b/Darwin/lib/python3.5/datetime.py @@ -12,7 +12,7 @@ def _cmp(x, y): MINYEAR = 1 MAXYEAR = 9999 -_MAXORDINAL = 3652059 # date.max.toordinal() +_MAXORDINAL = 3652059 # date.max.toordinal() # Utility functions, adapted from Python's Demo/classes/Dates.py, which # also assumes the current Gregorian calendar indefinitely extended in @@ -26,7 +26,7 @@ _MAXORDINAL = 3652059 # date.max.toordinal() # -1 is a placeholder for indexing purposes. _DAYS_IN_MONTH = [-1, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31] -_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes. +_DAYS_BEFORE_MONTH = [-1] # -1 is a placeholder for indexing purposes. dbm = 0 for dim in _DAYS_IN_MONTH[1:]: _DAYS_BEFORE_MONTH.append(dbm) @@ -162,9 +162,9 @@ def _format_time(hh, mm, ss, us): # Correctly substitute for %z and %Z escapes in strftime formats. def _wrap_strftime(object, format, timetuple): # Don't call utcoffset() or tzname() unless actually needed. - freplace = None # the string to use for %f - zreplace = None # the string to use for %z - Zreplace = None # the string to use for %Z + freplace = None # the string to use for %f + zreplace = None # the string to use for %z + Zreplace = None # the string to use for %Z # Scan format for %z and %Z escapes, replacing as needed. newformat = [] @@ -217,11 +217,6 @@ def _wrap_strftime(object, format, timetuple): newformat = "".join(newformat) return _time.strftime(newformat, timetuple) -def _call_tzinfo_method(tzinfo, methname, tzinfoarg): - if tzinfo is None: - return None - return getattr(tzinfo, methname)(tzinfoarg) - # Just raise TypeError if the arg isn't None or a string. def _check_tzname(name): if name is not None and not isinstance(name, str): @@ -245,13 +240,31 @@ def _check_utc_offset(name, offset): raise ValueError("tzinfo.%s() must return a whole number " "of minutes, got %s" % (name, offset)) if not -timedelta(1) < offset < timedelta(1): - raise ValueError("%s()=%s, must be must be strictly between" - " -timedelta(hours=24) and timedelta(hours=24)" - % (name, offset)) + raise ValueError("%s()=%s, must be must be strictly between " + "-timedelta(hours=24) and timedelta(hours=24)" % + (name, offset)) + +def _check_int_field(value): + if isinstance(value, int): + return value + if not isinstance(value, float): + try: + value = value.__int__() + except AttributeError: + pass + else: + if isinstance(value, int): + return value + raise TypeError('__int__ returned non-int (type %s)' % + type(value).__name__) + raise TypeError('an integer is required (got type %s)' % + type(value).__name__) + raise TypeError('integer argument expected, got float') def _check_date_fields(year, month, day): - if not isinstance(year, int): - raise TypeError('int expected') + year = _check_int_field(year) + month = _check_int_field(month) + day = _check_int_field(day) if not MINYEAR <= year <= MAXYEAR: raise ValueError('year must be in %d..%d' % (MINYEAR, MAXYEAR), year) if not 1 <= month <= 12: @@ -259,10 +272,13 @@ def _check_date_fields(year, month, day): dim = _days_in_month(year, month) if not 1 <= day <= dim: raise ValueError('day must be in 1..%d' % dim, day) + return year, month, day def _check_time_fields(hour, minute, second, microsecond): - if not isinstance(hour, int): - raise TypeError('int expected') + hour = _check_int_field(hour) + minute = _check_int_field(minute) + second = _check_int_field(second) + microsecond = _check_int_field(microsecond) if not 0 <= hour <= 23: raise ValueError('hour must be in 0..23', hour) if not 0 <= minute <= 59: @@ -271,6 +287,7 @@ def _check_time_fields(hour, minute, second, microsecond): raise ValueError('second must be in 0..59', second) if not 0 <= microsecond <= 999999: raise ValueError('microsecond must be in 0..999999', microsecond) + return hour, minute, second, microsecond def _check_tzinfo_arg(tz): if tz is not None and not isinstance(tz, tzinfo): @@ -280,6 +297,25 @@ def _cmperror(x, y): raise TypeError("can't compare '%s' to '%s'" % ( type(x).__name__, type(y).__name__)) +def _divide_and_round(a, b): + """divide a by b and round result to the nearest integer + + When the ratio is exactly half-way between two integers, + the even integer is returned. + """ + # Based on the reference implementation for divmod_near + # in Objects/longobject.c. + q, r = divmod(a, b) + # round up if either r / b > 0.5, or r / b == 0.5 and q is odd. + # The expression r / b > 0.5 is equivalent to 2 * r > b if b is + # positive, 2 * r < b if b negative. + r *= 2 + greater_than_half = r > b if b > 0 else r < b + if greater_than_half or r == b and q % 2 == 1: + q += 1 + + return q + class timedelta: """Represent the difference between two datetime objects. @@ -297,7 +333,7 @@ class timedelta: Representation: (days, seconds, microseconds). Why? Because I felt like it. """ - __slots__ = '_days', '_seconds', '_microseconds' + __slots__ = '_days', '_seconds', '_microseconds', '_hashcode' def __new__(cls, days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0): @@ -363,38 +399,26 @@ class timedelta: # secondsfrac isn't referenced again if isinstance(microseconds, float): - microseconds += usdouble - microseconds = round(microseconds, 0) - seconds, microseconds = divmod(microseconds, 1e6) - assert microseconds == int(microseconds) - assert seconds == int(seconds) - days, seconds = divmod(seconds, 24.*3600.) - assert days == int(days) - assert seconds == int(seconds) - d += int(days) - s += int(seconds) # can't overflow - assert isinstance(s, int) - assert abs(s) <= 3 * 24 * 3600 - else: + microseconds = round(microseconds + usdouble) seconds, microseconds = divmod(microseconds, 1000000) days, seconds = divmod(seconds, 24*3600) d += days - s += int(seconds) # can't overflow - assert isinstance(s, int) - assert abs(s) <= 3 * 24 * 3600 - microseconds = float(microseconds) - microseconds += usdouble - microseconds = round(microseconds, 0) + s += seconds + else: + microseconds = int(microseconds) + seconds, microseconds = divmod(microseconds, 1000000) + days, seconds = divmod(seconds, 24*3600) + d += days + s += seconds + microseconds = round(microseconds + usdouble) + assert isinstance(s, int) + assert isinstance(microseconds, int) assert abs(s) <= 3 * 24 * 3600 assert abs(microseconds) < 3.1e6 # Just a little bit of carrying possible for microseconds and seconds. - assert isinstance(microseconds, float) - assert int(microseconds) == microseconds - us = int(microseconds) - seconds, us = divmod(us, 1000000) - s += seconds # cant't overflow - assert isinstance(s, int) + seconds, us = divmod(microseconds, 1000000) + s += seconds days, s = divmod(s, 24*3600) d += days @@ -402,27 +426,31 @@ class timedelta: assert isinstance(s, int) and 0 <= s < 24*3600 assert isinstance(us, int) and 0 <= us < 1000000 - self = object.__new__(cls) - - self._days = d - self._seconds = s - self._microseconds = us if abs(d) > 999999999: raise OverflowError("timedelta # of days is too large: %d" % d) + self = object.__new__(cls) + self._days = d + self._seconds = s + self._microseconds = us + self._hashcode = -1 return self def __repr__(self): if self._microseconds: - return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__, - self._days, - self._seconds, - self._microseconds) + return "%s.%s(%d, %d, %d)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._days, + self._seconds, + self._microseconds) if self._seconds: - return "%s(%d, %d)" % ('datetime.' + self.__class__.__name__, - self._days, - self._seconds) - return "%s(%d)" % ('datetime.' + self.__class__.__name__, self._days) + return "%s.%s(%d, %d)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._days, + self._seconds) + return "%s.%s(%d)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._days) def __str__(self): mm, ss = divmod(self._seconds, 60) @@ -438,7 +466,7 @@ class timedelta: def total_seconds(self): """Total seconds in the duration.""" - return ((self.days * 86400 + self.seconds)*10**6 + + return ((self.days * 86400 + self.seconds) * 10**6 + self.microseconds) / 10**6 # Read-only field accessors @@ -506,8 +534,9 @@ class timedelta: self._seconds * other, self._microseconds * other) if isinstance(other, float): + usec = self._to_microseconds() a, b = other.as_integer_ratio() - return self * a / b + return timedelta(0, 0, _divide_and_round(usec * a, b)) return NotImplemented __rmul__ = __mul__ @@ -532,10 +561,10 @@ class timedelta: if isinstance(other, timedelta): return usec / other._to_microseconds() if isinstance(other, int): - return timedelta(0, 0, usec / other) + return timedelta(0, 0, _divide_and_round(usec, other)) if isinstance(other, float): a, b = other.as_integer_ratio() - return timedelta(0, 0, b * usec / a) + return timedelta(0, 0, _divide_and_round(b * usec, a)) def __mod__(self, other): if isinstance(other, timedelta): @@ -558,12 +587,6 @@ class timedelta: else: return False - def __ne__(self, other): - if isinstance(other, timedelta): - return self._cmp(other) != 0 - else: - return True - def __le__(self, other): if isinstance(other, timedelta): return self._cmp(other) <= 0 @@ -593,7 +616,9 @@ class timedelta: return _cmp(self._getstate(), other._getstate()) def __hash__(self): - return hash(self._getstate()) + if self._hashcode == -1: + self._hashcode = hash(self._getstate()) + return self._hashcode def __bool__(self): return (self._days != 0 or @@ -626,7 +651,7 @@ class date: Operators: __repr__, __str__ - __cmp__, __hash__ + __eq__, __le__, __lt__, __ge__, __gt__, __hash__ __add__, __radd__, __sub__ (add/radd only with timedelta arg) Methods: @@ -641,7 +666,7 @@ class date: Properties (readonly): year, month, day """ - __slots__ = '_year', '_month', '_day' + __slots__ = '_year', '_month', '_day', '_hashcode' def __new__(cls, year, month=None, day=None): """Constructor. @@ -650,17 +675,19 @@ class date: year, month, day (required, base 1) """ - if (isinstance(year, bytes) and len(year) == 4 and - 1 <= year[2] <= 12 and month is None): # Month is sane + if month is None and isinstance(year, bytes) and len(year) == 4 and \ + 1 <= year[2] <= 12: # Pickle support self = object.__new__(cls) self.__setstate(year) + self._hashcode = -1 return self - _check_date_fields(year, month, day) + year, month, day = _check_date_fields(year, month, day) self = object.__new__(cls) self._year = year self._month = month self._day = day + self._hashcode = -1 return self # Additional constructors @@ -700,10 +727,11 @@ class date: >>> repr(dt) 'datetime.datetime(2010, 1, 1, 0, 0, tzinfo=datetime.timezone.utc)' """ - return "%s(%d, %d, %d)" % ('datetime.' + self.__class__.__name__, - self._year, - self._month, - self._day) + return "%s.%s(%d, %d, %d)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._year, + self._month, + self._day) # XXX These shouldn't depend on time.localtime(), because that # clips the usable dates to [1970 .. 2038). At least ctime() is # easily done without using strftime() -- that's better too because @@ -723,6 +751,8 @@ class date: return _wrap_strftime(self, fmt, self.timetuple()) def __format__(self, fmt): + if not isinstance(fmt, str): + raise TypeError("must be str, not %s" % type(fmt).__name__) if len(fmt) != 0: return self.strftime(fmt) return str(self) @@ -756,7 +786,8 @@ class date: """day (1-31)""" return self._day - # Standard conversions, __cmp__, __hash__ (and helpers) + # Standard conversions, __eq__, __le__, __lt__, __ge__, __gt__, + # __hash__ (and helpers) def timetuple(self): "Return local time tuple compatible with time.localtime()." @@ -779,7 +810,6 @@ class date: month = self._month if day is None: day = self._day - _check_date_fields(year, month, day) return date(year, month, day) # Comparisons of date objects with other. @@ -789,11 +819,6 @@ class date: return self._cmp(other) == 0 return NotImplemented - def __ne__(self, other): - if isinstance(other, date): - return self._cmp(other) != 0 - return NotImplemented - def __le__(self, other): if isinstance(other, date): return self._cmp(other) <= 0 @@ -822,7 +847,9 @@ class date: def __hash__(self): "Hash." - return hash(self._getstate()) + if self._hashcode == -1: + self._hashcode = hash(self._getstate()) + return self._hashcode # Computations @@ -892,8 +919,6 @@ class date: return bytes([yhi, ylo, self._month, self._day]), def __setstate(self, string): - if len(string) != 4 or not (1 <= string[2] <= 12): - raise TypeError("not enough arguments") yhi, ylo, self._month, self._day = string self._year = yhi * 256 + ylo @@ -912,6 +937,7 @@ class tzinfo: Subclasses must override the name(), utcoffset() and dst() methods. """ __slots__ = () + def tzname(self, dt): "datetime -> string name of time zone." raise NotImplementedError("tzinfo subclass must override tzname()") @@ -985,7 +1011,7 @@ class time: Operators: __repr__, __str__ - __cmp__, __hash__ + __eq__, __le__, __lt__, __ge__, __gt__, __hash__ Methods: @@ -998,6 +1024,7 @@ class time: Properties (readonly): hour, minute, second, microsecond, tzinfo """ + __slots__ = '_hour', '_minute', '_second', '_microsecond', '_tzinfo', '_hashcode' def __new__(cls, hour=0, minute=0, second=0, microsecond=0, tzinfo=None): """Constructor. @@ -1008,18 +1035,22 @@ class time: second, microsecond (default to zero) tzinfo (default to None) """ - self = object.__new__(cls) - if isinstance(hour, bytes) and len(hour) == 6: + if isinstance(hour, bytes) and len(hour) == 6 and hour[0] < 24: # Pickle support + self = object.__new__(cls) self.__setstate(hour, minute or None) + self._hashcode = -1 return self + hour, minute, second, microsecond = _check_time_fields( + hour, minute, second, microsecond) _check_tzinfo_arg(tzinfo) - _check_time_fields(hour, minute, second, microsecond) + self = object.__new__(cls) self._hour = hour self._minute = minute self._second = second self._microsecond = microsecond self._tzinfo = tzinfo + self._hashcode = -1 return self # Read-only field accessors @@ -1058,12 +1089,6 @@ class time: else: return False - def __ne__(self, other): - if isinstance(other, time): - return self._cmp(other, allow_mixed=True) != 0 - else: - return True - def __le__(self, other): if isinstance(other, time): return self._cmp(other) <= 0 @@ -1104,8 +1129,8 @@ class time: if base_compare: return _cmp((self._hour, self._minute, self._second, self._microsecond), - (other._hour, other._minute, other._second, - other._microsecond)) + (other._hour, other._minute, other._second, + other._microsecond)) if myoff is None or otoff is None: if allow_mixed: return 2 # arbitrary non-zero value @@ -1118,16 +1143,20 @@ class time: def __hash__(self): """Hash.""" - tzoff = self.utcoffset() - if not tzoff: # zero or None - return hash(self._getstate()[0]) - h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff, - timedelta(hours=1)) - assert not m % timedelta(minutes=1), "whole minute" - m //= timedelta(minutes=1) - if 0 <= h < 24: - return hash(time(h, m, self.second, self.microsecond)) - return hash((h, m, self.second, self.microsecond)) + if self._hashcode == -1: + tzoff = self.utcoffset() + if not tzoff: # zero or None + self._hashcode = hash(self._getstate()[0]) + else: + h, m = divmod(timedelta(hours=self.hour, minutes=self.minute) - tzoff, + timedelta(hours=1)) + assert not m % timedelta(minutes=1), "whole minute" + m //= timedelta(minutes=1) + if 0 <= h < 24: + self._hashcode = hash(time(h, m, self.second, self.microsecond)) + else: + self._hashcode = hash((h, m, self.second, self.microsecond)) + return self._hashcode # Conversion to string @@ -1155,8 +1184,9 @@ class time: s = ", %d" % self._second else: s = "" - s= "%s(%d, %d%s)" % ('datetime.' + self.__class__.__name__, - self._hour, self._minute, s) + s= "%s.%s(%d, %d%s)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._hour, self._minute, s) if self._tzinfo is not None: assert s[-1:] == ")" s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" @@ -1189,6 +1219,8 @@ class time: return _wrap_strftime(self, fmt, timetuple) def __format__(self, fmt): + if not isinstance(fmt, str): + raise TypeError("must be str, not %s" % type(fmt).__name__) if len(fmt) != 0: return self.strftime(fmt) return str(self) @@ -1245,16 +1277,8 @@ class time: microsecond = self.microsecond if tzinfo is True: tzinfo = self.tzinfo - _check_time_fields(hour, minute, second, microsecond) - _check_tzinfo_arg(tzinfo) return time(hour, minute, second, microsecond, tzinfo) - def __bool__(self): - if self.second or self.microsecond: - return True - offset = self.utcoffset() or timedelta(0) - return timedelta(hours=self.hour, minutes=self.minute) != offset - # Pickle support. def _getstate(self): @@ -1268,15 +1292,11 @@ class time: return (basestate, self._tzinfo) def __setstate(self, string, tzinfo): - if len(string) != 6 or string[0] >= 24: - raise TypeError("an integer is required") - (self._hour, self._minute, self._second, - us1, us2, us3) = string + if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): + raise TypeError("bad tzinfo state arg") + self._hour, self._minute, self._second, us1, us2, us3 = string self._microsecond = (((us1 << 8) | us2) << 8) | us3 - if tzinfo is None or isinstance(tzinfo, _tzinfo_class): - self._tzinfo = tzinfo - else: - raise TypeError("bad tzinfo state arg %r" % tzinfo) + self._tzinfo = tzinfo def __reduce__(self): return (time, self._getstate()) @@ -1293,25 +1313,30 @@ class datetime(date): The year, month and day arguments are required. tzinfo may be None, or an instance of a tzinfo subclass. The remaining arguments may be ints. """ + __slots__ = date.__slots__ + time.__slots__ - __slots__ = date.__slots__ + ( - '_hour', '_minute', '_second', - '_microsecond', '_tzinfo') def __new__(cls, year, month=None, day=None, hour=0, minute=0, second=0, microsecond=0, tzinfo=None): - if isinstance(year, bytes) and len(year) == 10: + if isinstance(year, bytes) and len(year) == 10 and 1 <= year[2] <= 12: # Pickle support - self = date.__new__(cls, year[:4]) + self = object.__new__(cls) self.__setstate(year, month) + self._hashcode = -1 return self + year, month, day = _check_date_fields(year, month, day) + hour, minute, second, microsecond = _check_time_fields( + hour, minute, second, microsecond) _check_tzinfo_arg(tzinfo) - _check_time_fields(hour, minute, second, microsecond) - self = date.__new__(cls, year, month, day) + self = object.__new__(cls) + self._year = year + self._month = month + self._day = day self._hour = hour self._minute = minute self._second = second self._microsecond = microsecond self._tzinfo = tzinfo + self._hashcode = -1 return self # Read-only field accessors @@ -1346,7 +1371,6 @@ class datetime(date): A timezone info object may be passed in as well. """ - _check_tzinfo_arg(tz) converter = _time.localtime if tz is None else _time.gmtime @@ -1370,7 +1394,7 @@ class datetime(date): @classmethod def utcfromtimestamp(cls, t): - "Construct a UTC datetime from a POSIX timestamp (like time.time())." + """Construct a naive UTC datetime from a POSIX timestamp.""" t, frac = divmod(t, 1.0) us = int(frac * 1e6) @@ -1385,11 +1409,6 @@ class datetime(date): ss = min(ss, 59) # clamp out leap seconds if the platform has them return cls(y, m, d, hh, mm, ss, us) - # XXX This is supposed to do better than we *can* do by using time.time(), - # XXX if the platform supports a more accurate way. The C implementation - # XXX uses gettimeofday on platforms that have it, but that isn't - # XXX available from Python. So now() may return different results - # XXX across the implementations. @classmethod def now(cls, tz=None): "Construct a datetime from time.time() and optional time zone info." @@ -1476,11 +1495,8 @@ class datetime(date): microsecond = self.microsecond if tzinfo is True: tzinfo = self.tzinfo - _check_date_fields(year, month, day) - _check_time_fields(hour, minute, second, microsecond) - _check_tzinfo_arg(tzinfo) - return datetime(year, month, day, hour, minute, second, - microsecond, tzinfo) + return datetime(year, month, day, hour, minute, second, microsecond, + tzinfo) def astimezone(self, tz=None): if tz is None: @@ -1550,10 +1566,9 @@ class datetime(date): Optional argument sep specifies the separator between date and time, default 'T'. """ - s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, - sep) + - _format_time(self._hour, self._minute, self._second, - self._microsecond)) + s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) + + _format_time(self._hour, self._minute, self._second, + self._microsecond)) off = self.utcoffset() if off is not None: if off.days < 0: @@ -1569,14 +1584,15 @@ class datetime(date): def __repr__(self): """Convert to formal string, for repr().""" - L = [self._year, self._month, self._day, # These are never zero + L = [self._year, self._month, self._day, # These are never zero self._hour, self._minute, self._second, self._microsecond] if L[-1] == 0: del L[-1] if L[-1] == 0: del L[-1] - s = ", ".join(map(str, L)) - s = "%s(%s)" % ('datetime.' + self.__class__.__name__, s) + s = "%s.%s(%s)" % (self.__class__.__module__, + self.__class__.__qualname__, + ", ".join(map(str, L))) if self._tzinfo is not None: assert s[-1:] == ")" s = s[:-1] + ", tzinfo=%r" % self._tzinfo + ")" @@ -1608,7 +1624,9 @@ class datetime(date): it mean anything in particular. For example, "GMT", "UTC", "-500", "-5:00", "EDT", "US/Eastern", "America/New York" are all valid replies. """ - name = _call_tzinfo_method(self._tzinfo, "tzname", self) + if self._tzinfo is None: + return None + name = self._tzinfo.tzname(self) _check_tzname(name) return name @@ -1637,14 +1655,6 @@ class datetime(date): else: return False - def __ne__(self, other): - if isinstance(other, datetime): - return self._cmp(other, allow_mixed=True) != 0 - elif not isinstance(other, date): - return NotImplemented - else: - return True - def __le__(self, other): if isinstance(other, datetime): return self._cmp(other) <= 0 @@ -1694,9 +1704,9 @@ class datetime(date): return _cmp((self._year, self._month, self._day, self._hour, self._minute, self._second, self._microsecond), - (other._year, other._month, other._day, - other._hour, other._minute, other._second, - other._microsecond)) + (other._year, other._month, other._day, + other._hour, other._minute, other._second, + other._microsecond)) if myoff is None or otoff is None: if allow_mixed: return 2 # arbitrary non-zero value @@ -1754,12 +1764,15 @@ class datetime(date): return base + otoff - myoff def __hash__(self): - tzoff = self.utcoffset() - if tzoff is None: - return hash(self._getstate()[0]) - days = _ymd2ord(self.year, self.month, self.day) - seconds = self.hour * 3600 + self.minute * 60 + self.second - return hash(timedelta(days, seconds, self.microsecond) - tzoff) + if self._hashcode == -1: + tzoff = self.utcoffset() + if tzoff is None: + self._hashcode = hash(self._getstate()[0]) + else: + days = _ymd2ord(self.year, self.month, self.day) + seconds = self.hour * 3600 + self.minute * 60 + self.second + self._hashcode = hash(timedelta(days, seconds, self.microsecond) - tzoff) + return self._hashcode # Pickle support. @@ -1776,14 +1789,13 @@ class datetime(date): return (basestate, self._tzinfo) def __setstate(self, string, tzinfo): + if tzinfo is not None and not isinstance(tzinfo, _tzinfo_class): + raise TypeError("bad tzinfo state arg") (yhi, ylo, self._month, self._day, self._hour, self._minute, self._second, us1, us2, us3) = string self._year = yhi * 256 + ylo self._microsecond = (((us1 << 8) | us2) << 8) | us3 - if tzinfo is None or isinstance(tzinfo, _tzinfo_class): - self._tzinfo = tzinfo - else: - raise TypeError("bad tzinfo state arg %r" % tzinfo) + self._tzinfo = tzinfo def __reduce__(self): return (self.__class__, self._getstate()) @@ -1799,7 +1811,7 @@ def _isoweek1monday(year): # XXX This could be done more efficiently THURSDAY = 3 firstday = _ymd2ord(year, 1, 1) - firstweekday = (firstday + 6) % 7 # See weekday() above + firstweekday = (firstday + 6) % 7 # See weekday() above week1monday = firstday - firstweekday if firstweekday > THURSDAY: week1monday += 7 @@ -1820,13 +1832,12 @@ class timezone(tzinfo): elif not isinstance(name, str): raise TypeError("name must be a string") if not cls._minoffset <= offset <= cls._maxoffset: - raise ValueError("offset must be a timedelta" - " strictly between -timedelta(hours=24) and" - " timedelta(hours=24).") - if (offset.microseconds != 0 or - offset.seconds % 60 != 0): - raise ValueError("offset must be a timedelta" - " representing a whole number of minutes") + raise ValueError("offset must be a timedelta " + "strictly between -timedelta(hours=24) and " + "timedelta(hours=24).") + if (offset.microseconds != 0 or offset.seconds % 60 != 0): + raise ValueError("offset must be a timedelta " + "representing a whole number of minutes") return cls._create(offset, name) @classmethod @@ -1863,10 +1874,12 @@ class timezone(tzinfo): if self is self.utc: return 'datetime.timezone.utc' if self._name is None: - return "%s(%r)" % ('datetime.' + self.__class__.__name__, - self._offset) - return "%s(%r, %r)" % ('datetime.' + self.__class__.__name__, - self._offset, self._name) + return "%s.%s(%r)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._offset) + return "%s.%s(%r, %r)" % (self.__class__.__module__, + self.__class__.__qualname__, + self._offset, self._name) def __str__(self): return self.tzname(None) @@ -2121,14 +2134,13 @@ except ImportError: pass else: # Clean up unused names - del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, - _DI100Y, _DI400Y, _DI4Y, _MAXORDINAL, _MONTHNAMES, - _build_struct_time, _call_tzinfo_method, _check_date_fields, - _check_time_fields, _check_tzinfo_arg, _check_tzname, - _check_utc_offset, _cmp, _cmperror, _date_class, _days_before_month, - _days_before_year, _days_in_month, _format_time, _is_leap, - _isoweek1monday, _math, _ord2ymd, _time, _time_class, _tzinfo_class, - _wrap_strftime, _ymd2ord) + del (_DAYNAMES, _DAYS_BEFORE_MONTH, _DAYS_IN_MONTH, _DI100Y, _DI400Y, + _DI4Y, _EPOCH, _MAXORDINAL, _MONTHNAMES, _build_struct_time, + _check_date_fields, _check_int_field, _check_time_fields, + _check_tzinfo_arg, _check_tzname, _check_utc_offset, _cmp, _cmperror, + _date_class, _days_before_month, _days_before_year, _days_in_month, + _format_time, _is_leap, _isoweek1monday, _math, _ord2ymd, + _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord) # XXX Since import * above excludes names that start with _, # docstring does not get overwritten. In the future, it may be # appropriate to maintain a single module level docstring and diff --git a/Darwin/lib/python3.4/dbm/__init__.py b/Darwin/lib/python3.5/dbm/__init__.py similarity index 98% rename from Darwin/lib/python3.4/dbm/__init__.py rename to Darwin/lib/python3.5/dbm/__init__.py index 5f4664a..6831a84 100644 --- a/Darwin/lib/python3.4/dbm/__init__.py +++ b/Darwin/lib/python3.5/dbm/__init__.py @@ -153,9 +153,9 @@ def whichdb(filename): except OSError: return None - # Read the start of the file -- the magic number - s16 = f.read(16) - f.close() + with f: + # Read the start of the file -- the magic number + s16 = f.read(16) s = s16[0:4] # Return "" if not at least 4 bytes diff --git a/Darwin/lib/python3.4/dbm/dumb.py b/Darwin/lib/python3.5/dbm/dumb.py similarity index 73% rename from Darwin/lib/python3.4/dbm/dumb.py rename to Darwin/lib/python3.5/dbm/dumb.py index 15dad9c..7777a7c 100644 --- a/Darwin/lib/python3.4/dbm/dumb.py +++ b/Darwin/lib/python3.5/dbm/dumb.py @@ -21,6 +21,7 @@ is read when the database is opened, and some updates rewrite the whole index) """ +import ast as _ast import io as _io import os as _os import collections @@ -44,7 +45,7 @@ class _Database(collections.MutableMapping): _os = _os # for _commit() _io = _io # for _commit() - def __init__(self, filebasename, mode): + def __init__(self, filebasename, mode, flag='c'): self._mode = mode # The directory file is a text file. Each line looks like @@ -64,14 +65,25 @@ class _Database(collections.MutableMapping): # The index is an in-memory dict, mirroring the directory file. self._index = None # maps keys to (pos, siz) pairs + # Handle the creation + self._create(flag) + self._update() + + def _create(self, flag): + if flag == 'n': + for filename in (self._datfile, self._bakfile, self._dirfile): + try: + _os.remove(filename) + except OSError: + pass # Mod by Jack: create data file if needed try: f = _io.open(self._datfile, 'r', encoding="Latin-1") except OSError: - f = _io.open(self._datfile, 'w', encoding="Latin-1") - self._chmod(self._datfile) - f.close() - self._update() + with _io.open(self._datfile, 'w', encoding="Latin-1") as f: + self._chmod(self._datfile) + else: + f.close() # Read directory file into the in-memory index dict. def _update(self): @@ -81,12 +93,12 @@ class _Database(collections.MutableMapping): except OSError: pass else: - for line in f: - line = line.rstrip() - key, pos_and_siz_pair = eval(line) - key = key.encode('Latin-1') - self._index[key] = pos_and_siz_pair - f.close() + with f: + for line in f: + line = line.rstrip() + key, pos_and_siz_pair = _ast.literal_eval(line) + key = key.encode('Latin-1') + self._index[key] = pos_and_siz_pair # Write the index dict to the directory file. The original directory # file (if any) is renamed with a .bak extension first. If a .bak @@ -108,13 +120,13 @@ class _Database(collections.MutableMapping): except OSError: pass - f = self._io.open(self._dirfile, 'w', encoding="Latin-1") - self._chmod(self._dirfile) - for key, pos_and_siz_pair in self._index.items(): - # Use Latin-1 since it has no qualms with any value in any - # position; UTF-8, though, does care sometimes. - f.write("%r, %r\n" % (key.decode('Latin-1'), pos_and_siz_pair)) - f.close() + with self._io.open(self._dirfile, 'w', encoding="Latin-1") as f: + self._chmod(self._dirfile) + for key, pos_and_siz_pair in self._index.items(): + # Use Latin-1 since it has no qualms with any value in any + # position; UTF-8, though, does care sometimes. + entry = "%r, %r\n" % (key.decode('Latin-1'), pos_and_siz_pair) + f.write(entry) sync = _commit @@ -127,10 +139,9 @@ class _Database(collections.MutableMapping): key = key.encode('utf-8') self._verify_open() pos, siz = self._index[key] # may raise KeyError - f = _io.open(self._datfile, 'rb') - f.seek(pos) - dat = f.read(siz) - f.close() + with _io.open(self._datfile, 'rb') as f: + f.seek(pos) + dat = f.read(siz) return dat # Append val to the data file, starting at a _BLOCKSIZE-aligned @@ -138,14 +149,13 @@ class _Database(collections.MutableMapping): # to get to an aligned offset. Return pair # (starting offset of val, len(val)) def _addval(self, val): - f = _io.open(self._datfile, 'rb+') - f.seek(0, 2) - pos = int(f.tell()) - npos = ((pos + _BLOCKSIZE - 1) // _BLOCKSIZE) * _BLOCKSIZE - f.write(b'\0'*(npos-pos)) - pos = npos - f.write(val) - f.close() + with _io.open(self._datfile, 'rb+') as f: + f.seek(0, 2) + pos = int(f.tell()) + npos = ((pos + _BLOCKSIZE - 1) // _BLOCKSIZE) * _BLOCKSIZE + f.write(b'\0'*(npos-pos)) + pos = npos + f.write(val) return (pos, len(val)) # Write val to the data file, starting at offset pos. The caller @@ -153,10 +163,9 @@ class _Database(collections.MutableMapping): # pos to hold val, without overwriting some other value. Return # pair (pos, len(val)). def _setval(self, pos, val): - f = _io.open(self._datfile, 'rb+') - f.seek(pos) - f.write(val) - f.close() + with _io.open(self._datfile, 'rb+') as f: + f.seek(pos) + f.write(val) return (pos, len(val)) # key is a new key whose associated value starts in the data file @@ -164,10 +173,9 @@ class _Database(collections.MutableMapping): # the in-memory index dict, and append one to the directory file. def _addkey(self, key, pos_and_siz_pair): self._index[key] = pos_and_siz_pair - f = _io.open(self._dirfile, 'a', encoding="Latin-1") - self._chmod(self._dirfile) - f.write("%r, %r\n" % (key.decode("Latin-1"), pos_and_siz_pair)) - f.close() + with _io.open(self._dirfile, 'a', encoding="Latin-1") as f: + self._chmod(self._dirfile) + f.write("%r, %r\n" % (key.decode("Latin-1"), pos_and_siz_pair)) def __setitem__(self, key, val): if isinstance(key, str): @@ -216,8 +224,10 @@ class _Database(collections.MutableMapping): self._commit() def keys(self): - self._verify_open() - return list(self._index.keys()) + try: + return list(self._index) + except TypeError: + raise error('DBM object has already been closed') from None def items(self): self._verify_open() @@ -226,21 +236,32 @@ class _Database(collections.MutableMapping): def __contains__(self, key): if isinstance(key, str): key = key.encode('utf-8') - self._verify_open() - return key in self._index + try: + return key in self._index + except TypeError: + if self._index is None: + raise error('DBM object has already been closed') from None + else: + raise def iterkeys(self): - self._verify_open() - return iter(self._index.keys()) + try: + return iter(self._index) + except TypeError: + raise error('DBM object has already been closed') from None __iter__ = iterkeys def __len__(self): - self._verify_open() - return len(self._index) + try: + return len(self._index) + except TypeError: + raise error('DBM object has already been closed') from None def close(self): - self._commit() - self._index = self._datfile = self._dirfile = self._bakfile = None + try: + self._commit() + finally: + self._index = self._datfile = self._dirfile = self._bakfile = None __del__ = close @@ -255,20 +276,20 @@ class _Database(collections.MutableMapping): self.close() -def open(file, flag=None, mode=0o666): +def open(file, flag='c', mode=0o666): """Open the database file, filename, and return corresponding object. The flag argument, used to control how the database is opened in the - other DBM implementations, is ignored in the dbm.dumb module; the - database is always opened for update, and will be created if it does - not exist. + other DBM implementations, supports only the semantics of 'c' and 'n' + values. Other values will default to the semantics of 'c' value: + the database will always opened for update and will be created if it + does not exist. The optional mode argument is the UNIX mode of the file, used only when the database has to be created. It defaults to octal code 0o666 (and will be modified by the prevailing umask). """ - # flag argument is currently ignored # Modify mode depending on the umask try: @@ -279,5 +300,4 @@ def open(file, flag=None, mode=0o666): else: # Turn off any bits that are set in the umask mode = mode & (~um) - - return _Database(file, mode) + return _Database(file, mode, flag=flag) diff --git a/Darwin/lib/python3.4/dbm/gnu.py b/Darwin/lib/python3.5/dbm/gnu.py similarity index 100% rename from Darwin/lib/python3.4/dbm/gnu.py rename to Darwin/lib/python3.5/dbm/gnu.py diff --git a/Darwin/lib/python3.4/dbm/ndbm.py b/Darwin/lib/python3.5/dbm/ndbm.py similarity index 100% rename from Darwin/lib/python3.4/dbm/ndbm.py rename to Darwin/lib/python3.5/dbm/ndbm.py diff --git a/Darwin/lib/python3.5/decimal.py b/Darwin/lib/python3.5/decimal.py new file mode 100644 index 0000000..7746ea2 --- /dev/null +++ b/Darwin/lib/python3.5/decimal.py @@ -0,0 +1,11 @@ + +try: + from _decimal import * + from _decimal import __doc__ + from _decimal import __version__ + from _decimal import __libmpdec_version__ +except ImportError: + from _pydecimal import * + from _pydecimal import __doc__ + from _pydecimal import __version__ + from _pydecimal import __libmpdec_version__ diff --git a/Darwin/lib/python3.4/difflib.py b/Darwin/lib/python3.5/difflib.py similarity index 95% rename from Darwin/lib/python3.4/difflib.py rename to Darwin/lib/python3.5/difflib.py index 38dfef4..22d9145 100644 --- a/Darwin/lib/python3.4/difflib.py +++ b/Darwin/lib/python3.5/difflib.py @@ -28,9 +28,9 @@ Class HtmlDiff: __all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher', 'Differ','IS_CHARACTER_JUNK', 'IS_LINE_JUNK', 'context_diff', - 'unified_diff', 'HtmlDiff', 'Match'] + 'unified_diff', 'diff_bytes', 'HtmlDiff', 'Match'] -import heapq +from heapq import nlargest as _nlargest from collections import namedtuple as _namedtuple Match = _namedtuple('Match', 'a b size') @@ -511,8 +511,8 @@ class SequenceMatcher: non_adjacent.append((i1, j1, k1)) non_adjacent.append( (la, lb, 0) ) - self.matching_blocks = non_adjacent - return map(Match._make, self.matching_blocks) + self.matching_blocks = list(map(Match._make, non_adjacent)) + return self.matching_blocks def get_opcodes(self): """Return list of 5-tuples describing how to turn a into b. @@ -729,7 +729,7 @@ def get_close_matches(word, possibilities, n=3, cutoff=0.6): result.append((s.ratio(), x)) # Move the best scorers to head of list - result = heapq.nlargest(n, result) + result = _nlargest(n, result) # Strip scores for the best n matches return [x for score, x in result] @@ -852,10 +852,9 @@ class Differ: and return true iff the string is junk. The module-level function `IS_LINE_JUNK` may be used to filter out lines without visible characters, except for at most one splat ('#'). It is recommended - to leave linejunk None; as of Python 2.3, the underlying - SequenceMatcher class has grown an adaptive notion of "noise" lines - that's better than any static definition the author has ever been - able to craft. + to leave linejunk None; the underlying SequenceMatcher class has + an adaptive notion of "noise" lines that's better than any static + definition the author has ever been able to craft. - `charjunk`: A function that should accept a string of length 1. The module-level function `IS_CHARACTER_JUNK` may be used to filter out @@ -1175,6 +1174,7 @@ def unified_diff(a, b, fromfile='', tofile='', fromfiledate='', four """ + _check_types(a, b, fromfile, tofile, fromfiledate, tofiledate, lineterm) started = False for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n): if not started: @@ -1262,6 +1262,7 @@ def context_diff(a, b, fromfile='', tofile='', four """ + _check_types(a, b, fromfile, tofile, fromfiledate, tofiledate, lineterm) prefix = dict(insert='+ ', delete='- ', replace='! ', equal=' ') started = False for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n): @@ -1293,22 +1294,70 @@ def context_diff(a, b, fromfile='', tofile='', for line in b[j1:j2]: yield prefix[tag] + line +def _check_types(a, b, *args): + # Checking types is weird, but the alternative is garbled output when + # someone passes mixed bytes and str to {unified,context}_diff(). E.g. + # without this check, passing filenames as bytes results in output like + # --- b'oldfile.txt' + # +++ b'newfile.txt' + # because of how str.format() incorporates bytes objects. + if a and not isinstance(a[0], str): + raise TypeError('lines to compare must be str, not %s (%r)' % + (type(a[0]).__name__, a[0])) + if b and not isinstance(b[0], str): + raise TypeError('lines to compare must be str, not %s (%r)' % + (type(b[0]).__name__, b[0])) + for arg in args: + if not isinstance(arg, str): + raise TypeError('all arguments must be str, not: %r' % (arg,)) + +def diff_bytes(dfunc, a, b, fromfile=b'', tofile=b'', + fromfiledate=b'', tofiledate=b'', n=3, lineterm=b'\n'): + r""" + Compare `a` and `b`, two sequences of lines represented as bytes rather + than str. This is a wrapper for `dfunc`, which is typically either + unified_diff() or context_diff(). Inputs are losslessly converted to + strings so that `dfunc` only has to worry about strings, and encoded + back to bytes on return. This is necessary to compare files with + unknown or inconsistent encoding. All other inputs (except `n`) must be + bytes rather than str. + """ + def decode(s): + try: + return s.decode('ascii', 'surrogateescape') + except AttributeError as err: + msg = ('all arguments must be bytes, not %s (%r)' % + (type(s).__name__, s)) + raise TypeError(msg) from err + a = list(map(decode, a)) + b = list(map(decode, b)) + fromfile = decode(fromfile) + tofile = decode(tofile) + fromfiledate = decode(fromfiledate) + tofiledate = decode(tofiledate) + lineterm = decode(lineterm) + + lines = dfunc(a, b, fromfile, tofile, fromfiledate, tofiledate, n, lineterm) + for line in lines: + yield line.encode('ascii', 'surrogateescape') + def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK): r""" Compare `a` and `b` (lists of strings); return a `Differ`-style delta. Optional keyword parameters `linejunk` and `charjunk` are for filter - functions (or None): + functions, or can be None: - - linejunk: A function that should accept a single string argument, and + - linejunk: A function that should accept a single string argument and return true iff the string is junk. The default is None, and is - recommended; as of Python 2.3, an adaptive notion of "noise" lines is - used that does a good job on its own. + recommended; the underlying SequenceMatcher class has an adaptive + notion of "noise" lines. - - charjunk: A function that should accept a string of length 1. The - default is module-level function IS_CHARACTER_JUNK, which filters out - whitespace characters (a blank or tab; note: bad idea to include newline - in this!). + - charjunk: A function that accepts a character (string of length + 1), and returns true iff the character is junk. The default is + the module-level function IS_CHARACTER_JUNK, which filters out + whitespace characters (a blank or tab; note: it's a bad idea to + include newline in this!). Tools/scripts/ndiff.py is a command-line front-end to this function. @@ -1410,7 +1459,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, change_re.sub(record_sub_info,markers) # process each tuple inserting our special marks that won't be # noticed by an xml/html escaper. - for key,(begin,end) in sub_info[::-1]: + for key,(begin,end) in reversed(sub_info): text = text[0:begin]+'\0'+key+text[begin:end]+'\1'+text[end:] text = text[2:] # Handle case of add/delete entire line @@ -1448,10 +1497,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, # are a concatenation of the first character of each of the 4 lines # so we can do some very readable comparisons. while len(lines) < 4: - try: - lines.append(next(diff_lines_iterator)) - except StopIteration: - lines.append('X') + lines.append(next(diff_lines_iterator, 'X')) s = ''.join([line[0] for line in lines]) if s.startswith('X'): # When no more lines, pump out any remaining blank lines so the @@ -1514,7 +1560,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, num_blanks_to_yield -= 1 yield ('','\n'),None,True if s.startswith('X'): - raise StopIteration + return else: yield from_line,to_line,True @@ -1536,7 +1582,10 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, while True: # Collecting lines of text until we have a from/to pair while (len(fromlines)==0 or len(tolines)==0): - from_line, to_line, found_diff = next(line_iterator) + try: + from_line, to_line, found_diff = next(line_iterator) + except StopIteration: + return if from_line is not None: fromlines.append((from_line,found_diff)) if to_line is not None: @@ -1550,8 +1599,7 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, # them up without doing anything else with them. line_pair_iterator = _line_pair_iterator() if context is None: - while True: - yield next(line_pair_iterator) + yield from line_pair_iterator # Handle case where user wants context differencing. We must do some # storage of lines until we know for sure that they are to be yielded. else: @@ -1564,7 +1612,10 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None, index, contextLines = 0, [None]*(context) found_diff = False while(found_diff is False): - from_line, to_line, found_diff = next(line_pair_iterator) + try: + from_line, to_line, found_diff = next(line_pair_iterator) + except StopIteration: + return i = index % context contextLines[i] = (from_line, to_line, found_diff) index += 1 @@ -1601,7 +1652,7 @@ _file_template = """ + content="text/html; charset=%(charset)s" /> @@ -1679,7 +1730,7 @@ class HtmlDiff(object): tabsize -- tab stop spacing, defaults to 8. wrapcolumn -- column number where lines are broken and wrapped, defaults to None where lines are not wrapped. - linejunk,charjunk -- keyword arguments passed into ndiff() (used to by + linejunk,charjunk -- keyword arguments passed into ndiff() (used by HtmlDiff() to generate the side by side HTML differences). See ndiff() documentation for argument default values and descriptions. """ @@ -1688,8 +1739,8 @@ class HtmlDiff(object): self._linejunk = linejunk self._charjunk = charjunk - def make_file(self,fromlines,tolines,fromdesc='',todesc='',context=False, - numlines=5): + def make_file(self, fromlines, tolines, fromdesc='', todesc='', + context=False, numlines=5, *, charset='utf-8'): """Returns HTML file of side by side comparison with change highlights Arguments: @@ -1704,13 +1755,16 @@ class HtmlDiff(object): When context is False, controls the number of lines to place the "next" link anchors before the next change (so click of "next" link jumps to just before the change). + charset -- charset of the HTML document """ - return self._file_template % dict( - styles = self._styles, - legend = self._legend, - table = self.make_table(fromlines,tolines,fromdesc,todesc, - context=context,numlines=numlines)) + return (self._file_template % dict( + styles=self._styles, + legend=self._legend, + table=self.make_table(fromlines, tolines, fromdesc, todesc, + context=context, numlines=numlines), + charset=charset + )).encode(charset, 'xmlcharrefreplace').decode(charset) def _tab_newline_replace(self,fromlines,tolines): """Returns from/to line lists with tabs expanded and newlines removed. diff --git a/Darwin/lib/python3.4/dis.py b/Darwin/lib/python3.5/dis.py similarity index 98% rename from Darwin/lib/python3.4/dis.py rename to Darwin/lib/python3.5/dis.py index 81cbe7f..af37cdf 100644 --- a/Darwin/lib/python3.4/dis.py +++ b/Darwin/lib/python3.5/dis.py @@ -29,7 +29,7 @@ def _try_compile(source, name): return c def dis(x=None, *, file=None): - """Disassemble classes, methods, functions, or code. + """Disassemble classes, methods, functions, generators, or code. With no argument, disassemble the last traceback. @@ -41,6 +41,8 @@ def dis(x=None, *, file=None): x = x.__func__ if hasattr(x, '__code__'): # Function x = x.__code__ + if hasattr(x, 'gi_code'): # Generator + x = x.gi_code if hasattr(x, '__dict__'): # Class or module items = sorted(x.__dict__.items()) for name, x1 in items: @@ -82,6 +84,8 @@ COMPILER_FLAG_NAMES = { 16: "NESTED", 32: "GENERATOR", 64: "NOFREE", + 128: "COROUTINE", + 256: "ITERABLE_COROUTINE", } def pretty_flags(flags): @@ -99,11 +103,13 @@ def pretty_flags(flags): return ", ".join(names) def _get_code_object(x): - """Helper to handle methods, functions, strings and raw code objects""" + """Helper to handle methods, functions, generators, strings and raw code objects""" if hasattr(x, '__func__'): # Method x = x.__func__ if hasattr(x, '__code__'): # Function x = x.__code__ + if hasattr(x, 'gi_code'): # Generator + x = x.gi_code if isinstance(x, str): # Source code x = _try_compile(x, "") if hasattr(x, 'co_code'): # Code object diff --git a/Darwin/lib/python3.4/distutils/README b/Darwin/lib/python3.5/distutils/README similarity index 100% rename from Darwin/lib/python3.4/distutils/README rename to Darwin/lib/python3.5/distutils/README diff --git a/Darwin/lib/python3.4/distutils/__init__.py b/Darwin/lib/python3.5/distutils/__init__.py similarity index 55% rename from Darwin/lib/python3.4/distutils/__init__.py rename to Darwin/lib/python3.5/distutils/__init__.py index 9463a35..d823d04 100644 --- a/Darwin/lib/python3.4/distutils/__init__.py +++ b/Darwin/lib/python3.5/distutils/__init__.py @@ -8,10 +8,6 @@ used from a setup script as setup (...) """ -# Distutils version -# -# Updated automatically by the Python release process. -# -#--start constants-- -__version__ = "3.4.1" -#--end constants-- +import sys + +__version__ = sys.version[:sys.version.index(' ')] diff --git a/Darwin/lib/python3.5/distutils/_msvccompiler.py b/Darwin/lib/python3.5/distutils/_msvccompiler.py new file mode 100644 index 0000000..82b78a0 --- /dev/null +++ b/Darwin/lib/python3.5/distutils/_msvccompiler.py @@ -0,0 +1,544 @@ +"""distutils._msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for Microsoft Visual Studio 2015. + +The module is compatible with VS 2015 and later. You can find legacy support +for older versions in distutils.msvc9compiler and distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +# finding DevStudio (through the registry) +# ported to VS 2005 and VS 2008 by Christian Heimes +# ported to VS 2015 by Steve Dower + +import os +import shutil +import stat +import subprocess + +from distutils.errors import DistutilsExecError, DistutilsPlatformError, \ + CompileError, LibError, LinkError +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +import winreg +from itertools import count + +def _find_vcvarsall(plat_spec): + with winreg.OpenKeyEx( + winreg.HKEY_LOCAL_MACHINE, + r"Software\Microsoft\VisualStudio\SxS\VC7", + access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ) as key: + if not key: + log.debug("Visual C++ is not registered") + return None, None + + best_version = 0 + best_dir = None + for i in count(): + try: + v, vc_dir, vt = winreg.EnumValue(key, i) + except OSError: + break + if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): + try: + version = int(float(v)) + except (ValueError, TypeError): + continue + if version >= 14 and version > best_version: + best_version, best_dir = version, vc_dir + if not best_version: + log.debug("No suitable Visual C++ version found") + return None, None + + vcvarsall = os.path.join(best_dir, "vcvarsall.bat") + if not os.path.isfile(vcvarsall): + log.debug("%s cannot be found", vcvarsall) + return None, None + + vcruntime = None + vcruntime_spec = _VCVARS_PLAT_TO_VCRUNTIME_REDIST.get(plat_spec) + if vcruntime_spec: + vcruntime = os.path.join(best_dir, + vcruntime_spec.format(best_version)) + if not os.path.isfile(vcruntime): + log.debug("%s cannot be found", vcruntime) + vcruntime = None + + return vcvarsall, vcruntime + +def _get_vc_env(plat_spec): + if os.getenv("DISTUTILS_USE_SDK"): + return { + key.lower(): value + for key, value in os.environ.items() + } + + vcvarsall, vcruntime = _find_vcvarsall(plat_spec) + if not vcvarsall: + raise DistutilsPlatformError("Unable to find vcvarsall.bat") + + try: + out = subprocess.check_output( + '"{}" {} && set'.format(vcvarsall, plat_spec), + shell=True, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) + except subprocess.CalledProcessError as exc: + log.error(exc.output) + raise DistutilsPlatformError("Error executing {}" + .format(exc.cmd)) + + env = { + key.lower(): value + for key, _, value in + (line.partition('=') for line in out.splitlines()) + if key and value + } + + if vcruntime: + env['py_vcruntime_redist'] = vcruntime + return env + +def _find_exe(exe, paths=None): + """Return path to an MSVC executable program. + + Tries to find the program in several places: first, one of the + MSVC program search paths from the registry; next, the directories + in the PATH environment variable. If any of those work, return an + absolute path that is known to exist. If none of them work, just + return the original program name, 'exe'. + """ + if not paths: + paths = os.getenv('path').split(os.pathsep) + for p in paths: + fn = os.path.join(os.path.abspath(p), exe) + if os.path.isfile(fn): + return fn + return exe + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is +# the param to cross-compile on x86 targetting amd64.) +PLAT_TO_VCVARS = { + 'win32' : 'x86', + 'win-amd64' : 'amd64', +} + +# A map keyed by get_platform() return values to the file under +# the VC install directory containing the vcruntime redistributable. +_VCVARS_PLAT_TO_VCRUNTIME_REDIST = { + 'x86' : 'redist\\x86\\Microsoft.VC{0}0.CRT\\vcruntime{0}0.dll', + 'amd64' : 'redist\\x64\\Microsoft.VC{0}0.CRT\\vcruntime{0}0.dll', + 'x86_amd64' : 'redist\\x64\\Microsoft.VC{0}0.CRT\\vcruntime{0}0.dll', +} + +# A set containing the DLLs that are guaranteed to be available for +# all micro versions of this Python version. Known extension +# dependencies that are not in this set will be copied to the output +# path. +_BUNDLED_DLLS = frozenset(['vcruntime140.dll']) + +class MSVCCompiler(CCompiler) : + """Concrete class that implements an interface to Microsoft Visual C++, + as defined by the CCompiler abstract class.""" + + compiler_type = 'msvc' + + # Just set this so CCompiler's constructor doesn't barf. We currently + # don't use the 'set_executables()' bureaucracy provided by CCompiler, + # as it really isn't necessary for this sort of single-compiler class. + # Would be nice to have a consistent interface with UnixCCompiler, + # though, so it's worth thinking about. + executables = {} + + # Private class data (need to distinguish C from C++ source for compiler) + _c_extensions = ['.c'] + _cpp_extensions = ['.cc', '.cpp', '.cxx'] + _rc_extensions = ['.rc'] + _mc_extensions = ['.mc'] + + # Needed for the filename generation methods provided by the + # base class, CCompiler. + src_extensions = (_c_extensions + _cpp_extensions + + _rc_extensions + _mc_extensions) + res_extension = '.res' + obj_extension = '.obj' + static_lib_extension = '.lib' + shared_lib_extension = '.dll' + static_lib_format = shared_lib_format = '%s%s' + exe_extension = '.exe' + + + def __init__(self, verbose=0, dry_run=0, force=0): + CCompiler.__init__ (self, verbose, dry_run, force) + # target platform (.plat_name is consistent with 'bdist') + self.plat_name = None + self.initialized = False + + def initialize(self, plat_name=None): + # multi-init means we would need to check platform same each time... + assert not self.initialized, "don't init multiple times" + if plat_name is None: + plat_name = get_platform() + # sanity check for platforms to prevent obscure errors later. + if plat_name not in PLAT_TO_VCVARS: + raise DistutilsPlatformError("--plat-name must be one of {}" + .format(tuple(PLAT_TO_VCVARS))) + + # On x86, 'vcvarsall.bat amd64' creates an env that doesn't work; + # to cross compile, you use 'x86_amd64'. + # On AMD64, 'vcvarsall.bat amd64' is a native build env; to cross + # compile use 'x86' (ie, it runs the x86 compiler directly) + if plat_name == get_platform() or plat_name == 'win32': + # native build or cross-compile to win32 + plat_spec = PLAT_TO_VCVARS[plat_name] + else: + # cross compile from win32 -> some 64bit + plat_spec = '{}_{}'.format( + PLAT_TO_VCVARS[get_platform()], + PLAT_TO_VCVARS[plat_name] + ) + + vc_env = _get_vc_env(plat_spec) + if not vc_env: + raise DistutilsPlatformError("Unable to find a compatible " + "Visual Studio installation.") + + self._paths = vc_env.get('path', '') + paths = self._paths.split(os.pathsep) + self.cc = _find_exe("cl.exe", paths) + self.linker = _find_exe("link.exe", paths) + self.lib = _find_exe("lib.exe", paths) + self.rc = _find_exe("rc.exe", paths) # resource compiler + self.mc = _find_exe("mc.exe", paths) # message compiler + self.mt = _find_exe("mt.exe", paths) # message compiler + self._vcruntime_redist = vc_env.get('py_vcruntime_redist', '') + + for dir in vc_env.get('include', '').split(os.pathsep): + if dir: + self.add_include_dir(dir) + + for dir in vc_env.get('lib', '').split(os.pathsep): + if dir: + self.add_library_dir(dir) + + self.preprocess_options = None + # If vcruntime_redist is available, link against it dynamically. Otherwise, + # use /MT[d] to build statically, then switch from libucrt[d].lib to ucrt[d].lib + # later to dynamically link to ucrtbase but not vcruntime. + self.compile_options = [ + '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG' + ] + self.compile_options.append('/MD' if self._vcruntime_redist else '/MT') + + self.compile_options_debug = [ + '/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG' + ] + + ldflags = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG' + ] + if not self._vcruntime_redist: + ldflags.extend(('/nodefaultlib:libucrt.lib', 'ucrt.lib')) + + ldflags_debug = [ + '/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL' + ] + + self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] + self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] + self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO'] + self.ldflags_static = [*ldflags] + self.ldflags_static_debug = [*ldflags_debug] + + self._ldflags = { + (CCompiler.EXECUTABLE, None): self.ldflags_exe, + (CCompiler.EXECUTABLE, False): self.ldflags_exe, + (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, + (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, + (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, + (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, + (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, + } + + self.initialized = True + + # -- Worker methods ------------------------------------------------ + + def object_filenames(self, + source_filenames, + strip_dir=0, + output_dir=''): + ext_map = { + **{ext: self.obj_extension for ext in self.src_extensions}, + **{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions}, + } + + output_dir = output_dir or '' + + def make_out_path(p): + base, ext = os.path.splitext(p) + if strip_dir: + base = os.path.basename(base) + else: + _, base = os.path.splitdrive(base) + if base.startswith((os.path.sep, os.path.altsep)): + base = base[1:] + try: + # XXX: This may produce absurdly long paths. We should check + # the length of the result and trim base until we fit within + # 260 characters. + return os.path.join(output_dir, base + ext_map[ext]) + except LookupError: + # Better to raise an exception instead of silently continuing + # and later complain about sources and targets having + # different lengths + raise CompileError("Don't know how to compile {}".format(p)) + + return list(map(make_out_path, source_filenames)) + + + def compile(self, sources, + output_dir=None, macros=None, include_dirs=None, debug=0, + extra_preargs=None, extra_postargs=None, depends=None): + + if not self.initialized: + self.initialize() + compile_info = self._setup_compile(output_dir, macros, include_dirs, + sources, depends, extra_postargs) + macros, objects, extra_postargs, pp_opts, build = compile_info + + compile_opts = extra_preargs or [] + compile_opts.append('/c') + if debug: + compile_opts.extend(self.compile_options_debug) + else: + compile_opts.extend(self.compile_options) + + + add_cpp_opts = False + + for obj in objects: + try: + src, ext = build[obj] + except KeyError: + continue + if debug: + # pass the full pathname to MSVC in debug mode, + # this allows the debugger to find the source file + # without asking the user to browse for it + src = os.path.abspath(src) + + if ext in self._c_extensions: + input_opt = "/Tc" + src + elif ext in self._cpp_extensions: + input_opt = "/Tp" + src + add_cpp_opts = True + elif ext in self._rc_extensions: + # compile .RC to .RES file + input_opt = src + output_opt = "/fo" + obj + try: + self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) + except DistutilsExecError as msg: + raise CompileError(msg) + continue + elif ext in self._mc_extensions: + # Compile .MC to .RC file to .RES file. + # * '-h dir' specifies the directory for the + # generated include file + # * '-r dir' specifies the target directory of the + # generated RC file and the binary message resource + # it includes + # + # For now (since there are no options to change this), + # we use the source-directory for the include file and + # the build directory for the RC file and message + # resources. This works at least for win32all. + h_dir = os.path.dirname(src) + rc_dir = os.path.dirname(obj) + try: + # first compile .MC to .RC and .H file + self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) + base, _ = os.path.splitext(os.path.basename (src)) + rc_file = os.path.join(rc_dir, base + '.rc') + # then compile .RC to .RES file + self.spawn([self.rc, "/fo" + obj, rc_file]) + + except DistutilsExecError as msg: + raise CompileError(msg) + continue + else: + # how to handle this file? + raise CompileError("Don't know how to compile {} to {}" + .format(src, obj)) + + args = [self.cc] + compile_opts + pp_opts + if add_cpp_opts: + args.append('/EHsc') + args.append(input_opt) + args.append("/Fo" + obj) + args.extend(extra_postargs) + + try: + self.spawn(args) + except DistutilsExecError as msg: + raise CompileError(msg) + + return objects + + + def create_static_lib(self, + objects, + output_libname, + output_dir=None, + debug=0, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + output_filename = self.library_filename(output_libname, + output_dir=output_dir) + + if self._need_link(objects, output_filename): + lib_args = objects + ['/OUT:' + output_filename] + if debug: + pass # XXX what goes here? + try: + log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) + self.spawn([self.lib] + lib_args) + except DistutilsExecError as msg: + raise LibError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + + def link(self, + target_desc, + objects, + output_filename, + output_dir=None, + libraries=None, + library_dirs=None, + runtime_library_dirs=None, + export_symbols=None, + debug=0, + extra_preargs=None, + extra_postargs=None, + build_temp=None, + target_lang=None): + + if not self.initialized: + self.initialize() + objects, output_dir = self._fix_object_args(objects, output_dir) + fixed_args = self._fix_lib_args(libraries, library_dirs, + runtime_library_dirs) + libraries, library_dirs, runtime_library_dirs = fixed_args + + if runtime_library_dirs: + self.warn("I don't know what to do with 'runtime_library_dirs': " + + str(runtime_library_dirs)) + + lib_opts = gen_lib_options(self, + library_dirs, runtime_library_dirs, + libraries) + if output_dir is not None: + output_filename = os.path.join(output_dir, output_filename) + + if self._need_link(objects, output_filename): + ldflags = self._ldflags[target_desc, debug] + + export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] + + ld_args = (ldflags + lib_opts + export_opts + + objects + ['/OUT:' + output_filename]) + + # The MSVC linker generates .lib and .exp files, which cannot be + # suppressed by any linker switches. The .lib files may even be + # needed! Make sure they are generated in the temporary build + # directory. Since they have different names for debug and release + # builds, they can go into the same directory. + build_temp = os.path.dirname(objects[0]) + if export_symbols is not None: + (dll_name, dll_ext) = os.path.splitext( + os.path.basename(output_filename)) + implib_file = os.path.join( + build_temp, + self.library_filename(dll_name)) + ld_args.append ('/IMPLIB:' + implib_file) + + if extra_preargs: + ld_args[:0] = extra_preargs + if extra_postargs: + ld_args.extend(extra_postargs) + + output_dir = os.path.dirname(os.path.abspath(output_filename)) + self.mkpath(output_dir) + try: + log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) + self.spawn([self.linker] + ld_args) + self._copy_vcruntime(output_dir) + except DistutilsExecError as msg: + raise LinkError(msg) + else: + log.debug("skipping %s (up-to-date)", output_filename) + + def _copy_vcruntime(self, output_dir): + vcruntime = self._vcruntime_redist + if not vcruntime or not os.path.isfile(vcruntime): + return + + if os.path.basename(vcruntime).lower() in _BUNDLED_DLLS: + return + + log.debug('Copying "%s"', vcruntime) + vcruntime = shutil.copy(vcruntime, output_dir) + os.chmod(vcruntime, stat.S_IWRITE) + + def spawn(self, cmd): + old_path = os.getenv('path') + try: + os.environ['path'] = self._paths + return super().spawn(cmd) + finally: + os.environ['path'] = old_path + + # -- Miscellaneous methods ----------------------------------------- + # These are all used by the 'gen_lib_options() function, in + # ccompiler.py. + + def library_dir_option(self, dir): + return "/LIBPATH:" + dir + + def runtime_library_dir_option(self, dir): + raise DistutilsPlatformError( + "don't know how to set runtime library search path for MSVC") + + def library_option(self, lib): + return self.library_filename(lib) + + def find_library_file(self, dirs, lib, debug=0): + # Prefer a debugging library if found (and requested), but deal + # with it if we don't have one. + if debug: + try_names = [lib + "_d", lib] + else: + try_names = [lib] + for dir in dirs: + for name in try_names: + libfile = os.path.join(dir, self.library_filename(name)) + if os.path.isfile(libfile): + return libfile + else: + # Oops, didn't find it in *any* of 'dirs' + return None diff --git a/Darwin/lib/python3.4/distutils/archive_util.py b/Darwin/lib/python3.5/distutils/archive_util.py similarity index 92% rename from Darwin/lib/python3.4/distutils/archive_util.py rename to Darwin/lib/python3.5/distutils/archive_util.py index 4470bb0..bed1384 100644 --- a/Darwin/lib/python3.4/distutils/archive_util.py +++ b/Darwin/lib/python3.5/distutils/archive_util.py @@ -57,26 +57,28 @@ def make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, """Create a (possibly compressed) tar file from all the files under 'base_dir'. - 'compress' must be "gzip" (the default), "compress", "bzip2", or None. - (compress will be deprecated in Python 3.2) + 'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or + None. ("compress" will be deprecated in Python 3.2) 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_dir' + ".tar", possibly plus - the appropriate compression extension (".gz", ".bz2" or ".Z"). + the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). Returns the output filename. """ - tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', None: '', 'compress': ''} - compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'compress': '.Z'} + tar_compression = {'gzip': 'gz', 'bzip2': 'bz2', 'xz': 'xz', None: '', + 'compress': ''} + compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', + 'compress': '.Z'} # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext.keys(): raise ValueError( - "bad value for 'compress': must be None, 'gzip', 'bzip2' " - "or 'compress'") + "bad value for 'compress': must be None, 'gzip', 'bzip2', " + "'xz' or 'compress'") archive_name = base_name + '.tar' if compress != 'compress': @@ -177,6 +179,7 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0): ARCHIVE_FORMATS = { 'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), 'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), 'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), 'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), 'zip': (make_zipfile, [],"ZIP file") @@ -197,8 +200,8 @@ def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, """Create an archive file (eg. zip or tar). 'base_name' is the name of the file to create, minus any format-specific - extension; 'format' is the archive format: one of "zip", "tar", "ztar", - or "gztar". + extension; 'format' is the archive format: one of "zip", "tar", "gztar", + "bztar", "xztar", or "ztar". 'root_dir' is a directory that will be the root directory of the archive; ie. we typically chdir into 'root_dir' before creating the diff --git a/Darwin/lib/python3.4/distutils/bcppcompiler.py b/Darwin/lib/python3.5/distutils/bcppcompiler.py similarity index 100% rename from Darwin/lib/python3.4/distutils/bcppcompiler.py rename to Darwin/lib/python3.5/distutils/bcppcompiler.py diff --git a/Darwin/lib/python3.4/distutils/ccompiler.py b/Darwin/lib/python3.5/distutils/ccompiler.py similarity index 99% rename from Darwin/lib/python3.4/distutils/ccompiler.py rename to Darwin/lib/python3.5/distutils/ccompiler.py index 911e84d..4bb53c9 100644 --- a/Darwin/lib/python3.4/distutils/ccompiler.py +++ b/Darwin/lib/python3.5/distutils/ccompiler.py @@ -875,9 +875,9 @@ main (int argc, char **argv) { def library_filename(self, libname, lib_type='static', # or 'shared' strip_dir=0, output_dir=''): assert output_dir is not None - if lib_type not in ("static", "shared", "dylib"): + if lib_type not in ("static", "shared", "dylib", "xcode_stub"): raise ValueError( - "'lib_type' must be \"static\", \"shared\" or \"dylib\"") + "'lib_type' must be \"static\", \"shared\", \"dylib\", or \"xcode_stub\"") fmt = getattr(self, lib_type + "_lib_format") ext = getattr(self, lib_type + "_lib_extension") @@ -959,7 +959,7 @@ def get_default_compiler(osname=None, platform=None): # is assumed to be in the 'distutils' package.) compiler_class = { 'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"), - 'msvc': ('msvccompiler', 'MSVCCompiler', + 'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"), 'cygwin': ('cygwinccompiler', 'CygwinCCompiler', "Cygwin port of GNU C Compiler for Win32"), diff --git a/Darwin/lib/python3.4/distutils/cmd.py b/Darwin/lib/python3.5/distutils/cmd.py similarity index 100% rename from Darwin/lib/python3.4/distutils/cmd.py rename to Darwin/lib/python3.5/distutils/cmd.py diff --git a/Darwin/lib/python3.4/distutils/command/__init__.py b/Darwin/lib/python3.5/distutils/command/__init__.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/__init__.py rename to Darwin/lib/python3.5/distutils/command/__init__.py diff --git a/Darwin/lib/python3.4/distutils/command/bdist.py b/Darwin/lib/python3.5/distutils/command/bdist.py similarity index 97% rename from Darwin/lib/python3.4/distutils/command/bdist.py rename to Darwin/lib/python3.5/distutils/command/bdist.py index 6814a1c..014871d 100644 --- a/Darwin/lib/python3.4/distutils/command/bdist.py +++ b/Darwin/lib/python3.5/distutils/command/bdist.py @@ -61,13 +61,14 @@ class bdist(Command): 'nt': 'zip'} # Establish the preferred order (for the --help-formats option). - format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar', + format_commands = ['rpm', 'gztar', 'bztar', 'xztar', 'ztar', 'tar', 'wininst', 'zip', 'msi'] # And the real information. format_command = {'rpm': ('bdist_rpm', "RPM distribution"), 'gztar': ('bdist_dumb', "gzip'ed tar file"), 'bztar': ('bdist_dumb', "bzip2'ed tar file"), + 'xztar': ('bdist_dumb', "xz'ed tar file"), 'ztar': ('bdist_dumb', "compressed tar file"), 'tar': ('bdist_dumb', "tar file"), 'wininst': ('bdist_wininst', diff --git a/Darwin/lib/python3.4/distutils/command/bdist_dumb.py b/Darwin/lib/python3.5/distutils/command/bdist_dumb.py similarity index 97% rename from Darwin/lib/python3.4/distutils/command/bdist_dumb.py rename to Darwin/lib/python3.5/distutils/command/bdist_dumb.py index 4405d12..f1bfb24 100644 --- a/Darwin/lib/python3.4/distutils/command/bdist_dumb.py +++ b/Darwin/lib/python3.5/distutils/command/bdist_dumb.py @@ -22,7 +22,8 @@ class bdist_dumb(Command): "platform name to embed in generated filenames " "(default: %s)" % get_platform()), ('format=', 'f', - "archive format to create (tar, ztar, gztar, zip)"), + "archive format to create (tar, gztar, bztar, xztar, " + "ztar, zip)"), ('keep-temp', 'k', "keep the pseudo-installation tree around after " + "creating the distribution archive"), diff --git a/Darwin/lib/python3.4/distutils/command/bdist_msi.py b/Darwin/lib/python3.5/distutils/command/bdist_msi.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/bdist_msi.py rename to Darwin/lib/python3.5/distutils/command/bdist_msi.py diff --git a/Darwin/lib/python3.4/distutils/command/bdist_rpm.py b/Darwin/lib/python3.5/distutils/command/bdist_rpm.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/bdist_rpm.py rename to Darwin/lib/python3.5/distutils/command/bdist_rpm.py diff --git a/Darwin/lib/python3.4/distutils/command/bdist_wininst.py b/Darwin/lib/python3.5/distutils/command/bdist_wininst.py similarity index 93% rename from Darwin/lib/python3.4/distutils/command/bdist_wininst.py rename to Darwin/lib/python3.5/distutils/command/bdist_wininst.py index 959a8bf..0c0e2c1 100644 --- a/Darwin/lib/python3.4/distutils/command/bdist_wininst.py +++ b/Darwin/lib/python3.5/distutils/command/bdist_wininst.py @@ -303,7 +303,6 @@ class bdist_wininst(Command): return installer_name def get_exe_bytes(self): - from distutils.msvccompiler import get_build_version # If a target-version other than the current version has been # specified, then using the MSVC version from *this* build is no good. # Without actually finding and executing the target version and parsing @@ -313,20 +312,33 @@ class bdist_wininst(Command): # We can then execute this program to obtain any info we need, such # as the real sys.version string for the build. cur_version = get_python_version() - if self.target_version and self.target_version != cur_version: - # If the target version is *later* than us, then we assume they - # use what we use - # string compares seem wrong, but are what sysconfig.py itself uses - if self.target_version > cur_version: - bv = get_build_version() + + # If the target version is *later* than us, then we assume they + # use what we use + # string compares seem wrong, but are what sysconfig.py itself uses + if self.target_version and self.target_version < cur_version: + if self.target_version < "2.4": + bv = 6.0 + elif self.target_version == "2.4": + bv = 7.1 + elif self.target_version == "2.5": + bv = 8.0 + elif self.target_version <= "3.2": + bv = 9.0 + elif self.target_version <= "3.4": + bv = 10.0 else: - if self.target_version < "2.4": - bv = 6.0 - else: - bv = 7.1 + bv = 14.0 else: # for current version - use authoritative check. - bv = get_build_version() + try: + from msvcrt import CRT_ASSEMBLY_VERSION + except ImportError: + # cross-building, so assume the latest version + bv = 14.0 + else: + bv = float('.'.join(CRT_ASSEMBLY_VERSION.split('.', 2)[:2])) + # wininst-x.y.exe is in the same directory as this file directory = os.path.dirname(__file__) diff --git a/Darwin/lib/python3.4/distutils/command/build.py b/Darwin/lib/python3.5/distutils/command/build.py similarity index 94% rename from Darwin/lib/python3.4/distutils/command/build.py rename to Darwin/lib/python3.5/distutils/command/build.py index cfc15cf..337dd0b 100644 --- a/Darwin/lib/python3.4/distutils/command/build.py +++ b/Darwin/lib/python3.5/distutils/command/build.py @@ -36,6 +36,8 @@ class build(Command): "(default: %s)" % get_platform()), ('compiler=', 'c', "specify the compiler type"), + ('parallel=', 'j', + "number of parallel build jobs"), ('debug', 'g', "compile extensions and libraries with debugging information"), ('force', 'f', @@ -65,6 +67,7 @@ class build(Command): self.debug = None self.force = 0 self.executable = None + self.parallel = None def finalize_options(self): if self.plat_name is None: @@ -116,6 +119,12 @@ class build(Command): if self.executable is None: self.executable = os.path.normpath(sys.executable) + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + def run(self): # Run all relevant sub-commands. This will be some subset of: # - build_py - pure Python modules diff --git a/Darwin/lib/python3.4/distutils/command/build_clib.py b/Darwin/lib/python3.5/distutils/command/build_clib.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/build_clib.py rename to Darwin/lib/python3.5/distutils/command/build_clib.py diff --git a/Darwin/lib/python3.4/distutils/command/build_ext.py b/Darwin/lib/python3.5/distutils/command/build_ext.py similarity index 91% rename from Darwin/lib/python3.4/distutils/command/build_ext.py rename to Darwin/lib/python3.5/distutils/command/build_ext.py index 80689b6..d4cb11e 100644 --- a/Darwin/lib/python3.4/distutils/command/build_ext.py +++ b/Darwin/lib/python3.5/distutils/command/build_ext.py @@ -4,7 +4,10 @@ Implements the Distutils 'build_ext' command, for building extension modules (currently limited to C extensions, should accommodate C++ extensions ASAP).""" -import sys, os, re +import contextlib +import os +import re +import sys from distutils.core import Command from distutils.errors import * from distutils.sysconfig import customize_compiler, get_python_version @@ -14,17 +17,7 @@ from distutils.extension import Extension from distutils.util import get_platform from distutils import log -# this keeps compatibility from 2.3 to 2.5 -if sys.version < "2.6": - USER_BASE = None - HAS_USER_SITE = False -else: - from site import USER_BASE - HAS_USER_SITE = True - -if os.name == 'nt': - from distutils.msvccompiler import get_build_version - MSVC_VERSION = int(get_build_version()) +from site import USER_BASE # An extension name is just a dot-separated list of Python NAMEs (ie. # the same as a fully-qualified module name). @@ -91,20 +84,19 @@ class build_ext(Command): "forcibly build everything (ignore file timestamps)"), ('compiler=', 'c', "specify the compiler type"), + ('parallel=', 'j', + "number of parallel build jobs"), ('swig-cpp', None, "make SWIG create C++ files (default is C)"), ('swig-opts=', None, "list of SWIG command line options"), ('swig=', None, "path to the SWIG executable"), + ('user', None, + "add user include, library and rpath") ] - boolean_options = ['inplace', 'debug', 'force', 'swig-cpp'] - - if HAS_USER_SITE: - user_options.append(('user', None, - "add user include, library and rpath")) - boolean_options.append('user') + boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] help_options = [ ('help-compiler', None, @@ -133,6 +125,7 @@ class build_ext(Command): self.swig_cpp = None self.swig_opts = None self.user = None + self.parallel = None def finalize_options(self): from distutils import sysconfig @@ -143,6 +136,7 @@ class build_ext(Command): ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force'), + ('parallel', 'parallel'), ('plat_name', 'plat_name'), ) @@ -208,27 +202,17 @@ class build_ext(Command): _sys_home = getattr(sys, '_home', None) if _sys_home: self.library_dirs.append(_sys_home) - if MSVC_VERSION >= 9: - # Use the .lib files for the correct architecture - if self.plat_name == 'win32': - suffix = '' - else: - # win-amd64 or win-ia64 - suffix = self.plat_name[4:] - new_lib = os.path.join(sys.exec_prefix, 'PCbuild') - if suffix: - new_lib = os.path.join(new_lib, suffix) - self.library_dirs.append(new_lib) - elif MSVC_VERSION == 8: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VS8.0')) - elif MSVC_VERSION == 7: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VS7.1')) + # Use the .lib files for the correct architecture + if self.plat_name == 'win32': + suffix = 'win32' else: - self.library_dirs.append(os.path.join(sys.exec_prefix, - 'PC', 'VC6')) + # win-amd64 or win-ia64 + suffix = self.plat_name[4:] + new_lib = os.path.join(sys.exec_prefix, 'PCbuild') + if suffix: + new_lib = os.path.join(new_lib, suffix) + self.library_dirs.append(new_lib) # for extensions under Cygwin and AtheOS Python's library directory must be # appended to library_dirs @@ -246,7 +230,7 @@ class build_ext(Command): # Python's library directory must be appended to library_dirs # See Issues: #1600860, #4366 if (sysconfig.get_config_var('Py_ENABLE_SHARED')): - if sys.executable.startswith(os.path.join(sys.exec_prefix, "bin")): + if not sysconfig.python_build: # building third party extensions self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) else: @@ -283,6 +267,12 @@ class build_ext(Command): self.library_dirs.append(user_lib) self.rpath.append(user_lib) + if isinstance(self.parallel, str): + try: + self.parallel = int(self.parallel) + except ValueError: + raise DistutilsOptionError("parallel should be an integer") + def run(self): from distutils.ccompiler import new_compiler @@ -451,15 +441,45 @@ class build_ext(Command): def build_extensions(self): # First, sanity-check the 'extensions' list self.check_extensions_list(self.extensions) + if self.parallel: + self._build_extensions_parallel() + else: + self._build_extensions_serial() + def _build_extensions_parallel(self): + workers = self.parallel + if self.parallel is True: + workers = os.cpu_count() # may return None + try: + from concurrent.futures import ThreadPoolExecutor + except ImportError: + workers = None + + if workers is None: + self._build_extensions_serial() + return + + with ThreadPoolExecutor(max_workers=workers) as executor: + futures = [executor.submit(self.build_extension, ext) + for ext in self.extensions] + for ext, fut in zip(self.extensions, futures): + with self._filter_build_errors(ext): + fut.result() + + def _build_extensions_serial(self): for ext in self.extensions: - try: + with self._filter_build_errors(ext): self.build_extension(ext) - except (CCompilerError, DistutilsError, CompileError) as e: - if not ext.optional: - raise - self.warn('building extension "%s" failed: %s' % - (ext.name, e)) + + @contextlib.contextmanager + def _filter_build_errors(self, ext): + try: + yield + except (CCompilerError, DistutilsError, CompileError) as e: + if not ext.optional: + raise + self.warn('building extension "%s" failed: %s' % + (ext.name, e)) def build_extension(self, ext): sources = ext.sources @@ -511,15 +531,8 @@ class build_ext(Command): extra_postargs=extra_args, depends=ext.depends) - # XXX -- this is a Vile HACK! - # - # The setup.py script for Python on Unix needs to be able to - # get this list so it can perform all the clean up needed to - # avoid keeping object files around when cleaning out a failed - # build of an extension module. Since Distutils does not - # track dependencies, we have to get rid of intermediates to - # ensure all the intermediates will be properly re-built. - # + # XXX outdated variable, kept here in case third-part code + # needs it. self._built_objects = objects[:] # Now link the object files together into a "shared object" -- @@ -664,10 +677,7 @@ class build_ext(Command): """ from distutils.sysconfig import get_config_var ext_path = ext_name.split('.') - # extensions in debug_mode are named 'module_d.pyd' under windows ext_suffix = get_config_var('EXT_SUFFIX') - if os.name == 'nt' and self.debug: - return os.path.join(*ext_path) + '_d' + ext_suffix return os.path.join(*ext_path) + ext_suffix def get_export_symbols(self, ext): @@ -692,7 +702,7 @@ class build_ext(Command): # to need it mentioned explicitly, though, so that's what we do. # Append '_d' to the python import library on debug builds. if sys.platform == "win32": - from distutils.msvccompiler import MSVCCompiler + from distutils._msvccompiler import MSVCCompiler if not isinstance(self.compiler, MSVCCompiler): template = "python%d%d" if self.debug: diff --git a/Darwin/lib/python3.4/distutils/command/build_py.py b/Darwin/lib/python3.5/distutils/command/build_py.py similarity index 99% rename from Darwin/lib/python3.4/distutils/command/build_py.py rename to Darwin/lib/python3.5/distutils/command/build_py.py index 9100b96..cf0ca57 100644 --- a/Darwin/lib/python3.4/distutils/command/build_py.py +++ b/Darwin/lib/python3.5/distutils/command/build_py.py @@ -314,10 +314,10 @@ class build_py (Command): if include_bytecode: if self.compile: outputs.append(importlib.util.cache_from_source( - filename, debug_override=True)) + filename, optimization='')) if self.optimize > 0: outputs.append(importlib.util.cache_from_source( - filename, debug_override=False)) + filename, optimization=self.optimize)) outputs += [ os.path.join(build_dir, filename) diff --git a/Darwin/lib/python3.4/distutils/command/build_scripts.py b/Darwin/lib/python3.5/distutils/command/build_scripts.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/build_scripts.py rename to Darwin/lib/python3.5/distutils/command/build_scripts.py diff --git a/Darwin/lib/python3.4/distutils/command/check.py b/Darwin/lib/python3.5/distutils/command/check.py similarity index 95% rename from Darwin/lib/python3.4/distutils/command/check.py rename to Darwin/lib/python3.5/distutils/command/check.py index 22b9349..7ebe707 100644 --- a/Darwin/lib/python3.4/distutils/command/check.py +++ b/Darwin/lib/python3.5/distutils/command/check.py @@ -122,7 +122,7 @@ class check(Command): """Returns warnings when the provided data doesn't compile.""" source_path = StringIO() parser = Parser() - settings = frontend.OptionParser().get_default_values() + settings = frontend.OptionParser(components=(Parser,)).get_default_values() settings.tab_width = 4 settings.pep_references = None settings.rfc_references = None @@ -138,8 +138,8 @@ class check(Command): document.note_source(source_path, -1) try: parser.parse(data, document) - except AttributeError: - reporter.messages.append((-1, 'Could not finish the parsing.', - '', {})) + except AttributeError as e: + reporter.messages.append( + (-1, 'Could not finish the parsing: %s.' % e, '', {})) return reporter.messages diff --git a/Darwin/lib/python3.4/distutils/command/clean.py b/Darwin/lib/python3.5/distutils/command/clean.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/clean.py rename to Darwin/lib/python3.5/distutils/command/clean.py diff --git a/Darwin/lib/python3.4/distutils/command/command_template b/Darwin/lib/python3.5/distutils/command/command_template similarity index 100% rename from Darwin/lib/python3.4/distutils/command/command_template rename to Darwin/lib/python3.5/distutils/command/command_template diff --git a/Darwin/lib/python3.4/distutils/command/config.py b/Darwin/lib/python3.5/distutils/command/config.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/config.py rename to Darwin/lib/python3.5/distutils/command/config.py diff --git a/Darwin/lib/python3.4/distutils/command/install.py b/Darwin/lib/python3.5/distutils/command/install.py similarity index 97% rename from Darwin/lib/python3.4/distutils/command/install.py rename to Darwin/lib/python3.5/distutils/command/install.py index 456511c..67db007 100644 --- a/Darwin/lib/python3.4/distutils/command/install.py +++ b/Darwin/lib/python3.5/distutils/command/install.py @@ -15,32 +15,17 @@ from distutils.util import convert_path, subst_vars, change_root from distutils.util import get_platform from distutils.errors import DistutilsOptionError -# this keeps compatibility from 2.3 to 2.5 -if sys.version < "2.6": - USER_BASE = None - USER_SITE = None - HAS_USER_SITE = False -else: - from site import USER_BASE - from site import USER_SITE - HAS_USER_SITE = True +from site import USER_BASE +from site import USER_SITE +HAS_USER_SITE = True -if sys.version < "2.2": - WINDOWS_SCHEME = { - 'purelib': '$base', - 'platlib': '$base', - 'headers': '$base/Include/$dist_name', - 'scripts': '$base/Scripts', - 'data' : '$base', - } -else: - WINDOWS_SCHEME = { - 'purelib': '$base/Lib/site-packages', - 'platlib': '$base/Lib/site-packages', - 'headers': '$base/Include/$dist_name', - 'scripts': '$base/Scripts', - 'data' : '$base', - } +WINDOWS_SCHEME = { + 'purelib': '$base/Lib/site-packages', + 'platlib': '$base/Lib/site-packages', + 'headers': '$base/Include/$dist_name', + 'scripts': '$base/Scripts', + 'data' : '$base', +} INSTALL_SCHEMES = { 'unix_prefix': { @@ -66,7 +51,7 @@ if HAS_USER_SITE: 'purelib': '$usersite', 'platlib': '$usersite', 'headers': '$userbase/Python$py_version_nodot/Include/$dist_name', - 'scripts': '$userbase/Scripts', + 'scripts': '$userbase/Python$py_version_nodot/Scripts', 'data' : '$userbase', } diff --git a/Darwin/lib/python3.4/distutils/command/install_data.py b/Darwin/lib/python3.5/distutils/command/install_data.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/install_data.py rename to Darwin/lib/python3.5/distutils/command/install_data.py diff --git a/Darwin/lib/python3.4/distutils/command/install_egg_info.py b/Darwin/lib/python3.5/distutils/command/install_egg_info.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/install_egg_info.py rename to Darwin/lib/python3.5/distutils/command/install_egg_info.py diff --git a/Darwin/lib/python3.4/distutils/command/install_headers.py b/Darwin/lib/python3.5/distutils/command/install_headers.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/install_headers.py rename to Darwin/lib/python3.5/distutils/command/install_headers.py diff --git a/Darwin/lib/python3.4/distutils/command/install_lib.py b/Darwin/lib/python3.5/distutils/command/install_lib.py similarity index 94% rename from Darwin/lib/python3.4/distutils/command/install_lib.py rename to Darwin/lib/python3.5/distutils/command/install_lib.py index 215813b..6154cf0 100644 --- a/Darwin/lib/python3.4/distutils/command/install_lib.py +++ b/Darwin/lib/python3.5/distutils/command/install_lib.py @@ -22,15 +22,15 @@ class install_lib(Command): # possible scenarios: # 1) no compilation at all (--no-compile --no-optimize) # 2) compile .pyc only (--compile --no-optimize; default) - # 3) compile .pyc and "level 1" .pyo (--compile --optimize) - # 4) compile "level 1" .pyo only (--no-compile --optimize) - # 5) compile .pyc and "level 2" .pyo (--compile --optimize-more) - # 6) compile "level 2" .pyo only (--no-compile --optimize-more) + # 3) compile .pyc and "opt-1" .pyc (--compile --optimize) + # 4) compile "opt-1" .pyc only (--no-compile --optimize) + # 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) + # 6) compile "opt-2" .pyc only (--no-compile --optimize-more) # - # The UI for this is two option, 'compile' and 'optimize'. + # The UI for this is two options, 'compile' and 'optimize'. # 'compile' is strictly boolean, and only decides whether to # generate .pyc files. 'optimize' is three-way (0, 1, or 2), and - # decides both whether to generate .pyo files and what level of + # decides both whether to generate .pyc files and what level of # optimization to use. user_options = [ @@ -166,10 +166,10 @@ class install_lib(Command): continue if self.compile: bytecode_files.append(importlib.util.cache_from_source( - py_file, debug_override=True)) + py_file, optimization='')) if self.optimize > 0: bytecode_files.append(importlib.util.cache_from_source( - py_file, debug_override=False)) + py_file, optimization=self.optimize)) return bytecode_files diff --git a/Darwin/lib/python3.4/distutils/command/install_scripts.py b/Darwin/lib/python3.5/distutils/command/install_scripts.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/install_scripts.py rename to Darwin/lib/python3.5/distutils/command/install_scripts.py diff --git a/Darwin/lib/python3.4/distutils/command/register.py b/Darwin/lib/python3.5/distutils/command/register.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/register.py rename to Darwin/lib/python3.5/distutils/command/register.py diff --git a/Darwin/lib/python3.4/distutils/command/sdist.py b/Darwin/lib/python3.5/distutils/command/sdist.py similarity index 100% rename from Darwin/lib/python3.4/distutils/command/sdist.py rename to Darwin/lib/python3.5/distutils/command/sdist.py diff --git a/Darwin/lib/python3.4/distutils/command/upload.py b/Darwin/lib/python3.5/distutils/command/upload.py similarity index 85% rename from Darwin/lib/python3.4/distutils/command/upload.py rename to Darwin/lib/python3.5/distutils/command/upload.py index d6762e4..1c4fc48 100644 --- a/Darwin/lib/python3.4/distutils/command/upload.py +++ b/Darwin/lib/python3.5/distutils/command/upload.py @@ -1,24 +1,21 @@ -"""distutils.command.upload +""" +distutils.command.upload -Implements the Distutils 'upload' subcommand (upload package to PyPI).""" +Implements the Distutils 'upload' subcommand (upload package to a package +index). +""" -from distutils.errors import * -from distutils.core import PyPIRCCommand -from distutils.spawn import spawn -from distutils import log -import sys -import os, io -import socket +import os +import io import platform +import hashlib from base64 import standard_b64encode from urllib.request import urlopen, Request, HTTPError from urllib.parse import urlparse - -# this keeps compatibility for 2.3 and 2.4 -if sys.version < "2.5": - from md5 import md5 -else: - from hashlib import md5 +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils.core import PyPIRCCommand +from distutils.spawn import spawn +from distutils import log class upload(PyPIRCCommand): @@ -60,7 +57,8 @@ class upload(PyPIRCCommand): def run(self): if not self.distribution.dist_files: - raise DistutilsOptionError("No dist file created in earlier command") + msg = "No dist file created in earlier command" + raise DistutilsOptionError(msg) for command, pyversion, filename in self.distribution.dist_files: self.upload_file(command, pyversion, filename) @@ -103,10 +101,10 @@ class upload(PyPIRCCommand): 'content': (os.path.basename(filename),content), 'filetype': command, 'pyversion': pyversion, - 'md5_digest': md5(content).hexdigest(), + 'md5_digest': hashlib.md5(content).hexdigest(), # additional meta-data - 'metadata_version' : '1.0', + 'metadata_version': '1.0', 'summary': meta.get_description(), 'home_page': meta.get_url(), 'author': meta.get_contact(), @@ -143,13 +141,13 @@ class upload(PyPIRCCommand): # Build up the MIME payload for the POST data boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b'\n--' + boundary.encode('ascii') - end_boundary = sep_boundary + b'--' + sep_boundary = b'\r\n--' + boundary.encode('ascii') + end_boundary = sep_boundary + b'--\r\n' body = io.BytesIO() for key, value in data.items(): - title = '\nContent-Disposition: form-data; name="%s"' % key + title = '\r\nContent-Disposition: form-data; name="%s"' % key # handle multiple entries for the same name - if type(value) != type([]): + if not isinstance(value, list): value = [value] for value in value: if type(value) is tuple: @@ -159,21 +157,22 @@ class upload(PyPIRCCommand): value = str(value).encode('utf-8') body.write(sep_boundary) body.write(title.encode('utf-8')) - body.write(b"\n\n") + body.write(b"\r\n\r\n") body.write(value) if value and value[-1:] == b'\r': body.write(b'\n') # write an extra newline (lurve Macs) body.write(end_boundary) - body.write(b"\n") body = body.getvalue() - self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO) + msg = "Submitting %s to %s" % (filename, self.repository) + self.announce(msg, log.INFO) # build the Request - headers = {'Content-type': - 'multipart/form-data; boundary=%s' % boundary, - 'Content-length': str(len(body)), - 'Authorization': auth} + headers = { + 'Content-type': 'multipart/form-data; boundary=%s' % boundary, + 'Content-length': str(len(body)), + 'Authorization': auth, + } request = Request(self.repository, data=body, headers=headers) @@ -184,7 +183,7 @@ class upload(PyPIRCCommand): reason = result.msg except OSError as e: self.announce(str(e), log.ERROR) - return + raise except HTTPError as e: status = e.code reason = e.msg @@ -193,8 +192,9 @@ class upload(PyPIRCCommand): self.announce('Server response (%s): %s' % (status, reason), log.INFO) else: - self.announce('Upload failed (%s): %s' % (status, reason), - log.ERROR) + msg = 'Upload failed (%s): %s' % (status, reason) + self.announce(msg, log.ERROR) + raise DistutilsError(msg) if self.show_response: text = self._read_pypi_response(result) msg = '\n'.join(('-' * 75, text, '-' * 75)) diff --git a/Darwin/lib/python3.4/distutils/command/wininst-10.0-amd64.exe b/Darwin/lib/python3.5/distutils/command/wininst-10.0-amd64.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-10.0-amd64.exe rename to Darwin/lib/python3.5/distutils/command/wininst-10.0-amd64.exe diff --git a/Darwin/lib/python3.4/distutils/command/wininst-10.0.exe b/Darwin/lib/python3.5/distutils/command/wininst-10.0.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-10.0.exe rename to Darwin/lib/python3.5/distutils/command/wininst-10.0.exe diff --git a/Darwin/lib/python3.5/distutils/command/wininst-14.0-amd64.exe b/Darwin/lib/python3.5/distutils/command/wininst-14.0-amd64.exe new file mode 100644 index 0000000..7a5e78d Binary files /dev/null and b/Darwin/lib/python3.5/distutils/command/wininst-14.0-amd64.exe differ diff --git a/Darwin/lib/python3.5/distutils/command/wininst-14.0.exe b/Darwin/lib/python3.5/distutils/command/wininst-14.0.exe new file mode 100644 index 0000000..cc43296 Binary files /dev/null and b/Darwin/lib/python3.5/distutils/command/wininst-14.0.exe differ diff --git a/Darwin/lib/python3.4/distutils/command/wininst-6.0.exe b/Darwin/lib/python3.5/distutils/command/wininst-6.0.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-6.0.exe rename to Darwin/lib/python3.5/distutils/command/wininst-6.0.exe diff --git a/Darwin/lib/python3.4/distutils/command/wininst-7.1.exe b/Darwin/lib/python3.5/distutils/command/wininst-7.1.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-7.1.exe rename to Darwin/lib/python3.5/distutils/command/wininst-7.1.exe diff --git a/Darwin/lib/python3.4/distutils/command/wininst-8.0.exe b/Darwin/lib/python3.5/distutils/command/wininst-8.0.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-8.0.exe rename to Darwin/lib/python3.5/distutils/command/wininst-8.0.exe diff --git a/Darwin/lib/python3.4/distutils/command/wininst-9.0-amd64.exe b/Darwin/lib/python3.5/distutils/command/wininst-9.0-amd64.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-9.0-amd64.exe rename to Darwin/lib/python3.5/distutils/command/wininst-9.0-amd64.exe diff --git a/Darwin/lib/python3.4/distutils/command/wininst-9.0.exe b/Darwin/lib/python3.5/distutils/command/wininst-9.0.exe similarity index 100% rename from Darwin/lib/python3.4/distutils/command/wininst-9.0.exe rename to Darwin/lib/python3.5/distutils/command/wininst-9.0.exe diff --git a/Darwin/lib/python3.4/distutils/config.py b/Darwin/lib/python3.5/distutils/config.py similarity index 100% rename from Darwin/lib/python3.4/distutils/config.py rename to Darwin/lib/python3.5/distutils/config.py diff --git a/Darwin/lib/python3.4/distutils/core.py b/Darwin/lib/python3.5/distutils/core.py similarity index 99% rename from Darwin/lib/python3.4/distutils/core.py rename to Darwin/lib/python3.5/distutils/core.py index 2bfe66a..f05b34b 100644 --- a/Darwin/lib/python3.4/distutils/core.py +++ b/Darwin/lib/python3.5/distutils/core.py @@ -221,8 +221,6 @@ def run_setup (script_name, script_args=None, stop_after="run"): # Hmm, should we do something if exiting with a non-zero code # (ie. error)? pass - except: - raise if _setup_distribution is None: raise RuntimeError(("'distutils.core.setup()' was never called -- " diff --git a/Darwin/lib/python3.4/distutils/cygwinccompiler.py b/Darwin/lib/python3.5/distutils/cygwinccompiler.py similarity index 100% rename from Darwin/lib/python3.4/distutils/cygwinccompiler.py rename to Darwin/lib/python3.5/distutils/cygwinccompiler.py diff --git a/Darwin/lib/python3.4/distutils/debug.py b/Darwin/lib/python3.5/distutils/debug.py similarity index 100% rename from Darwin/lib/python3.4/distutils/debug.py rename to Darwin/lib/python3.5/distutils/debug.py diff --git a/Darwin/lib/python3.4/distutils/dep_util.py b/Darwin/lib/python3.5/distutils/dep_util.py similarity index 100% rename from Darwin/lib/python3.4/distutils/dep_util.py rename to Darwin/lib/python3.5/distutils/dep_util.py diff --git a/Darwin/lib/python3.4/distutils/dir_util.py b/Darwin/lib/python3.5/distutils/dir_util.py similarity index 97% rename from Darwin/lib/python3.4/distutils/dir_util.py rename to Darwin/lib/python3.5/distutils/dir_util.py index 9879b0d..d5cd8e3 100644 --- a/Darwin/lib/python3.4/distutils/dir_util.py +++ b/Darwin/lib/python3.5/distutils/dir_util.py @@ -81,7 +81,7 @@ def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): """Create all the empty directories under 'base_dir' needed to put 'files' there. - 'base_dir' is just the a name of a directory which doesn't necessarily + 'base_dir' is just the name of a directory which doesn't necessarily exist yet; 'files' is a list of filenames to be interpreted relative to 'base_dir'. 'base_dir' + the directory portion of every file in 'files' will be created if it doesn't already exist. 'mode', 'verbose' and @@ -125,12 +125,11 @@ def copy_tree(src, dst, preserve_mode=1, preserve_times=1, try: names = os.listdir(src) except OSError as e: - (errno, errstr) = e if dry_run: names = [] else: raise DistutilsFileError( - "error listing files in '%s': %s" % (src, errstr)) + "error listing files in '%s': %s" % (src, e.strerror)) if not dry_run: mkpath(dst, verbose=verbose) diff --git a/Darwin/lib/python3.4/distutils/dist.py b/Darwin/lib/python3.5/distutils/dist.py similarity index 96% rename from Darwin/lib/python3.4/distutils/dist.py rename to Darwin/lib/python3.5/distutils/dist.py index 7eb04bc..ffb33ff 100644 --- a/Darwin/lib/python3.4/distutils/dist.py +++ b/Darwin/lib/python3.5/distutils/dist.py @@ -4,7 +4,9 @@ Provides the Distribution class, which represents the module distribution being built/installed/distributed. """ -import sys, os, re +import sys +import os +import re from email import message_from_file try: @@ -22,7 +24,7 @@ from distutils.debug import DEBUG # the same as a Python NAME -- I don't allow leading underscores. The fact # that they're very similar is no coincidence; the default naming scheme is # to look for a Python module named after the command. -command_re = re.compile (r'^[a-zA-Z]([a-zA-Z0-9_]*)$') +command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') class Distribution: @@ -39,7 +41,6 @@ class Distribution: See the code for 'setup()', in core.py, for details. """ - # 'global_options' describes the command-line options that may be # supplied to the setup script prior to any actual commands. # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of @@ -48,12 +49,13 @@ class Distribution: # don't want to pollute the commands with too many options that they # have minimal control over. # The fourth entry for verbose means that it can be repeated. - global_options = [('verbose', 'v', "run verbosely (default)", 1), - ('quiet', 'q', "run quietly (turns verbosity off)"), - ('dry-run', 'n', "don't actually do anything"), - ('help', 'h', "show detailed help message"), - ('no-user-cfg', None, - 'ignore pydistutils.cfg in your home directory'), + global_options = [ + ('verbose', 'v', "run verbosely (default)", 1), + ('quiet', 'q', "run quietly (turns verbosity off)"), + ('dry-run', 'n', "don't actually do anything"), + ('help', 'h', "show detailed help message"), + ('no-user-cfg', None, + 'ignore pydistutils.cfg in your home directory'), ] # 'common_usage' is a short (2-3 line) string describing the common @@ -115,10 +117,9 @@ Common commands: (see '--help-commands' for more) # negative options are options that exclude other options negative_opt = {'quiet': 'verbose'} - # -- Creation/initialization methods ------------------------------- - def __init__ (self, attrs=None): + def __init__(self, attrs=None): """Construct a new Distribution instance: initialize all the attributes of a Distribution, and then use 'attrs' (a dictionary mapping attribute names to values) to assign some of those @@ -532,15 +533,15 @@ Common commands: (see '--help-commands' for more) # to be sure that the basic "command" interface is implemented. if not issubclass(cmd_class, Command): raise DistutilsClassError( - "command class %s must subclass Command" % cmd_class) + "command class %s must subclass Command" % cmd_class) # Also make sure that the command object provides a list of its # known options. if not (hasattr(cmd_class, 'user_options') and isinstance(cmd_class.user_options, list)): - raise DistutilsClassError(("command class %s must provide " + - "'user_options' attribute (a list of tuples)") % \ - cmd_class) + msg = ("command class %s must provide " + "'user_options' attribute (a list of tuples)") + raise DistutilsClassError(msg % cmd_class) # If the command class has a list of negative alias options, # merge it in with the global negative aliases. @@ -552,12 +553,11 @@ Common commands: (see '--help-commands' for more) # Check for help_options in command class. They have a different # format (tuple of four) so we need to preprocess them here. if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): + isinstance(cmd_class.help_options, list)): help_options = fix_help_options(cmd_class.help_options) else: help_options = [] - # All commands support the global options too, just by adding # in 'global_options'. parser.set_option_table(self.global_options + @@ -570,7 +570,7 @@ Common commands: (see '--help-commands' for more) return if (hasattr(cmd_class, 'help_options') and - isinstance(cmd_class.help_options, list)): + isinstance(cmd_class.help_options, list)): help_option_found=0 for (help_option, short, desc, func) in cmd_class.help_options: if hasattr(opts, parser.get_attr_name(help_option)): @@ -647,7 +647,7 @@ Common commands: (see '--help-commands' for more) else: klass = self.get_command_class(command) if (hasattr(klass, 'help_options') and - isinstance(klass.help_options, list)): + isinstance(klass.help_options, list)): parser.set_option_table(klass.user_options + fix_help_options(klass.help_options)) else: @@ -814,7 +814,7 @@ Common commands: (see '--help-commands' for more) klass_name = command try: - __import__ (module_name) + __import__(module_name) module = sys.modules[module_name] except ImportError: continue @@ -823,8 +823,8 @@ Common commands: (see '--help-commands' for more) klass = getattr(module, klass_name) except AttributeError: raise DistutilsModuleError( - "invalid command '%s' (no class '%s' in module '%s')" - % (command, klass_name, module_name)) + "invalid command '%s' (no class '%s' in module '%s')" + % (command, klass_name, module_name)) self.cmdclass[command] = klass return klass @@ -840,7 +840,7 @@ Common commands: (see '--help-commands' for more) cmd_obj = self.command_obj.get(command) if not cmd_obj and create: if DEBUG: - self.announce("Distribution.get_command_obj(): " \ + self.announce("Distribution.get_command_obj(): " "creating '%s' command object" % command) klass = self.get_command_class(command) @@ -897,8 +897,8 @@ Common commands: (see '--help-commands' for more) setattr(command_obj, option, value) else: raise DistutilsOptionError( - "error in %s: command '%s' has no such option '%s'" - % (source, command_name, option)) + "error in %s: command '%s' has no such option '%s'" + % (source, command_name, option)) except ValueError as msg: raise DistutilsOptionError(msg) @@ -974,7 +974,6 @@ Common commands: (see '--help-commands' for more) cmd_obj.run() self.have_run[command] = 1 - # -- Distribution query methods ------------------------------------ def has_pure_modules(self): @@ -1112,17 +1111,17 @@ class DistributionMetadata: """ version = '1.0' if (self.provides or self.requires or self.obsoletes or - self.classifiers or self.download_url): + self.classifiers or self.download_url): version = '1.1' file.write('Metadata-Version: %s\n' % version) - file.write('Name: %s\n' % self.get_name() ) - file.write('Version: %s\n' % self.get_version() ) - file.write('Summary: %s\n' % self.get_description() ) - file.write('Home-page: %s\n' % self.get_url() ) - file.write('Author: %s\n' % self.get_contact() ) - file.write('Author-email: %s\n' % self.get_contact_email() ) - file.write('License: %s\n' % self.get_license() ) + file.write('Name: %s\n' % self.get_name()) + file.write('Version: %s\n' % self.get_version()) + file.write('Summary: %s\n' % self.get_description()) + file.write('Home-page: %s\n' % self.get_url()) + file.write('Author: %s\n' % self.get_contact()) + file.write('Author-email: %s\n' % self.get_contact_email()) + file.write('License: %s\n' % self.get_license()) if self.download_url: file.write('Download-URL: %s\n' % self.download_url) @@ -1131,7 +1130,7 @@ class DistributionMetadata: keywords = ','.join(self.get_keywords()) if keywords: - file.write('Keywords: %s\n' % keywords ) + file.write('Keywords: %s\n' % keywords) self._write_list(file, 'Platform', self.get_platforms()) self._write_list(file, 'Classifier', self.get_classifiers()) diff --git a/Darwin/lib/python3.5/distutils/distutils.cfg b/Darwin/lib/python3.5/distutils/distutils.cfg new file mode 100644 index 0000000..494f3f3 --- /dev/null +++ b/Darwin/lib/python3.5/distutils/distutils.cfg @@ -0,0 +1,6 @@ +[install] +prefix=/Users/build/.local + +[build_ext] +include_dirs=/Users/build/.local/include:/Users/build/.local/opt/openssl/include:/Users/build/.local/opt/sqlite/include +library_dirs=/Users/build/.local/lib:/Users/build/.local/opt/openssl/lib:/Users/build/.local/opt/sqlite/lib diff --git a/Darwin/lib/python3.4/distutils/errors.py b/Darwin/lib/python3.5/distutils/errors.py similarity index 100% rename from Darwin/lib/python3.4/distutils/errors.py rename to Darwin/lib/python3.5/distutils/errors.py diff --git a/Darwin/lib/python3.4/distutils/extension.py b/Darwin/lib/python3.5/distutils/extension.py similarity index 98% rename from Darwin/lib/python3.4/distutils/extension.py rename to Darwin/lib/python3.5/distutils/extension.py index a93655a..7efbb74 100644 --- a/Darwin/lib/python3.4/distutils/extension.py +++ b/Darwin/lib/python3.5/distutils/extension.py @@ -131,6 +131,14 @@ class Extension: msg = "Unknown Extension options: %s" % options warnings.warn(msg) + def __repr__(self): + return '<%s.%s(%r) at %#x>' % ( + self.__class__.__module__, + self.__class__.__qualname__, + self.name, + id(self)) + + def read_setup_file(filename): """Reads a Setup file and returns Extension instances.""" from distutils.sysconfig import (parse_makefile, expand_makefile_vars, diff --git a/Darwin/lib/python3.4/distutils/fancy_getopt.py b/Darwin/lib/python3.5/distutils/fancy_getopt.py similarity index 100% rename from Darwin/lib/python3.4/distutils/fancy_getopt.py rename to Darwin/lib/python3.5/distutils/fancy_getopt.py diff --git a/Darwin/lib/python3.4/distutils/file_util.py b/Darwin/lib/python3.5/distutils/file_util.py similarity index 89% rename from Darwin/lib/python3.4/distutils/file_util.py rename to Darwin/lib/python3.5/distutils/file_util.py index f6ed290..b3fee35 100644 --- a/Darwin/lib/python3.4/distutils/file_util.py +++ b/Darwin/lib/python3.5/distutils/file_util.py @@ -80,7 +80,8 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, (os.symlink) instead of copying: set it to "hard" or "sym"; if it is None (the default), files are copied. Don't set 'link' on systems that don't support it: 'copy_file()' doesn't check if hard or symbolic - linking is available. + linking is available. If hardlink fails, falls back to + _copy_file_contents(). Under Mac OS, uses the native file copy function in macostools; on other systems, uses '_copy_file_contents()' to copy file contents. @@ -132,24 +133,31 @@ def copy_file(src, dst, preserve_mode=1, preserve_times=1, update=0, # (Unix only, of course, but that's the caller's responsibility) elif link == 'hard': if not (os.path.exists(dst) and os.path.samefile(src, dst)): - os.link(src, dst) + try: + os.link(src, dst) + return (dst, 1) + except OSError: + # If hard linking fails, fall back on copying file + # (some special filesystems don't support hard linking + # even under Unix, see issue #8876). + pass elif link == 'sym': if not (os.path.exists(dst) and os.path.samefile(src, dst)): os.symlink(src, dst) + return (dst, 1) # Otherwise (non-Mac, not linking), copy the file contents and # (optionally) copy the times and mode. - else: - _copy_file_contents(src, dst) - if preserve_mode or preserve_times: - st = os.stat(src) + _copy_file_contents(src, dst) + if preserve_mode or preserve_times: + st = os.stat(src) - # According to David Ascher , utime() should be done - # before chmod() (at least under NT). - if preserve_times: - os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) - if preserve_mode: - os.chmod(dst, S_IMODE(st[ST_MODE])) + # According to David Ascher , utime() should be done + # before chmod() (at least under NT). + if preserve_times: + os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) + if preserve_mode: + os.chmod(dst, S_IMODE(st[ST_MODE])) return (dst, 1) @@ -194,7 +202,7 @@ def move_file (src, dst, try: os.rename(src, dst) except OSError as e: - (num, msg) = e + (num, msg) = e.args if num == errno.EXDEV: copy_it = True else: @@ -206,7 +214,7 @@ def move_file (src, dst, try: os.unlink(src) except OSError as e: - (num, msg) = e + (num, msg) = e.args try: os.unlink(dst) except OSError: diff --git a/Darwin/lib/python3.4/distutils/filelist.py b/Darwin/lib/python3.5/distutils/filelist.py similarity index 100% rename from Darwin/lib/python3.4/distutils/filelist.py rename to Darwin/lib/python3.5/distutils/filelist.py diff --git a/Darwin/lib/python3.4/distutils/log.py b/Darwin/lib/python3.5/distutils/log.py similarity index 100% rename from Darwin/lib/python3.4/distutils/log.py rename to Darwin/lib/python3.5/distutils/log.py diff --git a/Darwin/lib/python3.4/distutils/msvc9compiler.py b/Darwin/lib/python3.5/distutils/msvc9compiler.py similarity index 99% rename from Darwin/lib/python3.4/distutils/msvc9compiler.py rename to Darwin/lib/python3.5/distutils/msvc9compiler.py index 9688f20..da4b21d 100644 --- a/Darwin/lib/python3.4/distutils/msvc9compiler.py +++ b/Darwin/lib/python3.5/distutils/msvc9compiler.py @@ -179,6 +179,9 @@ def get_build_version(): i = i + len(prefix) s, rest = sys.version[i:].split(" ", 1) majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 minorVersion = int(s[2:3]) / 10.0 # I don't think paths are affected by minor version in version 6 if majorVersion == 6: @@ -413,7 +416,7 @@ class MSVCCompiler(CCompiler) : self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] if self.__version >= 7: self.ldflags_shared_debug = [ - '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG', '/pdb:None' + '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG' ] self.ldflags_static = [ '/nologo'] diff --git a/Darwin/lib/python3.4/distutils/msvccompiler.py b/Darwin/lib/python3.5/distutils/msvccompiler.py similarity index 99% rename from Darwin/lib/python3.4/distutils/msvccompiler.py rename to Darwin/lib/python3.5/distutils/msvccompiler.py index 8116656..1048cd4 100644 --- a/Darwin/lib/python3.4/distutils/msvccompiler.py +++ b/Darwin/lib/python3.5/distutils/msvccompiler.py @@ -157,6 +157,9 @@ def get_build_version(): i = i + len(prefix) s, rest = sys.version[i:].split(" ", 1) majorVersion = int(s[:-2]) - 6 + if majorVersion >= 13: + # v13 was skipped and should be v14 + majorVersion += 1 minorVersion = int(s[2:3]) / 10.0 # I don't think paths are affected by minor version in version 6 if majorVersion == 6: diff --git a/Darwin/lib/python3.4/distutils/spawn.py b/Darwin/lib/python3.5/distutils/spawn.py similarity index 98% rename from Darwin/lib/python3.4/distutils/spawn.py rename to Darwin/lib/python3.5/distutils/spawn.py index 22e87e8..5dd415a 100644 --- a/Darwin/lib/python3.4/distutils/spawn.py +++ b/Darwin/lib/python3.5/distutils/spawn.py @@ -137,9 +137,6 @@ def _spawn_posix(cmd, search_path=1, verbose=0, dry_run=0): try: pid, status = os.waitpid(pid, 0) except OSError as exc: - import errno - if exc.errno == errno.EINTR: - continue if not DEBUG: cmd = executable raise DistutilsExecError( diff --git a/Darwin/lib/python3.4/distutils/sysconfig.py b/Darwin/lib/python3.5/distutils/sysconfig.py similarity index 90% rename from Darwin/lib/python3.4/distutils/sysconfig.py rename to Darwin/lib/python3.5/distutils/sysconfig.py index 75537db..573724d 100644 --- a/Darwin/lib/python3.4/distutils/sysconfig.py +++ b/Darwin/lib/python3.5/distutils/sysconfig.py @@ -9,6 +9,7 @@ Written by: Fred L. Drake, Jr. Email: """ +import _imp import os import re import sys @@ -22,23 +23,15 @@ BASE_PREFIX = os.path.normpath(sys.base_prefix) BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) # Path to the base directory of the project. On Windows the binary may -# live in project/PCBuild9. If we're dealing with an x64 Windows build, -# it'll live in project/PCbuild/amd64. +# live in project/PCBuild/win32 or project/PCBuild/amd64. # set for cross builds if "_PYTHON_PROJECT_BASE" in os.environ: project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"]) else: project_base = os.path.dirname(os.path.abspath(sys.executable)) -if os.name == "nt" and "pcbuild" in project_base[-8:].lower(): - project_base = os.path.abspath(os.path.join(project_base, os.path.pardir)) -# PC/VS7.1 -if os.name == "nt" and "\\pc\\v" in project_base[-10:].lower(): - project_base = os.path.abspath(os.path.join(project_base, os.path.pardir, - os.path.pardir)) -# PC/AMD64 -if os.name == "nt" and "\\pcbuild\\amd64" in project_base[-14:].lower(): - project_base = os.path.abspath(os.path.join(project_base, os.path.pardir, - os.path.pardir)) +if (os.name == 'nt' and + project_base.lower().endswith(('\\pcbuild\\win32', '\\pcbuild\\amd64'))): + project_base = os.path.dirname(os.path.dirname(project_base)) # python_build: (Boolean) if true, we're either building Python or # building an extension with an un-installed Python, so we use @@ -51,11 +44,9 @@ def _is_python_source_dir(d): return True return False _sys_home = getattr(sys, '_home', None) -if _sys_home and os.name == 'nt' and \ - _sys_home.lower().endswith(('pcbuild', 'pcbuild\\amd64')): - _sys_home = os.path.dirname(_sys_home) - if _sys_home.endswith('pcbuild'): # must be amd64 - _sys_home = os.path.dirname(_sys_home) +if (_sys_home and os.name == 'nt' and + _sys_home.lower().endswith(('\\pcbuild\\win32', '\\pcbuild\\amd64'))): + _sys_home = os.path.dirname(os.path.dirname(_sys_home)) def _python_build(): if _sys_home: return _is_python_source_dir(_sys_home) @@ -151,10 +142,7 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): if standard_lib: return os.path.join(prefix, "Lib") else: - if get_python_version() < "2.2": - return prefix - else: - return os.path.join(prefix, "Lib", "site-packages") + return os.path.join(prefix, "Lib", "site-packages") else: raise DistutilsPlatformError( "I don't know where Python installs its library " @@ -179,7 +167,8 @@ def customize_compiler(compiler): # version and build tools may not support the same set # of CPU architectures for universal builds. global _config_vars - if not _config_vars.get('CUSTOMIZED_OSX_COMPILER', ''): + # Use get_config_var() to ensure _config_vars is initialized. + if not get_config_var('CUSTOMIZED_OSX_COMPILER'): import _osx_support _osx_support.customize_compiler(_config_vars) _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' @@ -243,12 +232,8 @@ def get_config_h_filename(): inc_dir = _sys_home or project_base else: inc_dir = get_python_inc(plat_specific=1) - if get_python_version() < '2.2': - config_h = 'config.h' - else: - # The name of the config.h file changed in 2.2 - config_h = 'pyconfig.h' - return os.path.join(inc_dir, config_h) + + return os.path.join(inc_dir, 'pyconfig.h') def get_makefile_filename(): @@ -460,17 +445,6 @@ def _init_posix(): if python_build: g['LDSHARED'] = g['BLDSHARED'] - elif get_python_version() < '2.1': - # The following two branches are for 1.5.2 compatibility. - if sys.platform == 'aix4': # what about AIX 3.x ? - # Linker script is in the config directory, not in Modules as the - # Makefile says. - python_lib = get_python_lib(standard_lib=1) - ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix') - python_exp = os.path.join(python_lib, 'config', 'python.exp') - - g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp) - global _config_vars _config_vars = g @@ -485,7 +459,7 @@ def _init_nt(): # XXX hmmm.. a normal install puts include files here g['INCLUDEPY'] = get_python_inc(plat_specific=0) - g['EXT_SUFFIX'] = '.pyd' + g['EXT_SUFFIX'] = _imp.extension_suffixes()[0] g['EXE'] = ".exe" g['VERSION'] = get_python_version().replace(".", "") g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable)) diff --git a/Darwin/lib/python3.4/distutils/tests/Setup.sample b/Darwin/lib/python3.5/distutils/tests/Setup.sample similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/Setup.sample rename to Darwin/lib/python3.5/distutils/tests/Setup.sample diff --git a/Darwin/lib/python3.4/distutils/tests/__init__.py b/Darwin/lib/python3.5/distutils/tests/__init__.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/__init__.py rename to Darwin/lib/python3.5/distutils/tests/__init__.py diff --git a/Darwin/lib/python3.4/distutils/tests/support.py b/Darwin/lib/python3.5/distutils/tests/support.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/support.py rename to Darwin/lib/python3.5/distutils/tests/support.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_archive_util.py b/Darwin/lib/python3.5/distutils/tests/test_archive_util.py similarity index 68% rename from Darwin/lib/python3.4/distutils/tests/test_archive_util.py rename to Darwin/lib/python3.5/distutils/tests/test_archive_util.py index 2d72af4..02fa1e2 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_archive_util.py +++ b/Darwin/lib/python3.5/distutils/tests/test_archive_util.py @@ -13,7 +13,7 @@ from distutils.archive_util import (check_archive_formats, make_tarball, ARCHIVE_FORMATS) from distutils.spawn import find_executable, spawn from distutils.tests import support -from test.support import check_warnings, run_unittest, patch +from test.support import check_warnings, run_unittest, patch, change_cwd try: import grp @@ -34,6 +34,16 @@ try: except ImportError: ZLIB_SUPPORT = False +try: + import bz2 +except ImportError: + bz2 = None + +try: + import lzma +except ImportError: + lzma = None + def can_fs_encode(filename): """ Return True if the filename can be saved in the file system. @@ -52,19 +62,36 @@ class ArchiveUtilTestCase(support.TempdirManager, unittest.TestCase): @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') - def test_make_tarball(self): - self._make_tarball('archive') + def test_make_tarball(self, name='archive'): + # creating something to tar + tmpdir = self._create_files() + self._make_tarball(tmpdir, name, '.tar.gz') + # trying an uncompressed one + self._make_tarball(tmpdir, name, '.tar', compress=None) @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_make_tarball_gzip(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.gz', compress='gzip') + + @unittest.skipUnless(bz2, 'Need bz2 support to run') + def test_make_tarball_bzip2(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.bz2', compress='bzip2') + + @unittest.skipUnless(lzma, 'Need lzma support to run') + def test_make_tarball_xz(self): + tmpdir = self._create_files() + self._make_tarball(tmpdir, 'archive', '.tar.xz', compress='xz') + @unittest.skipUnless(can_fs_encode('Ã¥rchiv'), 'File system cannot handle this filename') def test_make_tarball_latin1(self): """ Mirror test_make_tarball, except filename contains latin characters. """ - self._make_tarball('Ã¥rchiv') # note this isn't a real word + self.test_make_tarball('Ã¥rchiv') # note this isn't a real word - @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') @unittest.skipUnless(can_fs_encode('ã®ã‚¢ãƒ¼ã‚«ã‚¤ãƒ–'), 'File system cannot handle this filename') def test_make_tarball_extended(self): @@ -72,16 +99,9 @@ class ArchiveUtilTestCase(support.TempdirManager, Mirror test_make_tarball, except filename contains extended characters outside the latin charset. """ - self._make_tarball('ã®ã‚¢ãƒ¼ã‚«ã‚¤ãƒ–') # japanese for archive - - def _make_tarball(self, target_name): - # creating something to tar - tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') - os.mkdir(os.path.join(tmpdir, 'sub')) - self.write_file([tmpdir, 'sub', 'file3'], 'xxx') + self.test_make_tarball('ã®ã‚¢ãƒ¼ã‚«ã‚¤ãƒ–') # japanese for archive + def _make_tarball(self, tmpdir, target_name, suffix, **kwargs): tmpdir2 = self.mkdtemp() unittest.skipUnless(splitdrive(tmpdir)[0] == splitdrive(tmpdir2)[0], "source and target should be on same drive") @@ -89,27 +109,13 @@ class ArchiveUtilTestCase(support.TempdirManager, base_name = os.path.join(tmpdir2, target_name) # working with relative paths to avoid tar warnings - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(splitdrive(base_name)[1], '.') - finally: - os.chdir(old_dir) + with change_cwd(tmpdir): + make_tarball(splitdrive(base_name)[1], 'dist', **kwargs) # check if the compressed tarball was created - tarball = base_name + '.tar.gz' - self.assertTrue(os.path.exists(tarball)) - - # trying an uncompressed one - base_name = os.path.join(tmpdir2, target_name) - old_dir = os.getcwd() - os.chdir(tmpdir) - try: - make_tarball(splitdrive(base_name)[1], '.', compress=None) - finally: - os.chdir(old_dir) - tarball = base_name + '.tar' + tarball = base_name + suffix self.assertTrue(os.path.exists(tarball)) + self.assertEqual(self._tarinfo(tarball), self._created_files) def _tarinfo(self, path): tar = tarfile.open(path) @@ -120,6 +126,9 @@ class ArchiveUtilTestCase(support.TempdirManager, finally: tar.close() + _created_files = ('dist', 'dist/file1', 'dist/file2', + 'dist/sub', 'dist/sub/file3', 'dist/sub2') + def _create_files(self): # creating something to tar tmpdir = self.mkdtemp() @@ -130,15 +139,15 @@ class ArchiveUtilTestCase(support.TempdirManager, os.mkdir(os.path.join(dist, 'sub')) self.write_file([dist, 'sub', 'file3'], 'xxx') os.mkdir(os.path.join(dist, 'sub2')) - tmpdir2 = self.mkdtemp() - base_name = os.path.join(tmpdir2, 'archive') - return tmpdir, tmpdir2, base_name + return tmpdir @unittest.skipUnless(find_executable('tar') and find_executable('gzip') and ZLIB_SUPPORT, 'Need the tar, gzip and zlib command to run') def test_tarfile_vs_tar(self): - tmpdir, tmpdir2, base_name = self._create_files() + tmpdir = self._create_files() + tmpdir2 = self.mkdtemp() + base_name = os.path.join(tmpdir2, 'archive') old_dir = os.getcwd() os.chdir(tmpdir) try: @@ -164,7 +173,8 @@ class ArchiveUtilTestCase(support.TempdirManager, self.assertTrue(os.path.exists(tarball2)) # let's compare both tarballs - self.assertEqual(self._tarinfo(tarball), self._tarinfo(tarball2)) + self.assertEqual(self._tarinfo(tarball), self._created_files) + self.assertEqual(self._tarinfo(tarball2), self._created_files) # trying an uncompressed one base_name = os.path.join(tmpdir2, 'archive') @@ -191,7 +201,8 @@ class ArchiveUtilTestCase(support.TempdirManager, @unittest.skipUnless(find_executable('compress'), 'The compress program is required') def test_compress_deprecated(self): - tmpdir, tmpdir2, base_name = self._create_files() + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') # using compress and testing the PendingDeprecationWarning old_dir = os.getcwd() @@ -224,17 +235,17 @@ class ArchiveUtilTestCase(support.TempdirManager, 'Need zip and zlib support to run') def test_make_zipfile(self): # creating something to tar - tmpdir = self.mkdtemp() - self.write_file([tmpdir, 'file1'], 'xxx') - self.write_file([tmpdir, 'file2'], 'xxx') - - tmpdir2 = self.mkdtemp() - base_name = os.path.join(tmpdir2, 'archive') - make_zipfile(base_name, tmpdir) + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + with change_cwd(tmpdir): + make_zipfile(base_name, 'dist') # check if the compressed tarball was created tarball = base_name + '.zip' self.assertTrue(os.path.exists(tarball)) + with zipfile.ZipFile(tarball) as zf: + self.assertEqual(sorted(zf.namelist()), + ['dist/file1', 'dist/file2', 'dist/sub/file3']) @unittest.skipUnless(ZIP_SUPPORT, 'Need zip support to run') def test_make_zipfile_no_zlib(self): @@ -250,18 +261,24 @@ class ArchiveUtilTestCase(support.TempdirManager, patch(self, archive_util.zipfile, 'ZipFile', fake_zipfile) # create something to tar and compress - tmpdir, tmpdir2, base_name = self._create_files() - make_zipfile(base_name, tmpdir) + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') + with change_cwd(tmpdir): + make_zipfile(base_name, 'dist') tarball = base_name + '.zip' self.assertEqual(called, [((tarball, "w"), {'compression': zipfile.ZIP_STORED})]) self.assertTrue(os.path.exists(tarball)) + with zipfile.ZipFile(tarball) as zf: + self.assertEqual(sorted(zf.namelist()), + ['dist/file1', 'dist/file2', 'dist/sub/file3']) def test_check_archive_formats(self): self.assertEqual(check_archive_formats(['gztar', 'xxx', 'zip']), 'xxx') - self.assertEqual(check_archive_formats(['gztar', 'zip']), None) + self.assertIsNone(check_archive_formats(['gztar', 'bztar', 'xztar', + 'ztar', 'tar', 'zip'])) def test_make_archive(self): tmpdir = self.mkdtemp() @@ -282,6 +299,41 @@ class ArchiveUtilTestCase(support.TempdirManager, finally: del ARCHIVE_FORMATS['xxx'] + def test_make_archive_tar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'tar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(ZLIB_SUPPORT, 'Need zlib support to run') + def test_make_archive_gztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'gztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.gz') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(bz2, 'Need bz2 support to run') + def test_make_archive_bztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'bztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.bz2') + self.assertEqual(self._tarinfo(res), self._created_files) + + @unittest.skipUnless(lzma, 'Need xz support to run') + def test_make_archive_xztar(self): + base_dir = self._create_files() + base_name = os.path.join(self.mkdtemp() , 'archive') + res = make_archive(base_name, 'xztar', base_dir, 'dist') + self.assertTrue(os.path.exists(res)) + self.assertEqual(os.path.basename(res), 'archive.tar.xz') + self.assertEqual(self._tarinfo(res), self._created_files) + def test_make_archive_owner_group(self): # testing make_archive with owner and group, with various combinations # this works even if there's not gid/uid support @@ -291,7 +343,8 @@ class ArchiveUtilTestCase(support.TempdirManager, else: group = owner = 'root' - base_dir, root_dir, base_name = self._create_files() + base_dir = self._create_files() + root_dir = self.mkdtemp() base_name = os.path.join(self.mkdtemp() , 'archive') res = make_archive(base_name, 'zip', root_dir, base_dir, owner=owner, group=group) @@ -311,7 +364,8 @@ class ArchiveUtilTestCase(support.TempdirManager, @unittest.skipUnless(ZLIB_SUPPORT, "Requires zlib") @unittest.skipUnless(UID_GID_SUPPORT, "Requires grp and pwd support") def test_tarfile_root_owner(self): - tmpdir, tmpdir2, base_name = self._create_files() + tmpdir = self._create_files() + base_name = os.path.join(self.mkdtemp(), 'archive') old_dir = os.getcwd() os.chdir(tmpdir) group = grp.getgrgid(0)[0] diff --git a/Darwin/lib/python3.4/distutils/tests/test_bdist.py b/Darwin/lib/python3.5/distutils/tests/test_bdist.py similarity index 96% rename from Darwin/lib/python3.4/distutils/tests/test_bdist.py rename to Darwin/lib/python3.5/distutils/tests/test_bdist.py index 503a6e8..f762f5d 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_bdist.py +++ b/Darwin/lib/python3.5/distutils/tests/test_bdist.py @@ -21,7 +21,7 @@ class BuildTestCase(support.TempdirManager, # what formats does bdist offer? formats = ['bztar', 'gztar', 'msi', 'rpm', 'tar', - 'wininst', 'zip', 'ztar'] + 'wininst', 'xztar', 'zip', 'ztar'] found = sorted(cmd.format_command) self.assertEqual(found, formats) diff --git a/Darwin/lib/python3.4/distutils/tests/test_bdist_dumb.py b/Darwin/lib/python3.5/distutils/tests/test_bdist_dumb.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_bdist_dumb.py rename to Darwin/lib/python3.5/distutils/tests/test_bdist_dumb.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_bdist_msi.py b/Darwin/lib/python3.5/distutils/tests/test_bdist_msi.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_bdist_msi.py rename to Darwin/lib/python3.5/distutils/tests/test_bdist_msi.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_bdist_rpm.py b/Darwin/lib/python3.5/distutils/tests/test_bdist_rpm.py similarity index 96% rename from Darwin/lib/python3.4/distutils/tests/test_bdist_rpm.py rename to Darwin/lib/python3.5/distutils/tests/test_bdist_rpm.py index bcbb563..25c14ab 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_bdist_rpm.py +++ b/Darwin/lib/python3.5/distutils/tests/test_bdist_rpm.py @@ -24,6 +24,7 @@ setup(name='foo', version='0.1', py_modules=['foo'], """ class BuildRpmTestCase(support.TempdirManager, + support.EnvironGuard, support.LoggingSilencer, unittest.TestCase): @@ -54,6 +55,7 @@ class BuildRpmTestCase(support.TempdirManager, def test_quiet(self): # let's create a package tmp_dir = self.mkdtemp() + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) @@ -96,6 +98,7 @@ class BuildRpmTestCase(support.TempdirManager, def test_no_optimize_flag(self): # let's create a package that brakes bdist_rpm tmp_dir = self.mkdtemp() + os.environ['HOME'] = tmp_dir # to confine dir '.rpmdb' creation pkg_dir = os.path.join(tmp_dir, 'foo') os.mkdir(pkg_dir) self.write_file((pkg_dir, 'setup.py'), SETUP_PY) diff --git a/Darwin/lib/python3.4/distutils/tests/test_bdist_wininst.py b/Darwin/lib/python3.5/distutils/tests/test_bdist_wininst.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_bdist_wininst.py rename to Darwin/lib/python3.5/distutils/tests/test_bdist_wininst.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_build.py b/Darwin/lib/python3.5/distutils/tests/test_build.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_build.py rename to Darwin/lib/python3.5/distutils/tests/test_build.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_build_clib.py b/Darwin/lib/python3.5/distutils/tests/test_build_clib.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_build_clib.py rename to Darwin/lib/python3.5/distutils/tests/test_build_clib.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_build_ext.py b/Darwin/lib/python3.5/distutils/tests/test_build_ext.py similarity index 89% rename from Darwin/lib/python3.4/distutils/tests/test_build_ext.py rename to Darwin/lib/python3.5/distutils/tests/test_build_ext.py index 9853abd..366ffbe 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_build_ext.py +++ b/Darwin/lib/python3.5/distutils/tests/test_build_ext.py @@ -31,12 +31,14 @@ class BuildExtTestCase(TempdirManager, self.tmp_dir = self.mkdtemp() self.sys_path = sys.path, sys.path[:] sys.path.append(self.tmp_dir) - if sys.version > "2.6": - import site - self.old_user_base = site.USER_BASE - site.USER_BASE = self.mkdtemp() - from distutils.command import build_ext - build_ext.USER_BASE = site.USER_BASE + import site + self.old_user_base = site.USER_BASE + site.USER_BASE = self.mkdtemp() + from distutils.command import build_ext + build_ext.USER_BASE = site.USER_BASE + + def build_ext(self, *args, **kwargs): + return build_ext(*args, **kwargs) def test_build_ext(self): global ALREADY_TESTED @@ -45,7 +47,7 @@ class BuildExtTestCase(TempdirManager, xx_ext = Extension('xx', [xx_c]) dist = Distribution({'name': 'xx', 'ext_modules': [xx_ext]}) dist.package_dir = self.tmp_dir - cmd = build_ext(dist) + cmd = self.build_ext(dist) fixup_build_ext(cmd) cmd.build_lib = self.tmp_dir cmd.build_temp = self.tmp_dir @@ -84,16 +86,15 @@ class BuildExtTestCase(TempdirManager, support.unload('xx') sys.path = self.sys_path[0] sys.path[:] = self.sys_path[1] - if sys.version > "2.6": - import site - site.USER_BASE = self.old_user_base - from distutils.command import build_ext - build_ext.USER_BASE = self.old_user_base + import site + site.USER_BASE = self.old_user_base + from distutils.command import build_ext + build_ext.USER_BASE = self.old_user_base super(BuildExtTestCase, self).tearDown() def test_solaris_enable_shared(self): dist = Distribution({'name': 'xx'}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) old = sys.platform sys.platform = 'sunos' # fooling finalize_options @@ -115,7 +116,7 @@ class BuildExtTestCase(TempdirManager, def test_user_site(self): import site dist = Distribution({'name': 'xx'}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) # making sure the user option is there options = [name for name, short, lable in @@ -146,14 +147,14 @@ class BuildExtTestCase(TempdirManager, # with the optional argument. modules = [Extension('foo', ['xxx'], optional=False)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.ensure_finalized() self.assertRaises((UnknownFileError, CompileError), cmd.run) # should raise an error modules = [Extension('foo', ['xxx'], optional=True)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.ensure_finalized() cmd.run() # should pass @@ -162,7 +163,7 @@ class BuildExtTestCase(TempdirManager, # etc.) are in the include search path. modules = [Extension('foo', ['xxx'], optional=False)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.finalize_options() from distutils import sysconfig @@ -174,14 +175,14 @@ class BuildExtTestCase(TempdirManager, # make sure cmd.libraries is turned into a list # if it's a string - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.libraries = 'my_lib, other_lib lastlib' cmd.finalize_options() self.assertEqual(cmd.libraries, ['my_lib', 'other_lib', 'lastlib']) # make sure cmd.library_dirs is turned into a list # if it's a string - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.library_dirs = 'my_lib_dir%sother_lib_dir' % os.pathsep cmd.finalize_options() self.assertIn('my_lib_dir', cmd.library_dirs) @@ -189,7 +190,7 @@ class BuildExtTestCase(TempdirManager, # make sure rpath is turned into a list # if it's a string - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.rpath = 'one%stwo' % os.pathsep cmd.finalize_options() self.assertEqual(cmd.rpath, ['one', 'two']) @@ -198,32 +199,32 @@ class BuildExtTestCase(TempdirManager, # make sure define is turned into 2-tuples # strings if they are ','-separated strings - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.define = 'one,two' cmd.finalize_options() self.assertEqual(cmd.define, [('one', '1'), ('two', '1')]) # make sure undef is turned into a list of # strings if they are ','-separated strings - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.undef = 'one,two' cmd.finalize_options() self.assertEqual(cmd.undef, ['one', 'two']) # make sure swig_opts is turned into a list - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.swig_opts = None cmd.finalize_options() self.assertEqual(cmd.swig_opts, []) - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.swig_opts = '1 2' cmd.finalize_options() self.assertEqual(cmd.swig_opts, ['1', '2']) def test_check_extensions_list(self): dist = Distribution() - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.finalize_options() #'extensions' option must be a list of Extension instances @@ -272,7 +273,7 @@ class BuildExtTestCase(TempdirManager, def test_get_source_files(self): modules = [Extension('foo', ['xxx'], optional=False)] dist = Distribution({'name': 'xx', 'ext_modules': modules}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.ensure_finalized() self.assertEqual(cmd.get_source_files(), ['xxx']) @@ -281,7 +282,7 @@ class BuildExtTestCase(TempdirManager, # should not be overriden by a compiler instance # when the command is run dist = Distribution() - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.compiler = 'unix' cmd.ensure_finalized() cmd.run() @@ -294,7 +295,7 @@ class BuildExtTestCase(TempdirManager, ext = Extension('foo', [c_file], optional=False) dist = Distribution({'name': 'xx', 'ext_modules': [ext]}) - cmd = build_ext(dist) + cmd = self.build_ext(dist) fixup_build_ext(cmd) cmd.ensure_finalized() self.assertEqual(len(cmd.get_outputs()), 1) @@ -357,7 +358,7 @@ class BuildExtTestCase(TempdirManager, #etree_ext = Extension('lxml.etree', [etree_c]) #dist = Distribution({'name': 'lxml', 'ext_modules': [etree_ext]}) dist = Distribution() - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.inplace = 1 cmd.distribution.package_dir = {'': 'src'} cmd.distribution.packages = ['lxml', 'lxml.html'] @@ -444,8 +445,16 @@ class BuildExtTestCase(TempdirManager, # get the deployment target that the interpreter was built with target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - target = tuple(map(int, target.split('.'))) - target = '%02d%01d0' % target + target = tuple(map(int, target.split('.')[0:2])) + # format the target value as defined in the Apple + # Availability Macros. We can't use the macro names since + # at least one value we test with will not exist yet. + if target[1] < 10: + # for 10.1 through 10.9.x -> "10n0" + target = '%02d%01d0' % target + else: + # for 10.10 and beyond -> "10nn00" + target = '%02d%02d00' % target deptarget_ext = Extension( 'deptarget', [deptarget_c], @@ -456,7 +465,7 @@ class BuildExtTestCase(TempdirManager, 'ext_modules': [deptarget_ext] }) dist.package_dir = self.tmp_dir - cmd = build_ext(dist) + cmd = self.build_ext(dist) cmd.build_lib = self.tmp_dir cmd.build_temp = self.tmp_dir @@ -475,8 +484,19 @@ class BuildExtTestCase(TempdirManager, self.fail("Wrong deployment target during compilation") +class ParallelBuildExtTestCase(BuildExtTestCase): + + def build_ext(self, *args, **kwargs): + build_ext = super().build_ext(*args, **kwargs) + build_ext.parallel = True + return build_ext + + def test_suite(): - return unittest.makeSuite(BuildExtTestCase) + suite = unittest.TestSuite() + suite.addTest(unittest.makeSuite(BuildExtTestCase)) + suite.addTest(unittest.makeSuite(ParallelBuildExtTestCase)) + return suite if __name__ == '__main__': - support.run_unittest(test_suite()) + support.run_unittest(__name__) diff --git a/Darwin/lib/python3.4/distutils/tests/test_build_py.py b/Darwin/lib/python3.5/distutils/tests/test_build_py.py similarity index 97% rename from Darwin/lib/python3.4/distutils/tests/test_build_py.py rename to Darwin/lib/python3.5/distutils/tests/test_build_py.py index c8f6b89..18283dc 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_build_py.py +++ b/Darwin/lib/python3.5/distutils/tests/test_build_py.py @@ -120,8 +120,8 @@ class BuildPyTestCase(support.TempdirManager, found = os.listdir(cmd.build_lib) self.assertEqual(sorted(found), ['__pycache__', 'boiledeggs.py']) found = os.listdir(os.path.join(cmd.build_lib, '__pycache__')) - self.assertEqual(sorted(found), - ['boiledeggs.%s.pyo' % sys.implementation.cache_tag]) + expect = 'boiledeggs.{}.opt-1.pyc'.format(sys.implementation.cache_tag) + self.assertEqual(sorted(found), [expect]) def test_dir_in_package_data(self): """ diff --git a/Darwin/lib/python3.4/distutils/tests/test_build_scripts.py b/Darwin/lib/python3.5/distutils/tests/test_build_scripts.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_build_scripts.py rename to Darwin/lib/python3.5/distutils/tests/test_build_scripts.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_check.py b/Darwin/lib/python3.5/distutils/tests/test_check.py similarity index 80% rename from Darwin/lib/python3.4/distutils/tests/test_check.py rename to Darwin/lib/python3.5/distutils/tests/test_check.py index 601b686..959fa90 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_check.py +++ b/Darwin/lib/python3.5/distutils/tests/test_check.py @@ -1,4 +1,5 @@ """Tests for distutils.command.check.""" +import textwrap import unittest from test.support import run_unittest @@ -92,6 +93,36 @@ class CheckTestCase(support.LoggingSilencer, cmd = self._run(metadata, strict=1, restructuredtext=1) self.assertEqual(cmd._warnings, 0) + @unittest.skipUnless(HAS_DOCUTILS, "won't test without docutils") + def test_check_restructuredtext_with_syntax_highlight(self): + # Don't fail if there is a `code` or `code-block` directive + + example_rst_docs = [] + example_rst_docs.append(textwrap.dedent("""\ + Here's some code: + + .. code:: python + + def foo(): + pass + """)) + example_rst_docs.append(textwrap.dedent("""\ + Here's some code: + + .. code-block:: python + + def foo(): + pass + """)) + + for rest_with_code in example_rst_docs: + pkg_info, dist = self.create_dist(long_description=rest_with_code) + cmd = check(dist) + cmd.check_restructuredtext() + self.assertEqual(cmd._warnings, 0) + msgs = cmd._check_rst_data(rest_with_code) + self.assertEqual(len(msgs), 0) + def test_check_all(self): metadata = {'url': 'xxx', 'author': 'xxx'} diff --git a/Darwin/lib/python3.4/distutils/tests/test_clean.py b/Darwin/lib/python3.5/distutils/tests/test_clean.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_clean.py rename to Darwin/lib/python3.5/distutils/tests/test_clean.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_cmd.py b/Darwin/lib/python3.5/distutils/tests/test_cmd.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_cmd.py rename to Darwin/lib/python3.5/distutils/tests/test_cmd.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_config.py b/Darwin/lib/python3.5/distutils/tests/test_config.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_config.py rename to Darwin/lib/python3.5/distutils/tests/test_config.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_config_cmd.py b/Darwin/lib/python3.5/distutils/tests/test_config_cmd.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_config_cmd.py rename to Darwin/lib/python3.5/distutils/tests/test_config_cmd.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_core.py b/Darwin/lib/python3.5/distutils/tests/test_core.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_core.py rename to Darwin/lib/python3.5/distutils/tests/test_core.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_cygwinccompiler.py b/Darwin/lib/python3.5/distutils/tests/test_cygwinccompiler.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_cygwinccompiler.py rename to Darwin/lib/python3.5/distutils/tests/test_cygwinccompiler.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_dep_util.py b/Darwin/lib/python3.5/distutils/tests/test_dep_util.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_dep_util.py rename to Darwin/lib/python3.5/distutils/tests/test_dep_util.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_dir_util.py b/Darwin/lib/python3.5/distutils/tests/test_dir_util.py similarity index 89% rename from Darwin/lib/python3.4/distutils/tests/test_dir_util.py rename to Darwin/lib/python3.5/distutils/tests/test_dir_util.py index 1589f12..d436cf8 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_dir_util.py +++ b/Darwin/lib/python3.5/distutils/tests/test_dir_util.py @@ -2,9 +2,10 @@ import unittest import os import stat -import shutil import sys +from unittest.mock import patch +from distutils import dir_util, errors from distutils.dir_util import (mkpath, remove_tree, create_tree, copy_tree, ensure_relative) @@ -12,6 +13,7 @@ from distutils import log from distutils.tests import support from test.support import run_unittest + class DirUtilTestCase(support.TempdirManager, unittest.TestCase): def _log(self, msg, *args): @@ -52,7 +54,7 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): self.assertEqual(self._logs, wanted) @unittest.skipIf(sys.platform.startswith('win'), - "This test is only appropriate for POSIX-like systems.") + "This test is only appropriate for POSIX-like systems.") def test_mkpath_with_custom_mode(self): # Get and set the current umask value for testing mode bits. umask = os.umask(0o002) @@ -120,6 +122,16 @@ class DirUtilTestCase(support.TempdirManager, unittest.TestCase): self.assertEqual(ensure_relative('c:\\home\\foo'), 'c:home\\foo') self.assertEqual(ensure_relative('home\\foo'), 'home\\foo') + def test_copy_tree_exception_in_listdir(self): + """ + An exception in listdir should raise a DistutilsFileError + """ + with patch("os.listdir", side_effect=OSError()), \ + self.assertRaises(errors.DistutilsFileError): + src = self.tempdirs[-1] + dir_util.copy_tree(src, None) + + def test_suite(): return unittest.makeSuite(DirUtilTestCase) diff --git a/Darwin/lib/python3.4/distutils/tests/test_dist.py b/Darwin/lib/python3.5/distutils/tests/test_dist.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_dist.py rename to Darwin/lib/python3.5/distutils/tests/test_dist.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_extension.py b/Darwin/lib/python3.5/distutils/tests/test_extension.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_extension.py rename to Darwin/lib/python3.5/distutils/tests/test_extension.py diff --git a/Darwin/lib/python3.5/distutils/tests/test_file_util.py b/Darwin/lib/python3.5/distutils/tests/test_file_util.py new file mode 100644 index 0000000..a6d04f0 --- /dev/null +++ b/Darwin/lib/python3.5/distutils/tests/test_file_util.py @@ -0,0 +1,115 @@ +"""Tests for distutils.file_util.""" +import unittest +import os +import shutil +import errno +from unittest.mock import patch + +from distutils.file_util import move_file, copy_file +from distutils import log +from distutils.tests import support +from distutils.errors import DistutilsFileError +from test.support import run_unittest + +class FileUtilTestCase(support.TempdirManager, unittest.TestCase): + + def _log(self, msg, *args): + if len(args) > 0: + self._logs.append(msg % args) + else: + self._logs.append(msg) + + def setUp(self): + super(FileUtilTestCase, self).setUp() + self._logs = [] + self.old_log = log.info + log.info = self._log + tmp_dir = self.mkdtemp() + self.source = os.path.join(tmp_dir, 'f1') + self.target = os.path.join(tmp_dir, 'f2') + self.target_dir = os.path.join(tmp_dir, 'd1') + + def tearDown(self): + log.info = self.old_log + super(FileUtilTestCase, self).tearDown() + + def test_move_file_verbosity(self): + f = open(self.source, 'w') + try: + f.write('some content') + finally: + f.close() + + move_file(self.source, self.target, verbose=0) + wanted = [] + self.assertEqual(self._logs, wanted) + + # back to original state + move_file(self.target, self.source, verbose=0) + + move_file(self.source, self.target, verbose=1) + wanted = ['moving %s -> %s' % (self.source, self.target)] + self.assertEqual(self._logs, wanted) + + # back to original state + move_file(self.target, self.source, verbose=0) + + self._logs = [] + # now the target is a dir + os.mkdir(self.target_dir) + move_file(self.source, self.target_dir, verbose=1) + wanted = ['moving %s -> %s' % (self.source, self.target_dir)] + self.assertEqual(self._logs, wanted) + + def test_move_file_exception_unpacking_rename(self): + # see issue 22182 + with patch("os.rename", side_effect=OSError("wrong", 1)), \ + self.assertRaises(DistutilsFileError): + with open(self.source, 'w') as fobj: + fobj.write('spam eggs') + move_file(self.source, self.target, verbose=0) + + def test_move_file_exception_unpacking_unlink(self): + # see issue 22182 + with patch("os.rename", side_effect=OSError(errno.EXDEV, "wrong")), \ + patch("os.unlink", side_effect=OSError("wrong", 1)), \ + self.assertRaises(DistutilsFileError): + with open(self.source, 'w') as fobj: + fobj.write('spam eggs') + move_file(self.source, self.target, verbose=0) + + def test_copy_file_hard_link(self): + with open(self.source, 'w') as f: + f.write('some content') + st = os.stat(self.source) + copy_file(self.source, self.target, link='hard') + st2 = os.stat(self.source) + st3 = os.stat(self.target) + self.assertTrue(os.path.samestat(st, st2), (st, st2)) + self.assertTrue(os.path.samestat(st2, st3), (st2, st3)) + with open(self.source, 'r') as f: + self.assertEqual(f.read(), 'some content') + + def test_copy_file_hard_link_failure(self): + # If hard linking fails, copy_file() falls back on copying file + # (some special filesystems don't support hard linking even under + # Unix, see issue #8876). + with open(self.source, 'w') as f: + f.write('some content') + st = os.stat(self.source) + with patch("os.link", side_effect=OSError(0, "linking unsupported")): + copy_file(self.source, self.target, link='hard') + st2 = os.stat(self.source) + st3 = os.stat(self.target) + self.assertTrue(os.path.samestat(st, st2), (st, st2)) + self.assertFalse(os.path.samestat(st2, st3), (st2, st3)) + for fn in (self.source, self.target): + with open(fn, 'r') as f: + self.assertEqual(f.read(), 'some content') + + +def test_suite(): + return unittest.makeSuite(FileUtilTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff --git a/Darwin/lib/python3.4/distutils/tests/test_filelist.py b/Darwin/lib/python3.5/distutils/tests/test_filelist.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_filelist.py rename to Darwin/lib/python3.5/distutils/tests/test_filelist.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_install.py b/Darwin/lib/python3.5/distutils/tests/test_install.py similarity index 98% rename from Darwin/lib/python3.4/distutils/tests/test_install.py rename to Darwin/lib/python3.5/distutils/tests/test_install.py index 18e1e57..9313330 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_install.py +++ b/Darwin/lib/python3.5/distutils/tests/test_install.py @@ -20,8 +20,6 @@ from distutils.tests import support def _make_ext_name(modname): - if os.name == 'nt' and sys.executable.endswith('_d.exe'): - modname += '_d' return modname + sysconfig.get_config_var('EXT_SUFFIX') diff --git a/Darwin/lib/python3.4/distutils/tests/test_install_data.py b/Darwin/lib/python3.5/distutils/tests/test_install_data.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_install_data.py rename to Darwin/lib/python3.5/distutils/tests/test_install_data.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_install_headers.py b/Darwin/lib/python3.5/distutils/tests/test_install_headers.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_install_headers.py rename to Darwin/lib/python3.5/distutils/tests/test_install_headers.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_install_lib.py b/Darwin/lib/python3.5/distutils/tests/test_install_lib.py similarity index 90% rename from Darwin/lib/python3.4/distutils/tests/test_install_lib.py rename to Darwin/lib/python3.5/distutils/tests/test_install_lib.py index 40dd1a9..5378aa8 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_install_lib.py +++ b/Darwin/lib/python3.5/distutils/tests/test_install_lib.py @@ -44,12 +44,11 @@ class InstallLibTestCase(support.TempdirManager, f = os.path.join(project_dir, 'foo.py') self.write_file(f, '# python file') cmd.byte_compile([f]) - pyc_file = importlib.util.cache_from_source('foo.py', - debug_override=True) - pyo_file = importlib.util.cache_from_source('foo.py', - debug_override=False) + pyc_file = importlib.util.cache_from_source('foo.py', optimization='') + pyc_opt_file = importlib.util.cache_from_source('foo.py', + optimization=cmd.optimize) self.assertTrue(os.path.exists(pyc_file)) - self.assertTrue(os.path.exists(pyo_file)) + self.assertTrue(os.path.exists(pyc_opt_file)) def test_get_outputs(self): project_dir, dist = self.create_dist() @@ -66,8 +65,8 @@ class InstallLibTestCase(support.TempdirManager, cmd.distribution.packages = ['spam'] cmd.distribution.script_name = 'setup.py' - # get_outputs should return 4 elements: spam/__init__.py, .pyc and - # .pyo, foo.import-tag-abiflags.so / foo.pyd + # get_outputs should return 4 elements: spam/__init__.py and .pyc, + # foo.import-tag-abiflags.so / foo.pyd outputs = cmd.get_outputs() self.assertEqual(len(outputs), 4, outputs) diff --git a/Darwin/lib/python3.4/distutils/tests/test_install_scripts.py b/Darwin/lib/python3.5/distutils/tests/test_install_scripts.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_install_scripts.py rename to Darwin/lib/python3.5/distutils/tests/test_install_scripts.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_log.py b/Darwin/lib/python3.5/distutils/tests/test_log.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_log.py rename to Darwin/lib/python3.5/distutils/tests/test_log.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_msvc9compiler.py b/Darwin/lib/python3.5/distutils/tests/test_msvc9compiler.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_msvc9compiler.py rename to Darwin/lib/python3.5/distutils/tests/test_msvc9compiler.py diff --git a/Darwin/lib/python3.5/distutils/tests/test_msvccompiler.py b/Darwin/lib/python3.5/distutils/tests/test_msvccompiler.py new file mode 100644 index 0000000..874d603 --- /dev/null +++ b/Darwin/lib/python3.5/distutils/tests/test_msvccompiler.py @@ -0,0 +1,90 @@ +"""Tests for distutils._msvccompiler.""" +import sys +import unittest +import os + +from distutils.errors import DistutilsPlatformError +from distutils.tests import support +from test.support import run_unittest + + +SKIP_MESSAGE = (None if sys.platform == "win32" else + "These tests are only for win32") + +@unittest.skipUnless(SKIP_MESSAGE is None, SKIP_MESSAGE) +class msvccompilerTestCase(support.TempdirManager, + unittest.TestCase): + + def test_no_compiler(self): + import distutils._msvccompiler as _msvccompiler + # makes sure query_vcvarsall raises + # a DistutilsPlatformError if the compiler + # is not found + def _find_vcvarsall(plat_spec): + return None, None + + old_find_vcvarsall = _msvccompiler._find_vcvarsall + _msvccompiler._find_vcvarsall = _find_vcvarsall + try: + self.assertRaises(DistutilsPlatformError, + _msvccompiler._get_vc_env, + 'wont find this version') + finally: + _msvccompiler._find_vcvarsall = old_find_vcvarsall + + def test_compiler_options(self): + import distutils._msvccompiler as _msvccompiler + # suppress path to vcruntime from _find_vcvarsall to + # check that /MT is added to compile options + old_find_vcvarsall = _msvccompiler._find_vcvarsall + def _find_vcvarsall(plat_spec): + return old_find_vcvarsall(plat_spec)[0], None + _msvccompiler._find_vcvarsall = _find_vcvarsall + try: + compiler = _msvccompiler.MSVCCompiler() + compiler.initialize() + + self.assertIn('/MT', compiler.compile_options) + self.assertNotIn('/MD', compiler.compile_options) + finally: + _msvccompiler._find_vcvarsall = old_find_vcvarsall + + def test_vcruntime_copy(self): + import distutils._msvccompiler as _msvccompiler + # force path to a known file - it doesn't matter + # what we copy as long as its name is not in + # _msvccompiler._BUNDLED_DLLS + old_find_vcvarsall = _msvccompiler._find_vcvarsall + def _find_vcvarsall(plat_spec): + return old_find_vcvarsall(plat_spec)[0], __file__ + _msvccompiler._find_vcvarsall = _find_vcvarsall + try: + tempdir = self.mkdtemp() + compiler = _msvccompiler.MSVCCompiler() + compiler.initialize() + compiler._copy_vcruntime(tempdir) + + self.assertTrue(os.path.isfile(os.path.join( + tempdir, os.path.basename(__file__)))) + finally: + _msvccompiler._find_vcvarsall = old_find_vcvarsall + + def test_vcruntime_skip_copy(self): + import distutils._msvccompiler as _msvccompiler + + tempdir = self.mkdtemp() + compiler = _msvccompiler.MSVCCompiler() + compiler.initialize() + dll = compiler._vcruntime_redist + self.assertTrue(os.path.isfile(dll)) + + compiler._copy_vcruntime(tempdir) + + self.assertFalse(os.path.isfile(os.path.join( + tempdir, os.path.basename(dll)))) + +def test_suite(): + return unittest.makeSuite(msvccompilerTestCase) + +if __name__ == "__main__": + run_unittest(test_suite()) diff --git a/Darwin/lib/python3.4/distutils/tests/test_register.py b/Darwin/lib/python3.5/distutils/tests/test_register.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_register.py rename to Darwin/lib/python3.5/distutils/tests/test_register.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_sdist.py b/Darwin/lib/python3.5/distutils/tests/test_sdist.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_sdist.py rename to Darwin/lib/python3.5/distutils/tests/test_sdist.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_spawn.py b/Darwin/lib/python3.5/distutils/tests/test_spawn.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_spawn.py rename to Darwin/lib/python3.5/distutils/tests/test_spawn.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_sysconfig.py b/Darwin/lib/python3.5/distutils/tests/test_sysconfig.py similarity index 88% rename from Darwin/lib/python3.4/distutils/tests/test_sysconfig.py rename to Darwin/lib/python3.5/distutils/tests/test_sysconfig.py index 95fa9dc..fc4d1de 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_sysconfig.py +++ b/Darwin/lib/python3.5/distutils/tests/test_sysconfig.py @@ -1,6 +1,9 @@ """Tests for distutils.sysconfig.""" import os import shutil +import subprocess +import sys +import textwrap import unittest from distutils import sysconfig @@ -174,6 +177,25 @@ class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): self.assertIsNotNone(vars['SO']) self.assertEqual(vars['SO'], vars['EXT_SUFFIX']) + def test_customize_compiler_before_get_config_vars(self): + # Issue #21923: test that a Distribution compiler + # instance can be called without an explicit call to + # get_config_vars(). + with open(TESTFN, 'w') as f: + f.writelines(textwrap.dedent('''\ + from distutils.core import Distribution + config = Distribution().get_command_obj('config') + # try_compile may pass or it may fail if no compiler + # is found but it should not raise an exception. + rc = config.try_compile('int x;') + ''')) + p = subprocess.Popen([str(sys.executable), TESTFN], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True) + outs, errs = p.communicate() + self.assertEqual(0, p.returncode, "Subprocess failed: " + outs) + def test_suite(): suite = unittest.TestSuite() diff --git a/Darwin/lib/python3.4/distutils/tests/test_text_file.py b/Darwin/lib/python3.5/distutils/tests/test_text_file.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_text_file.py rename to Darwin/lib/python3.5/distutils/tests/test_text_file.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_unixccompiler.py b/Darwin/lib/python3.5/distutils/tests/test_unixccompiler.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_unixccompiler.py rename to Darwin/lib/python3.5/distutils/tests/test_unixccompiler.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_upload.py b/Darwin/lib/python3.5/distutils/tests/test_upload.py similarity index 87% rename from Darwin/lib/python3.4/distutils/tests/test_upload.py rename to Darwin/lib/python3.5/distutils/tests/test_upload.py index f53eb26..dccaf77 100644 --- a/Darwin/lib/python3.4/distutils/tests/test_upload.py +++ b/Darwin/lib/python3.5/distutils/tests/test_upload.py @@ -6,6 +6,7 @@ from test.support import run_unittest from distutils.command import upload as upload_mod from distutils.command.upload import upload from distutils.core import Distribution +from distutils.errors import DistutilsError from distutils.log import INFO from distutils.tests.test_config import PYPIRC, PyPIRCCommandTestCase @@ -41,13 +42,14 @@ username:me class FakeOpen(object): - def __init__(self, url): + def __init__(self, url, msg=None, code=None): self.url = url if not isinstance(url, str): self.req = url else: self.req = None - self.msg = 'OK' + self.msg = msg or 'OK' + self.code = code or 200 def getheader(self, name, default=None): return { @@ -58,7 +60,7 @@ class FakeOpen(object): return b'xyzzy' def getcode(self): - return 200 + return self.code class uploadTestCase(PyPIRCCommandTestCase): @@ -68,13 +70,15 @@ class uploadTestCase(PyPIRCCommandTestCase): self.old_open = upload_mod.urlopen upload_mod.urlopen = self._urlopen self.last_open = None + self.next_msg = None + self.next_code = None def tearDown(self): upload_mod.urlopen = self.old_open super(uploadTestCase, self).tearDown() def _urlopen(self, url): - self.last_open = FakeOpen(url) + self.last_open = FakeOpen(url, msg=self.next_msg, code=self.next_code) return self.last_open def test_finalize_options(self): @@ -123,7 +127,7 @@ class uploadTestCase(PyPIRCCommandTestCase): # what did we send ? headers = dict(self.last_open.req.headers) - self.assertEqual(headers['Content-length'], '2087') + self.assertEqual(headers['Content-length'], '2161') content_type = headers['Content-type'] self.assertTrue(content_type.startswith('multipart/form-data')) self.assertEqual(self.last_open.req.get_method(), 'POST') @@ -135,6 +139,10 @@ class uploadTestCase(PyPIRCCommandTestCase): results = self.get_logs(INFO) self.assertIn('xyzzy\n', results[-1]) + def test_upload_fails(self): + self.next_msg = "Not Found" + self.next_code = 404 + self.assertRaises(DistutilsError, self.test_upload) def test_suite(): return unittest.makeSuite(uploadTestCase) diff --git a/Darwin/lib/python3.4/distutils/tests/test_util.py b/Darwin/lib/python3.5/distutils/tests/test_util.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_util.py rename to Darwin/lib/python3.5/distutils/tests/test_util.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_version.py b/Darwin/lib/python3.5/distutils/tests/test_version.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_version.py rename to Darwin/lib/python3.5/distutils/tests/test_version.py diff --git a/Darwin/lib/python3.4/distutils/tests/test_versionpredicate.py b/Darwin/lib/python3.5/distutils/tests/test_versionpredicate.py similarity index 100% rename from Darwin/lib/python3.4/distutils/tests/test_versionpredicate.py rename to Darwin/lib/python3.5/distutils/tests/test_versionpredicate.py diff --git a/Darwin/lib/python3.4/distutils/tests/xxmodule.c b/Darwin/lib/python3.5/distutils/tests/xxmodule.c similarity index 97% rename from Darwin/lib/python3.4/distutils/tests/xxmodule.c rename to Darwin/lib/python3.5/distutils/tests/xxmodule.c index 0feff66..85230d9 100644 --- a/Darwin/lib/python3.4/distutils/tests/xxmodule.c +++ b/Darwin/lib/python3.5/distutils/tests/xxmodule.c @@ -334,26 +334,10 @@ static PyMethodDef xx_methods[] = { PyDoc_STRVAR(module_doc, "This is a template module just for instruction."); -/* Initialization function for the module (*must* be called PyInit_xx) */ - -static struct PyModuleDef xxmodule = { - PyModuleDef_HEAD_INIT, - "xx", - module_doc, - -1, - xx_methods, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC -PyInit_xx(void) +static int +xx_exec(PyObject *m) { - PyObject *m = NULL; - /* Due to cross platform compiler issues the slots must be filled * here. It's required for portability to Windows without requiring * C++. */ @@ -366,11 +350,6 @@ PyInit_xx(void) if (PyType_Ready(&Xxo_Type) < 0) goto fail; - /* Create the module and add the functions */ - m = PyModule_Create(&xxmodule); - if (m == NULL) - goto fail; - /* Add some symbolic constants to the module */ if (ErrorObject == NULL) { ErrorObject = PyErr_NewException("xx.error", NULL, NULL); @@ -389,8 +368,33 @@ PyInit_xx(void) if (PyType_Ready(&Null_Type) < 0) goto fail; PyModule_AddObject(m, "Null", (PyObject *)&Null_Type); - return m; + return 0; fail: Py_XDECREF(m); - return NULL; + return -1; +} + +static struct PyModuleDef_Slot xx_slots[] = { + {Py_mod_exec, xx_exec}, + {0, NULL}, +}; + +static struct PyModuleDef xxmodule = { + PyModuleDef_HEAD_INIT, + "xx", + module_doc, + 0, + xx_methods, + xx_slots, + NULL, + NULL, + NULL +}; + +/* Export function for the module (*must* be called PyInit_xx) */ + +PyMODINIT_FUNC +PyInit_xx(void) +{ + return PyModuleDef_Init(&xxmodule); } diff --git a/Darwin/lib/python3.4/distutils/text_file.py b/Darwin/lib/python3.5/distutils/text_file.py similarity index 99% rename from Darwin/lib/python3.4/distutils/text_file.py rename to Darwin/lib/python3.5/distutils/text_file.py index 40b8484..478336f 100644 --- a/Darwin/lib/python3.4/distutils/text_file.py +++ b/Darwin/lib/python3.5/distutils/text_file.py @@ -118,10 +118,11 @@ class TextFile: def close(self): """Close the current file and forget everything we know about it (filename, current line number).""" - self.file.close() + file = self.file self.file = None self.filename = None self.current_line = None + file.close() def gen_error(self, msg, line=None): outmsg = [] diff --git a/Darwin/lib/python3.4/distutils/unixccompiler.py b/Darwin/lib/python3.5/distutils/unixccompiler.py similarity index 97% rename from Darwin/lib/python3.4/distutils/unixccompiler.py rename to Darwin/lib/python3.5/distutils/unixccompiler.py index 094a2f0..a814c5d 100644 --- a/Darwin/lib/python3.4/distutils/unixccompiler.py +++ b/Darwin/lib/python3.5/distutils/unixccompiler.py @@ -76,7 +76,9 @@ class UnixCCompiler(CCompiler): static_lib_extension = ".a" shared_lib_extension = ".so" dylib_lib_extension = ".dylib" + xcode_stub_lib_extension = ".tbd" static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" + xcode_stub_lib_format = dylib_lib_format if sys.platform == "cygwin": exe_extension = ".exe" @@ -255,6 +257,7 @@ class UnixCCompiler(CCompiler): def find_library_file(self, dirs, lib, debug=0): shared_f = self.library_filename(lib, lib_type='shared') dylib_f = self.library_filename(lib, lib_type='dylib') + xcode_stub_f = self.library_filename(lib, lib_type='xcode_stub') static_f = self.library_filename(lib, lib_type='static') if sys.platform == 'darwin': @@ -274,6 +277,7 @@ class UnixCCompiler(CCompiler): shared = os.path.join(dir, shared_f) dylib = os.path.join(dir, dylib_f) static = os.path.join(dir, static_f) + xcode_stub = os.path.join(dir, xcode_stub_f) if sys.platform == 'darwin' and ( dir.startswith('/System/') or ( @@ -282,6 +286,7 @@ class UnixCCompiler(CCompiler): shared = os.path.join(sysroot, dir[1:], shared_f) dylib = os.path.join(sysroot, dir[1:], dylib_f) static = os.path.join(sysroot, dir[1:], static_f) + xcode_stub = os.path.join(sysroot, dir[1:], xcode_stub_f) # We're second-guessing the linker here, with not much hard # data to go on: GCC seems to prefer the shared library, so I'm @@ -289,6 +294,8 @@ class UnixCCompiler(CCompiler): # ignoring even GCC's "-static" option. So sue me. if os.path.exists(dylib): return dylib + elif os.path.exists(xcode_stub): + return xcode_stub elif os.path.exists(shared): return shared elif os.path.exists(static): diff --git a/Darwin/lib/python3.4/distutils/util.py b/Darwin/lib/python3.5/distutils/util.py similarity index 98% rename from Darwin/lib/python3.4/distutils/util.py rename to Darwin/lib/python3.5/distutils/util.py index 5adcac5..e423325 100644 --- a/Darwin/lib/python3.4/distutils/util.py +++ b/Darwin/lib/python3.5/distutils/util.py @@ -322,11 +322,11 @@ def byte_compile (py_files, prefix=None, base_dir=None, verbose=1, dry_run=0, direct=None): - """Byte-compile a collection of Python source files to either .pyc - or .pyo files in a __pycache__ subdirectory. 'py_files' is a list + """Byte-compile a collection of Python source files to .pyc + files in a __pycache__ subdirectory. 'py_files' is a list of files to compile; any files that don't end in ".py" are silently skipped. 'optimize' must be one of the following: - 0 - don't optimize (generate .pyc) + 0 - don't optimize 1 - normal optimization (like "python -O") 2 - extra optimization (like "python -OO") If 'force' is true, all files are recompiled regardless of @@ -438,8 +438,9 @@ byte_compile(files, optimize=%r, force=%r, # cfile - byte-compiled file # dfile - purported source filename (same as 'file' by default) if optimize >= 0: + opt = '' if optimize == 0 else optimize cfile = importlib.util.cache_from_source( - file, debug_override=not optimize) + file, optimization=opt) else: cfile = importlib.util.cache_from_source(file) dfile = file diff --git a/Darwin/lib/python3.4/distutils/version.py b/Darwin/lib/python3.5/distutils/version.py similarity index 98% rename from Darwin/lib/python3.4/distutils/version.py rename to Darwin/lib/python3.5/distutils/version.py index ebcab84..af14cc1 100644 --- a/Darwin/lib/python3.4/distutils/version.py +++ b/Darwin/lib/python3.5/distutils/version.py @@ -48,12 +48,6 @@ class Version: return c return c == 0 - def __ne__(self, other): - c = self._cmp(other) - if c is NotImplemented: - return c - return c != 0 - def __lt__(self, other): c = self._cmp(other) if c is NotImplemented: diff --git a/Darwin/lib/python3.4/distutils/versionpredicate.py b/Darwin/lib/python3.5/distutils/versionpredicate.py similarity index 100% rename from Darwin/lib/python3.4/distutils/versionpredicate.py rename to Darwin/lib/python3.5/distutils/versionpredicate.py diff --git a/Darwin/lib/python3.4/doctest.py b/Darwin/lib/python3.5/doctest.py similarity index 99% rename from Darwin/lib/python3.4/doctest.py rename to Darwin/lib/python3.5/doctest.py index d212ad6..96ab0c4 100644 --- a/Darwin/lib/python3.4/doctest.py +++ b/Darwin/lib/python3.5/doctest.py @@ -481,9 +481,6 @@ class Example: self.options == other.options and \ self.exc_msg == other.exc_msg - def __ne__(self, other): - return not self == other - def __hash__(self): return hash((self.source, self.want, self.lineno, self.indent, self.exc_msg)) @@ -533,8 +530,9 @@ class DocTest: examples = '1 example' else: examples = '%d examples' % len(self.examples) - return ('' % - (self.name, self.filename, self.lineno, examples)) + return ('<%s %s from %s:%s (%s)>' % + (self.__class__.__name__, + self.name, self.filename, self.lineno, examples)) def __eq__(self, other): if type(self) is not type(other): @@ -547,9 +545,6 @@ class DocTest: self.filename == other.filename and \ self.lineno == other.lineno - def __ne__(self, other): - return not self == other - def __hash__(self): return hash((self.docstring, self.name, self.filename, self.lineno)) @@ -984,7 +979,8 @@ class DocTestFinder: for valname, val in obj.__dict__.items(): valname = '%s.%s' % (name, valname) # Recurse to functions & classes. - if ((inspect.isroutine(val) or inspect.isclass(val)) and + if ((inspect.isroutine(inspect.unwrap(val)) + or inspect.isclass(val)) and self._from_module(module, val)): self._find(tests, val, valname, module, source_lines, globs, seen) @@ -1055,7 +1051,7 @@ class DocTestFinder: filename = None else: filename = getattr(module, '__file__', module.__name__) - if filename[-4:] in (".pyc", ".pyo"): + if filename[-4:] == ".pyc": filename = filename[:-1] return self._parser.get_doctest(docstring, globs, name, filename, lineno) @@ -2289,9 +2285,6 @@ class DocTestCase(unittest.TestCase): self._dt_tearDown == other._dt_tearDown and \ self._dt_checker == other._dt_checker - def __ne__(self, other): - return not self == other - def __hash__(self): return hash((self._dt_optionflags, self._dt_setUp, self._dt_tearDown, self._dt_checker)) @@ -2376,15 +2369,6 @@ def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, suite = _DocTestSuite() suite.addTest(SkipDocTestCase(module)) return suite - elif not tests: - # Why do we want to do this? Because it reveals a bug that might - # otherwise be hidden. - # It is probably a bug that this exception is not also raised if the - # number of doctest examples in tests is zero (i.e. if no doctest - # examples were found). However, we should probably not be raising - # an exception at all here, though it is too late to make this change - # for a maintenance release. See also issue #14649. - raise ValueError(module, "has no docstrings") tests.sort() suite = _DocTestSuite() @@ -2394,7 +2378,7 @@ def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, continue if not test.filename: filename = module.__file__ - if filename[-4:] in (".pyc", ".pyo"): + if filename[-4:] == ".pyc": filename = filename[:-1] test.filename = filename suite.addTest(DocTestCase(test, **options)) diff --git a/Darwin/lib/python3.4/dummy_threading.py b/Darwin/lib/python3.5/dummy_threading.py similarity index 100% rename from Darwin/lib/python3.4/dummy_threading.py rename to Darwin/lib/python3.5/dummy_threading.py diff --git a/Darwin/lib/python3.4/email/__init__.py b/Darwin/lib/python3.5/email/__init__.py similarity index 98% rename from Darwin/lib/python3.4/email/__init__.py rename to Darwin/lib/python3.5/email/__init__.py index ff16f6a..fae8724 100644 --- a/Darwin/lib/python3.4/email/__init__.py +++ b/Darwin/lib/python3.5/email/__init__.py @@ -4,8 +4,6 @@ """A package for parsing, handling, and generating email messages.""" -__version__ = '5.1.0' - __all__ = [ 'base64mime', 'charset', diff --git a/Darwin/lib/python3.4/email/_encoded_words.py b/Darwin/lib/python3.5/email/_encoded_words.py similarity index 99% rename from Darwin/lib/python3.4/email/_encoded_words.py rename to Darwin/lib/python3.5/email/_encoded_words.py index 9e0cc75..5eaab36 100644 --- a/Darwin/lib/python3.4/email/_encoded_words.py +++ b/Darwin/lib/python3.5/email/_encoded_words.py @@ -152,7 +152,7 @@ def decode(ew): then from the resulting bytes into unicode using the specified charset. If the cte-decoded string does not successfully decode using the specified character set, a defect is added to the defects list and the unknown octets - are replaced by the unicode 'unknown' character \uFDFF. + are replaced by the unicode 'unknown' character \\uFDFF. The specified charset and language are returned. The default for language, which is rarely if ever encountered, is the empty string. diff --git a/Darwin/lib/python3.4/email/_header_value_parser.py b/Darwin/lib/python3.5/email/_header_value_parser.py similarity index 97% rename from Darwin/lib/python3.4/email/_header_value_parser.py rename to Darwin/lib/python3.5/email/_header_value_parser.py index 3dc5502..f264191 100644 --- a/Darwin/lib/python3.4/email/_header_value_parser.py +++ b/Darwin/lib/python3.5/email/_header_value_parser.py @@ -71,6 +71,7 @@ import re import urllib # For urllib.parse.unquote from string import hexdigits from collections import OrderedDict +from operator import itemgetter from email import _encoded_words as _ew from email import errors from email import utils @@ -319,17 +320,18 @@ class TokenList(list): return ''.join(res) def _fold(self, folded): + encoding = 'utf-8' if folded.policy.utf8 else 'ascii' for part in self.parts: tstr = str(part) tlen = len(tstr) try: - str(part).encode('us-ascii') + str(part).encode(encoding) except UnicodeEncodeError: if any(isinstance(x, errors.UndecodableBytesDefect) for x in part.all_defects): charset = 'unknown-8bit' else: - # XXX: this should be a policy setting + # XXX: this should be a policy setting when utf8 is False. charset = 'utf-8' tstr = part.cte_encode(charset, folded.policy) tlen = len(tstr) @@ -393,11 +395,12 @@ class UnstructuredTokenList(TokenList): def _fold(self, folded): last_ew = None + encoding = 'utf-8' if folded.policy.utf8 else 'ascii' for part in self.parts: tstr = str(part) is_ew = False try: - str(part).encode('us-ascii') + str(part).encode(encoding) except UnicodeEncodeError: if any(isinstance(x, errors.UndecodableBytesDefect) for x in part.all_defects): @@ -474,12 +477,13 @@ class Phrase(TokenList): # comment that becomes a barrier across which we can't compose encoded # words. last_ew = None + encoding = 'utf-8' if folded.policy.utf8 else 'ascii' for part in self.parts: tstr = str(part) tlen = len(tstr) has_ew = False try: - str(part).encode('us-ascii') + str(part).encode(encoding) except UnicodeEncodeError: if any(isinstance(x, errors.UndecodableBytesDefect) for x in part.all_defects): @@ -1098,15 +1102,34 @@ class MimeParameters(TokenList): params[name] = [] params[name].append((token.section_number, token)) for name, parts in params.items(): - parts = sorted(parts) - # XXX: there might be more recovery we could do here if, for - # example, this is really a case of a duplicate attribute name. + parts = sorted(parts, key=itemgetter(0)) + first_param = parts[0][1] + charset = first_param.charset + # Our arbitrary error recovery is to ignore duplicate parameters, + # to use appearance order if there are duplicate rfc 2231 parts, + # and to ignore gaps. This mimics the error recovery of get_param. + if not first_param.extended and len(parts) > 1: + if parts[1][0] == 0: + parts[1][1].defects.append(errors.InvalidHeaderDefect( + 'duplicate parameter name; duplicate(s) ignored')) + parts = parts[:1] + # Else assume the *0* was missing...note that this is different + # from get_param, but we registered a defect for this earlier. value_parts = [] - charset = parts[0][1].charset - for i, (section_number, param) in enumerate(parts): + i = 0 + for section_number, param in parts: if section_number != i: - param.defects.append(errors.InvalidHeaderDefect( - "inconsistent multipart parameter numbering")) + # We could get fancier here and look for a complete + # duplicate extended parameter and ignore the second one + # seen. But we're not doing that. The old code didn't. + if not param.extended: + param.defects.append(errors.InvalidHeaderDefect( + 'duplicate parameter name; duplicate ignored')) + continue + else: + param.defects.append(errors.InvalidHeaderDefect( + "inconsistent RFC2231 parameter numbering")) + i += 1 value = param.param_value if param.extended: try: @@ -2897,7 +2920,7 @@ def parse_content_disposition_header(value): try: token, value = get_token(value) except errors.HeaderParseError: - ctype.defects.append(errors.InvalidHeaderDefect( + disp_header.defects.append(errors.InvalidHeaderDefect( "Expected content disposition but found {!r}".format(value))) _find_mime_parameters(disp_header, value) return disp_header @@ -2928,8 +2951,8 @@ def parse_content_transfer_encoding_header(value): try: token, value = get_token(value) except errors.HeaderParseError: - ctype.defects.append(errors.InvalidHeaderDefect( - "Expected content trnasfer encoding but found {!r}".format(value))) + cte_header.defects.append(errors.InvalidHeaderDefect( + "Expected content transfer encoding but found {!r}".format(value))) else: cte_header.append(token) cte_header.cte = token.value.strip().lower() diff --git a/Darwin/lib/python3.4/email/_parseaddr.py b/Darwin/lib/python3.5/email/_parseaddr.py similarity index 100% rename from Darwin/lib/python3.4/email/_parseaddr.py rename to Darwin/lib/python3.5/email/_parseaddr.py diff --git a/Darwin/lib/python3.4/email/_policybase.py b/Darwin/lib/python3.5/email/_policybase.py similarity index 97% rename from Darwin/lib/python3.4/email/_policybase.py rename to Darwin/lib/python3.5/email/_policybase.py index 8106114..c0d98a4 100644 --- a/Darwin/lib/python3.4/email/_policybase.py +++ b/Darwin/lib/python3.5/email/_policybase.py @@ -149,12 +149,18 @@ class Policy(_PolicyBase, metaclass=abc.ABCMeta): during serialization. None or 0 means no line wrapping is done. Default is 78. + mangle_from_ -- a flag that, when True escapes From_ lines in the + body of the message by putting a `>' in front of + them. This is used when the message is being + serialized by a generator. Default: True. + """ raise_on_defect = False linesep = '\n' cte_type = '8bit' max_line_length = 78 + mangle_from_ = False def handle_defect(self, obj, defect): """Based on policy, either raise defect or call register_defect. @@ -266,6 +272,8 @@ class Compat32(Policy): replicates the behavior of the email package version 5.1. """ + mangle_from_ = True + def _sanitize_header(self, name, value): # If the header value contains surrogates, return a Header using # the unknown-8bit charset to encode the bytes as encoded words. diff --git a/Darwin/lib/python3.4/email/architecture.rst b/Darwin/lib/python3.5/email/architecture.rst similarity index 100% rename from Darwin/lib/python3.4/email/architecture.rst rename to Darwin/lib/python3.5/email/architecture.rst diff --git a/Darwin/lib/python3.4/email/base64mime.py b/Darwin/lib/python3.5/email/base64mime.py similarity index 100% rename from Darwin/lib/python3.4/email/base64mime.py rename to Darwin/lib/python3.5/email/base64mime.py diff --git a/Darwin/lib/python3.4/email/charset.py b/Darwin/lib/python3.5/email/charset.py similarity index 99% rename from Darwin/lib/python3.4/email/charset.py rename to Darwin/lib/python3.5/email/charset.py index e999472..ee56404 100644 --- a/Darwin/lib/python3.4/email/charset.py +++ b/Darwin/lib/python3.5/email/charset.py @@ -249,9 +249,6 @@ class Charset: def __eq__(self, other): return str(self) == str(other).lower() - def __ne__(self, other): - return not self.__eq__(other) - def get_body_encoding(self): """Return the content-transfer-encoding used for body encoding. diff --git a/Darwin/lib/python3.4/email/contentmanager.py b/Darwin/lib/python3.5/email/contentmanager.py similarity index 100% rename from Darwin/lib/python3.4/email/contentmanager.py rename to Darwin/lib/python3.5/email/contentmanager.py diff --git a/Darwin/lib/python3.4/email/encoders.py b/Darwin/lib/python3.5/email/encoders.py similarity index 100% rename from Darwin/lib/python3.4/email/encoders.py rename to Darwin/lib/python3.5/email/encoders.py diff --git a/Darwin/lib/python3.4/email/errors.py b/Darwin/lib/python3.5/email/errors.py similarity index 100% rename from Darwin/lib/python3.4/email/errors.py rename to Darwin/lib/python3.5/email/errors.py diff --git a/Darwin/lib/python3.4/email/feedparser.py b/Darwin/lib/python3.5/email/feedparser.py similarity index 93% rename from Darwin/lib/python3.4/email/feedparser.py rename to Darwin/lib/python3.5/email/feedparser.py index 6cf9b91..e2e3e96 100644 --- a/Darwin/lib/python3.4/email/feedparser.py +++ b/Darwin/lib/python3.5/email/feedparser.py @@ -26,6 +26,7 @@ import re from email import errors from email import message from email._policybase import compat32 +from collections import deque NLCRE = re.compile('\r\n|\r|\n') NLCRE_bol = re.compile('(\r\n|\r|\n)') @@ -33,7 +34,7 @@ NLCRE_eol = re.compile('(\r\n|\r|\n)\Z') NLCRE_crack = re.compile('(\r\n|\r|\n)') # RFC 2822 $3.6.8 Optional fields. ftext is %d33-57 / %d59-126, Any character # except controls, SP, and ":". -headerRE = re.compile(r'^(From |[\041-\071\073-\176]{1,}:|[\t ])') +headerRE = re.compile(r'^(From |[\041-\071\073-\176]*:|[\t ])') EMPTYSTRING = '' NL = '\n' @@ -50,10 +51,10 @@ class BufferedSubFile(object): simple abstraction -- it parses until EOF closes the current message. """ def __init__(self): - # The last partial line pushed into this object. - self._partial = '' - # The list of full, pushed lines, in reverse order - self._lines = [] + # Chunks of the last partial line pushed into this object. + self._partial = [] + # A deque of full, pushed lines + self._lines = deque() # The stack of false-EOF checking predicates. self._eofstack = [] # A flag indicating whether the file has been closed or not. @@ -67,8 +68,8 @@ class BufferedSubFile(object): def close(self): # Don't forget any trailing partial line. - self._lines.append(self._partial) - self._partial = '' + self.pushlines(''.join(self._partial).splitlines(True)) + self._partial = [] self._closed = True def readline(self): @@ -78,39 +79,48 @@ class BufferedSubFile(object): return NeedMoreData # Pop the line off the stack and see if it matches the current # false-EOF predicate. - line = self._lines.pop() + line = self._lines.popleft() # RFC 2046, section 5.1.2 requires us to recognize outer level # boundaries at any level of inner nesting. Do this, but be sure it's # in the order of most to least nested. - for ateof in self._eofstack[::-1]: + for ateof in reversed(self._eofstack): if ateof(line): # We're at the false EOF. But push the last line back first. - self._lines.append(line) + self._lines.appendleft(line) return '' return line def unreadline(self, line): # Let the consumer push a line back into the buffer. assert line is not NeedMoreData - self._lines.append(line) + self._lines.appendleft(line) def push(self, data): """Push some new data into this object.""" - # Handle any previous leftovers - data, self._partial = self._partial + data, '' # Crack into lines, but preserve the linesep characters on the end of each parts = data.splitlines(True) + + if not parts or not parts[0].endswith(('\n', '\r')): + # No new complete lines, so just accumulate partials + self._partial += parts + return + + if self._partial: + # If there are previous leftovers, complete them now + self._partial.append(parts[0]) + parts[0:1] = ''.join(self._partial).splitlines(True) + del self._partial[:] + # If the last element of the list does not end in a newline, then treat # it as a partial line. We only check for '\n' here because a line # ending with '\r' might be a line that was split in the middle of a # '\r\n' sequence (see bugs 1555570 and 1721862). - if parts and not parts[-1].endswith('\n'): - self._partial = parts.pop() + if not parts[-1].endswith('\n'): + self._partial = [parts.pop()] self.pushlines(parts) def pushlines(self, lines): - # Reverse and insert at the front of the lines. - self._lines[:0] = lines[::-1] + self._lines.extend(lines) def __iter__(self): return self @@ -501,6 +511,15 @@ class FeedParser: # There will always be a colon, because if there wasn't the part of # the parser that calls us would have started parsing the body. i = line.find(':') + + # If the colon is on the start of the line the header is clearly + # malformed, but we might be able to salvage the rest of the + # message. Track the error but keep going. + if i == 0: + defect = errors.InvalidHeaderDefect("Missing header name.") + self._cur.defects.append(defect) + continue + assert i>0, "_parse_headers fed line with no : and no leading WS" lastheader = line[:i] lastvalue = [line] diff --git a/Darwin/lib/python3.4/email/generator.py b/Darwin/lib/python3.5/email/generator.py similarity index 98% rename from Darwin/lib/python3.4/email/generator.py rename to Darwin/lib/python3.5/email/generator.py index 4735721..11ff16d 100644 --- a/Darwin/lib/python3.4/email/generator.py +++ b/Darwin/lib/python3.5/email/generator.py @@ -32,16 +32,16 @@ class Generator: # Public interface # - def __init__(self, outfp, mangle_from_=True, maxheaderlen=None, *, + def __init__(self, outfp, mangle_from_=None, maxheaderlen=None, *, policy=None): """Create the generator for message flattening. outfp is the output file-like object for writing the message to. It must have a write() method. - Optional mangle_from_ is a flag that, when True (the default), escapes - From_ lines in the body of the message by putting a `>' in front of - them. + Optional mangle_from_ is a flag that, when True (the default if policy + is not set), escapes From_ lines in the body of the message by putting + a `>' in front of them. Optional maxheaderlen specifies the longest length for a non-continued header. When a header line is longer (in characters, with tabs @@ -56,6 +56,9 @@ class Generator: flatten method is used. """ + + if mangle_from_ is None: + mangle_from_ = True if policy is None else policy.mangle_from_ self._fp = outfp self._mangle_from_ = mangle_from_ self.maxheaderlen = maxheaderlen @@ -449,7 +452,7 @@ class DecodedGenerator(Generator): Like the Generator base class, except that non-text parts are substituted with a format string representing the part. """ - def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None): + def __init__(self, outfp, mangle_from_=None, maxheaderlen=78, fmt=None): """Like Generator.__init__() except that an additional optional argument is allowed. diff --git a/Darwin/lib/python3.4/email/header.py b/Darwin/lib/python3.5/email/header.py similarity index 99% rename from Darwin/lib/python3.4/email/header.py rename to Darwin/lib/python3.5/email/header.py index 9c89589..6820ea1 100644 --- a/Darwin/lib/python3.4/email/header.py +++ b/Darwin/lib/python3.5/email/header.py @@ -262,9 +262,6 @@ class Header: # args and do another comparison. return other == str(self) - def __ne__(self, other): - return not self == other - def append(self, s, charset=None, errors='strict'): """Append a string to the MIME header. diff --git a/Darwin/lib/python3.4/email/headerregistry.py b/Darwin/lib/python3.5/email/headerregistry.py similarity index 98% rename from Darwin/lib/python3.4/email/headerregistry.py rename to Darwin/lib/python3.5/email/headerregistry.py index 1fae950..468ca9e 100644 --- a/Darwin/lib/python3.4/email/headerregistry.py +++ b/Darwin/lib/python3.5/email/headerregistry.py @@ -7,6 +7,7 @@ Eventually HeaderRegistry will be a public API, but it isn't yet, and will probably change some before that happens. """ +from types import MappingProxyType from email import utils from email import errors @@ -80,7 +81,8 @@ class Address: return lp def __repr__(self): - return "Address(display_name={!r}, username={!r}, domain={!r})".format( + return "{}(display_name={!r}, username={!r}, domain={!r})".format( + self.__class__.__name__, self.display_name, self.username, self.domain) def __str__(self): @@ -131,7 +133,8 @@ class Group: return self._addresses def __repr__(self): - return "Group(display_name={!r}, addresses={!r}".format( + return "{}(display_name={!r}, addresses={!r}".format( + self.__class__.__name__, self.display_name, self.addresses) def __str__(self): @@ -454,7 +457,7 @@ class ParameterizedMIMEHeader: @property def params(self): - return self._params.copy() + return MappingProxyType(self._params) class ContentTypeHeader(ParameterizedMIMEHeader): diff --git a/Darwin/lib/python3.4/email/iterators.py b/Darwin/lib/python3.5/email/iterators.py similarity index 100% rename from Darwin/lib/python3.4/email/iterators.py rename to Darwin/lib/python3.5/email/iterators.py diff --git a/Darwin/lib/python3.4/email/message.py b/Darwin/lib/python3.5/email/message.py similarity index 98% rename from Darwin/lib/python3.4/email/message.py rename to Darwin/lib/python3.5/email/message.py index aa46deb..a892012 100644 --- a/Darwin/lib/python3.4/email/message.py +++ b/Darwin/lib/python3.5/email/message.py @@ -9,6 +9,7 @@ __all__ = ['Message'] import re import uu import quopri +import warnings from io import BytesIO, StringIO # Intrapackage imports @@ -272,7 +273,7 @@ class Message: bpayload = payload.encode('ascii') except UnicodeError: # This won't happen for RFC compliant messages (messages - # containing only ASCII codepoints in the unicode input). + # containing only ASCII code points in the unicode input). # If it does happen, turn the string into bytes in a way # guaranteed not to fail. bpayload = payload.encode('raw-unicode-escape') @@ -926,6 +927,18 @@ class Message: """ return [part.get_content_charset(failobj) for part in self.walk()] + def get_content_disposition(self): + """Return the message's content-disposition if it exists, or None. + + The return values can be either 'inline', 'attachment' or None + according to the rfc2183. + """ + value = self.get('content-disposition') + if value is None: + return None + c_d = _splitparam(value)[0].lower() + return c_d + # I.e. def walk(self): ... from email.iterators import walk @@ -938,15 +951,12 @@ class MIMEPart(Message): policy = default Message.__init__(self, policy) - @property def is_attachment(self): c_d = self.get('content-disposition') - if c_d is None: - return False - return c_d.lower() == 'attachment' + return False if c_d is None else c_d.content_disposition == 'attachment' def _find_body(self, part, preferencelist): - if part.is_attachment: + if part.is_attachment(): return maintype, subtype = part.get_content_type().split('/') if maintype == 'text': @@ -1039,7 +1049,7 @@ class MIMEPart(Message): for part in parts: maintype, subtype = part.get_content_type().split('/') if ((maintype, subtype) in self._body_types and - not part.is_attachment and subtype not in seen): + not part.is_attachment() and subtype not in seen): seen.append(subtype) continue yield part diff --git a/Darwin/lib/python3.4/email/mime/__init__.py b/Darwin/lib/python3.5/email/mime/__init__.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/__init__.py rename to Darwin/lib/python3.5/email/mime/__init__.py diff --git a/Darwin/lib/python3.4/email/mime/application.py b/Darwin/lib/python3.5/email/mime/application.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/application.py rename to Darwin/lib/python3.5/email/mime/application.py diff --git a/Darwin/lib/python3.4/email/mime/audio.py b/Darwin/lib/python3.5/email/mime/audio.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/audio.py rename to Darwin/lib/python3.5/email/mime/audio.py diff --git a/Darwin/lib/python3.4/email/mime/base.py b/Darwin/lib/python3.5/email/mime/base.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/base.py rename to Darwin/lib/python3.5/email/mime/base.py diff --git a/Darwin/lib/python3.4/email/mime/image.py b/Darwin/lib/python3.5/email/mime/image.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/image.py rename to Darwin/lib/python3.5/email/mime/image.py diff --git a/Darwin/lib/python3.4/email/mime/message.py b/Darwin/lib/python3.5/email/mime/message.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/message.py rename to Darwin/lib/python3.5/email/mime/message.py diff --git a/Darwin/lib/python3.4/email/mime/multipart.py b/Darwin/lib/python3.5/email/mime/multipart.py similarity index 100% rename from Darwin/lib/python3.4/email/mime/multipart.py rename to Darwin/lib/python3.5/email/mime/multipart.py diff --git a/Darwin/lib/python3.4/email/mime/nonmultipart.py b/Darwin/lib/python3.5/email/mime/nonmultipart.py similarity index 91% rename from Darwin/lib/python3.4/email/mime/nonmultipart.py rename to Darwin/lib/python3.5/email/mime/nonmultipart.py index fc3b9eb..e1f5196 100644 --- a/Darwin/lib/python3.4/email/mime/nonmultipart.py +++ b/Darwin/lib/python3.5/email/mime/nonmultipart.py @@ -12,7 +12,7 @@ from email.mime.base import MIMEBase class MIMENonMultipart(MIMEBase): - """Base class for MIME multipart/* type messages.""" + """Base class for MIME non-multipart type messages.""" def attach(self, payload): # The public API prohibits attaching multiple subparts to MIMEBase diff --git a/Darwin/lib/python3.4/email/mime/text.py b/Darwin/lib/python3.5/email/mime/text.py similarity index 92% rename from Darwin/lib/python3.4/email/mime/text.py rename to Darwin/lib/python3.5/email/mime/text.py index ec18b85..479928e 100644 --- a/Darwin/lib/python3.4/email/mime/text.py +++ b/Darwin/lib/python3.5/email/mime/text.py @@ -6,6 +6,7 @@ __all__ = ['MIMEText'] +from email.charset import Charset from email.mime.nonmultipart import MIMENonMultipart @@ -34,6 +35,8 @@ class MIMEText(MIMENonMultipart): _charset = 'us-ascii' except UnicodeEncodeError: _charset = 'utf-8' + if isinstance(_charset, Charset): + _charset = str(_charset) MIMENonMultipart.__init__(self, 'text', _subtype, **{'charset': _charset}) diff --git a/Darwin/lib/python3.4/email/parser.py b/Darwin/lib/python3.5/email/parser.py similarity index 98% rename from Darwin/lib/python3.4/email/parser.py rename to Darwin/lib/python3.5/email/parser.py index 9f5f95d..8c9bc9e 100644 --- a/Darwin/lib/python3.4/email/parser.py +++ b/Darwin/lib/python3.5/email/parser.py @@ -106,8 +106,10 @@ class BytesParser: meaning it parses the entire contents of the file. """ fp = TextIOWrapper(fp, encoding='ascii', errors='surrogateescape') - with fp: + try: return self.parser.parse(fp, headersonly) + finally: + fp.detach() def parsebytes(self, text, headersonly=False): diff --git a/Darwin/lib/python3.4/email/policy.py b/Darwin/lib/python3.5/email/policy.py similarity index 92% rename from Darwin/lib/python3.4/email/policy.py rename to Darwin/lib/python3.5/email/policy.py index f0b20f4..6ac64a5 100644 --- a/Darwin/lib/python3.4/email/policy.py +++ b/Darwin/lib/python3.5/email/policy.py @@ -35,6 +35,13 @@ class EmailPolicy(Policy): In addition to the settable attributes listed above that apply to all Policies, this policy adds the following additional attributes: + utf8 -- if False (the default) message headers will be + serialized as ASCII, using encoded words to encode + any non-ASCII characters in the source strings. If + True, the message headers will be serialized using + utf8 and will not contain encoded words (see RFC + 6532 for more on this serialization format). + refold_source -- if the value for a header in the Message object came from the parsing of some source, this attribute indicates whether or not a generator should refold @@ -72,6 +79,7 @@ class EmailPolicy(Policy): """ + utf8 = False refold_source = 'long' header_factory = HeaderRegistry() content_manager = raw_data_manager @@ -175,9 +183,13 @@ class EmailPolicy(Policy): refold_header setting, since there is no way to know whether the binary data consists of single byte characters or multibyte characters. + If utf8 is true, headers are encoded to utf8, otherwise to ascii with + non-ASCII unicode rendered as encoded words. + """ folded = self._fold(name, value, refold_binary=self.cte_type=='7bit') - return folded.encode('ascii', 'surrogateescape') + charset = 'utf8' if self.utf8 else 'ascii' + return folded.encode(charset, 'surrogateescape') def _fold(self, name, value, refold_binary=False): if hasattr(value, 'name'): @@ -199,3 +211,4 @@ del default.header_factory strict = default.clone(raise_on_defect=True) SMTP = default.clone(linesep='\r\n') HTTP = default.clone(linesep='\r\n', max_line_length=None) +SMTPUTF8 = SMTP.clone(utf8=True) diff --git a/Darwin/lib/python3.4/email/quoprimime.py b/Darwin/lib/python3.5/email/quoprimime.py similarity index 100% rename from Darwin/lib/python3.4/email/quoprimime.py rename to Darwin/lib/python3.5/email/quoprimime.py diff --git a/Darwin/lib/python3.4/email/utils.py b/Darwin/lib/python3.5/email/utils.py similarity index 92% rename from Darwin/lib/python3.4/email/utils.py rename to Darwin/lib/python3.5/email/utils.py index cacb9b1..5080d81 100644 --- a/Darwin/lib/python3.4/email/utils.py +++ b/Darwin/lib/python3.5/email/utils.py @@ -155,30 +155,14 @@ def formatdate(timeval=None, localtime=False, usegmt=False): # 2822 requires that day and month names be the English abbreviations. if timeval is None: timeval = time.time() - if localtime: - now = time.localtime(timeval) - # Calculate timezone offset, based on whether the local zone has - # daylight savings time, and whether DST is in effect. - if time.daylight and now[-1]: - offset = time.altzone - else: - offset = time.timezone - hours, minutes = divmod(abs(offset), 3600) - # Remember offset is in seconds west of UTC, but the timezone is in - # minutes east of UTC, so the signs differ. - if offset > 0: - sign = '-' - else: - sign = '+' - zone = '%s%02d%02d' % (sign, hours, minutes // 60) + if localtime or usegmt: + dt = datetime.datetime.fromtimestamp(timeval, datetime.timezone.utc) else: - now = time.gmtime(timeval) - # Timezone offset is always -0000 - if usegmt: - zone = 'GMT' - else: - zone = '-0000' - return _format_timetuple_and_zone(now, zone) + dt = datetime.datetime.utcfromtimestamp(timeval) + if localtime: + dt = dt.astimezone() + usegmt = False + return format_datetime(dt, usegmt) def format_datetime(dt, usegmt=False): """Turn a datetime into a date string as specified in RFC 2822. @@ -202,24 +186,23 @@ def format_datetime(dt, usegmt=False): def make_msgid(idstring=None, domain=None): """Returns a string suitable for RFC 2822 compliant Message-ID, e.g: - <20020201195627.33539.96671@nightshade.la.mastaler.com> + <142480216486.20800.16526388040877946887@nightshade.la.mastaler.com> Optional idstring if given is a string used to strengthen the uniqueness of the message id. Optional domain if given provides the portion of the message id after the '@'. It defaults to the locally defined hostname. """ - timeval = time.time() - utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval)) + timeval = int(time.time()*100) pid = os.getpid() - randint = random.randrange(100000) + randint = random.getrandbits(64) if idstring is None: idstring = '' else: idstring = '.' + idstring if domain is None: domain = socket.getfqdn() - msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, domain) + msgid = '<%d.%d.%d%s@%s>' % (timeval, pid, randint, idstring, domain) return msgid diff --git a/Darwin/lib/python3.4/encodings/__init__.py b/Darwin/lib/python3.5/encodings/__init__.py similarity index 100% rename from Darwin/lib/python3.4/encodings/__init__.py rename to Darwin/lib/python3.5/encodings/__init__.py diff --git a/Darwin/lib/python3.4/encodings/aliases.py b/Darwin/lib/python3.5/encodings/aliases.py similarity index 99% rename from Darwin/lib/python3.4/encodings/aliases.py rename to Darwin/lib/python3.5/encodings/aliases.py index 4cbaade..67c828d 100644 --- a/Darwin/lib/python3.4/encodings/aliases.py +++ b/Darwin/lib/python3.5/encodings/aliases.py @@ -412,6 +412,11 @@ aliases = { # koi8_r codec 'cskoi8r' : 'koi8_r', + # kz1048 codec + 'kz_1048' : 'kz1048', + 'rk1048' : 'kz1048', + 'strk1048_2002' : 'kz1048', + # latin_1 codec # # Note that the latin_1 codec is implemented internally in C and a diff --git a/Darwin/lib/python3.4/encodings/ascii.py b/Darwin/lib/python3.5/encodings/ascii.py similarity index 100% rename from Darwin/lib/python3.4/encodings/ascii.py rename to Darwin/lib/python3.5/encodings/ascii.py diff --git a/Darwin/lib/python3.4/encodings/base64_codec.py b/Darwin/lib/python3.5/encodings/base64_codec.py similarity index 100% rename from Darwin/lib/python3.4/encodings/base64_codec.py rename to Darwin/lib/python3.5/encodings/base64_codec.py diff --git a/Darwin/lib/python3.4/encodings/big5.py b/Darwin/lib/python3.5/encodings/big5.py similarity index 100% rename from Darwin/lib/python3.4/encodings/big5.py rename to Darwin/lib/python3.5/encodings/big5.py diff --git a/Darwin/lib/python3.4/encodings/big5hkscs.py b/Darwin/lib/python3.5/encodings/big5hkscs.py similarity index 100% rename from Darwin/lib/python3.4/encodings/big5hkscs.py rename to Darwin/lib/python3.5/encodings/big5hkscs.py diff --git a/Darwin/lib/python3.4/encodings/bz2_codec.py b/Darwin/lib/python3.5/encodings/bz2_codec.py similarity index 100% rename from Darwin/lib/python3.4/encodings/bz2_codec.py rename to Darwin/lib/python3.5/encodings/bz2_codec.py diff --git a/Darwin/lib/python3.4/encodings/charmap.py b/Darwin/lib/python3.5/encodings/charmap.py similarity index 100% rename from Darwin/lib/python3.4/encodings/charmap.py rename to Darwin/lib/python3.5/encodings/charmap.py diff --git a/Darwin/lib/python3.4/encodings/cp037.py b/Darwin/lib/python3.5/encodings/cp037.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp037.py rename to Darwin/lib/python3.5/encodings/cp037.py diff --git a/Darwin/lib/python3.4/encodings/cp1006.py b/Darwin/lib/python3.5/encodings/cp1006.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1006.py rename to Darwin/lib/python3.5/encodings/cp1006.py diff --git a/Darwin/lib/python3.4/encodings/cp1026.py b/Darwin/lib/python3.5/encodings/cp1026.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1026.py rename to Darwin/lib/python3.5/encodings/cp1026.py diff --git a/Darwin/lib/python3.4/encodings/cp1125.py b/Darwin/lib/python3.5/encodings/cp1125.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1125.py rename to Darwin/lib/python3.5/encodings/cp1125.py diff --git a/Darwin/lib/python3.4/encodings/cp1140.py b/Darwin/lib/python3.5/encodings/cp1140.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1140.py rename to Darwin/lib/python3.5/encodings/cp1140.py diff --git a/Darwin/lib/python3.4/encodings/cp1250.py b/Darwin/lib/python3.5/encodings/cp1250.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1250.py rename to Darwin/lib/python3.5/encodings/cp1250.py diff --git a/Darwin/lib/python3.4/encodings/cp1251.py b/Darwin/lib/python3.5/encodings/cp1251.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1251.py rename to Darwin/lib/python3.5/encodings/cp1251.py diff --git a/Darwin/lib/python3.4/encodings/cp1252.py b/Darwin/lib/python3.5/encodings/cp1252.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1252.py rename to Darwin/lib/python3.5/encodings/cp1252.py diff --git a/Darwin/lib/python3.4/encodings/cp1253.py b/Darwin/lib/python3.5/encodings/cp1253.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1253.py rename to Darwin/lib/python3.5/encodings/cp1253.py diff --git a/Darwin/lib/python3.4/encodings/cp1254.py b/Darwin/lib/python3.5/encodings/cp1254.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1254.py rename to Darwin/lib/python3.5/encodings/cp1254.py diff --git a/Darwin/lib/python3.4/encodings/cp1255.py b/Darwin/lib/python3.5/encodings/cp1255.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1255.py rename to Darwin/lib/python3.5/encodings/cp1255.py diff --git a/Darwin/lib/python3.4/encodings/cp1256.py b/Darwin/lib/python3.5/encodings/cp1256.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1256.py rename to Darwin/lib/python3.5/encodings/cp1256.py diff --git a/Darwin/lib/python3.4/encodings/cp1257.py b/Darwin/lib/python3.5/encodings/cp1257.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1257.py rename to Darwin/lib/python3.5/encodings/cp1257.py diff --git a/Darwin/lib/python3.4/encodings/cp1258.py b/Darwin/lib/python3.5/encodings/cp1258.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp1258.py rename to Darwin/lib/python3.5/encodings/cp1258.py diff --git a/Darwin/lib/python3.4/encodings/cp273.py b/Darwin/lib/python3.5/encodings/cp273.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp273.py rename to Darwin/lib/python3.5/encodings/cp273.py diff --git a/Darwin/lib/python3.4/encodings/cp424.py b/Darwin/lib/python3.5/encodings/cp424.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp424.py rename to Darwin/lib/python3.5/encodings/cp424.py diff --git a/Darwin/lib/python3.4/encodings/cp437.py b/Darwin/lib/python3.5/encodings/cp437.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp437.py rename to Darwin/lib/python3.5/encodings/cp437.py diff --git a/Darwin/lib/python3.4/encodings/cp500.py b/Darwin/lib/python3.5/encodings/cp500.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp500.py rename to Darwin/lib/python3.5/encodings/cp500.py diff --git a/Darwin/lib/python3.4/encodings/cp65001.py b/Darwin/lib/python3.5/encodings/cp65001.py similarity index 81% rename from Darwin/lib/python3.4/encodings/cp65001.py rename to Darwin/lib/python3.5/encodings/cp65001.py index 287eb87..95cb2ae 100644 --- a/Darwin/lib/python3.4/encodings/cp65001.py +++ b/Darwin/lib/python3.5/encodings/cp65001.py @@ -11,20 +11,23 @@ if not hasattr(codecs, 'code_page_encode'): ### Codec APIs encode = functools.partial(codecs.code_page_encode, 65001) -decode = functools.partial(codecs.code_page_decode, 65001) +_decode = functools.partial(codecs.code_page_decode, 65001) + +def decode(input, errors='strict'): + return codecs.code_page_decode(65001, input, errors, True) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return encode(input, self.errors)[0] class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - _buffer_decode = decode + _buffer_decode = _decode class StreamWriter(codecs.StreamWriter): encode = encode class StreamReader(codecs.StreamReader): - decode = decode + decode = _decode ### encodings module API diff --git a/Darwin/lib/python3.4/encodings/cp720.py b/Darwin/lib/python3.5/encodings/cp720.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp720.py rename to Darwin/lib/python3.5/encodings/cp720.py diff --git a/Darwin/lib/python3.4/encodings/cp737.py b/Darwin/lib/python3.5/encodings/cp737.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp737.py rename to Darwin/lib/python3.5/encodings/cp737.py diff --git a/Darwin/lib/python3.4/encodings/cp775.py b/Darwin/lib/python3.5/encodings/cp775.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp775.py rename to Darwin/lib/python3.5/encodings/cp775.py diff --git a/Darwin/lib/python3.4/encodings/cp850.py b/Darwin/lib/python3.5/encodings/cp850.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp850.py rename to Darwin/lib/python3.5/encodings/cp850.py diff --git a/Darwin/lib/python3.4/encodings/cp852.py b/Darwin/lib/python3.5/encodings/cp852.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp852.py rename to Darwin/lib/python3.5/encodings/cp852.py diff --git a/Darwin/lib/python3.4/encodings/cp855.py b/Darwin/lib/python3.5/encodings/cp855.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp855.py rename to Darwin/lib/python3.5/encodings/cp855.py diff --git a/Darwin/lib/python3.4/encodings/cp856.py b/Darwin/lib/python3.5/encodings/cp856.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp856.py rename to Darwin/lib/python3.5/encodings/cp856.py diff --git a/Darwin/lib/python3.4/encodings/cp857.py b/Darwin/lib/python3.5/encodings/cp857.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp857.py rename to Darwin/lib/python3.5/encodings/cp857.py diff --git a/Darwin/lib/python3.4/encodings/cp858.py b/Darwin/lib/python3.5/encodings/cp858.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp858.py rename to Darwin/lib/python3.5/encodings/cp858.py diff --git a/Darwin/lib/python3.4/encodings/cp860.py b/Darwin/lib/python3.5/encodings/cp860.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp860.py rename to Darwin/lib/python3.5/encodings/cp860.py diff --git a/Darwin/lib/python3.4/encodings/cp861.py b/Darwin/lib/python3.5/encodings/cp861.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp861.py rename to Darwin/lib/python3.5/encodings/cp861.py diff --git a/Darwin/lib/python3.4/encodings/cp862.py b/Darwin/lib/python3.5/encodings/cp862.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp862.py rename to Darwin/lib/python3.5/encodings/cp862.py diff --git a/Darwin/lib/python3.4/encodings/cp863.py b/Darwin/lib/python3.5/encodings/cp863.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp863.py rename to Darwin/lib/python3.5/encodings/cp863.py diff --git a/Darwin/lib/python3.4/encodings/cp864.py b/Darwin/lib/python3.5/encodings/cp864.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp864.py rename to Darwin/lib/python3.5/encodings/cp864.py diff --git a/Darwin/lib/python3.4/encodings/cp865.py b/Darwin/lib/python3.5/encodings/cp865.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp865.py rename to Darwin/lib/python3.5/encodings/cp865.py diff --git a/Darwin/lib/python3.4/encodings/cp866.py b/Darwin/lib/python3.5/encodings/cp866.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp866.py rename to Darwin/lib/python3.5/encodings/cp866.py diff --git a/Darwin/lib/python3.4/encodings/cp869.py b/Darwin/lib/python3.5/encodings/cp869.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp869.py rename to Darwin/lib/python3.5/encodings/cp869.py diff --git a/Darwin/lib/python3.4/encodings/cp874.py b/Darwin/lib/python3.5/encodings/cp874.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp874.py rename to Darwin/lib/python3.5/encodings/cp874.py diff --git a/Darwin/lib/python3.4/encodings/cp875.py b/Darwin/lib/python3.5/encodings/cp875.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp875.py rename to Darwin/lib/python3.5/encodings/cp875.py diff --git a/Darwin/lib/python3.4/encodings/cp932.py b/Darwin/lib/python3.5/encodings/cp932.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp932.py rename to Darwin/lib/python3.5/encodings/cp932.py diff --git a/Darwin/lib/python3.4/encodings/cp949.py b/Darwin/lib/python3.5/encodings/cp949.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp949.py rename to Darwin/lib/python3.5/encodings/cp949.py diff --git a/Darwin/lib/python3.4/encodings/cp950.py b/Darwin/lib/python3.5/encodings/cp950.py similarity index 100% rename from Darwin/lib/python3.4/encodings/cp950.py rename to Darwin/lib/python3.5/encodings/cp950.py diff --git a/Darwin/lib/python3.4/encodings/euc_jis_2004.py b/Darwin/lib/python3.5/encodings/euc_jis_2004.py similarity index 100% rename from Darwin/lib/python3.4/encodings/euc_jis_2004.py rename to Darwin/lib/python3.5/encodings/euc_jis_2004.py diff --git a/Darwin/lib/python3.4/encodings/euc_jisx0213.py b/Darwin/lib/python3.5/encodings/euc_jisx0213.py similarity index 100% rename from Darwin/lib/python3.4/encodings/euc_jisx0213.py rename to Darwin/lib/python3.5/encodings/euc_jisx0213.py diff --git a/Darwin/lib/python3.4/encodings/euc_jp.py b/Darwin/lib/python3.5/encodings/euc_jp.py similarity index 100% rename from Darwin/lib/python3.4/encodings/euc_jp.py rename to Darwin/lib/python3.5/encodings/euc_jp.py diff --git a/Darwin/lib/python3.4/encodings/euc_kr.py b/Darwin/lib/python3.5/encodings/euc_kr.py similarity index 100% rename from Darwin/lib/python3.4/encodings/euc_kr.py rename to Darwin/lib/python3.5/encodings/euc_kr.py diff --git a/Darwin/lib/python3.4/encodings/gb18030.py b/Darwin/lib/python3.5/encodings/gb18030.py similarity index 100% rename from Darwin/lib/python3.4/encodings/gb18030.py rename to Darwin/lib/python3.5/encodings/gb18030.py diff --git a/Darwin/lib/python3.4/encodings/gb2312.py b/Darwin/lib/python3.5/encodings/gb2312.py similarity index 100% rename from Darwin/lib/python3.4/encodings/gb2312.py rename to Darwin/lib/python3.5/encodings/gb2312.py diff --git a/Darwin/lib/python3.4/encodings/gbk.py b/Darwin/lib/python3.5/encodings/gbk.py similarity index 100% rename from Darwin/lib/python3.4/encodings/gbk.py rename to Darwin/lib/python3.5/encodings/gbk.py diff --git a/Darwin/lib/python3.4/encodings/hex_codec.py b/Darwin/lib/python3.5/encodings/hex_codec.py similarity index 100% rename from Darwin/lib/python3.4/encodings/hex_codec.py rename to Darwin/lib/python3.5/encodings/hex_codec.py diff --git a/Darwin/lib/python3.4/encodings/hp_roman8.py b/Darwin/lib/python3.5/encodings/hp_roman8.py similarity index 100% rename from Darwin/lib/python3.4/encodings/hp_roman8.py rename to Darwin/lib/python3.5/encodings/hp_roman8.py diff --git a/Darwin/lib/python3.4/encodings/hz.py b/Darwin/lib/python3.5/encodings/hz.py similarity index 100% rename from Darwin/lib/python3.4/encodings/hz.py rename to Darwin/lib/python3.5/encodings/hz.py diff --git a/Darwin/lib/python3.4/encodings/idna.py b/Darwin/lib/python3.5/encodings/idna.py similarity index 100% rename from Darwin/lib/python3.4/encodings/idna.py rename to Darwin/lib/python3.5/encodings/idna.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp.py b/Darwin/lib/python3.5/encodings/iso2022_jp.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp.py rename to Darwin/lib/python3.5/encodings/iso2022_jp.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp_1.py b/Darwin/lib/python3.5/encodings/iso2022_jp_1.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp_1.py rename to Darwin/lib/python3.5/encodings/iso2022_jp_1.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp_2.py b/Darwin/lib/python3.5/encodings/iso2022_jp_2.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp_2.py rename to Darwin/lib/python3.5/encodings/iso2022_jp_2.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp_2004.py b/Darwin/lib/python3.5/encodings/iso2022_jp_2004.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp_2004.py rename to Darwin/lib/python3.5/encodings/iso2022_jp_2004.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp_3.py b/Darwin/lib/python3.5/encodings/iso2022_jp_3.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp_3.py rename to Darwin/lib/python3.5/encodings/iso2022_jp_3.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_jp_ext.py b/Darwin/lib/python3.5/encodings/iso2022_jp_ext.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_jp_ext.py rename to Darwin/lib/python3.5/encodings/iso2022_jp_ext.py diff --git a/Darwin/lib/python3.4/encodings/iso2022_kr.py b/Darwin/lib/python3.5/encodings/iso2022_kr.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso2022_kr.py rename to Darwin/lib/python3.5/encodings/iso2022_kr.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_1.py b/Darwin/lib/python3.5/encodings/iso8859_1.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_1.py rename to Darwin/lib/python3.5/encodings/iso8859_1.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_10.py b/Darwin/lib/python3.5/encodings/iso8859_10.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_10.py rename to Darwin/lib/python3.5/encodings/iso8859_10.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_11.py b/Darwin/lib/python3.5/encodings/iso8859_11.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_11.py rename to Darwin/lib/python3.5/encodings/iso8859_11.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_13.py b/Darwin/lib/python3.5/encodings/iso8859_13.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_13.py rename to Darwin/lib/python3.5/encodings/iso8859_13.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_14.py b/Darwin/lib/python3.5/encodings/iso8859_14.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_14.py rename to Darwin/lib/python3.5/encodings/iso8859_14.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_15.py b/Darwin/lib/python3.5/encodings/iso8859_15.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_15.py rename to Darwin/lib/python3.5/encodings/iso8859_15.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_16.py b/Darwin/lib/python3.5/encodings/iso8859_16.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_16.py rename to Darwin/lib/python3.5/encodings/iso8859_16.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_2.py b/Darwin/lib/python3.5/encodings/iso8859_2.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_2.py rename to Darwin/lib/python3.5/encodings/iso8859_2.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_3.py b/Darwin/lib/python3.5/encodings/iso8859_3.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_3.py rename to Darwin/lib/python3.5/encodings/iso8859_3.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_4.py b/Darwin/lib/python3.5/encodings/iso8859_4.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_4.py rename to Darwin/lib/python3.5/encodings/iso8859_4.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_5.py b/Darwin/lib/python3.5/encodings/iso8859_5.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_5.py rename to Darwin/lib/python3.5/encodings/iso8859_5.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_6.py b/Darwin/lib/python3.5/encodings/iso8859_6.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_6.py rename to Darwin/lib/python3.5/encodings/iso8859_6.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_7.py b/Darwin/lib/python3.5/encodings/iso8859_7.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_7.py rename to Darwin/lib/python3.5/encodings/iso8859_7.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_8.py b/Darwin/lib/python3.5/encodings/iso8859_8.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_8.py rename to Darwin/lib/python3.5/encodings/iso8859_8.py diff --git a/Darwin/lib/python3.4/encodings/iso8859_9.py b/Darwin/lib/python3.5/encodings/iso8859_9.py similarity index 100% rename from Darwin/lib/python3.4/encodings/iso8859_9.py rename to Darwin/lib/python3.5/encodings/iso8859_9.py diff --git a/Darwin/lib/python3.4/encodings/johab.py b/Darwin/lib/python3.5/encodings/johab.py similarity index 100% rename from Darwin/lib/python3.4/encodings/johab.py rename to Darwin/lib/python3.5/encodings/johab.py diff --git a/Darwin/lib/python3.4/encodings/koi8_r.py b/Darwin/lib/python3.5/encodings/koi8_r.py similarity index 100% rename from Darwin/lib/python3.4/encodings/koi8_r.py rename to Darwin/lib/python3.5/encodings/koi8_r.py diff --git a/Darwin/lib/python3.5/encodings/koi8_t.py b/Darwin/lib/python3.5/encodings/koi8_t.py new file mode 100644 index 0000000..b5415ba --- /dev/null +++ b/Darwin/lib/python3.5/encodings/koi8_t.py @@ -0,0 +1,308 @@ +""" Python Character Mapping Codec koi8_t +""" +# http://ru.wikipedia.org/wiki/КОИ-8 +# http://www.opensource.apple.com/source/libiconv/libiconv-4/libiconv/tests/KOI8-T.TXT + +import codecs + +### Codec APIs + +class Codec(codecs.Codec): + + def encode(self,input,errors='strict'): + return codecs.charmap_encode(input,errors,encoding_table) + + def decode(self,input,errors='strict'): + return codecs.charmap_decode(input,errors,decoding_table) + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return codecs.charmap_encode(input,self.errors,encoding_table)[0] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input, final=False): + return codecs.charmap_decode(input,self.errors,decoding_table)[0] + +class StreamWriter(Codec,codecs.StreamWriter): + pass + +class StreamReader(Codec,codecs.StreamReader): + pass + +### encodings module API + +def getregentry(): + return codecs.CodecInfo( + name='koi8-t', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) + + +### Decoding Table + +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE + '\u049b' # 0x80 -> CYRILLIC SMALL LETTER KA WITH DESCENDER + '\u0493' # 0x81 -> CYRILLIC SMALL LETTER GHE WITH STROKE + '\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK + '\u0492' # 0x83 -> CYRILLIC CAPITAL LETTER GHE WITH STROKE + '\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK + '\u2026' # 0x85 -> HORIZONTAL ELLIPSIS + '\u2020' # 0x86 -> DAGGER + '\u2021' # 0x87 -> DOUBLE DAGGER + '\ufffe' # 0x88 -> UNDEFINED + '\u2030' # 0x89 -> PER MILLE SIGN + '\u04b3' # 0x8A -> CYRILLIC SMALL LETTER HA WITH DESCENDER + '\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK + '\u04b2' # 0x8C -> CYRILLIC CAPITAL LETTER HA WITH DESCENDER + '\u04b7' # 0x8D -> CYRILLIC SMALL LETTER CHE WITH DESCENDER + '\u04b6' # 0x8E -> CYRILLIC CAPITAL LETTER CHE WITH DESCENDER + '\ufffe' # 0x8F -> UNDEFINED + '\u049a' # 0x90 -> CYRILLIC CAPITAL LETTER KA WITH DESCENDER + '\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK + '\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK + '\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK + '\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK + '\u2022' # 0x95 -> BULLET + '\u2013' # 0x96 -> EN DASH + '\u2014' # 0x97 -> EM DASH + '\ufffe' # 0x98 -> UNDEFINED + '\u2122' # 0x99 -> TRADE MARK SIGN + '\ufffe' # 0x9A -> UNDEFINED + '\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + '\ufffe' # 0x9C -> UNDEFINED + '\ufffe' # 0x9D -> UNDEFINED + '\ufffe' # 0x9E -> UNDEFINED + '\ufffe' # 0x9F -> UNDEFINED + '\ufffe' # 0xA0 -> UNDEFINED + '\u04ef' # 0xA1 -> CYRILLIC SMALL LETTER U WITH MACRON + '\u04ee' # 0xA2 -> CYRILLIC CAPITAL LETTER U WITH MACRON + '\u0451' # 0xA3 -> CYRILLIC SMALL LETTER IO + '\xa4' # 0xA4 -> CURRENCY SIGN + '\u04e3' # 0xA5 -> CYRILLIC SMALL LETTER I WITH MACRON + '\xa6' # 0xA6 -> BROKEN BAR + '\xa7' # 0xA7 -> SECTION SIGN + '\ufffe' # 0xA8 -> UNDEFINED + '\ufffe' # 0xA9 -> UNDEFINED + '\ufffe' # 0xAA -> UNDEFINED + '\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xac' # 0xAC -> NOT SIGN + '\xad' # 0xAD -> SOFT HYPHEN + '\xae' # 0xAE -> REGISTERED SIGN + '\ufffe' # 0xAF -> UNDEFINED + '\xb0' # 0xB0 -> DEGREE SIGN + '\xb1' # 0xB1 -> PLUS-MINUS SIGN + '\xb2' # 0xB2 -> SUPERSCRIPT TWO + '\u0401' # 0xB3 -> CYRILLIC CAPITAL LETTER IO + '\ufffe' # 0xB4 -> UNDEFINED + '\u04e2' # 0xB5 -> CYRILLIC CAPITAL LETTER I WITH MACRON + '\xb6' # 0xB6 -> PILCROW SIGN + '\xb7' # 0xB7 -> MIDDLE DOT + '\ufffe' # 0xB8 -> UNDEFINED + '\u2116' # 0xB9 -> NUMERO SIGN + '\ufffe' # 0xBA -> UNDEFINED + '\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\ufffe' # 0xBC -> UNDEFINED + '\ufffe' # 0xBD -> UNDEFINED + '\ufffe' # 0xBE -> UNDEFINED + '\xa9' # 0xBF -> COPYRIGHT SIGN + '\u044e' # 0xC0 -> CYRILLIC SMALL LETTER YU + '\u0430' # 0xC1 -> CYRILLIC SMALL LETTER A + '\u0431' # 0xC2 -> CYRILLIC SMALL LETTER BE + '\u0446' # 0xC3 -> CYRILLIC SMALL LETTER TSE + '\u0434' # 0xC4 -> CYRILLIC SMALL LETTER DE + '\u0435' # 0xC5 -> CYRILLIC SMALL LETTER IE + '\u0444' # 0xC6 -> CYRILLIC SMALL LETTER EF + '\u0433' # 0xC7 -> CYRILLIC SMALL LETTER GHE + '\u0445' # 0xC8 -> CYRILLIC SMALL LETTER HA + '\u0438' # 0xC9 -> CYRILLIC SMALL LETTER I + '\u0439' # 0xCA -> CYRILLIC SMALL LETTER SHORT I + '\u043a' # 0xCB -> CYRILLIC SMALL LETTER KA + '\u043b' # 0xCC -> CYRILLIC SMALL LETTER EL + '\u043c' # 0xCD -> CYRILLIC SMALL LETTER EM + '\u043d' # 0xCE -> CYRILLIC SMALL LETTER EN + '\u043e' # 0xCF -> CYRILLIC SMALL LETTER O + '\u043f' # 0xD0 -> CYRILLIC SMALL LETTER PE + '\u044f' # 0xD1 -> CYRILLIC SMALL LETTER YA + '\u0440' # 0xD2 -> CYRILLIC SMALL LETTER ER + '\u0441' # 0xD3 -> CYRILLIC SMALL LETTER ES + '\u0442' # 0xD4 -> CYRILLIC SMALL LETTER TE + '\u0443' # 0xD5 -> CYRILLIC SMALL LETTER U + '\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE + '\u0432' # 0xD7 -> CYRILLIC SMALL LETTER VE + '\u044c' # 0xD8 -> CYRILLIC SMALL LETTER SOFT SIGN + '\u044b' # 0xD9 -> CYRILLIC SMALL LETTER YERU + '\u0437' # 0xDA -> CYRILLIC SMALL LETTER ZE + '\u0448' # 0xDB -> CYRILLIC SMALL LETTER SHA + '\u044d' # 0xDC -> CYRILLIC SMALL LETTER E + '\u0449' # 0xDD -> CYRILLIC SMALL LETTER SHCHA + '\u0447' # 0xDE -> CYRILLIC SMALL LETTER CHE + '\u044a' # 0xDF -> CYRILLIC SMALL LETTER HARD SIGN + '\u042e' # 0xE0 -> CYRILLIC CAPITAL LETTER YU + '\u0410' # 0xE1 -> CYRILLIC CAPITAL LETTER A + '\u0411' # 0xE2 -> CYRILLIC CAPITAL LETTER BE + '\u0426' # 0xE3 -> CYRILLIC CAPITAL LETTER TSE + '\u0414' # 0xE4 -> CYRILLIC CAPITAL LETTER DE + '\u0415' # 0xE5 -> CYRILLIC CAPITAL LETTER IE + '\u0424' # 0xE6 -> CYRILLIC CAPITAL LETTER EF + '\u0413' # 0xE7 -> CYRILLIC CAPITAL LETTER GHE + '\u0425' # 0xE8 -> CYRILLIC CAPITAL LETTER HA + '\u0418' # 0xE9 -> CYRILLIC CAPITAL LETTER I + '\u0419' # 0xEA -> CYRILLIC CAPITAL LETTER SHORT I + '\u041a' # 0xEB -> CYRILLIC CAPITAL LETTER KA + '\u041b' # 0xEC -> CYRILLIC CAPITAL LETTER EL + '\u041c' # 0xED -> CYRILLIC CAPITAL LETTER EM + '\u041d' # 0xEE -> CYRILLIC CAPITAL LETTER EN + '\u041e' # 0xEF -> CYRILLIC CAPITAL LETTER O + '\u041f' # 0xF0 -> CYRILLIC CAPITAL LETTER PE + '\u042f' # 0xF1 -> CYRILLIC CAPITAL LETTER YA + '\u0420' # 0xF2 -> CYRILLIC CAPITAL LETTER ER + '\u0421' # 0xF3 -> CYRILLIC CAPITAL LETTER ES + '\u0422' # 0xF4 -> CYRILLIC CAPITAL LETTER TE + '\u0423' # 0xF5 -> CYRILLIC CAPITAL LETTER U + '\u0416' # 0xF6 -> CYRILLIC CAPITAL LETTER ZHE + '\u0412' # 0xF7 -> CYRILLIC CAPITAL LETTER VE + '\u042c' # 0xF8 -> CYRILLIC CAPITAL LETTER SOFT SIGN + '\u042b' # 0xF9 -> CYRILLIC CAPITAL LETTER YERU + '\u0417' # 0xFA -> CYRILLIC CAPITAL LETTER ZE + '\u0428' # 0xFB -> CYRILLIC CAPITAL LETTER SHA + '\u042d' # 0xFC -> CYRILLIC CAPITAL LETTER E + '\u0429' # 0xFD -> CYRILLIC CAPITAL LETTER SHCHA + '\u0427' # 0xFE -> CYRILLIC CAPITAL LETTER CHE + '\u042a' # 0xFF -> CYRILLIC CAPITAL LETTER HARD SIGN +) + +### Encoding table +encoding_table=codecs.charmap_build(decoding_table) diff --git a/Darwin/lib/python3.4/encodings/koi8_u.py b/Darwin/lib/python3.5/encodings/koi8_u.py similarity index 100% rename from Darwin/lib/python3.4/encodings/koi8_u.py rename to Darwin/lib/python3.5/encodings/koi8_u.py diff --git a/Darwin/lib/python3.5/encodings/kz1048.py b/Darwin/lib/python3.5/encodings/kz1048.py new file mode 100644 index 0000000..712aee6 --- /dev/null +++ b/Darwin/lib/python3.5/encodings/kz1048.py @@ -0,0 +1,307 @@ +""" Python Character Mapping Codec kz1048 generated from 'MAPPINGS/VENDORS/MISC/KZ1048.TXT' with gencodec.py. + +"""#" + +import codecs + +### Codec APIs + +class Codec(codecs.Codec): + + def encode(self, input, errors='strict'): + return codecs.charmap_encode(input, errors, encoding_table) + + def decode(self, input, errors='strict'): + return codecs.charmap_decode(input, errors, decoding_table) + +class IncrementalEncoder(codecs.IncrementalEncoder): + def encode(self, input, final=False): + return codecs.charmap_encode(input, self.errors, encoding_table)[0] + +class IncrementalDecoder(codecs.IncrementalDecoder): + def decode(self, input, final=False): + return codecs.charmap_decode(input, self.errors, decoding_table)[0] + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +### encodings module API + +def getregentry(): + return codecs.CodecInfo( + name='kz1048', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamreader=StreamReader, + streamwriter=StreamWriter, + ) + + +### Decoding Table + +decoding_table = ( + '\x00' # 0x00 -> NULL + '\x01' # 0x01 -> START OF HEADING + '\x02' # 0x02 -> START OF TEXT + '\x03' # 0x03 -> END OF TEXT + '\x04' # 0x04 -> END OF TRANSMISSION + '\x05' # 0x05 -> ENQUIRY + '\x06' # 0x06 -> ACKNOWLEDGE + '\x07' # 0x07 -> BELL + '\x08' # 0x08 -> BACKSPACE + '\t' # 0x09 -> HORIZONTAL TABULATION + '\n' # 0x0A -> LINE FEED + '\x0b' # 0x0B -> VERTICAL TABULATION + '\x0c' # 0x0C -> FORM FEED + '\r' # 0x0D -> CARRIAGE RETURN + '\x0e' # 0x0E -> SHIFT OUT + '\x0f' # 0x0F -> SHIFT IN + '\x10' # 0x10 -> DATA LINK ESCAPE + '\x11' # 0x11 -> DEVICE CONTROL ONE + '\x12' # 0x12 -> DEVICE CONTROL TWO + '\x13' # 0x13 -> DEVICE CONTROL THREE + '\x14' # 0x14 -> DEVICE CONTROL FOUR + '\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE + '\x16' # 0x16 -> SYNCHRONOUS IDLE + '\x17' # 0x17 -> END OF TRANSMISSION BLOCK + '\x18' # 0x18 -> CANCEL + '\x19' # 0x19 -> END OF MEDIUM + '\x1a' # 0x1A -> SUBSTITUTE + '\x1b' # 0x1B -> ESCAPE + '\x1c' # 0x1C -> FILE SEPARATOR + '\x1d' # 0x1D -> GROUP SEPARATOR + '\x1e' # 0x1E -> RECORD SEPARATOR + '\x1f' # 0x1F -> UNIT SEPARATOR + ' ' # 0x20 -> SPACE + '!' # 0x21 -> EXCLAMATION MARK + '"' # 0x22 -> QUOTATION MARK + '#' # 0x23 -> NUMBER SIGN + '$' # 0x24 -> DOLLAR SIGN + '%' # 0x25 -> PERCENT SIGN + '&' # 0x26 -> AMPERSAND + "'" # 0x27 -> APOSTROPHE + '(' # 0x28 -> LEFT PARENTHESIS + ')' # 0x29 -> RIGHT PARENTHESIS + '*' # 0x2A -> ASTERISK + '+' # 0x2B -> PLUS SIGN + ',' # 0x2C -> COMMA + '-' # 0x2D -> HYPHEN-MINUS + '.' # 0x2E -> FULL STOP + '/' # 0x2F -> SOLIDUS + '0' # 0x30 -> DIGIT ZERO + '1' # 0x31 -> DIGIT ONE + '2' # 0x32 -> DIGIT TWO + '3' # 0x33 -> DIGIT THREE + '4' # 0x34 -> DIGIT FOUR + '5' # 0x35 -> DIGIT FIVE + '6' # 0x36 -> DIGIT SIX + '7' # 0x37 -> DIGIT SEVEN + '8' # 0x38 -> DIGIT EIGHT + '9' # 0x39 -> DIGIT NINE + ':' # 0x3A -> COLON + ';' # 0x3B -> SEMICOLON + '<' # 0x3C -> LESS-THAN SIGN + '=' # 0x3D -> EQUALS SIGN + '>' # 0x3E -> GREATER-THAN SIGN + '?' # 0x3F -> QUESTION MARK + '@' # 0x40 -> COMMERCIAL AT + 'A' # 0x41 -> LATIN CAPITAL LETTER A + 'B' # 0x42 -> LATIN CAPITAL LETTER B + 'C' # 0x43 -> LATIN CAPITAL LETTER C + 'D' # 0x44 -> LATIN CAPITAL LETTER D + 'E' # 0x45 -> LATIN CAPITAL LETTER E + 'F' # 0x46 -> LATIN CAPITAL LETTER F + 'G' # 0x47 -> LATIN CAPITAL LETTER G + 'H' # 0x48 -> LATIN CAPITAL LETTER H + 'I' # 0x49 -> LATIN CAPITAL LETTER I + 'J' # 0x4A -> LATIN CAPITAL LETTER J + 'K' # 0x4B -> LATIN CAPITAL LETTER K + 'L' # 0x4C -> LATIN CAPITAL LETTER L + 'M' # 0x4D -> LATIN CAPITAL LETTER M + 'N' # 0x4E -> LATIN CAPITAL LETTER N + 'O' # 0x4F -> LATIN CAPITAL LETTER O + 'P' # 0x50 -> LATIN CAPITAL LETTER P + 'Q' # 0x51 -> LATIN CAPITAL LETTER Q + 'R' # 0x52 -> LATIN CAPITAL LETTER R + 'S' # 0x53 -> LATIN CAPITAL LETTER S + 'T' # 0x54 -> LATIN CAPITAL LETTER T + 'U' # 0x55 -> LATIN CAPITAL LETTER U + 'V' # 0x56 -> LATIN CAPITAL LETTER V + 'W' # 0x57 -> LATIN CAPITAL LETTER W + 'X' # 0x58 -> LATIN CAPITAL LETTER X + 'Y' # 0x59 -> LATIN CAPITAL LETTER Y + 'Z' # 0x5A -> LATIN CAPITAL LETTER Z + '[' # 0x5B -> LEFT SQUARE BRACKET + '\\' # 0x5C -> REVERSE SOLIDUS + ']' # 0x5D -> RIGHT SQUARE BRACKET + '^' # 0x5E -> CIRCUMFLEX ACCENT + '_' # 0x5F -> LOW LINE + '`' # 0x60 -> GRAVE ACCENT + 'a' # 0x61 -> LATIN SMALL LETTER A + 'b' # 0x62 -> LATIN SMALL LETTER B + 'c' # 0x63 -> LATIN SMALL LETTER C + 'd' # 0x64 -> LATIN SMALL LETTER D + 'e' # 0x65 -> LATIN SMALL LETTER E + 'f' # 0x66 -> LATIN SMALL LETTER F + 'g' # 0x67 -> LATIN SMALL LETTER G + 'h' # 0x68 -> LATIN SMALL LETTER H + 'i' # 0x69 -> LATIN SMALL LETTER I + 'j' # 0x6A -> LATIN SMALL LETTER J + 'k' # 0x6B -> LATIN SMALL LETTER K + 'l' # 0x6C -> LATIN SMALL LETTER L + 'm' # 0x6D -> LATIN SMALL LETTER M + 'n' # 0x6E -> LATIN SMALL LETTER N + 'o' # 0x6F -> LATIN SMALL LETTER O + 'p' # 0x70 -> LATIN SMALL LETTER P + 'q' # 0x71 -> LATIN SMALL LETTER Q + 'r' # 0x72 -> LATIN SMALL LETTER R + 's' # 0x73 -> LATIN SMALL LETTER S + 't' # 0x74 -> LATIN SMALL LETTER T + 'u' # 0x75 -> LATIN SMALL LETTER U + 'v' # 0x76 -> LATIN SMALL LETTER V + 'w' # 0x77 -> LATIN SMALL LETTER W + 'x' # 0x78 -> LATIN SMALL LETTER X + 'y' # 0x79 -> LATIN SMALL LETTER Y + 'z' # 0x7A -> LATIN SMALL LETTER Z + '{' # 0x7B -> LEFT CURLY BRACKET + '|' # 0x7C -> VERTICAL LINE + '}' # 0x7D -> RIGHT CURLY BRACKET + '~' # 0x7E -> TILDE + '\x7f' # 0x7F -> DELETE + '\u0402' # 0x80 -> CYRILLIC CAPITAL LETTER DJE + '\u0403' # 0x81 -> CYRILLIC CAPITAL LETTER GJE + '\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK + '\u0453' # 0x83 -> CYRILLIC SMALL LETTER GJE + '\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK + '\u2026' # 0x85 -> HORIZONTAL ELLIPSIS + '\u2020' # 0x86 -> DAGGER + '\u2021' # 0x87 -> DOUBLE DAGGER + '\u20ac' # 0x88 -> EURO SIGN + '\u2030' # 0x89 -> PER MILLE SIGN + '\u0409' # 0x8A -> CYRILLIC CAPITAL LETTER LJE + '\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK + '\u040a' # 0x8C -> CYRILLIC CAPITAL LETTER NJE + '\u049a' # 0x8D -> CYRILLIC CAPITAL LETTER KA WITH DESCENDER + '\u04ba' # 0x8E -> CYRILLIC CAPITAL LETTER SHHA + '\u040f' # 0x8F -> CYRILLIC CAPITAL LETTER DZHE + '\u0452' # 0x90 -> CYRILLIC SMALL LETTER DJE + '\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK + '\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK + '\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK + '\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK + '\u2022' # 0x95 -> BULLET + '\u2013' # 0x96 -> EN DASH + '\u2014' # 0x97 -> EM DASH + '\ufffe' # 0x98 -> UNDEFINED + '\u2122' # 0x99 -> TRADE MARK SIGN + '\u0459' # 0x9A -> CYRILLIC SMALL LETTER LJE + '\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + '\u045a' # 0x9C -> CYRILLIC SMALL LETTER NJE + '\u049b' # 0x9D -> CYRILLIC SMALL LETTER KA WITH DESCENDER + '\u04bb' # 0x9E -> CYRILLIC SMALL LETTER SHHA + '\u045f' # 0x9F -> CYRILLIC SMALL LETTER DZHE + '\xa0' # 0xA0 -> NO-BREAK SPACE + '\u04b0' # 0xA1 -> CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE + '\u04b1' # 0xA2 -> CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE + '\u04d8' # 0xA3 -> CYRILLIC CAPITAL LETTER SCHWA + '\xa4' # 0xA4 -> CURRENCY SIGN + '\u04e8' # 0xA5 -> CYRILLIC CAPITAL LETTER BARRED O + '\xa6' # 0xA6 -> BROKEN BAR + '\xa7' # 0xA7 -> SECTION SIGN + '\u0401' # 0xA8 -> CYRILLIC CAPITAL LETTER IO + '\xa9' # 0xA9 -> COPYRIGHT SIGN + '\u0492' # 0xAA -> CYRILLIC CAPITAL LETTER GHE WITH STROKE + '\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK + '\xac' # 0xAC -> NOT SIGN + '\xad' # 0xAD -> SOFT HYPHEN + '\xae' # 0xAE -> REGISTERED SIGN + '\u04ae' # 0xAF -> CYRILLIC CAPITAL LETTER STRAIGHT U + '\xb0' # 0xB0 -> DEGREE SIGN + '\xb1' # 0xB1 -> PLUS-MINUS SIGN + '\u0406' # 0xB2 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I + '\u0456' # 0xB3 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I + '\u04e9' # 0xB4 -> CYRILLIC SMALL LETTER BARRED O + '\xb5' # 0xB5 -> MICRO SIGN + '\xb6' # 0xB6 -> PILCROW SIGN + '\xb7' # 0xB7 -> MIDDLE DOT + '\u0451' # 0xB8 -> CYRILLIC SMALL LETTER IO + '\u2116' # 0xB9 -> NUMERO SIGN + '\u0493' # 0xBA -> CYRILLIC SMALL LETTER GHE WITH STROKE + '\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK + '\u04d9' # 0xBC -> CYRILLIC SMALL LETTER SCHWA + '\u04a2' # 0xBD -> CYRILLIC CAPITAL LETTER EN WITH DESCENDER + '\u04a3' # 0xBE -> CYRILLIC SMALL LETTER EN WITH DESCENDER + '\u04af' # 0xBF -> CYRILLIC SMALL LETTER STRAIGHT U + '\u0410' # 0xC0 -> CYRILLIC CAPITAL LETTER A + '\u0411' # 0xC1 -> CYRILLIC CAPITAL LETTER BE + '\u0412' # 0xC2 -> CYRILLIC CAPITAL LETTER VE + '\u0413' # 0xC3 -> CYRILLIC CAPITAL LETTER GHE + '\u0414' # 0xC4 -> CYRILLIC CAPITAL LETTER DE + '\u0415' # 0xC5 -> CYRILLIC CAPITAL LETTER IE + '\u0416' # 0xC6 -> CYRILLIC CAPITAL LETTER ZHE + '\u0417' # 0xC7 -> CYRILLIC CAPITAL LETTER ZE + '\u0418' # 0xC8 -> CYRILLIC CAPITAL LETTER I + '\u0419' # 0xC9 -> CYRILLIC CAPITAL LETTER SHORT I + '\u041a' # 0xCA -> CYRILLIC CAPITAL LETTER KA + '\u041b' # 0xCB -> CYRILLIC CAPITAL LETTER EL + '\u041c' # 0xCC -> CYRILLIC CAPITAL LETTER EM + '\u041d' # 0xCD -> CYRILLIC CAPITAL LETTER EN + '\u041e' # 0xCE -> CYRILLIC CAPITAL LETTER O + '\u041f' # 0xCF -> CYRILLIC CAPITAL LETTER PE + '\u0420' # 0xD0 -> CYRILLIC CAPITAL LETTER ER + '\u0421' # 0xD1 -> CYRILLIC CAPITAL LETTER ES + '\u0422' # 0xD2 -> CYRILLIC CAPITAL LETTER TE + '\u0423' # 0xD3 -> CYRILLIC CAPITAL LETTER U + '\u0424' # 0xD4 -> CYRILLIC CAPITAL LETTER EF + '\u0425' # 0xD5 -> CYRILLIC CAPITAL LETTER HA + '\u0426' # 0xD6 -> CYRILLIC CAPITAL LETTER TSE + '\u0427' # 0xD7 -> CYRILLIC CAPITAL LETTER CHE + '\u0428' # 0xD8 -> CYRILLIC CAPITAL LETTER SHA + '\u0429' # 0xD9 -> CYRILLIC CAPITAL LETTER SHCHA + '\u042a' # 0xDA -> CYRILLIC CAPITAL LETTER HARD SIGN + '\u042b' # 0xDB -> CYRILLIC CAPITAL LETTER YERU + '\u042c' # 0xDC -> CYRILLIC CAPITAL LETTER SOFT SIGN + '\u042d' # 0xDD -> CYRILLIC CAPITAL LETTER E + '\u042e' # 0xDE -> CYRILLIC CAPITAL LETTER YU + '\u042f' # 0xDF -> CYRILLIC CAPITAL LETTER YA + '\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A + '\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE + '\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE + '\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE + '\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE + '\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE + '\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE + '\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE + '\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I + '\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I + '\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA + '\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL + '\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM + '\u043d' # 0xED -> CYRILLIC SMALL LETTER EN + '\u043e' # 0xEE -> CYRILLIC SMALL LETTER O + '\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE + '\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER + '\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES + '\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE + '\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U + '\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF + '\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA + '\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE + '\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE + '\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA + '\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA + '\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN + '\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU + '\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN + '\u044d' # 0xFD -> CYRILLIC SMALL LETTER E + '\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU + '\u044f' # 0xFF -> CYRILLIC SMALL LETTER YA +) + +### Encoding table +encoding_table = codecs.charmap_build(decoding_table) diff --git a/Darwin/lib/python3.4/encodings/latin_1.py b/Darwin/lib/python3.5/encodings/latin_1.py similarity index 100% rename from Darwin/lib/python3.4/encodings/latin_1.py rename to Darwin/lib/python3.5/encodings/latin_1.py diff --git a/Darwin/lib/python3.4/encodings/mac_arabic.py b/Darwin/lib/python3.5/encodings/mac_arabic.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_arabic.py rename to Darwin/lib/python3.5/encodings/mac_arabic.py diff --git a/Darwin/lib/python3.4/encodings/mac_centeuro.py b/Darwin/lib/python3.5/encodings/mac_centeuro.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_centeuro.py rename to Darwin/lib/python3.5/encodings/mac_centeuro.py diff --git a/Darwin/lib/python3.4/encodings/mac_croatian.py b/Darwin/lib/python3.5/encodings/mac_croatian.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_croatian.py rename to Darwin/lib/python3.5/encodings/mac_croatian.py diff --git a/Darwin/lib/python3.4/encodings/mac_cyrillic.py b/Darwin/lib/python3.5/encodings/mac_cyrillic.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_cyrillic.py rename to Darwin/lib/python3.5/encodings/mac_cyrillic.py diff --git a/Darwin/lib/python3.4/encodings/mac_farsi.py b/Darwin/lib/python3.5/encodings/mac_farsi.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_farsi.py rename to Darwin/lib/python3.5/encodings/mac_farsi.py diff --git a/Darwin/lib/python3.4/encodings/mac_greek.py b/Darwin/lib/python3.5/encodings/mac_greek.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_greek.py rename to Darwin/lib/python3.5/encodings/mac_greek.py diff --git a/Darwin/lib/python3.4/encodings/mac_iceland.py b/Darwin/lib/python3.5/encodings/mac_iceland.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_iceland.py rename to Darwin/lib/python3.5/encodings/mac_iceland.py diff --git a/Darwin/lib/python3.4/encodings/mac_latin2.py b/Darwin/lib/python3.5/encodings/mac_latin2.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_latin2.py rename to Darwin/lib/python3.5/encodings/mac_latin2.py diff --git a/Darwin/lib/python3.4/encodings/mac_roman.py b/Darwin/lib/python3.5/encodings/mac_roman.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_roman.py rename to Darwin/lib/python3.5/encodings/mac_roman.py diff --git a/Darwin/lib/python3.4/encodings/mac_romanian.py b/Darwin/lib/python3.5/encodings/mac_romanian.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_romanian.py rename to Darwin/lib/python3.5/encodings/mac_romanian.py diff --git a/Darwin/lib/python3.4/encodings/mac_turkish.py b/Darwin/lib/python3.5/encodings/mac_turkish.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mac_turkish.py rename to Darwin/lib/python3.5/encodings/mac_turkish.py diff --git a/Darwin/lib/python3.4/encodings/mbcs.py b/Darwin/lib/python3.5/encodings/mbcs.py similarity index 100% rename from Darwin/lib/python3.4/encodings/mbcs.py rename to Darwin/lib/python3.5/encodings/mbcs.py diff --git a/Darwin/lib/python3.4/encodings/palmos.py b/Darwin/lib/python3.5/encodings/palmos.py similarity index 100% rename from Darwin/lib/python3.4/encodings/palmos.py rename to Darwin/lib/python3.5/encodings/palmos.py diff --git a/Darwin/lib/python3.4/encodings/ptcp154.py b/Darwin/lib/python3.5/encodings/ptcp154.py similarity index 100% rename from Darwin/lib/python3.4/encodings/ptcp154.py rename to Darwin/lib/python3.5/encodings/ptcp154.py diff --git a/Darwin/lib/python3.4/encodings/punycode.py b/Darwin/lib/python3.5/encodings/punycode.py similarity index 100% rename from Darwin/lib/python3.4/encodings/punycode.py rename to Darwin/lib/python3.5/encodings/punycode.py diff --git a/Darwin/lib/python3.4/encodings/quopri_codec.py b/Darwin/lib/python3.5/encodings/quopri_codec.py similarity index 100% rename from Darwin/lib/python3.4/encodings/quopri_codec.py rename to Darwin/lib/python3.5/encodings/quopri_codec.py diff --git a/Darwin/lib/python3.4/encodings/raw_unicode_escape.py b/Darwin/lib/python3.5/encodings/raw_unicode_escape.py similarity index 100% rename from Darwin/lib/python3.4/encodings/raw_unicode_escape.py rename to Darwin/lib/python3.5/encodings/raw_unicode_escape.py diff --git a/Darwin/lib/python3.4/encodings/rot_13.py b/Darwin/lib/python3.5/encodings/rot_13.py similarity index 100% rename from Darwin/lib/python3.4/encodings/rot_13.py rename to Darwin/lib/python3.5/encodings/rot_13.py diff --git a/Darwin/lib/python3.4/encodings/shift_jis.py b/Darwin/lib/python3.5/encodings/shift_jis.py similarity index 100% rename from Darwin/lib/python3.4/encodings/shift_jis.py rename to Darwin/lib/python3.5/encodings/shift_jis.py diff --git a/Darwin/lib/python3.4/encodings/shift_jis_2004.py b/Darwin/lib/python3.5/encodings/shift_jis_2004.py similarity index 100% rename from Darwin/lib/python3.4/encodings/shift_jis_2004.py rename to Darwin/lib/python3.5/encodings/shift_jis_2004.py diff --git a/Darwin/lib/python3.4/encodings/shift_jisx0213.py b/Darwin/lib/python3.5/encodings/shift_jisx0213.py similarity index 100% rename from Darwin/lib/python3.4/encodings/shift_jisx0213.py rename to Darwin/lib/python3.5/encodings/shift_jisx0213.py diff --git a/Darwin/lib/python3.4/encodings/tis_620.py b/Darwin/lib/python3.5/encodings/tis_620.py similarity index 100% rename from Darwin/lib/python3.4/encodings/tis_620.py rename to Darwin/lib/python3.5/encodings/tis_620.py diff --git a/Darwin/lib/python3.4/encodings/undefined.py b/Darwin/lib/python3.5/encodings/undefined.py similarity index 100% rename from Darwin/lib/python3.4/encodings/undefined.py rename to Darwin/lib/python3.5/encodings/undefined.py diff --git a/Darwin/lib/python3.4/encodings/unicode_escape.py b/Darwin/lib/python3.5/encodings/unicode_escape.py similarity index 100% rename from Darwin/lib/python3.4/encodings/unicode_escape.py rename to Darwin/lib/python3.5/encodings/unicode_escape.py diff --git a/Darwin/lib/python3.4/encodings/unicode_internal.py b/Darwin/lib/python3.5/encodings/unicode_internal.py similarity index 100% rename from Darwin/lib/python3.4/encodings/unicode_internal.py rename to Darwin/lib/python3.5/encodings/unicode_internal.py diff --git a/Darwin/lib/python3.4/encodings/utf_16.py b/Darwin/lib/python3.5/encodings/utf_16.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_16.py rename to Darwin/lib/python3.5/encodings/utf_16.py diff --git a/Darwin/lib/python3.4/encodings/utf_16_be.py b/Darwin/lib/python3.5/encodings/utf_16_be.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_16_be.py rename to Darwin/lib/python3.5/encodings/utf_16_be.py diff --git a/Darwin/lib/python3.4/encodings/utf_16_le.py b/Darwin/lib/python3.5/encodings/utf_16_le.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_16_le.py rename to Darwin/lib/python3.5/encodings/utf_16_le.py diff --git a/Darwin/lib/python3.4/encodings/utf_32.py b/Darwin/lib/python3.5/encodings/utf_32.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_32.py rename to Darwin/lib/python3.5/encodings/utf_32.py diff --git a/Darwin/lib/python3.4/encodings/utf_32_be.py b/Darwin/lib/python3.5/encodings/utf_32_be.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_32_be.py rename to Darwin/lib/python3.5/encodings/utf_32_be.py diff --git a/Darwin/lib/python3.4/encodings/utf_32_le.py b/Darwin/lib/python3.5/encodings/utf_32_le.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_32_le.py rename to Darwin/lib/python3.5/encodings/utf_32_le.py diff --git a/Darwin/lib/python3.4/encodings/utf_7.py b/Darwin/lib/python3.5/encodings/utf_7.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_7.py rename to Darwin/lib/python3.5/encodings/utf_7.py diff --git a/Darwin/lib/python3.4/encodings/utf_8.py b/Darwin/lib/python3.5/encodings/utf_8.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_8.py rename to Darwin/lib/python3.5/encodings/utf_8.py diff --git a/Darwin/lib/python3.4/encodings/utf_8_sig.py b/Darwin/lib/python3.5/encodings/utf_8_sig.py similarity index 100% rename from Darwin/lib/python3.4/encodings/utf_8_sig.py rename to Darwin/lib/python3.5/encodings/utf_8_sig.py diff --git a/Darwin/lib/python3.4/encodings/uu_codec.py b/Darwin/lib/python3.5/encodings/uu_codec.py similarity index 97% rename from Darwin/lib/python3.4/encodings/uu_codec.py rename to Darwin/lib/python3.5/encodings/uu_codec.py index 1454095..2a5728f 100644 --- a/Darwin/lib/python3.4/encodings/uu_codec.py +++ b/Darwin/lib/python3.5/encodings/uu_codec.py @@ -54,7 +54,7 @@ def uu_decode(input, errors='strict'): data = binascii.a2b_uu(s) except binascii.Error as v: # Workaround for broken uuencoders by /Fredrik Lundh - nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3 + nbytes = (((s[0]-32) & 63) * 4 + 5) // 3 data = binascii.a2b_uu(s[:nbytes]) #sys.stderr.write("Warning: %s\n" % str(v)) write(data) diff --git a/Darwin/lib/python3.4/encodings/zlib_codec.py b/Darwin/lib/python3.5/encodings/zlib_codec.py similarity index 100% rename from Darwin/lib/python3.4/encodings/zlib_codec.py rename to Darwin/lib/python3.5/encodings/zlib_codec.py diff --git a/Darwin/lib/python3.4/ensurepip/__init__.py b/Darwin/lib/python3.5/ensurepip/__init__.py similarity index 98% rename from Darwin/lib/python3.4/ensurepip/__init__.py rename to Darwin/lib/python3.5/ensurepip/__init__.py index 84c2125..1258833 100644 --- a/Darwin/lib/python3.4/ensurepip/__init__.py +++ b/Darwin/lib/python3.5/ensurepip/__init__.py @@ -8,9 +8,9 @@ import tempfile __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "2.1" +_SETUPTOOLS_VERSION = "18.2" -_PIP_VERSION = "1.5.6" +_PIP_VERSION = "7.1.2" # pip currently requires ssl support, so we try to provide a nicer # error message when that is missing (http://bugs.python.org/issue19744) @@ -137,7 +137,7 @@ def _uninstall_helper(*, verbosity=0): _disable_pip_configuration_settings() # Construct the arguments to be passed to the pip command - args = ["uninstall", "-y"] + args = ["uninstall", "-y", "--disable-pip-version-check"] if verbosity: args += ["-" + "v" * verbosity] diff --git a/Darwin/lib/python3.4/ensurepip/__main__.py b/Darwin/lib/python3.5/ensurepip/__main__.py similarity index 100% rename from Darwin/lib/python3.4/ensurepip/__main__.py rename to Darwin/lib/python3.5/ensurepip/__main__.py diff --git a/Darwin/lib/python3.5/ensurepip/_bundled/pip-7.1.2-py2.py3-none-any.whl b/Darwin/lib/python3.5/ensurepip/_bundled/pip-7.1.2-py2.py3-none-any.whl new file mode 100644 index 0000000..5e49015 Binary files /dev/null and b/Darwin/lib/python3.5/ensurepip/_bundled/pip-7.1.2-py2.py3-none-any.whl differ diff --git a/Darwin/lib/python3.4/ensurepip/_bundled/setuptools-2.1-py2.py3-none-any.whl b/Darwin/lib/python3.5/ensurepip/_bundled/setuptools-18.2-py2.py3-none-any.whl similarity index 56% rename from Darwin/lib/python3.4/ensurepip/_bundled/setuptools-2.1-py2.py3-none-any.whl rename to Darwin/lib/python3.5/ensurepip/_bundled/setuptools-18.2-py2.py3-none-any.whl index ed77b59..f4288d6 100644 Binary files a/Darwin/lib/python3.4/ensurepip/_bundled/setuptools-2.1-py2.py3-none-any.whl and b/Darwin/lib/python3.5/ensurepip/_bundled/setuptools-18.2-py2.py3-none-any.whl differ diff --git a/Darwin/lib/python3.4/ensurepip/_uninstall.py b/Darwin/lib/python3.5/ensurepip/_uninstall.py similarity index 100% rename from Darwin/lib/python3.4/ensurepip/_uninstall.py rename to Darwin/lib/python3.5/ensurepip/_uninstall.py diff --git a/Darwin/lib/python3.4/enum.py b/Darwin/lib/python3.5/enum.py similarity index 88% rename from Darwin/lib/python3.4/enum.py rename to Darwin/lib/python3.5/enum.py index 844a956..c28f345 100644 --- a/Darwin/lib/python3.4/enum.py +++ b/Darwin/lib/python3.5/enum.py @@ -106,12 +106,20 @@ class EnumMeta(type): raise ValueError('Invalid enum member name: {0}'.format( ','.join(invalid_names))) + # create a default docstring if one has not been provided + if '__doc__' not in classdict: + classdict['__doc__'] = 'An enumeration.' + # create our new Enum type enum_class = super().__new__(metacls, cls, bases, classdict) enum_class._member_names_ = [] # names in definition order enum_class._member_map_ = OrderedDict() # name->value map enum_class._member_type_ = member_type + # save attributes from super classes so we know if we can take + # the shortcut of storing members in the class dict + base_attributes = {a for b in bases for a in b.__dict__} + # Reverse value->name map for hashable values. enum_class._value2member_map_ = {} @@ -159,12 +167,17 @@ class EnumMeta(type): # If another member with the same value was already defined, the # new member becomes an alias to the existing one. for name, canonical_member in enum_class._member_map_.items(): - if canonical_member.value == enum_member._value_: + if canonical_member._value_ == enum_member._value_: enum_member = canonical_member break else: # Aliases don't appear in member names (only in __members__). enum_class._member_names_.append(member_name) + # performance boost for any member that would not shadow + # a DynamicClassAttribute + if member_name not in base_attributes: + setattr(enum_class, member_name, enum_member) + # now add to _member_map_ enum_class._member_map_[member_name] = enum_member try: # This may fail if value is not hashable. We can't add the value @@ -193,7 +206,7 @@ class EnumMeta(type): enum_class.__new__ = Enum.__new__ return enum_class - def __call__(cls, value, names=None, *, module=None, qualname=None, type=None): + def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1): """Either returns an existing member, or creates a new enum class. This method is used both when an enum class is given a value to match @@ -205,7 +218,7 @@ class EnumMeta(type): `value` will be the name of the new class. `names` should be either a string of white-space/comma delimited names - (values will start at 1), or an iterator/mapping of name, value pairs. + (values will start at `start`), or an iterator/mapping of name, value pairs. `module` should be set to the module this class is being created in; if it is not set, an attempt to find that module will be made, but if @@ -221,10 +234,10 @@ class EnumMeta(type): if names is None: # simple value lookup return cls.__new__(cls, value) # otherwise, functional API: we're creating a new Enum type - return cls._create_(value, names, module=module, qualname=qualname, type=type) + return cls._create_(value, names, module=module, qualname=qualname, type=type, start=start) def __contains__(cls, member): - return isinstance(member, cls) and member.name in cls._member_map_ + return isinstance(member, cls) and member._name_ in cls._member_map_ def __delattr__(cls, attr): # nicer error message when someone tries to delete an attribute @@ -292,16 +305,16 @@ class EnumMeta(type): raise AttributeError('Cannot reassign members.') super().__setattr__(name, value) - def _create_(cls, class_name, names=None, *, module=None, qualname=None, type=None): + def _create_(cls, class_name, names=None, *, module=None, qualname=None, type=None, start=1): """Convenience method to create a new Enum class. `names` can be: * A string containing member names, separated either with spaces or - commas. Values are auto-numbered from 1. - * An iterable of member names. Values are auto-numbered from 1. + commas. Values are incremented by 1 from `start`. + * An iterable of member names. Values are incremented by 1 from `start`. * An iterable of (member name, value) pairs. - * A mapping of member name -> value. + * A mapping of member name -> value pairs. """ metacls = cls.__class__ @@ -312,7 +325,7 @@ class EnumMeta(type): if isinstance(names, str): names = names.replace(',', ' ').split() if isinstance(names, (tuple, list)) and isinstance(names[0], str): - names = [(e, i) for (i, e) in enumerate(names, 1)] + names = [(e, i) for (i, e) in enumerate(names, start)] # Here, names is either an iterable of (name, value) or a mapping. for item in names: @@ -452,9 +465,9 @@ class Enum(metaclass=EnumMeta): except TypeError: # not there, now do long search -- O(n) behavior for member in cls._member_map_.values(): - if member.value == value: + if member._value_ == value: return member - raise ValueError("%s is not a valid %s" % (value, cls.__name__)) + raise ValueError("%r is not a valid %s" % (value, cls.__name__)) def __repr__(self): return "<%s.%s: %r>" % ( @@ -464,9 +477,13 @@ class Enum(metaclass=EnumMeta): return "%s.%s" % (self.__class__.__name__, self._name_) def __dir__(self): - added_behavior = [m for m in self.__class__.__dict__ if m[0] != '_'] - return (['__class__', '__doc__', '__module__', 'name', 'value'] + - added_behavior) + added_behavior = [ + m + for cls in self.__class__.mro() + for m in cls.__dict__ + if m[0] != '_' and m not in self._member_map_ + ] + return (['__class__', '__doc__', '__module__'] + added_behavior) def __format__(self, format_spec): # mixed-in Enums should use the mixed-in type's __format__, otherwise @@ -480,7 +497,7 @@ class Enum(metaclass=EnumMeta): # mix-in branch else: cls = self._member_type_ - val = self.value + val = self._value_ return cls.__format__(val, format_spec) def __hash__(self): @@ -506,11 +523,37 @@ class Enum(metaclass=EnumMeta): """The value of the Enum member.""" return self._value_ + @classmethod + def _convert(cls, name, module, filter, source=None): + """ + Create a new Enum subclass that replaces a collection of global constants + """ + # convert all constants from source (or module) that pass filter() to + # a new Enum called name, and export the enum and its members back to + # module; + # also, replace the __reduce_ex__ method so unpickling works in + # previous Python versions + module_globals = vars(sys.modules[module]) + if source: + source = vars(source) + else: + source = module_globals + members = {name: value for name, value in source.items() + if filter(name)} + cls = cls(name, members, module=module) + cls.__reduce_ex__ = _reduce_ex_by_name + module_globals.update(cls.__members__) + module_globals[name] = cls + return cls + class IntEnum(int, Enum): """Enum where members are also (and must be) ints""" +def _reduce_ex_by_name(self, proto): + return self.name + def unique(enumeration): """Class decorator for enumerations ensuring unique member values.""" duplicates = [] diff --git a/Darwin/lib/python3.4/filecmp.py b/Darwin/lib/python3.5/filecmp.py similarity index 100% rename from Darwin/lib/python3.4/filecmp.py rename to Darwin/lib/python3.5/filecmp.py diff --git a/Darwin/lib/python3.4/fileinput.py b/Darwin/lib/python3.5/fileinput.py similarity index 94% rename from Darwin/lib/python3.4/fileinput.py rename to Darwin/lib/python3.5/fileinput.py index 87758ad..af810d1 100644 --- a/Darwin/lib/python3.4/fileinput.py +++ b/Darwin/lib/python3.5/fileinput.py @@ -238,8 +238,10 @@ class FileInput: self.close() def close(self): - self.nextfile() - self._files = () + try: + self.nextfile() + finally: + self._files = () def __enter__(self): return self @@ -275,29 +277,31 @@ class FileInput: def nextfile(self): savestdout = self._savestdout - self._savestdout = 0 + self._savestdout = None if savestdout: sys.stdout = savestdout output = self._output - self._output = 0 - if output: - output.close() + self._output = None + try: + if output: + output.close() + finally: + file = self._file + self._file = None + try: + if file and not self._isstdin: + file.close() + finally: + backupfilename = self._backupfilename + self._backupfilename = None + if backupfilename and not self._backup: + try: os.unlink(backupfilename) + except OSError: pass - file = self._file - self._file = 0 - if file and not self._isstdin: - file.close() - - backupfilename = self._backupfilename - self._backupfilename = 0 - if backupfilename and not self._backup: - try: os.unlink(backupfilename) - except OSError: pass - - self._isstdin = False - self._buffer = [] - self._bufindex = 0 + self._isstdin = False + self._buffer = [] + self._bufindex = 0 def readline(self): try: diff --git a/Darwin/lib/python3.4/fnmatch.py b/Darwin/lib/python3.5/fnmatch.py similarity index 100% rename from Darwin/lib/python3.4/fnmatch.py rename to Darwin/lib/python3.5/fnmatch.py diff --git a/Darwin/lib/python3.4/formatter.py b/Darwin/lib/python3.5/formatter.py similarity index 98% rename from Darwin/lib/python3.4/formatter.py rename to Darwin/lib/python3.5/formatter.py index d8cca52..5e8e2ff 100644 --- a/Darwin/lib/python3.4/formatter.py +++ b/Darwin/lib/python3.5/formatter.py @@ -21,7 +21,7 @@ manage and inserting data into the output. import sys import warnings warnings.warn('the formatter module is deprecated and will be removed in ' - 'Python 3.6', PendingDeprecationWarning) + 'Python 3.6', DeprecationWarning, stacklevel=2) AS_IS = None @@ -436,11 +436,15 @@ def test(file = None): fp = open(sys.argv[1]) else: fp = sys.stdin - for line in fp: - if line == '\n': - f.end_paragraph(1) - else: - f.add_flowing_data(line) + try: + for line in fp: + if line == '\n': + f.end_paragraph(1) + else: + f.add_flowing_data(line) + finally: + if fp is not sys.stdin: + fp.close() f.end_paragraph(0) diff --git a/Darwin/lib/python3.4/fractions.py b/Darwin/lib/python3.5/fractions.py similarity index 91% rename from Darwin/lib/python3.4/fractions.py rename to Darwin/lib/python3.5/fractions.py index 79e83ff..60b0728 100644 --- a/Darwin/lib/python3.4/fractions.py +++ b/Darwin/lib/python3.5/fractions.py @@ -20,6 +20,17 @@ def gcd(a, b): Unless b==0, the result will have the same sign as b (so that when b is divided by it, the result comes out positive). """ + import warnings + warnings.warn('fractions.gcd() is deprecated. Use math.gcd() instead.', + DeprecationWarning, 2) + if type(a) is int is type(b): + if (b or a) < 0: + return -math.gcd(a, b) + return math.gcd(a, b) + return _gcd(a, b) + +def _gcd(a, b): + # Supports non-integers for backward compatibility. while b: a, b = b, a%b return a @@ -70,7 +81,7 @@ class Fraction(numbers.Rational): __slots__ = ('_numerator', '_denominator') # We're immutable, so use __new__ not __init__ - def __new__(cls, numerator=0, denominator=None): + def __new__(cls, numerator=0, denominator=None, _normalize=True): """Constructs a Rational. Takes a string like '3/2' or '1.5', another Rational instance, a @@ -104,7 +115,12 @@ class Fraction(numbers.Rational): self = super(Fraction, cls).__new__(cls) if denominator is None: - if isinstance(numerator, numbers.Rational): + if type(numerator) is int: + self._numerator = numerator + self._denominator = 1 + return self + + elif isinstance(numerator, numbers.Rational): self._numerator = numerator.numerator self._denominator = numerator.denominator return self @@ -153,6 +169,9 @@ class Fraction(numbers.Rational): raise TypeError("argument should be a string " "or a Rational instance") + elif type(numerator) is int is type(denominator): + pass # *very* normal case + elif (isinstance(numerator, numbers.Rational) and isinstance(denominator, numbers.Rational)): numerator, denominator = ( @@ -165,9 +184,18 @@ class Fraction(numbers.Rational): if denominator == 0: raise ZeroDivisionError('Fraction(%s, 0)' % numerator) - g = gcd(numerator, denominator) - self._numerator = numerator // g - self._denominator = denominator // g + if _normalize: + if type(numerator) is int is type(denominator): + # *very* normal case + g = math.gcd(numerator, denominator) + if denominator < 0: + g = -g + else: + g = _gcd(numerator, denominator) + numerator //= g + denominator //= g + self._numerator = numerator + self._denominator = denominator return self @classmethod @@ -277,7 +305,8 @@ class Fraction(numbers.Rational): def __repr__(self): """repr(self)""" - return ('Fraction(%s, %s)' % (self._numerator, self._denominator)) + return '%s(%s, %s)' % (self.__class__.__name__, + self._numerator, self._denominator) def __str__(self): """str(self)""" @@ -395,17 +424,17 @@ class Fraction(numbers.Rational): def _add(a, b): """a + b""" - return Fraction(a.numerator * b.denominator + - b.numerator * a.denominator, - a.denominator * b.denominator) + da, db = a.denominator, b.denominator + return Fraction(a.numerator * db + b.numerator * da, + da * db) __add__, __radd__ = _operator_fallbacks(_add, operator.add) def _sub(a, b): """a - b""" - return Fraction(a.numerator * b.denominator - - b.numerator * a.denominator, - a.denominator * b.denominator) + da, db = a.denominator, b.denominator + return Fraction(a.numerator * db - b.numerator * da, + da * db) __sub__, __rsub__ = _operator_fallbacks(_sub, operator.sub) @@ -453,10 +482,12 @@ class Fraction(numbers.Rational): power = b.numerator if power >= 0: return Fraction(a._numerator ** power, - a._denominator ** power) + a._denominator ** power, + _normalize=False) else: return Fraction(a._denominator ** -power, - a._numerator ** -power) + a._numerator ** -power, + _normalize=False) else: # A fractional power will generally produce an # irrational number. @@ -480,15 +511,15 @@ class Fraction(numbers.Rational): def __pos__(a): """+a: Coerces a subclass instance to Fraction""" - return Fraction(a._numerator, a._denominator) + return Fraction(a._numerator, a._denominator, _normalize=False) def __neg__(a): """-a""" - return Fraction(-a._numerator, a._denominator) + return Fraction(-a._numerator, a._denominator, _normalize=False) def __abs__(a): """abs(a)""" - return Fraction(abs(a._numerator), a._denominator) + return Fraction(abs(a._numerator), a._denominator, _normalize=False) def __trunc__(a): """trunc(a)""" @@ -555,6 +586,8 @@ class Fraction(numbers.Rational): def __eq__(a, b): """a == b""" + if type(b) is int: + return a._numerator == b and a._denominator == 1 if isinstance(b, numbers.Rational): return (a._numerator == b.numerator and a._denominator == b.denominator) diff --git a/Darwin/lib/python3.4/ftplib.py b/Darwin/lib/python3.5/ftplib.py similarity index 89% rename from Darwin/lib/python3.4/ftplib.py rename to Darwin/lib/python3.5/ftplib.py index c83be2b..54b0e2c 100644 --- a/Darwin/lib/python3.4/ftplib.py +++ b/Darwin/lib/python3.5/ftplib.py @@ -42,7 +42,7 @@ import socket import warnings from socket import _GLOBAL_DEFAULT_TIMEOUT -__all__ = ["FTP", "Netrc"] +__all__ = ["FTP"] # Magic number from MSG_OOB = 0x1 # Process data out of band @@ -667,11 +667,16 @@ class FTP: def close(self): '''Close the connection without assuming anything about it.''' - if self.file is not None: - self.file.close() - if self.sock is not None: - self.sock.close() - self.file = self.sock = None + try: + file = self.file + self.file = None + if file is not None: + file.close() + finally: + sock = self.sock + self.sock = None + if sock is not None: + sock.close() try: import ssl @@ -713,7 +718,7 @@ else: '221 Goodbye.' >>> ''' - ssl_version = ssl.PROTOCOL_TLSv1 + ssl_version = ssl.PROTOCOL_SSLv23 def __init__(self, host='', user='', passwd='', acct='', keyfile=None, certfile=None, context=None, @@ -743,13 +748,12 @@ else: '''Set up secure control connection by using TLS/SSL.''' if isinstance(self.sock, ssl.SSLSocket): raise ValueError("Already using TLS") - if self.ssl_version == ssl.PROTOCOL_TLSv1: + if self.ssl_version >= ssl.PROTOCOL_SSLv23: resp = self.voidcmd('AUTH TLS') else: resp = self.voidcmd('AUTH SSL') - server_hostname = self.host if ssl.HAS_SNI else None self.sock = self.context.wrap_socket(self.sock, - server_hostname=server_hostname) + server_hostname=self.host) self.file = self.sock.makefile(mode='r', encoding=self.encoding) return resp @@ -788,9 +792,8 @@ else: def ntransfercmd(self, cmd, rest=None): conn, size = FTP.ntransfercmd(self, cmd, rest) if self._prot_p: - server_hostname = self.host if ssl.HAS_SNI else None conn = self.context.wrap_socket(conn, - server_hostname=server_hostname) + server_hostname=self.host) return conn, size def abort(self): @@ -920,115 +923,6 @@ def ftpcp(source, sourcename, target, targetname = '', type = 'I'): target.voidresp() -class Netrc: - """Class to parse & provide access to 'netrc' format files. - - See the netrc(4) man page for information on the file format. - - WARNING: This class is obsolete -- use module netrc instead. - - """ - __defuser = None - __defpasswd = None - __defacct = None - - def __init__(self, filename=None): - warnings.warn("This class is deprecated, use the netrc module instead", - DeprecationWarning, 2) - if filename is None: - if "HOME" in os.environ: - filename = os.path.join(os.environ["HOME"], - ".netrc") - else: - raise OSError("specify file to load or set $HOME") - self.__hosts = {} - self.__macros = {} - fp = open(filename, "r") - in_macro = 0 - while 1: - line = fp.readline() - if not line: - break - if in_macro and line.strip(): - macro_lines.append(line) - continue - elif in_macro: - self.__macros[macro_name] = tuple(macro_lines) - in_macro = 0 - words = line.split() - host = user = passwd = acct = None - default = 0 - i = 0 - while i < len(words): - w1 = words[i] - if i+1 < len(words): - w2 = words[i + 1] - else: - w2 = None - if w1 == 'default': - default = 1 - elif w1 == 'machine' and w2: - host = w2.lower() - i = i + 1 - elif w1 == 'login' and w2: - user = w2 - i = i + 1 - elif w1 == 'password' and w2: - passwd = w2 - i = i + 1 - elif w1 == 'account' and w2: - acct = w2 - i = i + 1 - elif w1 == 'macdef' and w2: - macro_name = w2 - macro_lines = [] - in_macro = 1 - break - i = i + 1 - if default: - self.__defuser = user or self.__defuser - self.__defpasswd = passwd or self.__defpasswd - self.__defacct = acct or self.__defacct - if host: - if host in self.__hosts: - ouser, opasswd, oacct = \ - self.__hosts[host] - user = user or ouser - passwd = passwd or opasswd - acct = acct or oacct - self.__hosts[host] = user, passwd, acct - fp.close() - - def get_hosts(self): - """Return a list of hosts mentioned in the .netrc file.""" - return self.__hosts.keys() - - def get_account(self, host): - """Returns login information for the named host. - - The return value is a triple containing userid, - password, and the accounting field. - - """ - host = host.lower() - user = passwd = acct = None - if host in self.__hosts: - user, passwd, acct = self.__hosts[host] - user = user or self.__defuser - passwd = passwd or self.__defpasswd - acct = acct or self.__defacct - return user, passwd, acct - - def get_macros(self): - """Return a list of all defined macro names.""" - return self.__macros.keys() - - def get_macro(self, macro): - """Return a sequence of lines which define a named macro.""" - return self.__macros[macro] - - - def test(): '''Test program. Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ... @@ -1042,6 +936,8 @@ def test(): print(test.__doc__) sys.exit(0) + import netrc + debugging = 0 rcfile = None while sys.argv[1] == '-d': @@ -1056,14 +952,14 @@ def test(): ftp.set_debuglevel(debugging) userid = passwd = acct = '' try: - netrc = Netrc(rcfile) + netrcobj = netrc.netrc(rcfile) except OSError: if rcfile is not None: sys.stderr.write("Could not open account file" " -- using anonymous login.") else: try: - userid, passwd, acct = netrc.get_account(host) + userid, acct, passwd = netrcobj.authenticators(host) except KeyError: # no account for host sys.stderr.write( diff --git a/Darwin/lib/python3.4/functools.py b/Darwin/lib/python3.5/functools.py similarity index 70% rename from Darwin/lib/python3.4/functools.py rename to Darwin/lib/python3.5/functools.py index b8463ad..09df068 100644 --- a/Darwin/lib/python3.4/functools.py +++ b/Darwin/lib/python3.5/functools.py @@ -23,7 +23,7 @@ from types import MappingProxyType from weakref import WeakKeyDictionary try: from _thread import RLock -except: +except ImportError: class RLock: 'Dummy reentrant lock for builds without threads' def __enter__(self): pass @@ -89,101 +89,116 @@ def wraps(wrapped, ### total_ordering class decorator ################################################################################ -# The correct way to indicate that a comparison operation doesn't -# recognise the other type is to return NotImplemented and let the -# interpreter handle raising TypeError if both operands return -# NotImplemented from their respective comparison methods -# -# This makes the implementation of total_ordering more complicated, since -# we need to be careful not to trigger infinite recursion when two -# different types that both use this decorator encounter each other. -# -# For example, if a type implements __lt__, it's natural to define -# __gt__ as something like: -# -# lambda self, other: not self < other and not self == other -# -# However, using the operator syntax like that ends up invoking the full -# type checking machinery again and means we can end up bouncing back and -# forth between the two operands until we run out of stack space. -# -# The solution is to define helper functions that invoke the appropriate -# magic methods directly, ensuring we only try each operand once, and -# return NotImplemented immediately if it is returned from the -# underlying user provided method. Using this scheme, the __gt__ derived -# from a user provided __lt__ becomes: -# -# lambda self, other: _not_op_and_not_eq(self.__lt__, self, other)) +# The total ordering functions all invoke the root magic method directly +# rather than using the corresponding operator. This avoids possible +# infinite recursion that could occur when the operator dispatch logic +# detects a NotImplemented result and then calls a reflected method. -def _not_op(op, other): - # "not a < b" handles "a >= b" - # "not a <= b" handles "a > b" - # "not a >= b" handles "a < b" - # "not a > b" handles "a <= b" - op_result = op(other) +def _gt_from_lt(self, other, NotImplemented=NotImplemented): + 'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).' + op_result = self.__lt__(other) if op_result is NotImplemented: - return NotImplemented - return not op_result - -def _op_or_eq(op, self, other): - # "a < b or a == b" handles "a <= b" - # "a > b or a == b" handles "a >= b" - op_result = op(other) - if op_result is NotImplemented: - return NotImplemented - return op_result or self == other - -def _not_op_and_not_eq(op, self, other): - # "not (a < b or a == b)" handles "a > b" - # "not a < b and a != b" is equivalent - # "not (a > b or a == b)" handles "a < b" - # "not a > b and a != b" is equivalent - op_result = op(other) - if op_result is NotImplemented: - return NotImplemented + return op_result return not op_result and self != other -def _not_op_or_eq(op, self, other): - # "not a <= b or a == b" handles "a >= b" - # "not a >= b or a == b" handles "a <= b" - op_result = op(other) +def _le_from_lt(self, other, NotImplemented=NotImplemented): + 'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).' + op_result = self.__lt__(other) + return op_result or self == other + +def _ge_from_lt(self, other, NotImplemented=NotImplemented): + 'Return a >= b. Computed by @total_ordering from (not a < b).' + op_result = self.__lt__(other) if op_result is NotImplemented: - return NotImplemented + return op_result + return not op_result + +def _ge_from_le(self, other, NotImplemented=NotImplemented): + 'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).' + op_result = self.__le__(other) + if op_result is NotImplemented: + return op_result return not op_result or self == other -def _op_and_not_eq(op, self, other): - # "a <= b and not a == b" handles "a < b" - # "a >= b and not a == b" handles "a > b" - op_result = op(other) +def _lt_from_le(self, other, NotImplemented=NotImplemented): + 'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).' + op_result = self.__le__(other) if op_result is NotImplemented: - return NotImplemented + return op_result return op_result and self != other +def _gt_from_le(self, other, NotImplemented=NotImplemented): + 'Return a > b. Computed by @total_ordering from (not a <= b).' + op_result = self.__le__(other) + if op_result is NotImplemented: + return op_result + return not op_result + +def _lt_from_gt(self, other, NotImplemented=NotImplemented): + 'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).' + op_result = self.__gt__(other) + if op_result is NotImplemented: + return op_result + return not op_result and self != other + +def _ge_from_gt(self, other, NotImplemented=NotImplemented): + 'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).' + op_result = self.__gt__(other) + return op_result or self == other + +def _le_from_gt(self, other, NotImplemented=NotImplemented): + 'Return a <= b. Computed by @total_ordering from (not a > b).' + op_result = self.__gt__(other) + if op_result is NotImplemented: + return op_result + return not op_result + +def _le_from_ge(self, other, NotImplemented=NotImplemented): + 'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).' + op_result = self.__ge__(other) + if op_result is NotImplemented: + return op_result + return not op_result or self == other + +def _gt_from_ge(self, other, NotImplemented=NotImplemented): + 'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).' + op_result = self.__ge__(other) + if op_result is NotImplemented: + return op_result + return op_result and self != other + +def _lt_from_ge(self, other, NotImplemented=NotImplemented): + 'Return a < b. Computed by @total_ordering from (not a >= b).' + op_result = self.__ge__(other) + if op_result is NotImplemented: + return op_result + return not op_result + +_convert = { + '__lt__': [('__gt__', _gt_from_lt), + ('__le__', _le_from_lt), + ('__ge__', _ge_from_lt)], + '__le__': [('__ge__', _ge_from_le), + ('__lt__', _lt_from_le), + ('__gt__', _gt_from_le)], + '__gt__': [('__lt__', _lt_from_gt), + ('__ge__', _ge_from_gt), + ('__le__', _le_from_gt)], + '__ge__': [('__le__', _le_from_ge), + ('__gt__', _gt_from_ge), + ('__lt__', _lt_from_ge)] +} + def total_ordering(cls): """Class decorator that fills in missing ordering methods""" - convert = { - '__lt__': [('__gt__', lambda self, other: _not_op_and_not_eq(self.__lt__, self, other)), - ('__le__', lambda self, other: _op_or_eq(self.__lt__, self, other)), - ('__ge__', lambda self, other: _not_op(self.__lt__, other))], - '__le__': [('__ge__', lambda self, other: _not_op_or_eq(self.__le__, self, other)), - ('__lt__', lambda self, other: _op_and_not_eq(self.__le__, self, other)), - ('__gt__', lambda self, other: _not_op(self.__le__, other))], - '__gt__': [('__lt__', lambda self, other: _not_op_and_not_eq(self.__gt__, self, other)), - ('__ge__', lambda self, other: _op_or_eq(self.__gt__, self, other)), - ('__le__', lambda self, other: _not_op(self.__gt__, other))], - '__ge__': [('__le__', lambda self, other: _not_op_or_eq(self.__ge__, self, other)), - ('__gt__', lambda self, other: _op_and_not_eq(self.__ge__, self, other)), - ('__lt__', lambda self, other: _not_op(self.__ge__, other))] - } # Find user-defined comparisons (not those inherited from object). - roots = [op for op in convert if getattr(cls, op, None) is not getattr(object, op, None)] + roots = [op for op in _convert if getattr(cls, op, None) is not getattr(object, op, None)] if not roots: raise ValueError('must define at least one ordering operation: < > <= >=') root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__ - for opname, opfunc in convert[root]: + for opname, opfunc in _convert[root]: if opname not in roots: opfunc.__name__ = opname - opfunc.__doc__ = getattr(int, opname).__doc__ setattr(cls, opname, opfunc) return cls @@ -208,8 +223,6 @@ def cmp_to_key(mycmp): return mycmp(self.obj, other.obj) <= 0 def __ge__(self, other): return mycmp(self.obj, other.obj) >= 0 - def __ne__(self, other): - return mycmp(self.obj, other.obj) != 0 __hash__ = None return K @@ -228,6 +241,14 @@ def partial(func, *args, **keywords): """New function with partial application of the given arguments and keywords. """ + if hasattr(func, 'func'): + args = func.args + args + tmpkw = func.keywords.copy() + tmpkw.update(keywords) + keywords = tmpkw + del tmpkw + func = func.func + def newfunc(*fargs, **fkeywords): newkeywords = keywords.copy() newkeywords.update(fkeywords) @@ -277,7 +298,7 @@ class partialmethod(object): for k, v in self.keywords.items()) format_string = "{module}.{cls}({func}, {args}, {keywords})" return format_string.format(module=self.__class__.__module__, - cls=self.__class__.__name__, + cls=self.__class__.__qualname__, func=self.func, args=args, keywords=keywords) @@ -392,120 +413,135 @@ def lru_cache(maxsize=128, typed=False): # The internals of the lru_cache are encapsulated for thread safety and # to allow the implementation to change (including a possible C version). + # Early detection of an erroneous call to @lru_cache without any arguments + # resulting in the inner function being passed to maxsize instead of an + # integer or None. + if maxsize is not None and not isinstance(maxsize, int): + raise TypeError('Expected maxsize to be an integer or None') + + def decorating_function(user_function): + wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo) + return update_wrapper(wrapper, user_function) + + return decorating_function + +def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo): # Constants shared by all lru cache instances: sentinel = object() # unique object used to signal cache misses make_key = _make_key # build a key from the function arguments PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields - def decorating_function(user_function): - cache = {} - hits = misses = 0 - full = False - cache_get = cache.get # bound method to lookup a key or return None - lock = RLock() # because linkedlist updates aren't threadsafe - root = [] # root of the circular doubly linked list - root[:] = [root, root, None, None] # initialize by pointing to self + cache = {} + hits = misses = 0 + full = False + cache_get = cache.get # bound method to lookup a key or return None + lock = RLock() # because linkedlist updates aren't threadsafe + root = [] # root of the circular doubly linked list + root[:] = [root, root, None, None] # initialize by pointing to self - if maxsize == 0: + if maxsize == 0: - def wrapper(*args, **kwds): - # No caching -- just a statistics update after a successful call - nonlocal misses - result = user_function(*args, **kwds) - misses += 1 + def wrapper(*args, **kwds): + # No caching -- just a statistics update after a successful call + nonlocal misses + result = user_function(*args, **kwds) + misses += 1 + return result + + elif maxsize is None: + + def wrapper(*args, **kwds): + # Simple caching without ordering or size limit + nonlocal hits, misses + key = make_key(args, kwds, typed) + result = cache_get(key, sentinel) + if result is not sentinel: + hits += 1 return result + result = user_function(*args, **kwds) + cache[key] = result + misses += 1 + return result - elif maxsize is None: + else: - def wrapper(*args, **kwds): - # Simple caching without ordering or size limit - nonlocal hits, misses - key = make_key(args, kwds, typed) - result = cache_get(key, sentinel) - if result is not sentinel: + def wrapper(*args, **kwds): + # Size limited caching that tracks accesses by recency + nonlocal root, hits, misses, full + key = make_key(args, kwds, typed) + with lock: + link = cache_get(key) + if link is not None: + # Move the link to the front of the circular queue + link_prev, link_next, _key, result = link + link_prev[NEXT] = link_next + link_next[PREV] = link_prev + last = root[PREV] + last[NEXT] = root[PREV] = link + link[PREV] = last + link[NEXT] = root hits += 1 return result - result = user_function(*args, **kwds) - cache[key] = result + result = user_function(*args, **kwds) + with lock: + if key in cache: + # Getting here means that this same key was added to the + # cache while the lock was released. Since the link + # update is already done, we need only return the + # computed result and update the count of misses. + pass + elif full: + # Use the old root to store the new key and result. + oldroot = root + oldroot[KEY] = key + oldroot[RESULT] = result + # Empty the oldest link and make it the new root. + # Keep a reference to the old key and old result to + # prevent their ref counts from going to zero during the + # update. That will prevent potentially arbitrary object + # clean-up code (i.e. __del__) from running while we're + # still adjusting the links. + root = oldroot[NEXT] + oldkey = root[KEY] + oldresult = root[RESULT] + root[KEY] = root[RESULT] = None + # Now update the cache dictionary. + del cache[oldkey] + # Save the potentially reentrant cache[key] assignment + # for last, after the root and links have been put in + # a consistent state. + cache[key] = oldroot + else: + # Put result in a new link at the front of the queue. + last = root[PREV] + link = [last, root, key, result] + last[NEXT] = root[PREV] = cache[key] = link + full = (len(cache) >= maxsize) misses += 1 - return result + return result - else: + def cache_info(): + """Report cache statistics""" + with lock: + return _CacheInfo(hits, misses, maxsize, len(cache)) - def wrapper(*args, **kwds): - # Size limited caching that tracks accesses by recency - nonlocal root, hits, misses, full - key = make_key(args, kwds, typed) - with lock: - link = cache_get(key) - if link is not None: - # Move the link to the front of the circular queue - link_prev, link_next, _key, result = link - link_prev[NEXT] = link_next - link_next[PREV] = link_prev - last = root[PREV] - last[NEXT] = root[PREV] = link - link[PREV] = last - link[NEXT] = root - hits += 1 - return result - result = user_function(*args, **kwds) - with lock: - if key in cache: - # Getting here means that this same key was added to the - # cache while the lock was released. Since the link - # update is already done, we need only return the - # computed result and update the count of misses. - pass - elif full: - # Use the old root to store the new key and result. - oldroot = root - oldroot[KEY] = key - oldroot[RESULT] = result - # Empty the oldest link and make it the new root. - # Keep a reference to the old key and old result to - # prevent their ref counts from going to zero during the - # update. That will prevent potentially arbitrary object - # clean-up code (i.e. __del__) from running while we're - # still adjusting the links. - root = oldroot[NEXT] - oldkey = root[KEY] - oldresult = root[RESULT] - root[KEY] = root[RESULT] = None - # Now update the cache dictionary. - del cache[oldkey] - # Save the potentially reentrant cache[key] assignment - # for last, after the root and links have been put in - # a consistent state. - cache[key] = oldroot - else: - # Put result in a new link at the front of the queue. - last = root[PREV] - link = [last, root, key, result] - last[NEXT] = root[PREV] = cache[key] = link - full = (len(cache) >= maxsize) - misses += 1 - return result + def cache_clear(): + """Clear the cache and cache statistics""" + nonlocal hits, misses, full + with lock: + cache.clear() + root[:] = [root, root, None, None] + hits = misses = 0 + full = False - def cache_info(): - """Report cache statistics""" - with lock: - return _CacheInfo(hits, misses, maxsize, len(cache)) + wrapper.cache_info = cache_info + wrapper.cache_clear = cache_clear + return update_wrapper(wrapper, user_function) - def cache_clear(): - """Clear the cache and cache statistics""" - nonlocal hits, misses, full - with lock: - cache.clear() - root[:] = [root, root, None, None] - hits = misses = 0 - full = False - - wrapper.cache_info = cache_info - wrapper.cache_clear = cache_clear - return update_wrapper(wrapper, user_function) - - return decorating_function +try: + from _functools import _lru_cache_wrapper +except ImportError: + pass ################################################################################ diff --git a/Darwin/lib/python3.4/genericpath.py b/Darwin/lib/python3.5/genericpath.py similarity index 88% rename from Darwin/lib/python3.4/genericpath.py rename to Darwin/lib/python3.5/genericpath.py index ca4a510..6714061 100644 --- a/Darwin/lib/python3.4/genericpath.py +++ b/Darwin/lib/python3.5/genericpath.py @@ -130,3 +130,16 @@ def _splitext(p, sep, altsep, extsep): filenameIndex += 1 return p, p[:0] + +def _check_arg_types(funcname, *args): + hasstr = hasbytes = False + for s in args: + if isinstance(s, str): + hasstr = True + elif isinstance(s, bytes): + hasbytes = True + else: + raise TypeError('%s() argument must be str or bytes, not %r' % + (funcname, s.__class__.__name__)) from None + if hasstr and hasbytes: + raise TypeError("Can't mix strings and bytes in path components") from None diff --git a/Darwin/lib/python3.4/getopt.py b/Darwin/lib/python3.5/getopt.py similarity index 100% rename from Darwin/lib/python3.4/getopt.py rename to Darwin/lib/python3.5/getopt.py diff --git a/Darwin/lib/python3.4/getpass.py b/Darwin/lib/python3.5/getpass.py similarity index 100% rename from Darwin/lib/python3.4/getpass.py rename to Darwin/lib/python3.5/getpass.py diff --git a/Darwin/lib/python3.4/gettext.py b/Darwin/lib/python3.5/gettext.py similarity index 96% rename from Darwin/lib/python3.4/gettext.py rename to Darwin/lib/python3.5/gettext.py index 05d9c1e..101378f 100644 --- a/Darwin/lib/python3.4/gettext.py +++ b/Darwin/lib/python3.5/gettext.py @@ -52,7 +52,9 @@ from errno import ENOENT __all__ = ['NullTranslations', 'GNUTranslations', 'Catalog', 'find', 'translation', 'install', 'textdomain', 'bindtextdomain', - 'dgettext', 'dngettext', 'gettext', 'ngettext', + 'bind_textdomain_codeset', + 'dgettext', 'dngettext', 'gettext', 'lgettext', 'ldgettext', + 'ldngettext', 'lngettext', 'ngettext', ] _default_localedir = os.path.join(sys.base_prefix, 'share', 'locale') @@ -225,6 +227,13 @@ class GNUTranslations(NullTranslations): LE_MAGIC = 0x950412de BE_MAGIC = 0xde120495 + # Acceptable .mo versions + VERSIONS = (0, 1) + + def _get_versions(self, version): + """Returns a tuple of major version, minor version""" + return (version >> 16, version & 0xffff) + def _parse(self, fp): """Override this method to support alternative .mo formats.""" unpack = struct.unpack @@ -245,6 +254,12 @@ class GNUTranslations(NullTranslations): ii = '>II' else: raise OSError(0, 'Bad magic number', filename) + + major_version, minor_version = self._get_versions(version) + + if major_version not in self.VERSIONS: + raise OSError(0, 'Bad version number ' + str(major_version), filename) + # Now put all messages from the .mo file buffer into the catalog # dictionary. for i in range(0, msgcount): @@ -260,11 +275,12 @@ class GNUTranslations(NullTranslations): # See if we're looking at GNU .mo conventions for metadata if mlen == 0: # Catalog description - lastk = k = None + lastk = None for b_item in tmsg.split('\n'.encode("ascii")): item = b_item.decode().strip() if not item: continue + k = v = None if ':' in item: k, v = item.split(':', 1) k = k.strip().lower() diff --git a/Darwin/lib/python3.4/glob.py b/Darwin/lib/python3.5/glob.py similarity index 61% rename from Darwin/lib/python3.4/glob.py rename to Darwin/lib/python3.5/glob.py index e388b5f..56d6704 100644 --- a/Darwin/lib/python3.4/glob.py +++ b/Darwin/lib/python3.5/glob.py @@ -6,7 +6,7 @@ import fnmatch __all__ = ["glob", "iglob"] -def glob(pathname): +def glob(pathname, *, recursive=False): """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la @@ -14,10 +14,12 @@ def glob(pathname): dot are special cases that are not matched by '*' and '?' patterns. + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. """ - return list(iglob(pathname)) + return list(iglob(pathname, recursive=recursive)) -def iglob(pathname): +def iglob(pathname, *, recursive=False): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la @@ -25,24 +27,37 @@ def iglob(pathname): dot are special cases that are not matched by '*' and '?' patterns. + If recursive is true, the pattern '**' will match any files and + zero or more directories and subdirectories. """ - if not has_magic(pathname): - if os.path.lexists(pathname): - yield pathname - return dirname, basename = os.path.split(pathname) + if not has_magic(pathname): + if basename: + if os.path.lexists(pathname): + yield pathname + else: + # Patterns ending with a slash should match only directories + if os.path.isdir(dirname): + yield pathname + return if not dirname: - yield from glob1(None, basename) + if recursive and _isrecursive(basename): + yield from glob2(dirname, basename) + else: + yield from glob1(dirname, basename) return # `os.path.split()` returns the argument itself as a dirname if it is a # drive or UNC path. Prevent an infinite recursion if a drive or UNC path # contains magic characters (i.e. r'\\?\C:'). if dirname != pathname and has_magic(dirname): - dirs = iglob(dirname) + dirs = iglob(dirname, recursive=recursive) else: dirs = [dirname] if has_magic(basename): - glob_in_dir = glob1 + if recursive and _isrecursive(basename): + glob_in_dir = glob2 + else: + glob_in_dir = glob1 else: glob_in_dir = glob0 for dirname in dirs: @@ -78,6 +93,34 @@ def glob0(dirname, basename): return [basename] return [] +# This helper function recursively yields relative pathnames inside a literal +# directory. + +def glob2(dirname, pattern): + assert _isrecursive(pattern) + if dirname: + yield pattern[:0] + yield from _rlistdir(dirname) + +# Recursively yields relative pathnames inside a literal directory. + +def _rlistdir(dirname): + if not dirname: + if isinstance(dirname, bytes): + dirname = bytes(os.curdir, 'ASCII') + else: + dirname = os.curdir + try: + names = os.listdir(dirname) + except os.error: + return + for x in names: + if not _ishidden(x): + yield x + path = os.path.join(dirname, x) if dirname else x + for y in _rlistdir(path): + yield os.path.join(x, y) + magic_check = re.compile('([*?[])') magic_check_bytes = re.compile(b'([*?[])') @@ -92,6 +135,12 @@ def has_magic(s): def _ishidden(path): return path[0] in ('.', b'.'[0]) +def _isrecursive(pattern): + if isinstance(pattern, bytes): + return pattern == b'**' + else: + return pattern == '**' + def escape(pathname): """Escape all special characters. """ diff --git a/Darwin/lib/python3.4/gzip.py b/Darwin/lib/python3.5/gzip.py similarity index 58% rename from Darwin/lib/python3.4/gzip.py rename to Darwin/lib/python3.5/gzip.py index f934d4f..45152e4 100644 --- a/Darwin/lib/python3.4/gzip.py +++ b/Darwin/lib/python3.5/gzip.py @@ -9,6 +9,7 @@ import struct, sys, time, os import zlib import builtins import io +import _compression __all__ = ["GzipFile", "open", "compress", "decompress"] @@ -89,49 +90,35 @@ class _PaddedFile: return self._buffer[read:] + \ self.file.read(size-self._length+read) - def prepend(self, prepend=b'', readprevious=False): + def prepend(self, prepend=b''): if self._read is None: self._buffer = prepend - elif readprevious and len(prepend) <= self._read: + else: # Assume data was read since the last prepend() call self._read -= len(prepend) return - else: - self._buffer = self._buffer[self._read:] + prepend self._length = len(self._buffer) self._read = 0 - def unused(self): - if self._read is None: - return b'' - return self._buffer[self._read:] - - def seek(self, offset, whence=0): - # This is only ever called with offset=whence=0 - if whence == 1 and self._read is not None: - if 0 <= offset + self._read <= self._length: - self._read += offset - return - else: - offset += self._length - self._read + def seek(self, off): self._read = None self._buffer = None - return self.file.seek(offset, whence) + return self.file.seek(off) - def __getattr__(self, name): - return getattr(self.file, name) + def seekable(self): + return True # Allows fast-forwarding even in unseekable streams - -class GzipFile(io.BufferedIOBase): +class GzipFile(_compression.BaseStream): """The GzipFile class simulates most of the methods of a file object with - the exception of the readinto() and truncate() methods. + the exception of the truncate() method. This class only supports opening files in binary mode. If you need to open a compressed file in text mode, use the gzip.open() function. """ + # Overridden with internal file object to be closed, if only a filename + # is passed in myfileobj = None - max_read_chunk = 10 * 1024 * 1024 # 10Mb def __init__(self, filename=None, mode=None, compresslevel=9, fileobj=None, mtime=None): @@ -163,13 +150,8 @@ class GzipFile(io.BufferedIOBase): at all. The default is 9. The mtime argument is an optional numeric timestamp to be written - to the stream when compressing. All gzip compressed streams - are required to contain a timestamp. If omitted or None, the - current time is used. This module ignores the timestamp when - decompressing; however, some programs, such as gunzip, make use - of it. The format of the timestamp is the same as that of the - return value of time.time() and of the st_mtime member of the - object returned by os.stat(). + to the last modification time field in the stream when compressing. + If omitted or None, the current time is used. """ @@ -188,18 +170,9 @@ class GzipFile(io.BufferedIOBase): if mode.startswith('r'): self.mode = READ - # Set flag indicating start of a new member - self._new_member = True - # Buffer data read from gzip file. extrastart is offset in - # stream where buffer starts. extrasize is number of - # bytes remaining in buffer from current stream position. - self.extrabuf = b"" - self.extrasize = 0 - self.extrastart = 0 + raw = _GzipReader(fileobj) + self._buffer = io.BufferedReader(raw) self.name = filename - # Starts small, scales exponentially - self.min_readsize = 100 - fileobj = _PaddedFile(fileobj) elif mode.startswith(('w', 'a', 'x')): self.mode = WRITE @@ -209,12 +182,11 @@ class GzipFile(io.BufferedIOBase): -zlib.MAX_WBITS, zlib.DEF_MEM_LEVEL, 0) + self._write_mtime = mtime else: raise ValueError("Invalid mode: {!r}".format(mode)) self.fileobj = fileobj - self.offset = 0 - self.mtime = mtime if self.mode == WRITE: self._write_gzip_header() @@ -227,26 +199,22 @@ class GzipFile(io.BufferedIOBase): return self.name + ".gz" return self.name + @property + def mtime(self): + """Last modification time read from stream, or None""" + return self._buffer.raw._last_mtime + def __repr__(self): - fileobj = self.fileobj - if isinstance(fileobj, _PaddedFile): - fileobj = fileobj.file - s = repr(fileobj) + s = repr(self.fileobj) return '' - def _check_closed(self): - """Raises a ValueError if the underlying file object has been closed. - - """ - if self.closed: - raise ValueError('I/O operation on closed file.') - def _init_write(self, filename): self.name = filename self.crc = zlib.crc32(b"") & 0xffffffff self.size = 0 self.writebuf = [] self.bufsize = 0 + self.offset = 0 # Current file offset for seek(), tell(), etc def _write_gzip_header(self): self.fileobj.write(b'\037\213') # magic header @@ -265,7 +233,7 @@ class GzipFile(io.BufferedIOBase): if fname: flags = FNAME self.fileobj.write(chr(flags).encode('latin-1')) - mtime = self.mtime + mtime = self._write_mtime if mtime is None: mtime = time.time() write32u(self.fileobj, int(mtime)) @@ -274,59 +242,8 @@ class GzipFile(io.BufferedIOBase): if fname: self.fileobj.write(fname + b'\000') - def _init_read(self): - self.crc = zlib.crc32(b"") & 0xffffffff - self.size = 0 - - def _read_exact(self, n): - data = self.fileobj.read(n) - while len(data) < n: - b = self.fileobj.read(n - len(data)) - if not b: - raise EOFError("Compressed file ended before the " - "end-of-stream marker was reached") - data += b - return data - - def _read_gzip_header(self): - magic = self.fileobj.read(2) - if magic == b'': - return False - - if magic != b'\037\213': - raise OSError('Not a gzipped file') - - method, flag, self.mtime = struct.unpack(" 0: - self.size = self.size + len(data) + if length > 0: + self.fileobj.write(self.compress.compress(data)) + self.size += length self.crc = zlib.crc32(data, self.crc) & 0xffffffff - self.fileobj.write( self.compress.compress(data) ) - self.offset += len(data) + self.offset += length - return len(data) + return length def read(self, size=-1): - self._check_closed() + self._check_not_closed() if self.mode != READ: import errno raise OSError(errno.EBADF, "read() on write-only GzipFile object") - - if self.extrasize <= 0 and self.fileobj is None: - return b'' - - readsize = 1024 - if size < 0: # get the whole thing - while self._read(readsize): - readsize = min(self.max_read_chunk, readsize * 2) - size = self.extrasize - else: # just get some more of it - while size > self.extrasize: - if not self._read(readsize): - if size > self.extrasize: - size = self.extrasize - break - readsize = min(self.max_read_chunk, readsize * 2) - - offset = self.offset - self.extrastart - chunk = self.extrabuf[offset: offset + size] - self.extrasize = self.extrasize - size - - self.offset += size - return chunk + return self._buffer.read(size) def read1(self, size=-1): - self._check_closed() + """Implements BufferedIOBase.read1() + + Reads up to a buffer's worth of data is size is negative.""" + self._check_not_closed() if self.mode != READ: import errno raise OSError(errno.EBADF, "read1() on write-only GzipFile object") - if self.extrasize <= 0 and self.fileobj is None: - return b'' - - # For certain input data, a single call to _read() may not return - # any data. In this case, retry until we get some data or reach EOF. - while self.extrasize <= 0 and self._read(): - pass - if size < 0 or size > self.extrasize: - size = self.extrasize - - offset = self.offset - self.extrastart - chunk = self.extrabuf[offset: offset + size] - self.extrasize -= size - self.offset += size - return chunk + if size < 0: + size = io.DEFAULT_BUFFER_SIZE + return self._buffer.read1(size) def peek(self, n): + self._check_not_closed() if self.mode != READ: import errno raise OSError(errno.EBADF, "peek() on write-only GzipFile object") - - # Do not return ridiculously small buffers, for one common idiom - # is to call peek(1) and expect more bytes in return. - if n < 100: - n = 100 - if self.extrasize == 0: - if self.fileobj is None: - return b'' - # Ensure that we don't return b"" if we haven't reached EOF. - # 1024 is the same buffering heuristic used in read() - while self.extrasize == 0 and self._read(max(n, 1024)): - pass - offset = self.offset - self.extrastart - remaining = self.extrasize - assert remaining == len(self.extrabuf) - offset - return self.extrabuf[offset:offset + n] - - def _unread(self, buf): - self.extrasize = len(buf) + self.extrasize - self.offset -= len(buf) - - def _read(self, size=1024): - if self.fileobj is None: - return False - - if self._new_member: - # If the _new_member flag is set, we have to - # jump to the next member, if there is one. - self._init_read() - if not self._read_gzip_header(): - return False - self.decompress = zlib.decompressobj(-zlib.MAX_WBITS) - self._new_member = False - - # Read a chunk of data from the file - buf = self.fileobj.read(size) - - # If the EOF has been reached, flush the decompression object - # and mark this object as finished. - - if buf == b"": - uncompress = self.decompress.flush() - # Prepend the already read bytes to the fileobj to they can be - # seen by _read_eof() - self.fileobj.prepend(self.decompress.unused_data, True) - self._read_eof() - self._add_read_data( uncompress ) - return False - - uncompress = self.decompress.decompress(buf) - self._add_read_data( uncompress ) - - if self.decompress.unused_data != b"": - # Ending case: we've come to the end of a member in the file, - # so seek back to the start of the unused data, finish up - # this member, and read a new gzip header. - # Prepend the already read bytes to the fileobj to they can be - # seen by _read_eof() and _read_gzip_header() - self.fileobj.prepend(self.decompress.unused_data, True) - # Check the CRC and file size, and set the flag so we read - # a new member on the next call - self._read_eof() - self._new_member = True - return True - - def _add_read_data(self, data): - self.crc = zlib.crc32(data, self.crc) & 0xffffffff - offset = self.offset - self.extrastart - self.extrabuf = self.extrabuf[offset:] + data - self.extrasize = self.extrasize + len(data) - self.extrastart = self.offset - self.size = self.size + len(data) - - def _read_eof(self): - # We've read to the end of the file - # We check the that the computed CRC and size of the - # uncompressed data matches the stored values. Note that the size - # stored is the true file size mod 2**32. - crc32, isize = struct.unpack(" 0: - self.extrasize -= i - offset - self.offset += i - offset - return self.extrabuf[offset: i] + return self.readall() + # size=0 is special because decompress(max_length=0) is not supported + if not size: + return b"" - size = sys.maxsize - readsize = self.min_readsize - else: - readsize = size - bufs = [] - while size != 0: - c = self.read(readsize) - i = c.find(b'\n') + # For certain input data, a single + # call to decompress() may not return + # any data. In this case, retry until we get some data or reach EOF. + while True: + if self._decompressor.eof: + # Ending case: we've come to the end of a member in the file, + # so finish up this member, and read a new gzip header. + # Check the CRC and file size, and set the flag so we read + # a new member + self._read_eof() + self._new_member = True + self._decompressor = self._decomp_factory( + **self._decomp_args) - # We set i=size to break out of the loop under two - # conditions: 1) there's no newline, and the chunk is - # larger than size, or 2) there is a newline, but the - # resulting line would be longer than 'size'. - if (size <= i) or (i == -1 and len(c) > size): - i = size - 1 + if self._new_member: + # If the _new_member flag is set, we have to + # jump to the next member, if there is one. + self._init_read() + if not self._read_gzip_header(): + self._size = self._pos + return b"" + self._new_member = False - if i >= 0 or c == b'': - bufs.append(c[:i + 1]) # Add portion of last chunk - self._unread(c[i + 1:]) # Push back rest of chunk + # Read a chunk of data from the file + buf = self._fp.read(io.DEFAULT_BUFFER_SIZE) + + uncompress = self._decompressor.decompress(buf, size) + if self._decompressor.unconsumed_tail != b"": + self._fp.prepend(self._decompressor.unconsumed_tail) + elif self._decompressor.unused_data != b"": + # Prepend the already read bytes to the fileobj so they can + # be seen by _read_eof() and _read_gzip_header() + self._fp.prepend(self._decompressor.unused_data) + + if uncompress != b"": break + if buf == b"": + raise EOFError("Compressed file ended before the " + "end-of-stream marker was reached") - # Append chunk to list, decrease 'size', - bufs.append(c) - size = size - len(c) - readsize = min(size, readsize * 2) - if readsize > self.min_readsize: - self.min_readsize = min(readsize, self.min_readsize * 2, 512) - return b''.join(bufs) # Return resulting line + self._add_read_data( uncompress ) + self._pos += len(uncompress) + return uncompress + def _add_read_data(self, data): + self._crc = zlib.crc32(data, self._crc) & 0xffffffff + self._stream_size = self._stream_size + len(data) + + def _read_eof(self): + # We've read to the end of the file + # We check the that the computed CRC and size of the + # uncompressed data matches the stored values. Note that the size + # stored is the true file size mod 2**32. + crc32, isize = struct.unpack("= startpos, except possibly for pos. pos # is the index of a leaf with a possibly out-of-order value. Restore the # heap invariant. @@ -340,13 +311,7 @@ def _siftup_max(heap, pos): heap[pos] = newitem _siftdown_max(heap, startpos, pos) -# If available, use C implementation -try: - from _heapq import * -except ImportError: - pass - -def merge(*iterables): +def merge(*iterables, key=None, reverse=False): '''Merge multiple sorted inputs into a single sorted output. Similar to sorted(itertools.chain(*iterables)) but returns a generator, @@ -356,51 +321,158 @@ def merge(*iterables): >>> list(merge([1,3,5,7], [0,2,4,8], [5,10,15,20], [], [25])) [0, 1, 2, 3, 4, 5, 5, 7, 8, 10, 15, 20, 25] + If *key* is not None, applies a key function to each element to determine + its sort order. + + >>> list(merge(['dog', 'horse'], ['cat', 'fish', 'kangaroo'], key=len)) + ['dog', 'cat', 'fish', 'horse', 'kangaroo'] + ''' - _heappop, _heapreplace, _StopIteration = heappop, heapreplace, StopIteration - _len = len h = [] h_append = h.append - for itnum, it in enumerate(map(iter, iterables)): + + if reverse: + _heapify = _heapify_max + _heappop = _heappop_max + _heapreplace = _heapreplace_max + direction = -1 + else: + _heapify = heapify + _heappop = heappop + _heapreplace = heapreplace + direction = 1 + + if key is None: + for order, it in enumerate(map(iter, iterables)): + try: + next = it.__next__ + h_append([next(), order * direction, next]) + except StopIteration: + pass + _heapify(h) + while len(h) > 1: + try: + while True: + value, order, next = s = h[0] + yield value + s[0] = next() # raises StopIteration when exhausted + _heapreplace(h, s) # restore heap condition + except StopIteration: + _heappop(h) # remove empty iterator + if h: + # fast case when only a single iterator remains + value, order, next = h[0] + yield value + yield from next.__self__ + return + + for order, it in enumerate(map(iter, iterables)): try: next = it.__next__ - h_append([next(), itnum, next]) - except _StopIteration: + value = next() + h_append([key(value), order * direction, value, next]) + except StopIteration: pass - heapify(h) - - while _len(h) > 1: + _heapify(h) + while len(h) > 1: try: while True: - v, itnum, next = s = h[0] - yield v - s[0] = next() # raises StopIteration when exhausted - _heapreplace(h, s) # restore heap condition - except _StopIteration: - _heappop(h) # remove empty iterator + key_value, order, value, next = s = h[0] + yield value + value = next() + s[0] = key(value) + s[2] = value + _heapreplace(h, s) + except StopIteration: + _heappop(h) if h: - # fast case when only a single iterator remains - v, itnum, next = h[0] - yield v + key_value, order, value, next = h[0] + yield value yield from next.__self__ -# Extend the implementations of nsmallest and nlargest to use a key= argument -_nsmallest = nsmallest + +# Algorithm notes for nlargest() and nsmallest() +# ============================================== +# +# Make a single pass over the data while keeping the k most extreme values +# in a heap. Memory consumption is limited to keeping k values in a list. +# +# Measured performance for random inputs: +# +# number of comparisons +# n inputs k-extreme values (average of 5 trials) % more than min() +# ------------- ---------------- --------------------- ----------------- +# 1,000 100 3,317 231.7% +# 10,000 100 14,046 40.5% +# 100,000 100 105,749 5.7% +# 1,000,000 100 1,007,751 0.8% +# 10,000,000 100 10,009,401 0.1% +# +# Theoretical number of comparisons for k smallest of n random inputs: +# +# Step Comparisons Action +# ---- -------------------------- --------------------------- +# 1 1.66 * k heapify the first k-inputs +# 2 n - k compare remaining elements to top of heap +# 3 k * (1 + lg2(k)) * ln(n/k) replace the topmost value on the heap +# 4 k * lg2(k) - (k/2) final sort of the k most extreme values +# +# Combining and simplifying for a rough estimate gives: +# +# comparisons = n + k * (log(k, 2) * log(n/k) + log(k, 2) + log(n/k)) +# +# Computing the number of comparisons for step 3: +# ----------------------------------------------- +# * For the i-th new value from the iterable, the probability of being in the +# k most extreme values is k/i. For example, the probability of the 101st +# value seen being in the 100 most extreme values is 100/101. +# * If the value is a new extreme value, the cost of inserting it into the +# heap is 1 + log(k, 2). +# * The probability times the cost gives: +# (k/i) * (1 + log(k, 2)) +# * Summing across the remaining n-k elements gives: +# sum((k/i) * (1 + log(k, 2)) for i in range(k+1, n+1)) +# * This reduces to: +# (H(n) - H(k)) * k * (1 + log(k, 2)) +# * Where H(n) is the n-th harmonic number estimated by: +# gamma = 0.5772156649 +# H(n) = log(n, e) + gamma + 1 / (2 * n) +# http://en.wikipedia.org/wiki/Harmonic_series_(mathematics)#Rate_of_divergence +# * Substituting the H(n) formula: +# comparisons = k * (1 + log(k, 2)) * (log(n/k, e) + (1/n - 1/k) / 2) +# +# Worst-case for step 3: +# ---------------------- +# In the worst case, the input data is reversed sorted so that every new element +# must be inserted in the heap: +# +# comparisons = 1.66 * k + log(k, 2) * (n - k) +# +# Alternative Algorithms +# ---------------------- +# Other algorithms were not used because they: +# 1) Took much more auxiliary memory, +# 2) Made multiple passes over the data. +# 3) Made more comparisons in common cases (small k, large n, semi-random input). +# See the more detailed comparison of approach at: +# http://code.activestate.com/recipes/577573-compare-algorithms-for-heapqsmallest + def nsmallest(n, iterable, key=None): """Find the n smallest elements in a dataset. Equivalent to: sorted(iterable, key=key)[:n] """ - # Short-cut for n==1 is to use min() when len(iterable)>0 + + # Short-cut for n==1 is to use min() if n == 1: it = iter(iterable) - head = list(islice(it, 1)) - if not head: - return [] + sentinel = object() if key is None: - return [min(chain(head, it))] - return [min(chain(head, it), key=key)] + result = min(it, default=sentinel) + else: + result = min(it, default=sentinel, key=key) + return [] if result is sentinel else [result] # When n>=size, it's faster to use sorted() try: @@ -413,32 +485,57 @@ def nsmallest(n, iterable, key=None): # When key is none, use simpler decoration if key is None: - it = zip(iterable, count()) # decorate - result = _nsmallest(n, it) - return [r[0] for r in result] # undecorate + it = iter(iterable) + # put the range(n) first so that zip() doesn't + # consume one too many elements from the iterator + result = [(elem, i) for i, elem in zip(range(n), it)] + if not result: + return result + _heapify_max(result) + top = result[0][0] + order = n + _heapreplace = _heapreplace_max + for elem in it: + if elem < top: + _heapreplace(result, (elem, order)) + top = result[0][0] + order += 1 + result.sort() + return [r[0] for r in result] # General case, slowest method - in1, in2 = tee(iterable) - it = zip(map(key, in1), count(), in2) # decorate - result = _nsmallest(n, it) - return [r[2] for r in result] # undecorate + it = iter(iterable) + result = [(key(elem), i, elem) for i, elem in zip(range(n), it)] + if not result: + return result + _heapify_max(result) + top = result[0][0] + order = n + _heapreplace = _heapreplace_max + for elem in it: + k = key(elem) + if k < top: + _heapreplace(result, (k, order, elem)) + top = result[0][0] + order += 1 + result.sort() + return [r[2] for r in result] -_nlargest = nlargest def nlargest(n, iterable, key=None): """Find the n largest elements in a dataset. Equivalent to: sorted(iterable, key=key, reverse=True)[:n] """ - # Short-cut for n==1 is to use max() when len(iterable)>0 + # Short-cut for n==1 is to use max() if n == 1: it = iter(iterable) - head = list(islice(it, 1)) - if not head: - return [] + sentinel = object() if key is None: - return [max(chain(head, it))] - return [max(chain(head, it), key=key)] + result = max(it, default=sentinel) + else: + result = max(it, default=sentinel, key=key) + return [] if result is sentinel else [result] # When n>=size, it's faster to use sorted() try: @@ -451,26 +548,60 @@ def nlargest(n, iterable, key=None): # When key is none, use simpler decoration if key is None: - it = zip(iterable, count(0,-1)) # decorate - result = _nlargest(n, it) - return [r[0] for r in result] # undecorate + it = iter(iterable) + result = [(elem, i) for i, elem in zip(range(0, -n, -1), it)] + if not result: + return result + heapify(result) + top = result[0][0] + order = -n + _heapreplace = heapreplace + for elem in it: + if top < elem: + _heapreplace(result, (elem, order)) + top = result[0][0] + order -= 1 + result.sort(reverse=True) + return [r[0] for r in result] # General case, slowest method - in1, in2 = tee(iterable) - it = zip(map(key, in1), count(0,-1), in2) # decorate - result = _nlargest(n, it) - return [r[2] for r in result] # undecorate + it = iter(iterable) + result = [(key(elem), i, elem) for i, elem in zip(range(0, -n, -1), it)] + if not result: + return result + heapify(result) + top = result[0][0] + order = -n + _heapreplace = heapreplace + for elem in it: + k = key(elem) + if top < k: + _heapreplace(result, (k, order, elem)) + top = result[0][0] + order -= 1 + result.sort(reverse=True) + return [r[2] for r in result] + +# If available, use C implementation +try: + from _heapq import * +except ImportError: + pass +try: + from _heapq import _heapreplace_max +except ImportError: + pass +try: + from _heapq import _heapify_max +except ImportError: + pass +try: + from _heapq import _heappop_max +except ImportError: + pass + if __name__ == "__main__": - # Simple sanity test - heap = [] - data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0] - for item in data: - heappush(heap, item) - sort = [] - while heap: - sort.append(heappop(heap)) - print(sort) import doctest - doctest.testmod() + print(doctest.testmod()) diff --git a/Darwin/lib/python3.4/hmac.py b/Darwin/lib/python3.5/hmac.py similarity index 100% rename from Darwin/lib/python3.4/hmac.py rename to Darwin/lib/python3.5/hmac.py diff --git a/Darwin/lib/python3.4/html/__init__.py b/Darwin/lib/python3.5/html/__init__.py similarity index 100% rename from Darwin/lib/python3.4/html/__init__.py rename to Darwin/lib/python3.5/html/__init__.py diff --git a/Darwin/lib/python3.4/html/entities.py b/Darwin/lib/python3.5/html/entities.py similarity index 99% rename from Darwin/lib/python3.4/html/entities.py rename to Darwin/lib/python3.5/html/entities.py index e891ad6..3e1778b 100644 --- a/Darwin/lib/python3.4/html/entities.py +++ b/Darwin/lib/python3.5/html/entities.py @@ -1,6 +1,9 @@ """HTML character entity references.""" -# maps the HTML entity name to the Unicode codepoint +__all__ = ['html5', 'name2codepoint', 'codepoint2name', 'entitydefs'] + + +# maps the HTML entity name to the Unicode code point name2codepoint = { 'AElig': 0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1 'Aacute': 0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1 @@ -2492,7 +2495,7 @@ html5 = { 'zwnj;': '\u200c', } -# maps the Unicode codepoint to the HTML entity name +# maps the Unicode code point to the HTML entity name codepoint2name = {} # maps the HTML entity name to the character diff --git a/Darwin/lib/python3.4/html/parser.py b/Darwin/lib/python3.5/html/parser.py similarity index 79% rename from Darwin/lib/python3.4/html/parser.py rename to Darwin/lib/python3.5/html/parser.py index a650d5e..390d4cc 100644 --- a/Darwin/lib/python3.4/html/parser.py +++ b/Darwin/lib/python3.5/html/parser.py @@ -29,35 +29,15 @@ starttagopen = re.compile('<[a-zA-Z]') piclose = re.compile('>') commentclose = re.compile(r'--\s*>') # Note: -# 1) the strict attrfind isn't really strict, but we can't make it -# correctly strict without breaking backward compatibility; -# 2) if you change tagfind/attrfind remember to update locatestarttagend too; -# 3) if you change tagfind/attrfind and/or locatestarttagend the parser will +# 1) if you change tagfind/attrfind remember to update locatestarttagend too; +# 2) if you change tagfind/attrfind and/or locatestarttagend the parser will # explode, so don't do it. -tagfind = re.compile('([a-zA-Z][-.a-zA-Z0-9:_]*)(?:\s|/(?!>))*') # see http://www.w3.org/TR/html5/tokenization.html#tag-open-state # and http://www.w3.org/TR/html5/tokenization.html#tag-name-state tagfind_tolerant = re.compile('([a-zA-Z][^\t\n\r\f />\x00]*)(?:\s|/(?!>))*') -attrfind = re.compile( - r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*' - r'(\'[^\']*\'|"[^"]*"|[^\s"\'=<>`]*))?') attrfind_tolerant = re.compile( r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*' r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*') -locatestarttagend = re.compile(r""" - <[a-zA-Z][-.a-zA-Z0-9:_]* # tag name - (?:\s+ # whitespace before attribute name - (?:[a-zA-Z_][-.:a-zA-Z0-9_]* # attribute name - (?:\s*=\s* # value indicator - (?:'[^']*' # LITA-enclosed value - |\"[^\"]*\" # LIT-enclosed value - |[^'\">\s]+ # bare value - ) - )? - ) - )* - \s* # trailing whitespace -""", re.VERBOSE) locatestarttagend_tolerant = re.compile(r""" <[a-zA-Z][^\t\n\r\f />\x00]* # tag name (?:[\s/]* # optional whitespace before attribute name @@ -79,25 +59,6 @@ endendtag = re.compile('>') endtagfind = re.compile('') -class HTMLParseError(Exception): - """Exception raised for all parse errors.""" - - def __init__(self, msg, position=(None, None)): - assert msg - self.msg = msg - self.lineno = position[0] - self.offset = position[1] - - def __str__(self): - result = self.msg - if self.lineno is not None: - result = result + ", at line %d" % self.lineno - if self.offset is not None: - result = result + ", column %d" % (self.offset + 1) - return result - - -_default_sentinel = object() class HTMLParser(_markupbase.ParserBase): """Find tags and other markup and call handler functions. @@ -123,27 +84,12 @@ class HTMLParser(_markupbase.ParserBase): CDATA_CONTENT_ELEMENTS = ("script", "style") - def __init__(self, strict=_default_sentinel, *, - convert_charrefs=_default_sentinel): + def __init__(self, *, convert_charrefs=True): """Initialize and reset this instance. - If convert_charrefs is True (default: False), all character references + If convert_charrefs is True (the default), all character references are automatically converted to the corresponding Unicode characters. - If strict is set to False (the default) the parser will parse invalid - markup, otherwise it will raise an error. Note that the strict mode - and argument are deprecated. """ - if strict is not _default_sentinel: - warnings.warn("The strict argument and mode are deprecated.", - DeprecationWarning, stacklevel=2) - else: - strict = False # default - self.strict = strict - if convert_charrefs is _default_sentinel: - convert_charrefs = False # default - warnings.warn("The value of convert_charrefs will become True in " - "3.5. You are encouraged to set the value explicitly.", - DeprecationWarning, stacklevel=2) self.convert_charrefs = convert_charrefs self.reset() @@ -168,11 +114,6 @@ class HTMLParser(_markupbase.ParserBase): """Handle any buffered data.""" self.goahead(1) - def error(self, message): - warnings.warn("The 'error' method is deprecated.", - DeprecationWarning, stacklevel=2) - raise HTMLParseError(message, self.getpos()) - __starttag_text = None def get_starttag_text(self): @@ -227,10 +168,7 @@ class HTMLParser(_markupbase.ParserBase): elif startswith("', i + 1) if k < 0: k = rawdata.find('<', i + 1) @@ -282,13 +218,10 @@ class HTMLParser(_markupbase.ParserBase): if match: # match.group() will contain at least 2 chars if end and match.group() == rawdata[i:]: - if self.strict: - self.error("EOF in middle of entity or char ref") - else: - k = match.end() - if k <= i: - k = n - i = self.updatepos(i, i + 1) + k = match.end() + if k <= i: + k = n + i = self.updatepos(i, i + 1) # incomplete break elif (i + 1) < n: @@ -367,18 +300,12 @@ class HTMLParser(_markupbase.ParserBase): # Now parse the data between i+1 and j into a tag and attrs attrs = [] - if self.strict: - match = tagfind.match(rawdata, i+1) - else: - match = tagfind_tolerant.match(rawdata, i+1) + match = tagfind_tolerant.match(rawdata, i+1) assert match, 'unexpected call to parse_starttag()' k = match.end() self.lasttag = tag = match.group(1).lower() while k < endpos: - if self.strict: - m = attrfind.match(rawdata, k) - else: - m = attrfind_tolerant.match(rawdata, k) + m = attrfind_tolerant.match(rawdata, k) if not m: break attrname, rest, attrvalue = m.group(1, 2, 3) @@ -401,9 +328,6 @@ class HTMLParser(_markupbase.ParserBase): - self.__starttag_text.rfind("\n") else: offset = offset + len(self.__starttag_text) - if self.strict: - self.error("junk characters in start tag: %r" - % (rawdata[k:endpos][:20],)) self.handle_data(rawdata[i:endpos]) return endpos if end.endswith('/>'): @@ -419,10 +343,7 @@ class HTMLParser(_markupbase.ParserBase): # or -1 if incomplete. def check_for_whole_start_tag(self, i): rawdata = self.rawdata - if self.strict: - m = locatestarttagend.match(rawdata, i) - else: - m = locatestarttagend_tolerant.match(rawdata, i) + m = locatestarttagend_tolerant.match(rawdata, i) if m: j = m.end() next = rawdata[j:j+1] @@ -435,9 +356,6 @@ class HTMLParser(_markupbase.ParserBase): # buffer boundary return -1 # else bogus input - if self.strict: - self.updatepos(i, j + 1) - self.error("malformed empty start tag") if j > i: return j else: @@ -450,9 +368,6 @@ class HTMLParser(_markupbase.ParserBase): # end of input in or before attribute value, or we have the # '/' from a '/>' ending return -1 - if self.strict: - self.updatepos(i, j) - self.error("malformed start tag") if j > i: return j else: @@ -472,8 +387,6 @@ class HTMLParser(_markupbase.ParserBase): if self.cdata_elem is not None: self.handle_data(rawdata[i:gtpos]) return gtpos - if self.strict: - self.error("bad end tag: %r" % (rawdata[i:gtpos],)) # find the name: w3.org/TR/html5/tokenization.html#tag-name-state namematch = tagfind_tolerant.match(rawdata, i+2) if not namematch: @@ -539,8 +452,7 @@ class HTMLParser(_markupbase.ParserBase): pass def unknown_decl(self, data): - if self.strict: - self.error("unknown declaration: %r" % (data,)) + pass # Internal -- helper to remove special character quoting def unescape(self, s): diff --git a/Darwin/lib/python3.5/http/__init__.py b/Darwin/lib/python3.5/http/__init__.py new file mode 100644 index 0000000..d4334cc --- /dev/null +++ b/Darwin/lib/python3.5/http/__init__.py @@ -0,0 +1,134 @@ +from enum import IntEnum + +__all__ = ['HTTPStatus'] + +class HTTPStatus(IntEnum): + """HTTP status codes and reason phrases + + Status codes from the following RFCs are all observed: + + * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 + * RFC 6585: Additional HTTP Status Codes + * RFC 3229: Delta encoding in HTTP + * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 + * RFC 5842: Binding Extensions to WebDAV + * RFC 7238: Permanent Redirect + * RFC 2295: Transparent Content Negotiation in HTTP + * RFC 2774: An HTTP Extension Framework + """ + def __new__(cls, value, phrase, description=''): + obj = int.__new__(cls, value) + obj._value_ = value + + obj.phrase = phrase + obj.description = description + return obj + + # informational + CONTINUE = 100, 'Continue', 'Request received, please continue' + SWITCHING_PROTOCOLS = (101, 'Switching Protocols', + 'Switching to new protocol; obey Upgrade header') + PROCESSING = 102, 'Processing' + + # success + OK = 200, 'OK', 'Request fulfilled, document follows' + CREATED = 201, 'Created', 'Document created, URL follows' + ACCEPTED = (202, 'Accepted', + 'Request accepted, processing continues off-line') + NON_AUTHORITATIVE_INFORMATION = (203, + 'Non-Authoritative Information', 'Request fulfilled from cache') + NO_CONTENT = 204, 'No Content', 'Request fulfilled, nothing follows' + RESET_CONTENT = 205, 'Reset Content', 'Clear input form for further input' + PARTIAL_CONTENT = 206, 'Partial Content', 'Partial content follows' + MULTI_STATUS = 207, 'Multi-Status' + ALREADY_REPORTED = 208, 'Already Reported' + IM_USED = 226, 'IM Used' + + # redirection + MULTIPLE_CHOICES = (300, 'Multiple Choices', + 'Object has several resources -- see URI list') + MOVED_PERMANENTLY = (301, 'Moved Permanently', + 'Object moved permanently -- see URI list') + FOUND = 302, 'Found', 'Object moved temporarily -- see URI list' + SEE_OTHER = 303, 'See Other', 'Object moved -- see Method and URL list' + NOT_MODIFIED = (304, 'Not Modified', + 'Document has not changed since given time') + USE_PROXY = (305, 'Use Proxy', + 'You must use proxy specified in Location to access this resource') + TEMPORARY_REDIRECT = (307, 'Temporary Redirect', + 'Object moved temporarily -- see URI list') + PERMANENT_REDIRECT = (308, 'Permanent Redirect', + 'Object moved temporarily -- see URI list') + + # client error + BAD_REQUEST = (400, 'Bad Request', + 'Bad request syntax or unsupported method') + UNAUTHORIZED = (401, 'Unauthorized', + 'No permission -- see authorization schemes') + PAYMENT_REQUIRED = (402, 'Payment Required', + 'No payment -- see charging schemes') + FORBIDDEN = (403, 'Forbidden', + 'Request forbidden -- authorization will not help') + NOT_FOUND = (404, 'Not Found', + 'Nothing matches the given URI') + METHOD_NOT_ALLOWED = (405, 'Method Not Allowed', + 'Specified method is invalid for this resource') + NOT_ACCEPTABLE = (406, 'Not Acceptable', + 'URI not available in preferred format') + PROXY_AUTHENTICATION_REQUIRED = (407, + 'Proxy Authentication Required', + 'You must authenticate with this proxy before proceeding') + REQUEST_TIMEOUT = (408, 'Request Timeout', + 'Request timed out; try again later') + CONFLICT = 409, 'Conflict', 'Request conflict' + GONE = (410, 'Gone', + 'URI no longer exists and has been permanently removed') + LENGTH_REQUIRED = (411, 'Length Required', + 'Client must specify Content-Length') + PRECONDITION_FAILED = (412, 'Precondition Failed', + 'Precondition in headers is false') + REQUEST_ENTITY_TOO_LARGE = (413, 'Request Entity Too Large', + 'Entity is too large') + REQUEST_URI_TOO_LONG = (414, 'Request-URI Too Long', + 'URI is too long') + UNSUPPORTED_MEDIA_TYPE = (415, 'Unsupported Media Type', + 'Entity body in unsupported format') + REQUESTED_RANGE_NOT_SATISFIABLE = (416, + 'Requested Range Not Satisfiable', + 'Cannot satisfy request range') + EXPECTATION_FAILED = (417, 'Expectation Failed', + 'Expect condition could not be satisfied') + UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity' + LOCKED = 423, 'Locked' + FAILED_DEPENDENCY = 424, 'Failed Dependency' + UPGRADE_REQUIRED = 426, 'Upgrade Required' + PRECONDITION_REQUIRED = (428, 'Precondition Required', + 'The origin server requires the request to be conditional') + TOO_MANY_REQUESTS = (429, 'Too Many Requests', + 'The user has sent too many requests in ' + 'a given amount of time ("rate limiting")') + REQUEST_HEADER_FIELDS_TOO_LARGE = (431, + 'Request Header Fields Too Large', + 'The server is unwilling to process the request because its header ' + 'fields are too large') + + # server errors + INTERNAL_SERVER_ERROR = (500, 'Internal Server Error', + 'Server got itself in trouble') + NOT_IMPLEMENTED = (501, 'Not Implemented', + 'Server does not support this operation') + BAD_GATEWAY = (502, 'Bad Gateway', + 'Invalid responses from another server/proxy') + SERVICE_UNAVAILABLE = (503, 'Service Unavailable', + 'The server cannot process the request due to a high load') + GATEWAY_TIMEOUT = (504, 'Gateway Timeout', + 'The gateway server did not receive a timely response') + HTTP_VERSION_NOT_SUPPORTED = (505, 'HTTP Version Not Supported', + 'Cannot fulfill request') + VARIANT_ALSO_NEGOTIATES = 506, 'Variant Also Negotiates' + INSUFFICIENT_STORAGE = 507, 'Insufficient Storage' + LOOP_DETECTED = 508, 'Loop Detected' + NOT_EXTENDED = 510, 'Not Extended' + NETWORK_AUTHENTICATION_REQUIRED = (511, + 'Network Authentication Required', + 'The client needs to authenticate to gain network access') diff --git a/Darwin/lib/python3.4/http/client.py b/Darwin/lib/python3.5/http/client.py similarity index 79% rename from Darwin/lib/python3.4/http/client.py rename to Darwin/lib/python3.5/http/client.py index d2013f2..80c80cf 100644 --- a/Darwin/lib/python3.4/http/client.py +++ b/Darwin/lib/python3.5/http/client.py @@ -20,10 +20,12 @@ request. This diagram details these state transitions: | ( putheader() )* endheaders() v Request-sent - | - | response = getresponse() - v - Unread-response [Response-headers-read] + |\_____________________________ + | | getresponse() raises + | response = getresponse() | ConnectionError + v v + Unread-response Idle + [Response-headers-read] |\____________________ | | | response.read() | putrequest() @@ -68,18 +70,23 @@ Req-sent-unread-response _CS_REQ_SENT import email.parser import email.message +import http import io import os +import re import socket import collections from urllib.parse import urlsplit +# HTTPMessage, parse_headers(), and the HTTP status code constants are +# intentionally omitted for simplicity __all__ = ["HTTPResponse", "HTTPConnection", "HTTPException", "NotConnected", "UnknownProtocol", "UnknownTransferEncoding", "UnimplementedFileMode", "IncompleteRead", "InvalidURL", "ImproperConnectionState", "CannotSendRequest", "CannotSendHeader", "ResponseNotReady", - "BadStatusLine", "error", "responses"] + "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error", + "responses"] HTTP_PORT = 80 HTTPS_PORT = 443 @@ -91,122 +98,13 @@ _CS_IDLE = 'Idle' _CS_REQ_STARTED = 'Request-started' _CS_REQ_SENT = 'Request-sent' -# status codes -# informational -CONTINUE = 100 -SWITCHING_PROTOCOLS = 101 -PROCESSING = 102 -# successful -OK = 200 -CREATED = 201 -ACCEPTED = 202 -NON_AUTHORITATIVE_INFORMATION = 203 -NO_CONTENT = 204 -RESET_CONTENT = 205 -PARTIAL_CONTENT = 206 -MULTI_STATUS = 207 -IM_USED = 226 - -# redirection -MULTIPLE_CHOICES = 300 -MOVED_PERMANENTLY = 301 -FOUND = 302 -SEE_OTHER = 303 -NOT_MODIFIED = 304 -USE_PROXY = 305 -TEMPORARY_REDIRECT = 307 - -# client error -BAD_REQUEST = 400 -UNAUTHORIZED = 401 -PAYMENT_REQUIRED = 402 -FORBIDDEN = 403 -NOT_FOUND = 404 -METHOD_NOT_ALLOWED = 405 -NOT_ACCEPTABLE = 406 -PROXY_AUTHENTICATION_REQUIRED = 407 -REQUEST_TIMEOUT = 408 -CONFLICT = 409 -GONE = 410 -LENGTH_REQUIRED = 411 -PRECONDITION_FAILED = 412 -REQUEST_ENTITY_TOO_LARGE = 413 -REQUEST_URI_TOO_LONG = 414 -UNSUPPORTED_MEDIA_TYPE = 415 -REQUESTED_RANGE_NOT_SATISFIABLE = 416 -EXPECTATION_FAILED = 417 -UNPROCESSABLE_ENTITY = 422 -LOCKED = 423 -FAILED_DEPENDENCY = 424 -UPGRADE_REQUIRED = 426 -PRECONDITION_REQUIRED = 428 -TOO_MANY_REQUESTS = 429 -REQUEST_HEADER_FIELDS_TOO_LARGE = 431 - -# server error -INTERNAL_SERVER_ERROR = 500 -NOT_IMPLEMENTED = 501 -BAD_GATEWAY = 502 -SERVICE_UNAVAILABLE = 503 -GATEWAY_TIMEOUT = 504 -HTTP_VERSION_NOT_SUPPORTED = 505 -INSUFFICIENT_STORAGE = 507 -NOT_EXTENDED = 510 -NETWORK_AUTHENTICATION_REQUIRED = 511 +# hack to maintain backwards compatibility +globals().update(http.HTTPStatus.__members__) +# another hack to maintain backwards compatibility # Mapping status codes to official W3C names -responses = { - 100: 'Continue', - 101: 'Switching Protocols', - - 200: 'OK', - 201: 'Created', - 202: 'Accepted', - 203: 'Non-Authoritative Information', - 204: 'No Content', - 205: 'Reset Content', - 206: 'Partial Content', - - 300: 'Multiple Choices', - 301: 'Moved Permanently', - 302: 'Found', - 303: 'See Other', - 304: 'Not Modified', - 305: 'Use Proxy', - 306: '(Unused)', - 307: 'Temporary Redirect', - - 400: 'Bad Request', - 401: 'Unauthorized', - 402: 'Payment Required', - 403: 'Forbidden', - 404: 'Not Found', - 405: 'Method Not Allowed', - 406: 'Not Acceptable', - 407: 'Proxy Authentication Required', - 408: 'Request Timeout', - 409: 'Conflict', - 410: 'Gone', - 411: 'Length Required', - 412: 'Precondition Failed', - 413: 'Request Entity Too Large', - 414: 'Request-URI Too Long', - 415: 'Unsupported Media Type', - 416: 'Requested Range Not Satisfiable', - 417: 'Expectation Failed', - 428: 'Precondition Required', - 429: 'Too Many Requests', - 431: 'Request Header Fields Too Large', - - 500: 'Internal Server Error', - 501: 'Not Implemented', - 502: 'Bad Gateway', - 503: 'Service Unavailable', - 504: 'Gateway Timeout', - 505: 'HTTP Version Not Supported', - 511: 'Network Authentication Required', -} +responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()} # maximal amount of data to read at one time in _safe_read MAXAMOUNT = 1048576 @@ -215,6 +113,38 @@ MAXAMOUNT = 1048576 _MAXLINE = 65536 _MAXHEADERS = 100 +# Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2) +# +# VCHAR = %x21-7E +# obs-text = %x80-FF +# header-field = field-name ":" OWS field-value OWS +# field-name = token +# field-value = *( field-content / obs-fold ) +# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] +# field-vchar = VCHAR / obs-text +# +# obs-fold = CRLF 1*( SP / HTAB ) +# ; obsolete line folding +# ; see Section 3.2.4 + +# token = 1*tchar +# +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" +# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" +# / DIGIT / ALPHA +# ; any VCHAR, except delimiters +# +# VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1 + +# the patterns for both name and value are more leniant than RFC +# definitions to allow for backwards compatibility +_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch +_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search + +# We always set the Content-Length header for these methods because some +# servers will otherwise respond with a 411 +_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'} + class HTTPMessage(email.message.Message): # XXX The only usage of this method is in @@ -270,7 +200,7 @@ def parse_headers(fp, _class=HTTPMessage): return email.parser.Parser(_class=_class).parsestr(hstring) -class HTTPResponse(io.RawIOBase): +class HTTPResponse(io.BufferedIOBase): # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details. @@ -318,7 +248,8 @@ class HTTPResponse(io.RawIOBase): if not line: # Presumably, the server closed the connection before # sending a valid response. - raise BadStatusLine(line) + raise RemoteDisconnected("Remote end closed connection without" + " response") try: version, status, reason = line.split(None, 2) except ValueError: @@ -457,9 +388,11 @@ class HTTPResponse(io.RawIOBase): fp.close() def close(self): - super().close() # set "closed" flag - if self.fp: - self._close_conn() + try: + super().close() # set "closed" flag + finally: + if self.fp: + self._close_conn() # These implementations are for the benefit of io.BufferedReader. @@ -495,9 +428,10 @@ class HTTPResponse(io.RawIOBase): return b"" if amt is not None: - # Amount is given, so call base class version - # (which is implemented in terms of self.readinto) - return super(HTTPResponse, self).read(amt) + # Amount is given, implement using readinto + b = bytearray(amt) + n = self.readinto(b) + return memoryview(b)[:n].tobytes() else: # Amount is not given (unbounded read) so we must check self.length # and self.chunked @@ -577,71 +511,67 @@ class HTTPResponse(io.RawIOBase): if line in (b'\r\n', b'\n', b''): break + def _get_chunk_left(self): + # return self.chunk_left, reading a new chunk if necessary. + # chunk_left == 0: at the end of the current chunk, need to close it + # chunk_left == None: No current chunk, should read next. + # This function returns non-zero or None if the last chunk has + # been read. + chunk_left = self.chunk_left + if not chunk_left: # Can be 0 or None + if chunk_left is not None: + # We are at the end of chunk. dicard chunk end + self._safe_read(2) # toss the CRLF at the end of the chunk + try: + chunk_left = self._read_next_chunk_size() + except ValueError: + raise IncompleteRead(b'') + if chunk_left == 0: + # last chunk: 1*("0") [ chunk-extension ] CRLF + self._read_and_discard_trailer() + # we read everything; close the "file" + self._close_conn() + chunk_left = None + self.chunk_left = chunk_left + return chunk_left + def _readall_chunked(self): assert self.chunked != _UNKNOWN - chunk_left = self.chunk_left value = [] - while True: - if chunk_left is None: - try: - chunk_left = self._read_next_chunk_size() - if chunk_left == 0: - break - except ValueError: - raise IncompleteRead(b''.join(value)) - value.append(self._safe_read(chunk_left)) - - # we read the whole chunk, get another - self._safe_read(2) # toss the CRLF at the end of the chunk - chunk_left = None - - self._read_and_discard_trailer() - - # we read everything; close the "file" - self._close_conn() - - return b''.join(value) + try: + while True: + chunk_left = self._get_chunk_left() + if chunk_left is None: + break + value.append(self._safe_read(chunk_left)) + self.chunk_left = 0 + return b''.join(value) + except IncompleteRead: + raise IncompleteRead(b''.join(value)) def _readinto_chunked(self, b): assert self.chunked != _UNKNOWN - chunk_left = self.chunk_left - total_bytes = 0 mvb = memoryview(b) - while True: - if chunk_left is None: - try: - chunk_left = self._read_next_chunk_size() - if chunk_left == 0: - break - except ValueError: - raise IncompleteRead(bytes(b[0:total_bytes])) + try: + while True: + chunk_left = self._get_chunk_left() + if chunk_left is None: + return total_bytes - if len(mvb) < chunk_left: - n = self._safe_readinto(mvb) - self.chunk_left = chunk_left - n - return total_bytes + n - elif len(mvb) == chunk_left: - n = self._safe_readinto(mvb) - self._safe_read(2) # toss the CRLF at the end of the chunk - self.chunk_left = None - return total_bytes + n - else: - temp_mvb = mvb[0:chunk_left] + if len(mvb) <= chunk_left: + n = self._safe_readinto(mvb) + self.chunk_left = chunk_left - n + return total_bytes + n + + temp_mvb = mvb[:chunk_left] n = self._safe_readinto(temp_mvb) mvb = mvb[n:] total_bytes += n + self.chunk_left = 0 - # we read the whole chunk, get another - self._safe_read(2) # toss the CRLF at the end of the chunk - chunk_left = None - - self._read_and_discard_trailer() - - # we read everything; close the "file" - self._close_conn() - - return total_bytes + except IncompleteRead: + raise IncompleteRead(bytes(b[0:total_bytes])) def _safe_read(self, amt): """Read the number of bytes requested, compensating for partial reads. @@ -682,6 +612,73 @@ class HTTPResponse(io.RawIOBase): total_bytes += n return total_bytes + def read1(self, n=-1): + """Read with at most one underlying system call. If at least one + byte is buffered, return that instead. + """ + if self.fp is None or self._method == "HEAD": + return b"" + if self.chunked: + return self._read1_chunked(n) + try: + result = self.fp.read1(n) + except ValueError: + if n >= 0: + raise + # some implementations, like BufferedReader, don't support -1 + # Read an arbitrarily selected largeish chunk. + result = self.fp.read1(16*1024) + if not result and n: + self._close_conn() + return result + + def peek(self, n=-1): + # Having this enables IOBase.readline() to read more than one + # byte at a time + if self.fp is None or self._method == "HEAD": + return b"" + if self.chunked: + return self._peek_chunked(n) + return self.fp.peek(n) + + def readline(self, limit=-1): + if self.fp is None or self._method == "HEAD": + return b"" + if self.chunked: + # Fallback to IOBase readline which uses peek() and read() + return super().readline(limit) + result = self.fp.readline(limit) + if not result and limit: + self._close_conn() + return result + + def _read1_chunked(self, n): + # Strictly speaking, _get_chunk_left() may cause more than one read, + # but that is ok, since that is to satisfy the chunked protocol. + chunk_left = self._get_chunk_left() + if chunk_left is None or n == 0: + return b'' + if not (0 <= n <= chunk_left): + n = chunk_left # if n is negative or larger than chunk_left + read = self.fp.read1(n) + self.chunk_left -= len(read) + if not read: + raise IncompleteRead(b"") + return read + + def _peek_chunked(self, n): + # Strictly speaking, _get_chunk_left() may cause more than one read, + # but that is ok, since that is to satisfy the chunked protocol. + try: + chunk_left = self._get_chunk_left() + except IncompleteRead: + return b'' # peek doesn't worry about protocol + if chunk_left is None: + return b'' # eof + # peek is allowed to return more than requested. Just request the + # entire chunk, and truncate what we get. + return self.fp.peek(chunk_left)[:chunk_left] + def fileno(self): return self.fp.fileno() @@ -725,14 +722,6 @@ class HTTPConnection: default_port = HTTP_PORT auto_open = 1 debuglevel = 0 - # TCP Maximum Segment Size (MSS) is determined by the TCP stack on - # a per-connection basis. There is no simple and efficient - # platform independent mechanism for determining the MSS, so - # instead a reasonable estimate is chosen. The getsockopt() - # interface using the TCP_MAXSEG parameter may be a suitable - # approach on some operating systems. A value of 16KiB is chosen - # as a reasonable estimate of the maximum MSS. - mss = 16384 def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, source_address=None): @@ -771,8 +760,7 @@ class HTTPConnection: if self.sock: raise RuntimeError("Can't set up tunnel for established connection") - self._tunnel_host = host - self._tunnel_port = port + self._tunnel_host, self._tunnel_port = self._get_hostport(host, port) if headers: self._tunnel_headers = headers else: @@ -802,9 +790,8 @@ class HTTPConnection: self.debuglevel = level def _tunnel(self): - (host, port) = self._get_hostport(self._tunnel_host, - self._tunnel_port) - connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port) + connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host, + self._tunnel_port) connect_bytes = connect_str.encode("ascii") self.send(connect_bytes) for header, value in self._tunnel_headers.items(): @@ -816,7 +803,7 @@ class HTTPConnection: response = self.response_class(self.sock, method=self._method) (version, code, message) = response._read_status() - if code != 200: + if code != http.HTTPStatus.OK: self.close() raise OSError("Tunnel connection failed: %d %s" % (code, message.strip())) @@ -830,23 +817,31 @@ class HTTPConnection: if line in (b'\r\n', b'\n', b''): break + if self.debuglevel > 0: + print('header:', line.decode()) + def connect(self): """Connect to the host and port specified in __init__.""" - self.sock = self._create_connection((self.host,self.port), - self.timeout, self.source_address) + self.sock = self._create_connection( + (self.host,self.port), self.timeout, self.source_address) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if self._tunnel_host: self._tunnel() def close(self): """Close the connection to the HTTP server.""" - if self.sock: - self.sock.close() # close it manually... there may be other refs - self.sock = None - if self.__response: - self.__response.close() - self.__response = None self.__state = _CS_IDLE + try: + sock = self.sock + if sock: + self.sock = None + sock.close() # close it manually... there may be other refs + finally: + response = self.__response + if response: + self.__response = None + response.close() def send(self, data): """Send `data' to the server. @@ -912,19 +907,9 @@ class HTTPConnection: self._buffer.extend((b"", b"")) msg = b"\r\n".join(self._buffer) del self._buffer[:] - # If msg and message_body are sent in a single send() call, - # it will avoid performance problems caused by the interaction - # between delayed ack and the Nagle algorithm. However, - # there is no performance gain if the message is larger - # than MSS (and there is a memory penalty for the message - # copy). - if isinstance(message_body, bytes) and len(message_body) < self.mss: - msg += message_body - message_body = None + self.send(msg) if message_body is not None: - # message_body was not a string (i.e. it is a file), and - # we must run the risk of Nagle. self.send(message_body) def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0): @@ -1060,12 +1045,20 @@ class HTTPConnection: if hasattr(header, 'encode'): header = header.encode('ascii') + + if not _is_legal_header_name(header): + raise ValueError('Invalid header name %r' % (header,)) + values = list(values) for i, one_value in enumerate(values): if hasattr(one_value, 'encode'): values[i] = one_value.encode('latin-1') elif isinstance(one_value, int): values[i] = str(one_value).encode('ascii') + + if _is_illegal_header_value(values[i]): + raise ValueError('Invalid header value %r' % (values[i],)) + value = b'\r\n\t'.join(values) header = header + b': ' + value self._output(header) @@ -1089,19 +1082,26 @@ class HTTPConnection: """Send a complete request to the server.""" self._send_request(method, url, body, headers) - def _set_content_length(self, body): - # Set the content-length based on the body. + def _set_content_length(self, body, method): + # Set the content-length based on the body. If the body is "empty", we + # set Content-Length: 0 for methods that expect a body (RFC 7230, + # Section 3.3.2). If the body is set for other methods, we set the + # header provided we can figure out what the length is. thelen = None - try: - thelen = str(len(body)) - except TypeError as te: - # If this is a file-like object, try to - # fstat its file descriptor + method_expects_body = method.upper() in _METHODS_EXPECTING_BODY + if body is None and method_expects_body: + thelen = '0' + elif body is not None: try: - thelen = str(os.fstat(body.fileno()).st_size) - except (AttributeError, OSError): - # Don't send a length if this failed - if self.debuglevel > 0: print("Cannot stat!!") + thelen = str(len(body)) + except TypeError: + # If this is a file-like object, try to + # fstat its file descriptor + try: + thelen = str(os.fstat(body.fileno()).st_size) + except (AttributeError, OSError): + # Don't send a length if this failed + if self.debuglevel > 0: print("Cannot stat!!") if thelen is not None: self.putheader('Content-Length', thelen) @@ -1117,8 +1117,8 @@ class HTTPConnection: self.putrequest(method, url, **skips) - if body is not None and ('content-length' not in header_names): - self._set_content_length(body) + if 'content-length' not in header_names: + self._set_content_length(body, method) for hdr, value in headers.items(): self.putheader(hdr, value) if isinstance(body, str): @@ -1169,18 +1169,26 @@ class HTTPConnection: else: response = self.response_class(self.sock, method=self._method) - response.begin() - assert response.will_close != _UNKNOWN - self.__state = _CS_IDLE + try: + try: + response.begin() + except ConnectionError: + self.close() + raise + assert response.will_close != _UNKNOWN + self.__state = _CS_IDLE - if response.will_close: - # this effectively passes the connection to the response - self.close() - else: - # remember this, so we can tell when it is complete - self.__response = response + if response.will_close: + # this effectively passes the connection to the response + self.close() + else: + # remember this, so we can tell when it is complete + self.__response = response - return response + return response + except: + response.close() + raise try: import ssl @@ -1203,11 +1211,11 @@ else: self.key_file = key_file self.cert_file = cert_file if context is None: - context = ssl._create_stdlib_context() + context = ssl._create_default_https_context() will_verify = context.verify_mode != ssl.CERT_NONE if check_hostname is None: - check_hostname = will_verify - elif check_hostname and not will_verify: + check_hostname = context.check_hostname + if check_hostname and not will_verify: raise ValueError("check_hostname needs a SSL context with " "either CERT_OPTIONAL or CERT_REQUIRED") if key_file or cert_file: @@ -1224,10 +1232,9 @@ else: server_hostname = self._tunnel_host else: server_hostname = self.host - sni_hostname = server_hostname if ssl.HAS_SNI else None self.sock = self._context.wrap_socket(self.sock, - server_hostname=sni_hostname) + server_hostname=server_hostname) if not self._context.check_hostname and self._check_hostname: try: ssl.match_hostname(self.sock.getpeercert(), server_hostname) @@ -1270,7 +1277,8 @@ class IncompleteRead(HTTPException): e = ', %i more expected' % self.expected else: e = '' - return 'IncompleteRead(%i bytes read%s)' % (len(self.partial), e) + return '%s(%i bytes read%s)' % (self.__class__.__name__, + len(self.partial), e) def __str__(self): return repr(self) @@ -1298,5 +1306,10 @@ class LineTooLong(HTTPException): HTTPException.__init__(self, "got more than %d bytes when reading %s" % (_MAXLINE, line_type)) +class RemoteDisconnected(ConnectionResetError, BadStatusLine): + def __init__(self, *pos, **kw): + BadStatusLine.__init__(self, "") + ConnectionResetError.__init__(self, *pos, **kw) + # for backwards compatibility error = HTTPException diff --git a/Darwin/lib/python3.4/http/cookiejar.py b/Darwin/lib/python3.5/http/cookiejar.py similarity index 98% rename from Darwin/lib/python3.4/http/cookiejar.py rename to Darwin/lib/python3.5/http/cookiejar.py index 4dc468b..b1ba72e 100644 --- a/Darwin/lib/python3.4/http/cookiejar.py +++ b/Darwin/lib/python3.5/http/cookiejar.py @@ -472,26 +472,42 @@ def parse_ns_headers(ns_headers): for ns_header in ns_headers: pairs = [] version_set = False - for ii, param in enumerate(re.split(r";\s*", ns_header)): - param = param.rstrip() - if param == "": continue - if "=" not in param: - k, v = param, None - else: - k, v = re.split(r"\s*=\s*", param, 1) - k = k.lstrip() + + # XXX: The following does not strictly adhere to RFCs in that empty + # names and values are legal (the former will only appear once and will + # be overwritten if multiple occurrences are present). This is + # mostly to deal with backwards compatibility. + for ii, param in enumerate(ns_header.split(';')): + param = param.strip() + + key, sep, val = param.partition('=') + key = key.strip() + + if not key: + if ii == 0: + break + else: + continue + + # allow for a distinction between present and empty and missing + # altogether + val = val.strip() if sep else None + if ii != 0: - lc = k.lower() + lc = key.lower() if lc in known_attrs: - k = lc - if k == "version": + key = lc + + if key == "version": # This is an RFC 2109 cookie. - v = strip_quotes(v) + if val is not None: + val = strip_quotes(val) version_set = True - if k == "expires": + elif key == "expires": # convert expires date to seconds since epoch - v = http2time(strip_quotes(v)) # None if invalid - pairs.append((k, v)) + if val is not None: + val = http2time(strip_quotes(val)) # None if invalid + pairs.append((key, val)) if pairs: if not version_set: @@ -742,7 +758,7 @@ class Cookie: ): if version is not None: version = int(version) - if expires is not None: expires = int(expires) + if expires is not None: expires = int(float(expires)) if port is None and port_specified is True: raise ValueError("if port is None, port_specified must be false") @@ -805,7 +821,7 @@ class Cookie: args.append("%s=%s" % (name, repr(attr))) args.append("rest=%s" % repr(self._rest)) args.append("rfc2109=%s" % repr(self.rfc2109)) - return "Cookie(%s)" % ", ".join(args) + return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) class CookiePolicy: @@ -1722,12 +1738,12 @@ class CookieJar: def __repr__(self): r = [] for cookie in self: r.append(repr(cookie)) - return "<%s[%s]>" % (self.__class__, ", ".join(r)) + return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) def __str__(self): r = [] for cookie in self: r.append(str(cookie)) - return "<%s[%s]>" % (self.__class__, ", ".join(r)) + return "<%s[%s]>" % (self.__class__.__name__, ", ".join(r)) # derives from OSError for backwards-compatibility with Python 2.4.0 @@ -1792,7 +1808,7 @@ class FileCookieJar(CookieJar): def lwp_cookie_str(cookie): - """Return string representation of Cookie in an the LWP cookie file format. + """Return string representation of Cookie in the LWP cookie file format. Actually, the format is extended a bit -- see module docstring. @@ -1983,7 +1999,6 @@ class MozillaCookieJar(FileCookieJar): magic = f.readline() if not self.magic_re.search(magic): - f.close() raise LoadError( "%r does not look like a Netscape format cookies file" % filename) diff --git a/Darwin/lib/python3.4/http/cookies.py b/Darwin/lib/python3.5/http/cookies.py similarity index 71% rename from Darwin/lib/python3.4/http/cookies.py rename to Darwin/lib/python3.5/http/cookies.py index 24da5f4..fda02b7 100644 --- a/Darwin/lib/python3.4/http/cookies.py +++ b/Darwin/lib/python3.5/http/cookies.py @@ -138,6 +138,12 @@ _nulljoin = ''.join _semispacejoin = '; '.join _spacejoin = ' '.join +def _warn_deprecated_setter(setter): + import warnings + msg = ('The .%s setter is deprecated. The attribute will be read-only in ' + 'future releases. Please use the set() method instead.' % setter) + warnings.warn(msg, DeprecationWarning, stacklevel=3) + # # Define an exception visible to External modules # @@ -151,88 +157,36 @@ class CookieError(Exception): # into a 4 character sequence: a forward-slash followed by the # three-digit octal equivalent of the character. Any '\' or '"' is # quoted with a preceeding '\' slash. +# Because of the way browsers really handle cookies (as opposed to what +# the RFC says) we also encode "," and ";". # # These are taken from RFC2068 and RFC2109. # _LegalChars is the list of chars which don't require "'s # _Translator hash-table for fast quoting # -_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" -_Translator = { - '\000' : '\\000', '\001' : '\\001', '\002' : '\\002', - '\003' : '\\003', '\004' : '\\004', '\005' : '\\005', - '\006' : '\\006', '\007' : '\\007', '\010' : '\\010', - '\011' : '\\011', '\012' : '\\012', '\013' : '\\013', - '\014' : '\\014', '\015' : '\\015', '\016' : '\\016', - '\017' : '\\017', '\020' : '\\020', '\021' : '\\021', - '\022' : '\\022', '\023' : '\\023', '\024' : '\\024', - '\025' : '\\025', '\026' : '\\026', '\027' : '\\027', - '\030' : '\\030', '\031' : '\\031', '\032' : '\\032', - '\033' : '\\033', '\034' : '\\034', '\035' : '\\035', - '\036' : '\\036', '\037' : '\\037', +_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~:" +_UnescapedChars = _LegalChars + ' ()/<=>?@[]{}' - # Because of the way browsers really handle cookies (as opposed - # to what the RFC says) we also encode , and ; +_Translator = {n: '\\%03o' % n + for n in set(range(256)) - set(map(ord, _UnescapedChars))} +_Translator.update({ + ord('"'): '\\"', + ord('\\'): '\\\\', +}) - ',' : '\\054', ';' : '\\073', +_is_legal_key = re.compile('[%s]+' % _LegalChars).fullmatch - '"' : '\\"', '\\' : '\\\\', - - '\177' : '\\177', '\200' : '\\200', '\201' : '\\201', - '\202' : '\\202', '\203' : '\\203', '\204' : '\\204', - '\205' : '\\205', '\206' : '\\206', '\207' : '\\207', - '\210' : '\\210', '\211' : '\\211', '\212' : '\\212', - '\213' : '\\213', '\214' : '\\214', '\215' : '\\215', - '\216' : '\\216', '\217' : '\\217', '\220' : '\\220', - '\221' : '\\221', '\222' : '\\222', '\223' : '\\223', - '\224' : '\\224', '\225' : '\\225', '\226' : '\\226', - '\227' : '\\227', '\230' : '\\230', '\231' : '\\231', - '\232' : '\\232', '\233' : '\\233', '\234' : '\\234', - '\235' : '\\235', '\236' : '\\236', '\237' : '\\237', - '\240' : '\\240', '\241' : '\\241', '\242' : '\\242', - '\243' : '\\243', '\244' : '\\244', '\245' : '\\245', - '\246' : '\\246', '\247' : '\\247', '\250' : '\\250', - '\251' : '\\251', '\252' : '\\252', '\253' : '\\253', - '\254' : '\\254', '\255' : '\\255', '\256' : '\\256', - '\257' : '\\257', '\260' : '\\260', '\261' : '\\261', - '\262' : '\\262', '\263' : '\\263', '\264' : '\\264', - '\265' : '\\265', '\266' : '\\266', '\267' : '\\267', - '\270' : '\\270', '\271' : '\\271', '\272' : '\\272', - '\273' : '\\273', '\274' : '\\274', '\275' : '\\275', - '\276' : '\\276', '\277' : '\\277', '\300' : '\\300', - '\301' : '\\301', '\302' : '\\302', '\303' : '\\303', - '\304' : '\\304', '\305' : '\\305', '\306' : '\\306', - '\307' : '\\307', '\310' : '\\310', '\311' : '\\311', - '\312' : '\\312', '\313' : '\\313', '\314' : '\\314', - '\315' : '\\315', '\316' : '\\316', '\317' : '\\317', - '\320' : '\\320', '\321' : '\\321', '\322' : '\\322', - '\323' : '\\323', '\324' : '\\324', '\325' : '\\325', - '\326' : '\\326', '\327' : '\\327', '\330' : '\\330', - '\331' : '\\331', '\332' : '\\332', '\333' : '\\333', - '\334' : '\\334', '\335' : '\\335', '\336' : '\\336', - '\337' : '\\337', '\340' : '\\340', '\341' : '\\341', - '\342' : '\\342', '\343' : '\\343', '\344' : '\\344', - '\345' : '\\345', '\346' : '\\346', '\347' : '\\347', - '\350' : '\\350', '\351' : '\\351', '\352' : '\\352', - '\353' : '\\353', '\354' : '\\354', '\355' : '\\355', - '\356' : '\\356', '\357' : '\\357', '\360' : '\\360', - '\361' : '\\361', '\362' : '\\362', '\363' : '\\363', - '\364' : '\\364', '\365' : '\\365', '\366' : '\\366', - '\367' : '\\367', '\370' : '\\370', '\371' : '\\371', - '\372' : '\\372', '\373' : '\\373', '\374' : '\\374', - '\375' : '\\375', '\376' : '\\376', '\377' : '\\377' - } - -def _quote(str, LegalChars=_LegalChars): +def _quote(str): r"""Quote a string for use in a cookie header. If the string does not need to be double-quoted, then just return the string. Otherwise, surround the string in doublequotes and quote (with a \) special characters. """ - if all(c in LegalChars for c in str): + if str is None or _is_legal_key(str): return str else: - return '"' + _nulljoin(_Translator.get(s, s) for s in str) + '"' + return '"' + str.translate(_Translator) + '"' _OctalPatt = re.compile(r"\\[0-3][0-7][0-7]") @@ -241,7 +195,7 @@ _QuotePatt = re.compile(r"[\\].") def _unquote(str): # If there aren't any doublequotes, # then there can't be any special characters. See RFC 2109. - if len(str) < 2: + if str is None or len(str) < 2: return str if str[0] != '"' or str[-1] != '"': return str @@ -330,8 +284,8 @@ class Morsel(dict): "comment" : "Comment", "domain" : "Domain", "max-age" : "Max-Age", - "secure" : "secure", - "httponly" : "httponly", + "secure" : "Secure", + "httponly" : "HttpOnly", "version" : "Version", } @@ -339,33 +293,108 @@ class Morsel(dict): def __init__(self): # Set defaults - self.key = self.value = self.coded_value = None + self._key = self._value = self._coded_value = None # Set default attributes for key in self._reserved: dict.__setitem__(self, key, "") + @property + def key(self): + return self._key + + @key.setter + def key(self, key): + _warn_deprecated_setter('key') + self._key = key + + @property + def value(self): + return self._value + + @value.setter + def value(self, value): + _warn_deprecated_setter('value') + self._value = value + + @property + def coded_value(self): + return self._coded_value + + @coded_value.setter + def coded_value(self, coded_value): + _warn_deprecated_setter('coded_value') + self._coded_value = coded_value + def __setitem__(self, K, V): K = K.lower() if not K in self._reserved: - raise CookieError("Invalid Attribute %s" % K) + raise CookieError("Invalid attribute %r" % (K,)) dict.__setitem__(self, K, V) + def setdefault(self, key, val=None): + key = key.lower() + if key not in self._reserved: + raise CookieError("Invalid attribute %r" % (key,)) + return dict.setdefault(self, key, val) + + def __eq__(self, morsel): + if not isinstance(morsel, Morsel): + return NotImplemented + return (dict.__eq__(self, morsel) and + self._value == morsel._value and + self._key == morsel._key and + self._coded_value == morsel._coded_value) + + __ne__ = object.__ne__ + + def copy(self): + morsel = Morsel() + dict.update(morsel, self) + morsel.__dict__.update(self.__dict__) + return morsel + + def update(self, values): + data = {} + for key, val in dict(values).items(): + key = key.lower() + if key not in self._reserved: + raise CookieError("Invalid attribute %r" % (key,)) + data[key] = val + dict.update(self, data) + def isReservedKey(self, K): return K.lower() in self._reserved def set(self, key, val, coded_val, LegalChars=_LegalChars): - # First we verify that the key isn't a reserved word - # Second we make sure it only contains legal characters + if LegalChars != _LegalChars: + import warnings + warnings.warn( + 'LegalChars parameter is deprecated, ignored and will ' + 'be removed in future versions.', DeprecationWarning, + stacklevel=2) + if key.lower() in self._reserved: - raise CookieError("Attempt to set a reserved key: %s" % key) - if any(c not in LegalChars for c in key): - raise CookieError("Illegal key value: %s" % key) + raise CookieError('Attempt to set a reserved key %r' % (key,)) + if not _is_legal_key(key): + raise CookieError('Illegal key %r' % (key,)) # It's a good key, so save it. - self.key = key - self.value = val - self.coded_value = coded_val + self._key = key + self._value = val + self._coded_value = coded_val + + def __getstate__(self): + return { + 'key': self._key, + 'value': self._value, + 'coded_value': self._coded_value, + } + + def __setstate__(self, state): + self._key = state['key'] + self._value = state['value'] + self._coded_value = state['coded_value'] def output(self, attrs=None, header="Set-Cookie:"): return "%s %s" % (header, self.OutputString(attrs)) @@ -373,8 +402,7 @@ class Morsel(dict): __str__ = output def __repr__(self): - return '<%s: %s=%s>' % (self.__class__.__name__, - self.key, repr(self.value)) + return '<%s: %s>' % (self.__class__.__name__, self.OutputString()) def js_output(self, attrs=None): # Print javascript @@ -408,10 +436,9 @@ class Morsel(dict): append("%s=%s" % (self._reserved[key], _getdate(value))) elif key == "max-age" and isinstance(value, int): append("%s=%d" % (self._reserved[key], value)) - elif key == "secure": - append(str(self._reserved[key])) - elif key == "httponly": - append(str(self._reserved[key])) + elif key in self._flags: + if value: + append(str(self._reserved[key])) else: append("%s=%s" % (self._reserved[key], value)) @@ -428,11 +455,13 @@ class Morsel(dict): # result, the parsing rules here are less strict. # -_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]" +_LegalKeyChars = r"\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=" +_LegalValueChars = _LegalKeyChars + '\[\]' _CookiePattern = re.compile(r""" (?x) # This is a verbose pattern + \s* # Optional whitespace at start of cookie (?P # Start of group 'key' - """ + _LegalCharsPatt + r"""+? # Any word of at least one letter + [""" + _LegalKeyChars + r"""]+? # Any word of at least one letter ) # End of group 'key' ( # Optional group: there may not be a value. \s*=\s* # Equal Sign @@ -441,7 +470,7 @@ _CookiePattern = re.compile(r""" | # or \w{3},\s[\w\d\s-]{9,11}\s[\d:]{8}\sGMT # Special case for "expires" attr | # or - """ + _LegalCharsPatt + r"""* # Any word or empty string + [""" + _LegalValueChars + r"""]* # Any word or empty string ) # End of group 'val' )? # End of optional value group \s* # Any number of spaces. @@ -485,8 +514,12 @@ class BaseCookie(dict): def __setitem__(self, key, value): """Dictionary style assignment.""" - rval, cval = self.value_encode(value) - self.__set(key, rval, cval) + if isinstance(value, Morsel): + # allow assignment of constructed Morsels (e.g. for pickling) + dict.__setitem__(self, key, value) + else: + rval, cval = self.value_encode(value) + self.__set(key, rval, cval) def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"): """Return a string suitable for HTTP.""" @@ -528,13 +561,20 @@ class BaseCookie(dict): return def __parse_string(self, str, patt=_CookiePattern): - i = 0 # Our starting point - n = len(str) # Length of string - M = None # current morsel + i = 0 # Our starting point + n = len(str) # Length of string + parsed_items = [] # Parsed (type, key, value) triples + morsel_seen = False # A key=value pair was previously encountered + TYPE_ATTRIBUTE = 1 + TYPE_KEYVALUE = 2 + + # We first parse the whole cookie string and reject it if it's + # syntactically invalid (this helps avoid some classes of injection + # attacks). while 0 <= i < n: # Start looking for a cookie - match = patt.search(str, i) + match = patt.match(str, i) if not match: # No more cookies break @@ -542,22 +582,41 @@ class BaseCookie(dict): key, value = match.group("key"), match.group("val") i = match.end(0) - # Parse the key, value in case it's metainfo if key[0] == "$": - # We ignore attributes which pertain to the cookie - # mechanism as a whole. See RFC 2109. - # (Does anyone care?) - if M: - M[key[1:]] = value + if not morsel_seen: + # We ignore attributes which pertain to the cookie + # mechanism as a whole, such as "$Version". + # See RFC 2965. (Does anyone care?) + continue + parsed_items.append((TYPE_ATTRIBUTE, key[1:], value)) elif key.lower() in Morsel._reserved: - if M: - if value is None: - if key.lower() in Morsel._flags: - M[key] = True + if not morsel_seen: + # Invalid cookie string + return + if value is None: + if key.lower() in Morsel._flags: + parsed_items.append((TYPE_ATTRIBUTE, key, True)) else: - M[key] = _unquote(value) + # Invalid cookie string + return + else: + parsed_items.append((TYPE_ATTRIBUTE, key, _unquote(value))) elif value is not None: - rval, cval = self.value_decode(value) + parsed_items.append((TYPE_KEYVALUE, key, self.value_decode(value))) + morsel_seen = True + else: + # Invalid cookie string + return + + # The cookie string is valid, apply it. + M = None # current morsel + for tp, key, value in parsed_items: + if tp == TYPE_ATTRIBUTE: + assert M is not None + M[key] = value + else: + assert tp == TYPE_KEYVALUE + rval, cval = value self.__set(key, rval, cval) M = self[key] diff --git a/Darwin/lib/python3.4/http/server.py b/Darwin/lib/python3.5/http/server.py similarity index 86% rename from Darwin/lib/python3.4/http/server.py rename to Darwin/lib/python3.5/http/server.py index 6ce6bda..fd13be3 100644 --- a/Darwin/lib/python3.4/http/server.py +++ b/Darwin/lib/python3.5/http/server.py @@ -82,7 +82,10 @@ XXX To do: __version__ = "0.6" -__all__ = ["HTTPServer", "BaseHTTPRequestHandler"] +__all__ = [ + "HTTPServer", "BaseHTTPRequestHandler", + "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler", +] import html import http.client @@ -100,6 +103,8 @@ import urllib.parse import copy import argparse +from http import HTTPStatus + # Default error message template DEFAULT_ERROR_MESSAGE = """\ @@ -270,7 +275,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): """ self.command = None # set in case of error on the first line self.request_version = version = self.default_request_version - self.close_connection = 1 + self.close_connection = True requestline = str(self.raw_requestline, 'iso-8859-1') requestline = requestline.rstrip('\r\n') self.requestline = requestline @@ -278,7 +283,9 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): if len(words) == 3: command, path, version = words if version[:5] != 'HTTP/': - self.send_error(400, "Bad request version (%r)" % version) + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad request version (%r)" % version) return False try: base_version_number = version.split('/', 1)[1] @@ -293,25 +300,31 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): raise ValueError version_number = int(version_number[0]), int(version_number[1]) except (ValueError, IndexError): - self.send_error(400, "Bad request version (%r)" % version) + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad request version (%r)" % version) return False if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1": - self.close_connection = 0 + self.close_connection = False if version_number >= (2, 0): - self.send_error(505, - "Invalid HTTP Version (%s)" % base_version_number) + self.send_error( + HTTPStatus.HTTP_VERSION_NOT_SUPPORTED, + "Invalid HTTP Version (%s)" % base_version_number) return False elif len(words) == 2: command, path = words - self.close_connection = 1 + self.close_connection = True if command != 'GET': - self.send_error(400, - "Bad HTTP/0.9 request type (%r)" % command) + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad HTTP/0.9 request type (%r)" % command) return False elif not words: return False else: - self.send_error(400, "Bad request syntax (%r)" % requestline) + self.send_error( + HTTPStatus.BAD_REQUEST, + "Bad request syntax (%r)" % requestline) return False self.command, self.path, self.request_version = command, path, version @@ -320,15 +333,17 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): self.headers = http.client.parse_headers(self.rfile, _class=self.MessageClass) except http.client.LineTooLong: - self.send_error(400, "Line too long") + self.send_error( + HTTPStatus.BAD_REQUEST, + "Line too long") return False conntype = self.headers.get('Connection', "") if conntype.lower() == 'close': - self.close_connection = 1 + self.close_connection = True elif (conntype.lower() == 'keep-alive' and self.protocol_version >= "HTTP/1.1"): - self.close_connection = 0 + self.close_connection = False # Examine the headers and look for an Expect directive expect = self.headers.get('Expect', "") if (expect.lower() == "100-continue" and @@ -352,7 +367,7 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): False. """ - self.send_response_only(100) + self.send_response_only(HTTPStatus.CONTINUE) self.end_headers() return True @@ -370,17 +385,19 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): self.requestline = '' self.request_version = '' self.command = '' - self.send_error(414) + self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG) return if not self.raw_requestline: - self.close_connection = 1 + self.close_connection = True return if not self.parse_request(): # An error code has been sent, just exit return mname = 'do_' + self.command if not hasattr(self, mname): - self.send_error(501, "Unsupported method (%r)" % self.command) + self.send_error( + HTTPStatus.NOT_IMPLEMENTED, + "Unsupported method (%r)" % self.command) return method = getattr(self, mname) method() @@ -388,12 +405,12 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): except socket.timeout as e: #a read or a write timed out. Discard this connection self.log_error("Request timed out: %r", e) - self.close_connection = 1 + self.close_connection = True return def handle(self): """Handle multiple requests if necessary.""" - self.close_connection = 1 + self.close_connection = True self.handle_one_request() while not self.close_connection: @@ -435,7 +452,11 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): self.send_header('Connection', 'close') self.send_header('Content-Length', int(len(body))) self.end_headers() - if self.command != 'HEAD' and code >= 200 and code not in (204, 304): + + if (self.command != 'HEAD' and + code >= 200 and + code not in ( + HTTPStatus.NO_CONTENT, HTTPStatus.NOT_MODIFIED)): self.wfile.write(body) def send_response(self, code, message=None): @@ -475,9 +496,9 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): if keyword.lower() == 'connection': if value.lower() == 'close': - self.close_connection = 1 + self.close_connection = True elif value.lower() == 'keep-alive': - self.close_connection = 0 + self.close_connection = False def end_headers(self): """Send the blank line ending the MIME headers.""" @@ -496,7 +517,8 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): This is called by send_response(). """ - + if isinstance(code, HTTPStatus): + code = code.value self.log_message('"%s" %s %s', self.requestline, str(code), str(size)) @@ -579,82 +601,11 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): # MessageClass used to parse headers MessageClass = http.client.HTTPMessage - # Table mapping response codes to messages; entries have the - # form {code: (shortmessage, longmessage)}. - # See RFC 2616 and 6585. + # hack to maintain backwards compatibility responses = { - 100: ('Continue', 'Request received, please continue'), - 101: ('Switching Protocols', - 'Switching to new protocol; obey Upgrade header'), - - 200: ('OK', 'Request fulfilled, document follows'), - 201: ('Created', 'Document created, URL follows'), - 202: ('Accepted', - 'Request accepted, processing continues off-line'), - 203: ('Non-Authoritative Information', 'Request fulfilled from cache'), - 204: ('No Content', 'Request fulfilled, nothing follows'), - 205: ('Reset Content', 'Clear input form for further input.'), - 206: ('Partial Content', 'Partial content follows.'), - - 300: ('Multiple Choices', - 'Object has several resources -- see URI list'), - 301: ('Moved Permanently', 'Object moved permanently -- see URI list'), - 302: ('Found', 'Object moved temporarily -- see URI list'), - 303: ('See Other', 'Object moved -- see Method and URL list'), - 304: ('Not Modified', - 'Document has not changed since given time'), - 305: ('Use Proxy', - 'You must use proxy specified in Location to access this ' - 'resource.'), - 307: ('Temporary Redirect', - 'Object moved temporarily -- see URI list'), - - 400: ('Bad Request', - 'Bad request syntax or unsupported method'), - 401: ('Unauthorized', - 'No permission -- see authorization schemes'), - 402: ('Payment Required', - 'No payment -- see charging schemes'), - 403: ('Forbidden', - 'Request forbidden -- authorization will not help'), - 404: ('Not Found', 'Nothing matches the given URI'), - 405: ('Method Not Allowed', - 'Specified method is invalid for this resource.'), - 406: ('Not Acceptable', 'URI not available in preferred format.'), - 407: ('Proxy Authentication Required', 'You must authenticate with ' - 'this proxy before proceeding.'), - 408: ('Request Timeout', 'Request timed out; try again later.'), - 409: ('Conflict', 'Request conflict.'), - 410: ('Gone', - 'URI no longer exists and has been permanently removed.'), - 411: ('Length Required', 'Client must specify Content-Length.'), - 412: ('Precondition Failed', 'Precondition in headers is false.'), - 413: ('Request Entity Too Large', 'Entity is too large.'), - 414: ('Request-URI Too Long', 'URI is too long.'), - 415: ('Unsupported Media Type', 'Entity body in unsupported format.'), - 416: ('Requested Range Not Satisfiable', - 'Cannot satisfy request range.'), - 417: ('Expectation Failed', - 'Expect condition could not be satisfied.'), - 428: ('Precondition Required', - 'The origin server requires the request to be conditional.'), - 429: ('Too Many Requests', 'The user has sent too many requests ' - 'in a given amount of time ("rate limiting").'), - 431: ('Request Header Fields Too Large', 'The server is unwilling to ' - 'process the request because its header fields are too large.'), - - 500: ('Internal Server Error', 'Server got itself in trouble'), - 501: ('Not Implemented', - 'Server does not support this operation'), - 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'), - 503: ('Service Unavailable', - 'The server cannot process the request due to a high load'), - 504: ('Gateway Timeout', - 'The gateway server did not receive a timely response'), - 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'), - 511: ('Network Authentication Required', - 'The client needs to authenticate to gain network access.'), - } + v: (v.phrase, v.description) + for v in HTTPStatus.__members__.values() + } class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): @@ -701,10 +652,14 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): path = self.translate_path(self.path) f = None if os.path.isdir(path): - if not self.path.endswith('/'): + parts = urllib.parse.urlsplit(self.path) + if not parts.path.endswith('/'): # redirect browser - doing basically what apache does - self.send_response(301) - self.send_header("Location", self.path + "/") + self.send_response(HTTPStatus.MOVED_PERMANENTLY) + new_parts = (parts[0], parts[1], parts[2] + '/', + parts[3], parts[4]) + new_url = urllib.parse.urlunsplit(new_parts) + self.send_header("Location", new_url) self.end_headers() return None for index in "index.html", "index.htm": @@ -718,10 +673,10 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): try: f = open(path, 'rb') except OSError: - self.send_error(404, "File not found") + self.send_error(HTTPStatus.NOT_FOUND, "File not found") return None try: - self.send_response(200) + self.send_response(HTTPStatus.OK) self.send_header("Content-type", ctype) fs = os.fstat(f.fileno()) self.send_header("Content-Length", str(fs[6])) @@ -743,11 +698,18 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): try: list = os.listdir(path) except OSError: - self.send_error(404, "No permission to list directory") + self.send_error( + HTTPStatus.NOT_FOUND, + "No permission to list directory") return None list.sort(key=lambda a: a.lower()) r = [] - displaypath = html.escape(urllib.parse.unquote(self.path)) + try: + displaypath = urllib.parse.unquote(self.path, + errors='surrogatepass') + except UnicodeDecodeError: + displaypath = urllib.parse.unquote(path) + displaypath = html.escape(displaypath) enc = sys.getfilesystemencoding() title = 'Directory listing for %s' % displaypath r.append('
%s' - % (urllib.parse.quote(linkname), html.escape(displayname))) + % (urllib.parse.quote(linkname, + errors='surrogatepass'), + html.escape(displayname))) r.append('\n
\n\n\n') - encoded = '\n'.join(r).encode(enc) + encoded = '\n'.join(r).encode(enc, 'surrogateescape') f = io.BytesIO() f.write(encoded) f.seek(0) - self.send_response(200) + self.send_response(HTTPStatus.OK) self.send_header("Content-type", "text/html; charset=%s" % enc) self.send_header("Content-Length", str(len(encoded))) self.end_headers() @@ -794,7 +758,11 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): path = path.split('#',1)[0] # Don't forget explicit trailing slash when normalizing. Issue17324 trailing_slash = path.rstrip().endswith('/') - path = posixpath.normpath(urllib.parse.unquote(path)) + try: + path = urllib.parse.unquote(path, errors='surrogatepass') + except UnicodeDecodeError: + path = urllib.parse.unquote(path) + path = posixpath.normpath(path) words = path.split('/') words = filter(None, words) path = os.getcwd() @@ -953,7 +921,9 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): if self.is_cgi(): self.run_cgi() else: - self.send_error(501, "Can only POST to CGI scripts") + self.send_error( + HTTPStatus.NOT_IMPLEMENTED, + "Can only POST to CGI scripts") def send_head(self): """Version of send_head that support CGI scripts""" @@ -977,7 +947,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): (and the next character is a '/' or the end of the string). """ - collapsed_path = _url_collapse_path(self.path) + collapsed_path = _url_collapse_path(urllib.parse.unquote(self.path)) dir_sep = collapsed_path.find('/', 1) head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] if head in self.cgi_directories: @@ -1000,16 +970,16 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): def run_cgi(self): """Execute a CGI script.""" dir, rest = self.cgi_info - - i = rest.find('/') + path = dir + '/' + rest + i = path.find('/', len(dir)+1) while i >= 0: - nextdir = rest[:i] - nextrest = rest[i+1:] + nextdir = path[:i] + nextrest = path[i+1:] scriptdir = self.translate_path(nextdir) if os.path.isdir(scriptdir): dir, rest = nextdir, nextrest - i = rest.find('/') + i = path.find('/', len(dir)+1) else: break @@ -1031,17 +1001,21 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): scriptname = dir + '/' + script scriptfile = self.translate_path(scriptname) if not os.path.exists(scriptfile): - self.send_error(404, "No such CGI script (%r)" % scriptname) + self.send_error( + HTTPStatus.NOT_FOUND, + "No such CGI script (%r)" % scriptname) return if not os.path.isfile(scriptfile): - self.send_error(403, "CGI script is not a plain file (%r)" % - scriptname) + self.send_error( + HTTPStatus.FORBIDDEN, + "CGI script is not a plain file (%r)" % scriptname) return ispy = self.is_python(scriptname) if self.have_fork or not ispy: if not self.is_executable(scriptfile): - self.send_error(403, "CGI script is not executable (%r)" % - scriptname) + self.send_error( + HTTPStatus.FORBIDDEN, + "CGI script is not executable (%r)" % scriptname) return # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html @@ -1109,7 +1083,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): env.setdefault(k, "") - self.send_response(200, "Script output follows") + self.send_response(HTTPStatus.OK, "Script output follows") self.flush_headers() decoded_query = query.replace('+', ' ') diff --git a/Darwin/lib/python3.4/idlelib/AutoComplete.py b/Darwin/lib/python3.5/idlelib/AutoComplete.py similarity index 98% rename from Darwin/lib/python3.4/idlelib/AutoComplete.py rename to Darwin/lib/python3.5/idlelib/AutoComplete.py index f366030..b20512d 100644 --- a/Darwin/lib/python3.4/idlelib/AutoComplete.py +++ b/Darwin/lib/python3.5/idlelib/AutoComplete.py @@ -226,3 +226,8 @@ class AutoComplete: namespace = sys.modules.copy() namespace.update(__main__.__dict__) return eval(name, namespace) + + +if __name__ == '__main__': + from unittest import main + main('idlelib.idle_test.test_autocomplete', verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/AutoCompleteWindow.py b/Darwin/lib/python3.5/idlelib/AutoCompleteWindow.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/AutoCompleteWindow.py rename to Darwin/lib/python3.5/idlelib/AutoCompleteWindow.py diff --git a/Darwin/lib/python3.4/idlelib/AutoExpand.py b/Darwin/lib/python3.5/idlelib/AutoExpand.py similarity index 73% rename from Darwin/lib/python3.4/idlelib/AutoExpand.py rename to Darwin/lib/python3.5/idlelib/AutoExpand.py index 9e93d57..7059054 100644 --- a/Darwin/lib/python3.4/idlelib/AutoExpand.py +++ b/Darwin/lib/python3.5/idlelib/AutoExpand.py @@ -1,3 +1,17 @@ +'''Complete the current word before the cursor with words in the editor. + +Each menu selection or shortcut key selection replaces the word with a +different word with the same prefix. The search for matches begins +before the target and moves toward the top of the editor. It then starts +after the cursor and moves down. It then returns to the original word and +the cycle starts again. + +Changing the current text line or leaving the cursor in a different +place before requesting the next selection causes AutoExpand to reset +its state. + +This is an extension file and there is only one instance of AutoExpand. +''' import string import re @@ -20,6 +34,7 @@ class AutoExpand: self.state = None def expand_word_event(self, event): + "Replace the current word with the next expansion." curinsert = self.text.index("insert") curline = self.text.get("insert linestart", "insert lineend") if not self.state: @@ -46,6 +61,7 @@ class AutoExpand: return "break" def getwords(self): + "Return a list of words that match the prefix before the cursor." word = self.getprevword() if not word: return [] @@ -76,8 +92,13 @@ class AutoExpand: return words def getprevword(self): + "Return the word prefix before the cursor." line = self.text.get("insert linestart", "insert") i = len(line) while i > 0 and line[i-1] in self.wordchars: i = i-1 return line[i:] + +if __name__ == '__main__': + import unittest + unittest.main('idlelib.idle_test.test_autoexpand', verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/Bindings.py b/Darwin/lib/python3.5/idlelib/Bindings.py similarity index 91% rename from Darwin/lib/python3.4/idlelib/Bindings.py rename to Darwin/lib/python3.5/idlelib/Bindings.py index df2b251..226671c 100644 --- a/Darwin/lib/python3.4/idlelib/Bindings.py +++ b/Darwin/lib/python3.5/idlelib/Bindings.py @@ -8,6 +8,8 @@ the PythonShell window, and a Format menu which is only present in the Editor windows. """ +from importlib.util import find_spec + from idlelib.configHandler import idleConf # Warning: menudefs is altered in macosxSupport.overrideRootMenu() @@ -75,7 +77,8 @@ menudefs = [ ('!_Auto-open Stack Viewer', '<>'), ]), ('options', [ - ('_Configure IDLE...', '<>'), + ('Configure _IDLE', '<>'), + ('Configure _Extensions', '<>'), None, ]), ('help', [ @@ -86,4 +89,7 @@ menudefs = [ ]), ] +if find_spec('turtledemo'): + menudefs[-1][1].append(('Turtle Demo', '<>')) + default_keydefs = idleConf.GetCurrentKeySet() diff --git a/Darwin/lib/python3.4/idlelib/CREDITS.txt b/Darwin/lib/python3.5/idlelib/CREDITS.txt similarity index 100% rename from Darwin/lib/python3.4/idlelib/CREDITS.txt rename to Darwin/lib/python3.5/idlelib/CREDITS.txt diff --git a/Darwin/lib/python3.4/idlelib/CallTipWindow.py b/Darwin/lib/python3.5/idlelib/CallTipWindow.py similarity index 85% rename from Darwin/lib/python3.4/idlelib/CallTipWindow.py rename to Darwin/lib/python3.5/idlelib/CallTipWindow.py index 8e29dab..170d146 100644 --- a/Darwin/lib/python3.4/idlelib/CallTipWindow.py +++ b/Darwin/lib/python3.5/idlelib/CallTipWindow.py @@ -2,9 +2,8 @@ After ToolTip.py, which uses ideas gleaned from PySol Used by the CallTips IDLE extension. - """ -from tkinter import * +from tkinter import Toplevel, Label, LEFT, SOLID, TclError HIDE_VIRTUAL_EVENT_NAME = "<>" HIDE_SEQUENCES = ("", "") @@ -133,37 +132,29 @@ class CallTip: return bool(self.tipwindow) +def _calltip_window(parent): # htest # + from tkinter import Toplevel, Text, LEFT, BOTH -############################### -# -# Test Code -# -class container: # Conceptually an editor_window - def __init__(self): - root = Tk() - text = self.text = Text(root) - text.pack(side=LEFT, fill=BOTH, expand=1) - text.insert("insert", "string.split") - root.update() - self.calltip = CallTip(text) + top = Toplevel(parent) + top.title("Test calltips") + top.geometry("200x100+%d+%d" % (parent.winfo_rootx() + 200, + parent.winfo_rooty() + 150)) + text = Text(top) + text.pack(side=LEFT, fill=BOTH, expand=1) + text.insert("insert", "string.split") + top.update() + calltip = CallTip(text) - text.event_add("<>", "(") - text.event_add("<>", ")") - text.bind("<>", self.calltip_show) - text.bind("<>", self.calltip_hide) - - text.focus_set() - root.mainloop() - - def calltip_show(self, event): - self.calltip.showtip("Hello world") - - def calltip_hide(self, event): - self.calltip.hidetip() - -def main(): - # Test code - c=container() + def calltip_show(event): + calltip.showtip("(s=Hello world)", "insert", "end") + def calltip_hide(event): + calltip.hidetip() + text.event_add("<>", "(") + text.event_add("<>", ")") + text.bind("<>", calltip_show) + text.bind("<>", calltip_hide) + text.focus_set() if __name__=='__main__': - main() + from idlelib.idle_test.htest import run + run(_calltip_window) diff --git a/Darwin/lib/python3.4/idlelib/CallTips.py b/Darwin/lib/python3.5/idlelib/CallTips.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/CallTips.py rename to Darwin/lib/python3.5/idlelib/CallTips.py diff --git a/Darwin/lib/python3.4/idlelib/ChangeLog b/Darwin/lib/python3.5/idlelib/ChangeLog similarity index 99% rename from Darwin/lib/python3.4/idlelib/ChangeLog rename to Darwin/lib/python3.5/idlelib/ChangeLog index 985871b..90e02f6 100644 --- a/Darwin/lib/python3.4/idlelib/ChangeLog +++ b/Darwin/lib/python3.5/idlelib/ChangeLog @@ -20,7 +20,7 @@ IDLEfork ChangeLog 2001-07-19 14:49 elguavas * ChangeLog, EditorWindow.py, INSTALLATION, NEWS.txt, README.txt, - TODO.txt, idlever.py: + TODO.txt, idlever.py: minor tidy-ups ready for 0.8.1 alpha tarball release 2001-07-17 15:12 kbk @@ -172,7 +172,7 @@ IDLEfork ChangeLog all this work w/ a future-stmt just looks harder and harder." --tim_one - (From Rel 1.8: "Hack to make this still work with Python 1.5.2. + (From Rel 1.8: "Hack to make this still work with Python 1.5.2. ;-( " --fdrake) 2001-07-14 14:51 kbk @@ -193,7 +193,7 @@ IDLEfork ChangeLog test() to _test()." --GvR This was an interesting merge. The join completely missed removing - goodname(), which was adjacent, but outside of, a small conflict. + goodname(), which was adjacent, but outside of, a small conflict. I only caught it by comparing the 1.1.3.2/1.1.3.3 diff. CVS ain't infallible. @@ -516,12 +516,12 @@ IDLEfork ChangeLog 2000-08-15 22:51 nowonder - * IDLEFORK.html: + * IDLEFORK.html: corrected email address 2000-08-15 22:47 nowonder - * IDLEFORK.html: + * IDLEFORK.html: added .html file for http://idlefork.sourceforge.net 2000-08-15 11:13 dscherer diff --git a/Darwin/lib/python3.4/idlelib/ClassBrowser.py b/Darwin/lib/python3.5/idlelib/ClassBrowser.py similarity index 86% rename from Darwin/lib/python3.4/idlelib/ClassBrowser.py rename to Darwin/lib/python3.5/idlelib/ClassBrowser.py index 71176cd..5be65ef 100644 --- a/Darwin/lib/python3.4/idlelib/ClassBrowser.py +++ b/Darwin/lib/python3.5/idlelib/ClassBrowser.py @@ -19,13 +19,23 @@ from idlelib.WindowList import ListedToplevel from idlelib.TreeWidget import TreeNode, TreeItem, ScrolledCanvas from idlelib.configHandler import idleConf +file_open = None # Method...Item and Class...Item use this. +# Normally PyShell.flist.open, but there is no PyShell.flist for htest. + class ClassBrowser: - def __init__(self, flist, name, path): + def __init__(self, flist, name, path, _htest=False): # XXX This API should change, if the file doesn't end in ".py" # XXX the code here is bogus! + """ + _htest - bool, change box when location running htest. + """ + global file_open + if not _htest: + file_open = PyShell.flist.open self.name = name self.file = os.path.join(path[0], self.name + ".py") + self._htest = _htest self.init(flist) def close(self, event=None): @@ -40,6 +50,9 @@ class ClassBrowser: self.top = top = ListedToplevel(flist.root) top.protocol("WM_DELETE_WINDOW", self.close) top.bind("", self.close) + if self._htest: # place dialog below parent if running htest + top.geometry("+%d+%d" % + (flist.root.winfo_rootx(), flist.root.winfo_rooty() + 200)) self.settitle() top.focus_set() # create scrolled canvas @@ -94,7 +107,7 @@ class ModuleBrowserTreeItem(TreeItem): return [] try: dict = pyclbr.readmodule_ex(name, [dir] + sys.path) - except ImportError as msg: + except ImportError: return [] items = [] self.classes = {} @@ -163,7 +176,7 @@ class ClassBrowserTreeItem(TreeItem): def OnDoubleClick(self): if not os.path.exists(self.file): return - edit = PyShell.flist.open(self.file) + edit = file_open(self.file) if hasattr(self.cl, 'lineno'): lineno = self.cl.lineno edit.gotoline(lineno) @@ -199,10 +212,10 @@ class MethodBrowserTreeItem(TreeItem): def OnDoubleClick(self): if not os.path.exists(self.file): return - edit = PyShell.flist.open(self.file) + edit = file_open(self.file) edit.gotoline(self.cl.methods[self.name]) -def main(): +def _class_browser(parent): #Wrapper for htest try: file = __file__ except NameError: @@ -213,9 +226,11 @@ def main(): file = sys.argv[0] dir, file = os.path.split(file) name = os.path.splitext(file)[0] - ClassBrowser(PyShell.flist, name, [dir]) - if sys.stdin is sys.__stdin__: - mainloop() + flist = PyShell.PyShellFileList(parent) + global file_open + file_open = flist.open + ClassBrowser(flist, name, [dir], _htest=True) if __name__ == "__main__": - main() + from idlelib.idle_test.htest import run + run(_class_browser) diff --git a/Darwin/lib/python3.4/idlelib/CodeContext.py b/Darwin/lib/python3.5/idlelib/CodeContext.py similarity index 95% rename from Darwin/lib/python3.4/idlelib/CodeContext.py rename to Darwin/lib/python3.5/idlelib/CodeContext.py index 84491d5..7d25ada 100644 --- a/Darwin/lib/python3.4/idlelib/CodeContext.py +++ b/Darwin/lib/python3.5/idlelib/CodeContext.py @@ -15,8 +15,8 @@ import re from sys import maxsize as INFINITY from idlelib.configHandler import idleConf -BLOCKOPENERS = set(["class", "def", "elif", "else", "except", "finally", "for", - "if", "try", "while", "with"]) +BLOCKOPENERS = {"class", "def", "elif", "else", "except", "finally", "for", + "if", "try", "while", "with"} UPDATEINTERVAL = 100 # millisec FONTUPDATEINTERVAL = 1000 # millisec @@ -57,18 +57,18 @@ class CodeContext: # Calculate the border width and horizontal padding required to # align the context with the text in the main Text widget. # - # All values are passed through int(str()), since some + # All values are passed through getint(), since some # values may be pixel objects, which can't simply be added to ints. widgets = self.editwin.text, self.editwin.text_frame # Calculate the required vertical padding padx = 0 for widget in widgets: - padx += int(str( widget.pack_info()['padx'] )) - padx += int(str( widget.cget('padx') )) + padx += widget.tk.getint(widget.pack_info()['padx']) + padx += widget.tk.getint(widget.cget('padx')) # Calculate the required border width border = 0 for widget in widgets: - border += int(str( widget.cget('border') )) + border += widget.tk.getint(widget.cget('border')) self.label = tkinter.Label(self.editwin.top, text="\n" * (self.context_depth - 1), anchor=W, justify=LEFT, diff --git a/Darwin/lib/python3.4/idlelib/ColorDelegator.py b/Darwin/lib/python3.5/idlelib/ColorDelegator.py similarity index 88% rename from Darwin/lib/python3.4/idlelib/ColorDelegator.py rename to Darwin/lib/python3.5/idlelib/ColorDelegator.py index 61e2be4..13a9010 100644 --- a/Darwin/lib/python3.4/idlelib/ColorDelegator.py +++ b/Darwin/lib/python3.5/idlelib/ColorDelegator.py @@ -2,7 +2,6 @@ import time import re import keyword import builtins -from tkinter import * from idlelib.Delegator import Delegator from idlelib.configHandler import idleConf @@ -32,7 +31,6 @@ def make_pat(): prog = re.compile(make_pat(), re.S) idprog = re.compile(r"\s+(\w+)", re.S) -asprog = re.compile(r".*?\b(as)\b") class ColorDelegator(Delegator): @@ -40,7 +38,6 @@ class ColorDelegator(Delegator): Delegator.__init__(self) self.prog = prog self.idprog = idprog - self.asprog = asprog self.LoadTagDefs() def setdelegate(self, delegate): @@ -72,7 +69,6 @@ class ColorDelegator(Delegator): "DEFINITION": idleConf.GetHighlight(theme, "definition"), "SYNC": {'background':None,'foreground':None}, "TODO": {'background':None,'foreground':None}, - "BREAK": idleConf.GetHighlight(theme, "break"), "ERROR": idleConf.GetHighlight(theme, "error"), # The following is used by ReplaceDialog: "hit": idleConf.GetHighlight(theme, "hit"), @@ -214,22 +210,6 @@ class ColorDelegator(Delegator): self.tag_add("DEFINITION", head + "+%dc" % a, head + "+%dc" % b) - elif value == "import": - # color all the "as" words on same line, except - # if in a comment; cheap approximation to the - # truth - if '#' in chars: - endpos = chars.index('#') - else: - endpos = len(chars) - while True: - m1 = self.asprog.match(chars, b, endpos) - if not m1: - break - a, b = m1.span(1) - self.tag_add("KEYWORD", - head + "+%dc" % a, - head + "+%dc" % b) m = self.prog.search(chars, m.end()) if "SYNC" in self.tag_names(next + "-1c"): head = next @@ -253,17 +233,24 @@ class ColorDelegator(Delegator): for tag in self.tagdefs: self.tag_remove(tag, "1.0", "end") -def main(): +def _color_delegator(parent): # htest # + from tkinter import Toplevel, Text from idlelib.Percolator import Percolator - root = Tk() - root.wm_protocol("WM_DELETE_WINDOW", root.quit) - text = Text(background="white") + + top = Toplevel(parent) + top.title("Test ColorDelegator") + top.geometry("200x100+%d+%d" % (parent.winfo_rootx() + 200, + parent.winfo_rooty() + 150)) + source = "if somename: x = 'abc' # comment\nprint\n" + text = Text(top, background="white") text.pack(expand=1, fill="both") + text.insert("insert", source) text.focus_set() + p = Percolator(text) d = ColorDelegator() p.insertfilter(d) - root.mainloop() if __name__ == "__main__": - main() + from idlelib.idle_test.htest import run + run(_color_delegator) diff --git a/Darwin/lib/python3.4/idlelib/Debugger.py b/Darwin/lib/python3.5/idlelib/Debugger.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/Debugger.py rename to Darwin/lib/python3.5/idlelib/Debugger.py index ca98b10..6875197 100644 --- a/Darwin/lib/python3.4/idlelib/Debugger.py +++ b/Darwin/lib/python3.5/idlelib/Debugger.py @@ -1,6 +1,5 @@ import os import bdb -import types from tkinter import * from idlelib.WindowList import ListedToplevel from idlelib.ScrolledList import ScrolledList diff --git a/Darwin/lib/python3.4/idlelib/Delegator.py b/Darwin/lib/python3.5/idlelib/Delegator.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/Delegator.py rename to Darwin/lib/python3.5/idlelib/Delegator.py diff --git a/Darwin/lib/python3.4/idlelib/EditorWindow.py b/Darwin/lib/python3.5/idlelib/EditorWindow.py similarity index 96% rename from Darwin/lib/python3.4/idlelib/EditorWindow.py rename to Darwin/lib/python3.5/idlelib/EditorWindow.py index 06fb137..3ac68bb 100644 --- a/Darwin/lib/python3.4/idlelib/EditorWindow.py +++ b/Darwin/lib/python3.5/idlelib/EditorWindow.py @@ -2,7 +2,7 @@ import importlib import importlib.abc import importlib.util import os -from platform import python_version +import platform import re import string import sys @@ -13,7 +13,6 @@ import traceback import webbrowser from idlelib.MultiCall import MultiCallCreator -from idlelib import idlever from idlelib import WindowList from idlelib import SearchDialog from idlelib import GrepDialog @@ -26,6 +25,8 @@ from idlelib import macosxSupport # The default tab setting for a Text widget, in average-width characters. TK_TABWIDTH_DEFAULT = 8 +_py_version = ' (%s)' % platform.python_version() + def _sphinx_version(): "Format sys.version_info to produce the Sphinx version string used to install the chm docs" major, minor, micro, level, serial = sys.version_info @@ -79,7 +80,7 @@ class HelpDialog(object): self.parent = None helpDialog = HelpDialog() # singleton instance -def _Help_dialog(parent): # wrapper for htest +def _help_dialog(parent): # wrapper for htest helpDialog.show_dialog(parent) @@ -122,8 +123,7 @@ class EditorWindow(object): # Safari requires real file:-URLs EditorWindow.help_url = 'file://' + EditorWindow.help_url else: - EditorWindow.help_url = "http://docs.python.org/%d.%d" % sys.version_info[:2] - currentTheme=idleConf.CurrentTheme() + EditorWindow.help_url = "https://docs.python.org/%d.%d/" % sys.version_info[:2] self.flist = flist root = root or flist.root self.root = root @@ -187,6 +187,8 @@ class EditorWindow(object): text.bind("<>", self.python_docs) text.bind("<>", self.about_dialog) text.bind("<>", self.config_dialog) + text.bind("<>", + self.config_extensions_dialog) text.bind("<>", self.open_module) text.bind("<>", lambda event: "break") text.bind("<>", self.select_all) @@ -222,18 +224,13 @@ class EditorWindow(object): text.bind("<>", self.flist.close_all_callback) text.bind("<>", self.open_class_browser) text.bind("<>", self.open_path_browser) + text.bind("<>", self.open_turtle_demo) self.set_status_bar() vbar['command'] = text.yview vbar.pack(side=RIGHT, fill=Y) text['yscrollcommand'] = vbar.set - fontWeight = 'normal' - if idleConf.GetOption('main', 'EditorWindow', 'font-bold', type='bool'): - fontWeight='bold' - text.config(font=(idleConf.GetOption('main', 'EditorWindow', 'font'), - idleConf.GetOption('main', 'EditorWindow', - 'font-size', type='int'), - fontWeight)) + text['font'] = idleConf.GetFont(self.root, 'main', 'EditorWindow') text_frame.pack(side=LEFT, fill=BOTH, expand=1) text.pack(side=TOP, fill=BOTH, expand=1) text.focus_set() @@ -347,19 +344,19 @@ class EditorWindow(object): def _filename_to_unicode(self, filename): - """convert filename to unicode in order to display it in Tk""" - if isinstance(filename, str) or not filename: - return filename - else: + """Return filename as BMP unicode so diplayable in Tk.""" + # Decode bytes to unicode. + if isinstance(filename, bytes): try: - return filename.decode(self.filesystemencoding) + filename = filename.decode(self.filesystemencoding) except UnicodeDecodeError: - # XXX try: - return filename.decode(self.encoding) + filename = filename.decode(self.encoding) except UnicodeDecodeError: # byte-to-byte conversion - return filename.decode('iso8859-1') + filename = filename.decode('iso8859-1') + # Replace non-BMP char with diamond questionmark. + return re.sub('[\U00010000-\U0010FFFF]', '\ufffd', filename) def new_callback(self, event): dirname, basename = self.io.defaultfilename() @@ -434,27 +431,25 @@ class EditorWindow(object): ("format", "F_ormat"), ("run", "_Run"), ("options", "_Options"), - ("windows", "_Windows"), + ("windows", "_Window"), ("help", "_Help"), ] - if sys.platform == "darwin": - menu_specs[-2] = ("windows", "_Window") - def createmenubar(self): mbar = self.menubar self.menudict = menudict = {} for name, label in self.menu_specs: underline, label = prepstr(label) - menudict[name] = menu = Menu(mbar, name=name) + menudict[name] = menu = Menu(mbar, name=name, tearoff=0) mbar.add_cascade(label=label, menu=menu, underline=underline) if macosxSupport.isCarbonTk(): # Insert the application menu - menudict['application'] = menu = Menu(mbar, name='apple') + menudict['application'] = menu = Menu(mbar, name='apple', + tearoff=0) mbar.add_cascade(label='IDLE', menu=menu) self.fill_menus() - self.recent_files_menu = Menu(self.menubar) + self.recent_files_menu = Menu(self.menubar, tearoff=0) self.menudict['file'].insert_cascade(3, label='Recent Files', underline=0, menu=self.recent_files_menu) @@ -540,6 +535,8 @@ class EditorWindow(object): def config_dialog(self, event=None): configDialog.ConfigDialog(self.top,'Settings') + def config_extensions_dialog(self, event=None): + configDialog.ConfigExtensionsDialog(self.top) def help_dialog(self, event=None): if self.root: @@ -686,16 +683,15 @@ class EditorWindow(object): self.flist.open(file_path) else: self.io.loadfile(file_path) + return file_path def open_class_browser(self, event=None): filename = self.io.filename - if not filename: - tkMessageBox.showerror( - "No filename", - "This buffer has no associated filename", - master=self.text) - self.text.focus_set() - return None + if not (self.__class__.__name__ == 'PyShellEditorWindow' + and filename): + filename = self.open_module() + if filename is None: + return head, tail = os.path.split(filename) base, ext = os.path.splitext(tail) from idlelib import ClassBrowser @@ -705,6 +701,14 @@ class EditorWindow(object): from idlelib import PathBrowser PathBrowser.PathBrowser(self.flist) + def open_turtle_demo(self, event = None): + import subprocess + + cmd = [sys.executable, + '-c', + 'from turtledemo.__main__ import main; main()'] + subprocess.Popen(cmd, shell=False) + def gotoline(self, lineno): if lineno is not None and lineno > 0: self.text.mark_set("insert", "%d.0" % lineno) @@ -755,7 +759,7 @@ class EditorWindow(object): self.color = None def ResetColorizer(self): - "Update the colour theme" + "Update the color theme" # Called from self.filename_change_hook and from configDialog.py self._rmcolorizer() self._addcolorizer() @@ -787,13 +791,8 @@ class EditorWindow(object): def ResetFont(self): "Update the text widgets' font if it is changed" # Called from configDialog.py - fontWeight='normal' - if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'): - fontWeight='bold' - self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'), - idleConf.GetOption('main','EditorWindow','font-size', - type='int'), - fontWeight)) + + self.text['font'] = idleConf.GetFont(self.root, 'main','EditorWindow') def RemoveKeybindings(self): "Remove the keybindings before they are changed." @@ -935,7 +934,7 @@ class EditorWindow(object): short = self.short_title() long = self.long_title() if short and long: - title = short + " - " + long + title = short + " - " + long + _py_version elif short: title = short elif long: @@ -959,14 +958,13 @@ class EditorWindow(object): self.undo.reset_undo() def short_title(self): - pyversion = "Python " + python_version() + ": " filename = self.io.filename if filename: filename = os.path.basename(filename) else: filename = "Untitled" # return unicode string to display non-ASCII chars correctly - return pyversion + self._filename_to_unicode(filename) + return self._filename_to_unicode(filename) def long_title(self): # return unicode string to display non-ASCII chars correctly @@ -1702,21 +1700,20 @@ def fixwordbreaks(root): tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]') -def _Editor_window(parent): +def _editor_window(parent): # htest # + # error if close master window first - timer event, after script root = parent fixwordbreaks(root) - root.withdraw() if sys.argv[1:]: filename = sys.argv[1] else: filename = None macosxSupport.setupApp(root, None) edit = EditorWindow(root=root, filename=filename) - edit.set_close_hook(root.quit) edit.text.bind("<>", edit.close_event) + # Does not stop error, neither does following + # edit.text.bind("<>", edit.close_event) if __name__ == '__main__': from idlelib.idle_test.htest import run - if len(sys.argv) <= 1: - run(_Help_dialog) - run(_Editor_window) + run(_help_dialog, _editor_window) diff --git a/Darwin/lib/python3.4/idlelib/FileList.py b/Darwin/lib/python3.5/idlelib/FileList.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/FileList.py rename to Darwin/lib/python3.5/idlelib/FileList.py diff --git a/Darwin/lib/python3.4/idlelib/FormatParagraph.py b/Darwin/lib/python3.5/idlelib/FormatParagraph.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/FormatParagraph.py rename to Darwin/lib/python3.5/idlelib/FormatParagraph.py index 2ac87e4..7a9d185 100644 --- a/Darwin/lib/python3.4/idlelib/FormatParagraph.py +++ b/Darwin/lib/python3.5/idlelib/FormatParagraph.py @@ -44,9 +44,11 @@ class FormatParagraph: The length limit parameter is for testing with a known value. """ - if limit == None: + if limit is None: + # The default length limit is that defined by pep8 limit = idleConf.GetOption( - 'main', 'FormatParagraph', 'paragraph', type='int') + 'extensions', 'FormatParagraph', 'max-width', + type='int', default=72) text = self.editwin.text first, last = self.editwin.get_selection_indices() if first and last: @@ -188,7 +190,6 @@ def get_comment_header(line): return m.group(1) if __name__ == "__main__": - from test import support; support.use_resources = ['gui'] import unittest unittest.main('idlelib.idle_test.test_formatparagraph', verbosity=2, exit=False) diff --git a/Darwin/lib/python3.4/idlelib/GrepDialog.py b/Darwin/lib/python3.5/idlelib/GrepDialog.py similarity index 62% rename from Darwin/lib/python3.4/idlelib/GrepDialog.py rename to Darwin/lib/python3.5/idlelib/GrepDialog.py index f73d70a..721b231 100644 --- a/Darwin/lib/python3.4/idlelib/GrepDialog.py +++ b/Darwin/lib/python3.5/idlelib/GrepDialog.py @@ -1,9 +1,13 @@ import os import fnmatch +import re # for htest import sys -from tkinter import * +from tkinter import StringVar, BooleanVar, Checkbutton # for GrepDialog +from tkinter import Tk, Text, Button, SEL, END # for htest from idlelib import SearchEngine from idlelib.SearchDialogBase import SearchDialogBase +# Importing OutputWindow fails due to import loop +# EditorWindow -> GrepDialop -> OutputWindow -> EditorWindow def grep(text, io=None, flist=None): root = text._root() @@ -40,10 +44,10 @@ class GrepDialog(SearchDialogBase): def create_entries(self): SearchDialogBase.create_entries(self) - self.globent = self.make_entry("In files:", self.globvar) + self.globent = self.make_entry("In files:", self.globvar)[0] def create_other_buttons(self): - f = self.make_frame() + f = self.make_frame()[0] btn = Checkbutton(f, anchor="w", variable=self.recvar, @@ -63,7 +67,7 @@ class GrepDialog(SearchDialogBase): if not path: self.top.bell() return - from idlelib.OutputWindow import OutputWindow + from idlelib.OutputWindow import OutputWindow # leave here! save = sys.stdout try: sys.stdout = OutputWindow(self.flist) @@ -79,21 +83,26 @@ class GrepDialog(SearchDialogBase): pat = self.engine.getpat() print("Searching %r in %s ..." % (pat, path)) hits = 0 - for fn in list: - try: - with open(fn, errors='replace') as f: - for lineno, line in enumerate(f, 1): - if line[-1:] == '\n': - line = line[:-1] - if prog.search(line): - sys.stdout.write("%s: %s: %s\n" % - (fn, lineno, line)) - hits += 1 - except OSError as msg: - print(msg) - print(("Hits found: %s\n" - "(Hint: right-click to open locations.)" - % hits) if hits else "No hits.") + try: + for fn in list: + try: + with open(fn, errors='replace') as f: + for lineno, line in enumerate(f, 1): + if line[-1:] == '\n': + line = line[:-1] + if prog.search(line): + sys.stdout.write("%s: %s: %s\n" % + (fn, lineno, line)) + hits += 1 + except OSError as msg: + print(msg) + print(("Hits found: %s\n" + "(Hint: right-click to open locations.)" + % hits) if hits else "No hits.") + except AttributeError: + # Tk window has been closed, OutputWindow.text = None, + # so in OW.write, OW.text.insert fails. + pass def findfiles(self, dir, base, rec): try: @@ -120,8 +129,30 @@ class GrepDialog(SearchDialogBase): self.top.grab_release() self.top.withdraw() + +def _grep_dialog(parent): # htest # + from idlelib.PyShell import PyShellFileList + root = Tk() + root.title("Test GrepDialog") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + + flist = PyShellFileList(root) + text = Text(root, height=5) + text.pack() + + def show_grep_dialog(): + text.tag_add(SEL, "1.0", END) + grep(text, flist=flist) + text.tag_remove(SEL, "1.0", END) + + button = Button(root, text="Show GrepDialog", command=show_grep_dialog) + button.pack() + root.mainloop() + if __name__ == "__main__": - # A human test is a bit tricky since EditorWindow() imports this module. - # Hence Idle must be restarted after editing this file for a live test. import unittest unittest.main('idlelib.idle_test.test_grep', verbosity=2, exit=False) + + from idlelib.idle_test.htest import run + run(_grep_dialog) diff --git a/Darwin/lib/python3.4/idlelib/HISTORY.txt b/Darwin/lib/python3.5/idlelib/HISTORY.txt similarity index 99% rename from Darwin/lib/python3.4/idlelib/HISTORY.txt rename to Darwin/lib/python3.5/idlelib/HISTORY.txt index 01d73ed..731fabd 100644 --- a/Darwin/lib/python3.4/idlelib/HISTORY.txt +++ b/Darwin/lib/python3.5/idlelib/HISTORY.txt @@ -11,7 +11,7 @@ What's New in IDLEfork 0.8.1? *Release date: 22-Jul-2001* - New tarball released as a result of the 'revitalisation' of the IDLEfork - project. + project. - This release requires python 2.1 or better. Compatibility with earlier versions of python (especially ancient ones like 1.5x) is no longer a @@ -26,8 +26,8 @@ What's New in IDLEfork 0.8.1? not working, but I believe this was the case with the previous IDLE fork release (0.7.1) as well. -- This release is being made now to mark the point at which IDLEfork is - launching into a new stage of development. +- This release is being made now to mark the point at which IDLEfork is + launching into a new stage of development. - IDLEfork CVS will now be branched to enable further development and exploration of the two "execution in a remote process" patches submitted by @@ -96,7 +96,7 @@ IDLEfork 0.7.1 - 29 May 2000 instead of the IDLE help; shift-TAB is now a synonym for unindent. - New modules: - + ExecBinding.py Executes program through loader loader.py Bootstraps user program protocol.py RPC protocol diff --git a/Darwin/lib/python3.5/idlelib/HyperParser.py b/Darwin/lib/python3.5/idlelib/HyperParser.py new file mode 100644 index 0000000..77cb057 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/HyperParser.py @@ -0,0 +1,313 @@ +"""Provide advanced parsing abilities for ParenMatch and other extensions. + +HyperParser uses PyParser. PyParser mostly gives information on the +proper indentation of code. HyperParser gives additional information on +the structure of code. +""" + +import string +from keyword import iskeyword +from idlelib import PyParse + + +# all ASCII chars that may be in an identifier +_ASCII_ID_CHARS = frozenset(string.ascii_letters + string.digits + "_") +# all ASCII chars that may be the first char of an identifier +_ASCII_ID_FIRST_CHARS = frozenset(string.ascii_letters + "_") + +# lookup table for whether 7-bit ASCII chars are valid in a Python identifier +_IS_ASCII_ID_CHAR = [(chr(x) in _ASCII_ID_CHARS) for x in range(128)] +# lookup table for whether 7-bit ASCII chars are valid as the first +# char in a Python identifier +_IS_ASCII_ID_FIRST_CHAR = \ + [(chr(x) in _ASCII_ID_FIRST_CHARS) for x in range(128)] + + +class HyperParser: + def __init__(self, editwin, index): + "To initialize, analyze the surroundings of the given index." + + self.editwin = editwin + self.text = text = editwin.text + + parser = PyParse.Parser(editwin.indentwidth, editwin.tabwidth) + + def index2line(index): + return int(float(index)) + lno = index2line(text.index(index)) + + if not editwin.context_use_ps1: + for context in editwin.num_context_lines: + startat = max(lno - context, 1) + startatindex = repr(startat) + ".0" + stopatindex = "%d.end" % lno + # We add the newline because PyParse requires a newline + # at end. We add a space so that index won't be at end + # of line, so that its status will be the same as the + # char before it, if should. + parser.set_str(text.get(startatindex, stopatindex)+' \n') + bod = parser.find_good_parse_start( + editwin._build_char_in_string_func(startatindex)) + if bod is not None or startat == 1: + break + parser.set_lo(bod or 0) + else: + r = text.tag_prevrange("console", index) + if r: + startatindex = r[1] + else: + startatindex = "1.0" + stopatindex = "%d.end" % lno + # We add the newline because PyParse requires it. We add a + # space so that index won't be at end of line, so that its + # status will be the same as the char before it, if should. + parser.set_str(text.get(startatindex, stopatindex)+' \n') + parser.set_lo(0) + + # We want what the parser has, minus the last newline and space. + self.rawtext = parser.str[:-2] + # Parser.str apparently preserves the statement we are in, so + # that stopatindex can be used to synchronize the string with + # the text box indices. + self.stopatindex = stopatindex + self.bracketing = parser.get_last_stmt_bracketing() + # find which pairs of bracketing are openers. These always + # correspond to a character of rawtext. + self.isopener = [i>0 and self.bracketing[i][1] > + self.bracketing[i-1][1] + for i in range(len(self.bracketing))] + + self.set_index(index) + + def set_index(self, index): + """Set the index to which the functions relate. + + The index must be in the same statement. + """ + indexinrawtext = (len(self.rawtext) - + len(self.text.get(index, self.stopatindex))) + if indexinrawtext < 0: + raise ValueError("Index %s precedes the analyzed statement" + % index) + self.indexinrawtext = indexinrawtext + # find the rightmost bracket to which index belongs + self.indexbracket = 0 + while (self.indexbracket < len(self.bracketing)-1 and + self.bracketing[self.indexbracket+1][0] < self.indexinrawtext): + self.indexbracket += 1 + if (self.indexbracket < len(self.bracketing)-1 and + self.bracketing[self.indexbracket+1][0] == self.indexinrawtext and + not self.isopener[self.indexbracket+1]): + self.indexbracket += 1 + + def is_in_string(self): + """Is the index given to the HyperParser in a string?""" + # The bracket to which we belong should be an opener. + # If it's an opener, it has to have a character. + return (self.isopener[self.indexbracket] and + self.rawtext[self.bracketing[self.indexbracket][0]] + in ('"', "'")) + + def is_in_code(self): + """Is the index given to the HyperParser in normal code?""" + return (not self.isopener[self.indexbracket] or + self.rawtext[self.bracketing[self.indexbracket][0]] + not in ('#', '"', "'")) + + def get_surrounding_brackets(self, openers='([{', mustclose=False): + """Return bracket indexes or None. + + If the index given to the HyperParser is surrounded by a + bracket defined in openers (or at least has one before it), + return the indices of the opening bracket and the closing + bracket (or the end of line, whichever comes first). + + If it is not surrounded by brackets, or the end of line comes + before the closing bracket and mustclose is True, returns None. + """ + + bracketinglevel = self.bracketing[self.indexbracket][1] + before = self.indexbracket + while (not self.isopener[before] or + self.rawtext[self.bracketing[before][0]] not in openers or + self.bracketing[before][1] > bracketinglevel): + before -= 1 + if before < 0: + return None + bracketinglevel = min(bracketinglevel, self.bracketing[before][1]) + after = self.indexbracket + 1 + while (after < len(self.bracketing) and + self.bracketing[after][1] >= bracketinglevel): + after += 1 + + beforeindex = self.text.index("%s-%dc" % + (self.stopatindex, len(self.rawtext)-self.bracketing[before][0])) + if (after >= len(self.bracketing) or + self.bracketing[after][0] > len(self.rawtext)): + if mustclose: + return None + afterindex = self.stopatindex + else: + # We are after a real char, so it is a ')' and we give the + # index before it. + afterindex = self.text.index( + "%s-%dc" % (self.stopatindex, + len(self.rawtext)-(self.bracketing[after][0]-1))) + + return beforeindex, afterindex + + # the set of built-in identifiers which are also keywords, + # i.e. keyword.iskeyword() returns True for them + _ID_KEYWORDS = frozenset({"True", "False", "None"}) + + @classmethod + def _eat_identifier(cls, str, limit, pos): + """Given a string and pos, return the number of chars in the + identifier which ends at pos, or 0 if there is no such one. + + This ignores non-identifier eywords are not identifiers. + """ + is_ascii_id_char = _IS_ASCII_ID_CHAR + + # Start at the end (pos) and work backwards. + i = pos + + # Go backwards as long as the characters are valid ASCII + # identifier characters. This is an optimization, since it + # is faster in the common case where most of the characters + # are ASCII. + while i > limit and ( + ord(str[i - 1]) < 128 and + is_ascii_id_char[ord(str[i - 1])] + ): + i -= 1 + + # If the above loop ended due to reaching a non-ASCII + # character, continue going backwards using the most generic + # test for whether a string contains only valid identifier + # characters. + if i > limit and ord(str[i - 1]) >= 128: + while i - 4 >= limit and ('a' + str[i - 4:pos]).isidentifier(): + i -= 4 + if i - 2 >= limit and ('a' + str[i - 2:pos]).isidentifier(): + i -= 2 + if i - 1 >= limit and ('a' + str[i - 1:pos]).isidentifier(): + i -= 1 + + # The identifier candidate starts here. If it isn't a valid + # identifier, don't eat anything. At this point that is only + # possible if the first character isn't a valid first + # character for an identifier. + if not str[i:pos].isidentifier(): + return 0 + elif i < pos: + # All characters in str[i:pos] are valid ASCII identifier + # characters, so it is enough to check that the first is + # valid as the first character of an identifier. + if not _IS_ASCII_ID_FIRST_CHAR[ord(str[i])]: + return 0 + + # All keywords are valid identifiers, but should not be + # considered identifiers here, except for True, False and None. + if i < pos and ( + iskeyword(str[i:pos]) and + str[i:pos] not in cls._ID_KEYWORDS + ): + return 0 + + return pos - i + + # This string includes all chars that may be in a white space + _whitespace_chars = " \t\n\\" + + def get_expression(self): + """Return a string with the Python expression which ends at the + given index, which is empty if there is no real one. + """ + if not self.is_in_code(): + raise ValueError("get_expression should only be called" + "if index is inside a code.") + + rawtext = self.rawtext + bracketing = self.bracketing + + brck_index = self.indexbracket + brck_limit = bracketing[brck_index][0] + pos = self.indexinrawtext + + last_identifier_pos = pos + postdot_phase = True + + while 1: + # Eat whitespaces, comments, and if postdot_phase is False - a dot + while 1: + if pos>brck_limit and rawtext[pos-1] in self._whitespace_chars: + # Eat a whitespace + pos -= 1 + elif (not postdot_phase and + pos > brck_limit and rawtext[pos-1] == '.'): + # Eat a dot + pos -= 1 + postdot_phase = True + # The next line will fail if we are *inside* a comment, + # but we shouldn't be. + elif (pos == brck_limit and brck_index > 0 and + rawtext[bracketing[brck_index-1][0]] == '#'): + # Eat a comment + brck_index -= 2 + brck_limit = bracketing[brck_index][0] + pos = bracketing[brck_index+1][0] + else: + # If we didn't eat anything, quit. + break + + if not postdot_phase: + # We didn't find a dot, so the expression end at the + # last identifier pos. + break + + ret = self._eat_identifier(rawtext, brck_limit, pos) + if ret: + # There is an identifier to eat + pos = pos - ret + last_identifier_pos = pos + # Now, to continue the search, we must find a dot. + postdot_phase = False + # (the loop continues now) + + elif pos == brck_limit: + # We are at a bracketing limit. If it is a closing + # bracket, eat the bracket, otherwise, stop the search. + level = bracketing[brck_index][1] + while brck_index > 0 and bracketing[brck_index-1][1] > level: + brck_index -= 1 + if bracketing[brck_index][0] == brck_limit: + # We were not at the end of a closing bracket + break + pos = bracketing[brck_index][0] + brck_index -= 1 + brck_limit = bracketing[brck_index][0] + last_identifier_pos = pos + if rawtext[pos] in "([": + # [] and () may be used after an identifier, so we + # continue. postdot_phase is True, so we don't allow a dot. + pass + else: + # We can't continue after other types of brackets + if rawtext[pos] in "'\"": + # Scan a string prefix + while pos > 0 and rawtext[pos - 1] in "rRbBuU": + pos -= 1 + last_identifier_pos = pos + break + + else: + # We've found an operator or something. + break + + return rawtext[last_identifier_pos:self.indexinrawtext] + + +if __name__ == '__main__': + import unittest + unittest.main('idlelib.idle_test.test_hyperparser', verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/IOBinding.py b/Darwin/lib/python3.5/idlelib/IOBinding.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/IOBinding.py rename to Darwin/lib/python3.5/idlelib/IOBinding.py index 3cd7a4c..505cc8b 100644 --- a/Darwin/lib/python3.4/idlelib/IOBinding.py +++ b/Darwin/lib/python3.5/idlelib/IOBinding.py @@ -1,5 +1,4 @@ import os -import types import shlex import sys import codecs @@ -525,16 +524,17 @@ class IOBinding: if self.editwin.flist: self.editwin.update_recent_files_list(filename) -def test(): +def _io_binding(parent): # htest # root = Tk() + root.title("Test IOBinding") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) class MyEditWin: def __init__(self, text): self.text = text self.flist = None self.text.bind("", self.open) self.text.bind("", self.save) - self.text.bind("", self.save_as) - self.text.bind("", self.save_a_copy) def get_saved(self): return 0 def set_saved(self, flag): pass def reset_undo(self): pass @@ -542,16 +542,13 @@ def test(): self.text.event_generate("<>") def save(self, event): self.text.event_generate("<>") - def save_as(self, event): - self.text.event_generate("<>") - def save_a_copy(self, event): - self.text.event_generate("<>") + text = Text(root) text.pack() text.focus_set() editwin = MyEditWin(text) - io = IOBinding(editwin) - root.mainloop() + IOBinding(editwin) if __name__ == "__main__": - test() + from idlelib.idle_test.htest import run + run(_io_binding) diff --git a/Darwin/lib/python3.4/idlelib/Icons/folder.gif b/Darwin/lib/python3.5/idlelib/Icons/folder.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/folder.gif rename to Darwin/lib/python3.5/idlelib/Icons/folder.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle.icns b/Darwin/lib/python3.5/idlelib/Icons/idle.icns similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle.icns rename to Darwin/lib/python3.5/idlelib/Icons/idle.icns diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle.ico b/Darwin/lib/python3.5/idlelib/Icons/idle.ico similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle.ico rename to Darwin/lib/python3.5/idlelib/Icons/idle.ico diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_16.gif b/Darwin/lib/python3.5/idlelib/Icons/idle_16.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_16.gif rename to Darwin/lib/python3.5/idlelib/Icons/idle_16.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_16.png b/Darwin/lib/python3.5/idlelib/Icons/idle_16.png similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_16.png rename to Darwin/lib/python3.5/idlelib/Icons/idle_16.png diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_32.gif b/Darwin/lib/python3.5/idlelib/Icons/idle_32.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_32.gif rename to Darwin/lib/python3.5/idlelib/Icons/idle_32.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_32.png b/Darwin/lib/python3.5/idlelib/Icons/idle_32.png similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_32.png rename to Darwin/lib/python3.5/idlelib/Icons/idle_32.png diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_48.gif b/Darwin/lib/python3.5/idlelib/Icons/idle_48.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_48.gif rename to Darwin/lib/python3.5/idlelib/Icons/idle_48.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/idle_48.png b/Darwin/lib/python3.5/idlelib/Icons/idle_48.png similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/idle_48.png rename to Darwin/lib/python3.5/idlelib/Icons/idle_48.png diff --git a/Darwin/lib/python3.4/idlelib/Icons/minusnode.gif b/Darwin/lib/python3.5/idlelib/Icons/minusnode.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/minusnode.gif rename to Darwin/lib/python3.5/idlelib/Icons/minusnode.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/openfolder.gif b/Darwin/lib/python3.5/idlelib/Icons/openfolder.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/openfolder.gif rename to Darwin/lib/python3.5/idlelib/Icons/openfolder.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/plusnode.gif b/Darwin/lib/python3.5/idlelib/Icons/plusnode.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/plusnode.gif rename to Darwin/lib/python3.5/idlelib/Icons/plusnode.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/python.gif b/Darwin/lib/python3.5/idlelib/Icons/python.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/python.gif rename to Darwin/lib/python3.5/idlelib/Icons/python.gif diff --git a/Darwin/lib/python3.4/idlelib/Icons/tk.gif b/Darwin/lib/python3.5/idlelib/Icons/tk.gif similarity index 100% rename from Darwin/lib/python3.4/idlelib/Icons/tk.gif rename to Darwin/lib/python3.5/idlelib/Icons/tk.gif diff --git a/Darwin/lib/python3.4/idlelib/IdleHistory.py b/Darwin/lib/python3.5/idlelib/IdleHistory.py similarity index 98% rename from Darwin/lib/python3.4/idlelib/IdleHistory.py rename to Darwin/lib/python3.5/idlelib/IdleHistory.py index d6cb162..078af29 100644 --- a/Darwin/lib/python3.4/idlelib/IdleHistory.py +++ b/Darwin/lib/python3.5/idlelib/IdleHistory.py @@ -100,7 +100,5 @@ class History: self.prefix = None if __name__ == "__main__": - from test import support - support.use_resources = ['gui'] from unittest import main main('idlelib.idle_test.test_idlehistory', verbosity=2, exit=False) diff --git a/Darwin/lib/python3.4/idlelib/MultiCall.py b/Darwin/lib/python3.5/idlelib/MultiCall.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/MultiCall.py rename to Darwin/lib/python3.5/idlelib/MultiCall.py index cc6bffd..251a84d 100644 --- a/Darwin/lib/python3.4/idlelib/MultiCall.py +++ b/Darwin/lib/python3.5/idlelib/MultiCall.py @@ -60,8 +60,7 @@ _modifier_names = dict([(name, number) # destroyed before .__del__ methods here are called. The following # is used to selectively ignore shutdown exceptions to avoid # 'Exception ignored' messages. See http://bugs.python.org/issue20167 -APPLICATION_GONE = '''\ -can't invoke "bind" command: application has been destroyed''' +APPLICATION_GONE = "application has been destroyed" # A binder is a class which binds functions to one type of event. It has two # methods: bind and unbind, which get a function and a parsed sequence, as @@ -108,9 +107,7 @@ class _SimpleBinder: self.widget.unbind(self.widgetinst, self.sequence, self.handlerid) except tkinter.TclError as e: - if e.args[0] == APPLICATION_GONE: - pass - else: + if not APPLICATION_GONE in e.args[0]: raise # An int in range(1 << len(_modifiers)) represents a combination of modifiers @@ -243,9 +240,7 @@ class _ComplexBinder: try: self.widget.unbind(self.widgetinst, seq, id) except tkinter.TclError as e: - if e.args[0] == APPLICATION_GONE: - break - else: + if not APPLICATION_GONE in e.args[0]: raise # define the list of event types to be handled by MultiEvent. the order is @@ -412,17 +407,18 @@ def MultiCallCreator(widget): try: self.__binders[triplet[1]].unbind(triplet, func) except tkinter.TclError as e: - if e.args[0] == APPLICATION_GONE: - break - else: + if not APPLICATION_GONE in e.args[0]: raise _multicall_dict[widget] = MultiCall return MultiCall -if __name__ == "__main__": - # Test + +def _multi_call(parent): root = tkinter.Tk() + root.title("Test MultiCall") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) text = MultiCallCreator(tkinter.Text)(root) text.pack() def bindseq(seq, n=[0]): @@ -438,8 +434,13 @@ if __name__ == "__main__": bindseq("") bindseq("") bindseq("") + bindseq("") bindseq("") bindseq("") bindseq("") bindseq("") root.mainloop() + +if __name__ == "__main__": + from idlelib.idle_test.htest import run + run(_multi_call) diff --git a/Darwin/lib/python3.5/idlelib/MultiStatusBar.py b/Darwin/lib/python3.5/idlelib/MultiStatusBar.py new file mode 100644 index 0000000..f44b6a8 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/MultiStatusBar.py @@ -0,0 +1,45 @@ +from tkinter import * + +class MultiStatusBar(Frame): + + def __init__(self, master=None, **kw): + if master is None: + master = Tk() + Frame.__init__(self, master, **kw) + self.labels = {} + + def set_label(self, name, text='', side=LEFT): + if name not in self.labels: + label = Label(self, bd=1, relief=SUNKEN, anchor=W) + label.pack(side=side) + self.labels[name] = label + else: + label = self.labels[name] + label.config(text=text) + +def _multistatus_bar(parent): + root = Tk() + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d" %(x, y + 150)) + root.title("Test multistatus bar") + frame = Frame(root) + text = Text(frame) + text.pack() + msb = MultiStatusBar(frame) + msb.set_label("one", "hello") + msb.set_label("two", "world") + msb.pack(side=BOTTOM, fill=X) + + def change(): + msb.set_label("one", "foo") + msb.set_label("two", "bar") + + button = Button(root, text="Update status", command=change) + button.pack(side=BOTTOM) + frame.pack() + frame.mainloop() + root.mainloop() + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_multistatus_bar) diff --git a/Darwin/lib/python3.4/idlelib/NEWS.txt b/Darwin/lib/python3.5/idlelib/NEWS.txt similarity index 90% rename from Darwin/lib/python3.4/idlelib/NEWS.txt rename to Darwin/lib/python3.5/idlelib/NEWS.txt index 953a38d..2d8ce54 100644 --- a/Darwin/lib/python3.4/idlelib/NEWS.txt +++ b/Darwin/lib/python3.5/idlelib/NEWS.txt @@ -1,5 +1,90 @@ +What's New in IDLE 3.5.0? +========================= +*Release date: 2015-09-13* ?? + +- Issue #23184: remove unused names and imports in idlelib. + Initial patch by Al Sweigart. + +- Issue #20577: Configuration of the max line length for the FormatParagraph + extension has been moved from the General tab of the Idle preferences dialog + to the FormatParagraph tab of the Config Extensions dialog. + Patch by Tal Einat. + +- Issue #16893: Update Idle doc chapter to match current Idle and add new + information. + +- Issue #3068: Add Idle extension configuration dialog to Options menu. + Changes are written to HOME/.idlerc/config-extensions.cfg. + Original patch by Tal Einat. + +- Issue #16233: A module browser (File : Class Browser, Alt+C) requires a + editor window with a filename. When Class Browser is requested otherwise, + from a shell, output window, or 'Untitled' editor, Idle no longer displays + an error box. It now pops up an Open Module box (Alt+M). If a valid name + is entered and a module is opened, a corresponding browser is also opened. + +- Issue #4832: Save As to type Python files automatically adds .py to the + name you enter (even if your system does not display it). Some systems + automatically add .txt when type is Text files. + +- Issue #21986: Code objects are not normally pickled by the pickle module. + To match this, they are no longer pickled when running under Idle. + +- Issue #23180: Rename IDLE "Windows" menu item to "Window". + Patch by Al Sweigart. + +- Issue #17390: Adjust Editor window title; remove 'Python', + move version to end. + +- Issue #14105: Idle debugger breakpoints no longer disappear + when inseting or deleting lines. + +- Issue #17172: Turtledemo can now be run from Idle. + Currently, the entry is on the Help menu, but it may move to Run. + Patch by Ramchandra Apt and Lita Cho. + +- Issue #21765: Add support for non-ascii identifiers to HyperParser. + +- Issue #21940: Add unittest for WidgetRedirector. Initial patch by Saimadhav + Heblikar. + +- Issue #18592: Add unittest for SearchDialogBase. Patch by Phil Webster. + +- Issue #21694: Add unittest for ParenMatch. Patch by Saimadhav Heblikar. + +- Issue #21686: add unittest for HyperParser. Original patch by Saimadhav + Heblikar. + +- Issue #12387: Add missing upper(lower)case versions of default Windows key + bindings for Idle so Caps Lock does not disable them. Patch by Roger Serwy. + +- Issue #21695: Closing a Find-in-files output window while the search is + still in progress no longer closes Idle. + +- Issue #18910: Add unittest for textView. Patch by Phil Webster. + +- Issue #18292: Add unittest for AutoExpand. Patch by Saihadhav Heblikar. + +- Issue #18409: Add unittest for AutoComplete. Patch by Phil Webster. + +- Issue #21477: htest.py - Improve framework, complete set of tests. + Patches by Saimadhav Heblikar + +- Issue #18104: Add idlelib/idle_test/htest.py with a few sample tests to begin + consolidating and improving human-validated tests of Idle. Change other files + as needed to work with htest. Running the module as __main__ runs all tests. + +- Issue #21139: Change default paragraph width to 72, the PEP 8 recommendation. + +- Issue #21284: Paragraph reformat test passes after user changes reformat width. + +- Issue #17654: Ensure IDLE menus are customized properly on OS X for + non-framework builds and for all variants of Tk. + + What's New in IDLE 3.4.0? ========================= +*Release date: 2014-03-16* - Issue #17390: Display Python version on Idle title bar. Initial patch by Edmond Burnett. @@ -17,6 +102,7 @@ What's New in IDLE 3.4.0? What's New in IDLE 3.3.0? ========================= +*Release date: 2012-09-29* - Issue #17625: Close the replace dialog after it is used. @@ -59,7 +145,6 @@ What's New in IDLE 3.3.0? What's New in IDLE 3.2.1? ========================= - *Release date: 15-May-11* - Issue #6378: Further adjust idle.bat to start associated Python @@ -77,7 +162,6 @@ What's New in IDLE 3.2.1? What's New in IDLE 3.1b1? ========================= - *Release date: 06-May-09* - Use of 'filter' in keybindingDialog.py was causing custom key assignment to @@ -86,7 +170,6 @@ What's New in IDLE 3.1b1? What's New in IDLE 3.1a1? ========================= - *Release date: 07-Mar-09* - Issue #4815: Offer conversion to UTF-8 if source files have @@ -104,7 +187,6 @@ What's New in IDLE 3.1a1? What's New in IDLE 2.7? (UNRELEASED, but merged into 3.1 releases above.) ======================= - *Release date: XX-XXX-2010* - idle.py modified and simplified to better support developing experimental diff --git a/Darwin/lib/python3.4/idlelib/ObjectBrowser.py b/Darwin/lib/python3.5/idlelib/ObjectBrowser.py similarity index 93% rename from Darwin/lib/python3.4/idlelib/ObjectBrowser.py rename to Darwin/lib/python3.5/idlelib/ObjectBrowser.py index b359efc..7b57aa4 100644 --- a/Darwin/lib/python3.4/idlelib/ObjectBrowser.py +++ b/Darwin/lib/python3.5/idlelib/ObjectBrowser.py @@ -9,6 +9,8 @@ # XXX TO DO: # - for classes/modules, add "open source" to object browser +import re + from idlelib.TreeWidget import TreeItem, TreeNode, ScrolledCanvas from reprlib import Repr @@ -119,12 +121,14 @@ def make_objecttreeitem(labeltext, object, setfunction=None): c = ObjectTreeItem return c(labeltext, object, setfunction) -# Test script -def _test(): +def _object_browser(parent): import sys from tkinter import Tk root = Tk() + root.title("Test ObjectBrowser") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) root.configure(bd=0, bg="yellow") root.focus_set() sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1) @@ -135,4 +139,5 @@ def _test(): root.mainloop() if __name__ == '__main__': - _test() + from idlelib.idle_test.htest import run + run(_object_browser) diff --git a/Darwin/lib/python3.4/idlelib/OutputWindow.py b/Darwin/lib/python3.5/idlelib/OutputWindow.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/OutputWindow.py rename to Darwin/lib/python3.5/idlelib/OutputWindow.py diff --git a/Darwin/lib/python3.4/idlelib/ParenMatch.py b/Darwin/lib/python3.5/idlelib/ParenMatch.py similarity index 94% rename from Darwin/lib/python3.4/idlelib/ParenMatch.py rename to Darwin/lib/python3.5/idlelib/ParenMatch.py index 6d91b39..19bad8c 100644 --- a/Darwin/lib/python3.4/idlelib/ParenMatch.py +++ b/Darwin/lib/python3.5/idlelib/ParenMatch.py @@ -90,7 +90,8 @@ class ParenMatch: self.set_timeout = self.set_timeout_none def flash_paren_event(self, event): - indices = HyperParser(self.editwin, "insert").get_surrounding_brackets() + indices = (HyperParser(self.editwin, "insert") + .get_surrounding_brackets()) if indices is None: self.warn_mismatched() return @@ -167,6 +168,11 @@ class ParenMatch: # associate a counter with an event; only disable the "paren" # tag if the event is for the most recent timer. self.counter += 1 - self.editwin.text_frame.after(self.FLASH_DELAY, - lambda self=self, c=self.counter: \ - self.handle_restore_timer(c)) + self.editwin.text_frame.after( + self.FLASH_DELAY, + lambda self=self, c=self.counter: self.handle_restore_timer(c)) + + +if __name__ == '__main__': + import unittest + unittest.main('idlelib.idle_test.test_parenmatch', verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/PathBrowser.py b/Darwin/lib/python3.5/idlelib/PathBrowser.py similarity index 80% rename from Darwin/lib/python3.4/idlelib/PathBrowser.py rename to Darwin/lib/python3.5/idlelib/PathBrowser.py index 5e5c6be..9ab7632 100644 --- a/Darwin/lib/python3.4/idlelib/PathBrowser.py +++ b/Darwin/lib/python3.5/idlelib/PathBrowser.py @@ -4,13 +4,20 @@ import importlib.machinery from idlelib.TreeWidget import TreeItem from idlelib.ClassBrowser import ClassBrowser, ModuleBrowserTreeItem +from idlelib.PyShell import PyShellFileList + class PathBrowser(ClassBrowser): - def __init__(self, flist): + def __init__(self, flist, _htest=False): + """ + _htest - bool, change box location when running htest + """ + self._htest = _htest self.init(flist) def settitle(self): + "Set window titles." self.top.wm_title("Path Browser") self.top.wm_iconname("Path Browser") @@ -63,16 +70,17 @@ class DirBrowserTreeItem(TreeItem): return sublist def ispackagedir(self, file): + " Return true for directories that are packages." if not os.path.isdir(file): - return 0 + return False init = os.path.join(file, "__init__.py") return os.path.exists(init) def listmodules(self, allnames): modules = {} suffixes = importlib.machinery.EXTENSION_SUFFIXES[:] - suffixes += importlib.machinery.SOURCE_SUFFIXES[:] - suffixes += importlib.machinery.BYTECODE_SUFFIXES[:] + suffixes += importlib.machinery.SOURCE_SUFFIXES + suffixes += importlib.machinery.BYTECODE_SUFFIXES sorted = [] for suff in suffixes: i = -len(suff) @@ -87,12 +95,14 @@ class DirBrowserTreeItem(TreeItem): sorted.sort() return sorted -def main(): - from idlelib import PyShell - PathBrowser(PyShell.flist) - if sys.stdin is sys.__stdin__: - mainloop() +def _path_browser(parent): # htest # + flist = PyShellFileList(parent) + PathBrowser(flist, _htest=True) + parent.mainloop() if __name__ == "__main__": from unittest import main main('idlelib.idle_test.test_pathbrowser', verbosity=2, exit=False) + + from idlelib.idle_test.htest import run + run(_path_browser) diff --git a/Darwin/lib/python3.4/idlelib/Percolator.py b/Darwin/lib/python3.5/idlelib/Percolator.py similarity index 71% rename from Darwin/lib/python3.4/idlelib/Percolator.py rename to Darwin/lib/python3.5/idlelib/Percolator.py index c91de38..9e93319 100644 --- a/Darwin/lib/python3.4/idlelib/Percolator.py +++ b/Darwin/lib/python3.5/idlelib/Percolator.py @@ -51,8 +51,9 @@ class Percolator: f.setdelegate(filter.delegate) filter.setdelegate(None) -def main(): - import tkinter as Tk +def _percolator(parent): + import tkinter as tk + import re class Tracer(Delegator): def __init__(self, name): self.name = name @@ -63,22 +64,41 @@ def main(): def delete(self, *args): print(self.name, ": delete", args) self.delegate.delete(*args) - root = Tk.Tk() - root.wm_protocol("WM_DELETE_WINDOW", root.quit) - text = Tk.Text() - text.pack() - text.focus_set() + root = tk.Tk() + root.title("Test Percolator") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + text = tk.Text(root) p = Percolator(text) t1 = Tracer("t1") t2 = Tracer("t2") - p.insertfilter(t1) - p.insertfilter(t2) - root.mainloop() # click close widget to continue... - p.removefilter(t2) - root.mainloop() - p.insertfilter(t2) - p.removefilter(t1) + + def toggle1(): + if var1.get() == 0: + var1.set(1) + p.insertfilter(t1) + elif var1.get() == 1: + var1.set(0) + p.removefilter(t1) + + def toggle2(): + if var2.get() == 0: + var2.set(1) + p.insertfilter(t2) + elif var2.get() == 1: + var2.set(0) + p.removefilter(t2) + + text.pack() + var1 = tk.IntVar() + cb1 = tk.Checkbutton(root, text="Tracer1", command=toggle1, variable=var1) + cb1.pack() + var2 = tk.IntVar() + cb2 = tk.Checkbutton(root, text="Tracer2", command=toggle2, variable=var2) + cb2.pack() + root.mainloop() if __name__ == "__main__": - main() + from idlelib.idle_test.htest import run + run(_percolator) diff --git a/Darwin/lib/python3.4/idlelib/PyParse.py b/Darwin/lib/python3.5/idlelib/PyParse.py similarity index 90% rename from Darwin/lib/python3.4/idlelib/PyParse.py rename to Darwin/lib/python3.5/idlelib/PyParse.py index 61a0003..9ccbb25 100644 --- a/Darwin/lib/python3.4/idlelib/PyParse.py +++ b/Darwin/lib/python3.5/idlelib/PyParse.py @@ -1,5 +1,6 @@ import re import sys +from collections import Mapping # Reason last stmt is continued (or C_NONE if it's not). (C_NONE, C_BACKSLASH, C_STRING_FIRST_LINE, @@ -91,19 +92,48 @@ _chew_ordinaryre = re.compile(r""" [^[\](){}#'"\\]+ """, re.VERBOSE).match -# Build translation table to map uninteresting chars to "x", open -# brackets to "(", and close brackets to ")". -_tran = {} -for i in range(256): - _tran[i] = 'x' -for ch in "({[": - _tran[ord(ch)] = '(' -for ch in ")}]": - _tran[ord(ch)] = ')' -for ch in "\"'\\\n#": - _tran[ord(ch)] = ch -del i, ch +class StringTranslatePseudoMapping(Mapping): + r"""Utility class to be used with str.translate() + + This Mapping class wraps a given dict. When a value for a key is + requested via __getitem__() or get(), the key is looked up in the + given dict. If found there, the value from the dict is returned. + Otherwise, the default value given upon initialization is returned. + + This allows using str.translate() to make some replacements, and to + replace all characters for which no replacement was specified with + a given character instead of leaving them as-is. + + For example, to replace everything except whitespace with 'x': + + >>> whitespace_chars = ' \t\n\r' + >>> preserve_dict = {ord(c): ord(c) for c in whitespace_chars} + >>> mapping = StringTranslatePseudoMapping(preserve_dict, ord('x')) + >>> text = "a + b\tc\nd" + >>> text.translate(mapping) + 'x x x\tx\nx' + """ + def __init__(self, non_defaults, default_value): + self._non_defaults = non_defaults + self._default_value = default_value + + def _get(key, _get=non_defaults.get, _default=default_value): + return _get(key, _default) + self._get = _get + + def __getitem__(self, item): + return self._get(item) + + def __len__(self): + return len(self._non_defaults) + + def __iter__(self): + return iter(self._non_defaults) + + def get(self, key, default=None): + return self._get(key) + class Parser: @@ -113,19 +143,6 @@ class Parser: def set_str(self, s): assert len(s) == 0 or s[-1] == '\n' - if isinstance(s, str): - # The parse functions have no idea what to do with Unicode, so - # replace all Unicode characters with "x". This is "safe" - # so long as the only characters germane to parsing the structure - # of Python are 7-bit ASCII. It's *necessary* because Unicode - # strings don't have a .translate() method that supports - # deletechars. - uniphooey = s - s = [] - push = s.append - for raw in map(ord, uniphooey): - push(raw < 127 and chr(raw) or "x") - s = "".join(s) self.str = s self.study_level = 0 @@ -197,6 +214,16 @@ class Parser: if lo > 0: self.str = self.str[lo:] + # Build a translation table to map uninteresting chars to 'x', open + # brackets to '(', close brackets to ')' while preserving quotes, + # backslashes, newlines and hashes. This is to be passed to + # str.translate() in _study1(). + _tran = {} + _tran.update((ord(c), ord('(')) for c in "({[") + _tran.update((ord(c), ord(')')) for c in ")}]") + _tran.update((ord(c), ord(c)) for c in "\"'\\\n#") + _tran = StringTranslatePseudoMapping(_tran, default_value=ord('x')) + # As quickly as humanly possible , find the line numbers (0- # based) of the non-continuation lines. # Creates self.{goodlines, continuation}. @@ -211,7 +238,7 @@ class Parser: # uninteresting characters. This can cut the number of chars # by a factor of 10-40, and so greatly speed the following loop. str = self.str - str = str.translate(_tran) + str = str.translate(self._tran) str = str.replace('xxxxxxxx', 'x') str = str.replace('xxxx', 'x') str = str.replace('xx', 'x') diff --git a/Darwin/lib/python3.4/idlelib/PyShell.py b/Darwin/lib/python3.5/idlelib/PyShell.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/PyShell.py rename to Darwin/lib/python3.5/idlelib/PyShell.py index c23b62a..5854cf9 100755 --- a/Darwin/lib/python3.4/idlelib/PyShell.py +++ b/Darwin/lib/python3.5/idlelib/PyShell.py @@ -10,8 +10,6 @@ import sys import threading import time import tokenize -import traceback -import types import io import linecache @@ -21,7 +19,7 @@ from platform import python_version, system try: from tkinter import * except ImportError: - print("** IDLE can't import Tkinter. " \ + print("** IDLE can't import Tkinter.\n" "Your Python may not be configured for Tk. **", file=sys.__stderr__) sys.exit(1) import tkinter.messagebox as tkMessageBox @@ -32,7 +30,6 @@ from idlelib.ColorDelegator import ColorDelegator from idlelib.UndoDelegator import UndoDelegator from idlelib.OutputWindow import OutputWindow from idlelib.configHandler import idleConf -from idlelib import idlever from idlelib import rpc from idlelib import Debugger from idlelib import RemoteDebugger @@ -138,6 +135,7 @@ class PyShellEditorWindow(EditorWindow): self.io.set_filename_change_hook(filename_changed_hook) if self.io.filename: self.restore_file_breaks() + self.color_breakpoint_text() rmenu_specs = [ ("Cut", "<>", "rmenu_check_cut"), @@ -148,12 +146,24 @@ class PyShellEditorWindow(EditorWindow): ("Clear Breakpoint", "<>", None) ] + def color_breakpoint_text(self, color=True): + "Turn colorizing of breakpoint text on or off" + if self.io is None: + # possible due to update in restore_file_breaks + return + if color: + theme = idleConf.GetOption('main','Theme','name') + cfg = idleConf.GetHighlight(theme, "break") + else: + cfg = {'foreground': '', 'background': ''} + self.text.tag_config('BREAK', cfg) + def set_breakpoint(self, lineno): text = self.text filename = self.io.filename text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1)) try: - i = self.breakpoints.index(lineno) + self.breakpoints.index(lineno) except ValueError: # only add if missing, i.e. do once self.breakpoints.append(lineno) try: # update the subprocess debugger @@ -217,13 +227,8 @@ class PyShellEditorWindow(EditorWindow): # This is necessary to keep the saved breaks synched with the # saved file. # - # Breakpoints are set as tagged ranges in the text. Certain - # kinds of edits cause these ranges to be deleted: Inserting - # or deleting a line just before a breakpoint, and certain - # deletions prior to a breakpoint. These issues need to be - # investigated and understood. It's not clear if they are - # Tk issues or IDLE issues, or whether they can actually - # be fixed. Since a modified file has to be saved before it is + # Breakpoints are set as tagged ranges in the text. + # Since a modified file has to be saved before it is # run, and since self.breakpoints (from which the subprocess # debugger is loaded) is updated during the save, the visible # breaks stay synched with the subprocess even if one of these @@ -419,7 +424,7 @@ class ModifiedInterpreter(InteractiveInterpreter): try: self.rpcclt = MyRPCClient(addr) break - except OSError as err: + except OSError: pass else: self.display_port_binding_error() @@ -440,7 +445,7 @@ class ModifiedInterpreter(InteractiveInterpreter): self.rpcclt.listening_sock.settimeout(10) try: self.rpcclt.accept() - except socket.timeout as err: + except socket.timeout: self.display_no_subprocess_error() return None self.rpcclt.register("console", self.tkconsole) @@ -454,7 +459,7 @@ class ModifiedInterpreter(InteractiveInterpreter): self.poll_subprocess() return self.rpcclt - def restart_subprocess(self, with_cwd=False): + def restart_subprocess(self, with_cwd=False, filename=''): if self.restarting: return self.rpcclt self.restarting = True @@ -475,25 +480,24 @@ class ModifiedInterpreter(InteractiveInterpreter): self.spawn_subprocess() try: self.rpcclt.accept() - except socket.timeout as err: + except socket.timeout: self.display_no_subprocess_error() return None self.transfer_path(with_cwd=with_cwd) console.stop_readline() # annotate restart in shell window and mark it console.text.delete("iomark", "end-1c") - if was_executing: - console.write('\n') - console.showprompt() - halfbar = ((int(console.width) - 16) // 2) * '=' - console.write(halfbar + ' RESTART ' + halfbar) + tag = 'RESTART: ' + (filename if filename else 'Shell') + halfbar = ((int(console.width) -len(tag) - 4) // 2) * '=' + console.write("\n{0} {1} {0}".format(halfbar, tag)) console.text.mark_set("restart", "end-1c") console.text.mark_gravity("restart", "left") - console.showprompt() + if not filename: + console.showprompt() # restart subprocess debugger if debug: # Restarted debugger connects to current instance of debug GUI - gui = RemoteDebugger.restart_subprocess_debugger(self.rpcclt) + RemoteDebugger.restart_subprocess_debugger(self.rpcclt) # reload remote debugger breakpoints for all PyShellEditWindows debug.load_breakpoints() self.compile.compiler.flags = self.original_compiler_flags @@ -641,9 +645,9 @@ class ModifiedInterpreter(InteractiveInterpreter): code = compile(source, filename, "exec") except (OverflowError, SyntaxError): self.tkconsole.resetoutput() - tkerr = self.tkconsole.stderr - print('*** Error in script or command!\n', file=tkerr) - print('Traceback (most recent call last):', file=tkerr) + print('*** Error in script or command!\n' + 'Traceback (most recent call last):', + file=self.tkconsole.stderr) InteractiveInterpreter.showsyntaxerror(self, filename) self.tkconsole.showprompt() else: @@ -840,13 +844,10 @@ class PyShell(OutputWindow): ("edit", "_Edit"), ("debug", "_Debug"), ("options", "_Options"), - ("windows", "_Windows"), + ("windows", "_Window"), ("help", "_Help"), ] - if sys.platform == "darwin": - menu_specs[-2] = ("windows", "_Window") - # New classes from idlelib.IdleHistory import History @@ -1226,7 +1227,7 @@ class PyShell(OutputWindow): while i > 0 and line[i-1] in " \t": i = i-1 line = line[:i] - more = self.interp.runsource(line) + self.interp.runsource(line) def open_stack_viewer(self, event=None): if self.interp.rpcclt: @@ -1240,7 +1241,7 @@ class PyShell(OutputWindow): master=self.text) return from idlelib.StackViewer import StackBrowser - sv = StackBrowser(self.root, self.flist) + StackBrowser(self.root, self.flist) def view_restart_mark(self, event=None): self.text.see("iomark") @@ -1462,8 +1463,7 @@ def main(): try: opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:") except getopt.error as msg: - sys.stderr.write("Error: %s\n" % str(msg)) - sys.stderr.write(usage_msg) + print("Error: %s\n%s" % (msg, usage_msg), file=sys.stderr) sys.exit(2) for o, a in opts: if o == '-c': diff --git a/Darwin/lib/python3.4/idlelib/README.txt b/Darwin/lib/python3.5/idlelib/README.txt similarity index 99% rename from Darwin/lib/python3.4/idlelib/README.txt rename to Darwin/lib/python3.5/idlelib/README.txt index b2bb73b..7f4a66d 100644 --- a/Darwin/lib/python3.4/idlelib/README.txt +++ b/Darwin/lib/python3.5/idlelib/README.txt @@ -14,7 +14,7 @@ code objects from a top level viewpoint without dealing with code folding. There is a Python Shell window which features colorizing and command recall. IDLE executes Python code in a separate process, which is restarted for each -Run (F5) initiated from an editor window. The environment can also be +Run (F5) initiated from an editor window. The environment can also be restarted from the Shell window without restarting IDLE. This enhancement has often been requested, and is now finally available. The diff --git a/Darwin/lib/python3.4/idlelib/RemoteDebugger.py b/Darwin/lib/python3.5/idlelib/RemoteDebugger.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/RemoteDebugger.py rename to Darwin/lib/python3.5/idlelib/RemoteDebugger.py index d8662bb..be2262f 100644 --- a/Darwin/lib/python3.4/idlelib/RemoteDebugger.py +++ b/Darwin/lib/python3.5/idlelib/RemoteDebugger.py @@ -21,7 +21,6 @@ barrier, in particular frame and traceback objects. """ import types -from idlelib import rpc from idlelib import Debugger debugging = 0 @@ -99,7 +98,7 @@ class IdbAdapter: else: tb = tracebacktable[tbid] stack, i = self.idb.get_stack(frame, tb) - stack = [(wrap_frame(frame), k) for frame, k in stack] + stack = [(wrap_frame(frame2), k) for frame2, k in stack] return stack, i def run(self, cmd): diff --git a/Darwin/lib/python3.4/idlelib/RemoteObjectBrowser.py b/Darwin/lib/python3.5/idlelib/RemoteObjectBrowser.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/RemoteObjectBrowser.py rename to Darwin/lib/python3.5/idlelib/RemoteObjectBrowser.py diff --git a/Darwin/lib/python3.4/idlelib/ReplaceDialog.py b/Darwin/lib/python3.5/idlelib/ReplaceDialog.py similarity index 87% rename from Darwin/lib/python3.4/idlelib/ReplaceDialog.py rename to Darwin/lib/python3.5/idlelib/ReplaceDialog.py index e73f2c5..fc8b80f 100644 --- a/Darwin/lib/python3.4/idlelib/ReplaceDialog.py +++ b/Darwin/lib/python3.5/idlelib/ReplaceDialog.py @@ -40,7 +40,7 @@ class ReplaceDialog(SearchDialogBase): def create_entries(self): SearchDialogBase.create_entries(self) - self.replent = self.make_entry("Replace with:", self.replvar) + self.replent = self.make_entry("Replace with:", self.replvar)[0] def create_command_buttons(self): SearchDialogBase.create_command_buttons(self) @@ -188,3 +188,34 @@ class ReplaceDialog(SearchDialogBase): def close(self, event=None): SearchDialogBase.close(self, event) self.text.tag_remove("hit", "1.0", "end") + +def _replace_dialog(parent): + root = Tk() + root.title("Test ReplaceDialog") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + + # mock undo delegator methods + def undo_block_start(): + pass + + def undo_block_stop(): + pass + + text = Text(root) + text.undo_block_start = undo_block_start + text.undo_block_stop = undo_block_stop + text.pack() + text.insert("insert","This is a sample string.\n"*10) + + def show_replace(): + text.tag_add(SEL, "1.0", END) + replace(text) + text.tag_remove(SEL, "1.0", END) + + button = Button(root, text="Replace", command=show_replace) + button.pack() + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_replace_dialog) diff --git a/Darwin/lib/python3.4/idlelib/RstripExtension.py b/Darwin/lib/python3.5/idlelib/RstripExtension.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/RstripExtension.py rename to Darwin/lib/python3.5/idlelib/RstripExtension.py diff --git a/Darwin/lib/python3.4/idlelib/ScriptBinding.py b/Darwin/lib/python3.5/idlelib/ScriptBinding.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/ScriptBinding.py rename to Darwin/lib/python3.5/idlelib/ScriptBinding.py index b783637..e8cb2fc 100644 --- a/Darwin/lib/python3.4/idlelib/ScriptBinding.py +++ b/Darwin/lib/python3.5/idlelib/ScriptBinding.py @@ -18,13 +18,10 @@ XXX GvR Redesign this interface (yet again) as follows: """ import os -import re -import string import tabnanny import tokenize import tkinter.messagebox as tkMessageBox -from idlelib.EditorWindow import EditorWindow -from idlelib import PyShell, IOBinding +from idlelib import PyShell from idlelib.configHandler import idleConf from idlelib import macosxSupport @@ -39,6 +36,7 @@ To fix case 2, change all tabs to spaces by using Edit->Select All followed \ by Format->Untabify Region and specify the number of columns used by each tab. """ + class ScriptBinding: menudefs = [ @@ -145,7 +143,8 @@ class ScriptBinding: return 'break' interp = self.shell.interp if PyShell.use_subprocess: - interp.restart_subprocess(with_cwd=False) + interp.restart_subprocess(with_cwd=False, filename= + self.editwin._filename_to_unicode(filename)) dirname = os.path.dirname(filename) # XXX Too often this discards arguments the user just set... interp.runcommand("""if 1: diff --git a/Darwin/lib/python3.4/idlelib/ScrolledList.py b/Darwin/lib/python3.5/idlelib/ScrolledList.py similarity index 90% rename from Darwin/lib/python3.4/idlelib/ScrolledList.py rename to Darwin/lib/python3.5/idlelib/ScrolledList.py index 0255a0a..71ec547 100644 --- a/Darwin/lib/python3.4/idlelib/ScrolledList.py +++ b/Darwin/lib/python3.5/idlelib/ScrolledList.py @@ -119,21 +119,22 @@ class ScrolledList: pass -def test(): +def _scrolled_list(parent): root = Tk() - root.protocol("WM_DELETE_WINDOW", root.destroy) + root.title("Test ScrolledList") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) class MyScrolledList(ScrolledList): - def fill_menu(self): self.menu.add_command(label="pass") + def fill_menu(self): self.menu.add_command(label="right click") def on_select(self, index): print("select", self.get(index)) def on_double(self, index): print("double", self.get(index)) - s = MyScrolledList(root) - for i in range(30): - s.append("item %02d" % i) - return root -def main(): - root = test() + scrolled_list = MyScrolledList(root) + for i in range(30): + scrolled_list.append("Item %02d" % i) + root.mainloop() if __name__ == '__main__': - main() + from idlelib.idle_test.htest import run + run(_scrolled_list) diff --git a/Darwin/lib/python3.4/idlelib/SearchDialog.py b/Darwin/lib/python3.5/idlelib/SearchDialog.py similarity index 74% rename from Darwin/lib/python3.4/idlelib/SearchDialog.py rename to Darwin/lib/python3.5/idlelib/SearchDialog.py index bf76c41..77ef7b9 100644 --- a/Darwin/lib/python3.4/idlelib/SearchDialog.py +++ b/Darwin/lib/python3.5/idlelib/SearchDialog.py @@ -23,7 +23,7 @@ def find_selection(text): class SearchDialog(SearchDialogBase): def create_widgets(self): - f = SearchDialogBase.create_widgets(self) + SearchDialogBase.create_widgets(self) self.make_button("Find Next", self.default_command, 1) def default_command(self, event=None): @@ -65,3 +65,25 @@ class SearchDialog(SearchDialogBase): if pat: self.engine.setcookedpat(pat) return self.find_again(text) + +def _search_dialog(parent): + root = Tk() + root.title("Test SearchDialog") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + text = Text(root) + text.pack() + text.insert("insert","This is a sample string.\n"*10) + + def show_find(): + text.tag_add(SEL, "1.0", END) + s = _setup(text) + s.open(text) + text.tag_remove(SEL, "1.0", END) + + button = Button(root, text="Search", command=show_find) + button.pack() + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_search_dialog) diff --git a/Darwin/lib/python3.5/idlelib/SearchDialogBase.py b/Darwin/lib/python3.5/idlelib/SearchDialogBase.py new file mode 100644 index 0000000..5fa84e2 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/SearchDialogBase.py @@ -0,0 +1,184 @@ +'''Define SearchDialogBase used by Search, Replace, and Grep dialogs.''' + +from tkinter import (Toplevel, Frame, Entry, Label, Button, + Checkbutton, Radiobutton) + +class SearchDialogBase: + '''Create most of a 3 or 4 row, 3 column search dialog. + + The left and wide middle column contain: + 1 or 2 labeled text entry lines (make_entry, create_entries); + a row of standard Checkbuttons (make_frame, create_option_buttons), + each of which corresponds to a search engine Variable; + a row of dialog-specific Check/Radiobuttons (create_other_buttons). + + The narrow right column contains command buttons + (make_button, create_command_buttons). + These are bound to functions that execute the command. + + Except for command buttons, this base class is not limited to items + common to all three subclasses. Rather, it is the Find dialog minus + the "Find Next" command, its execution function, and the + default_command attribute needed in create_widgets. The other + dialogs override attributes and methods, the latter to replace and + add widgets. + ''' + + title = "Search Dialog" # replace in subclasses + icon = "Search" + needwrapbutton = 1 # not in Find in Files + + def __init__(self, root, engine): + '''Initialize root, engine, and top attributes. + + top (level widget): set in create_widgets() called from open(). + text (Text searched): set in open(), only used in subclasses(). + ent (ry): created in make_entry() called from create_entry(). + row (of grid): 0 in create_widgets(), +1 in make_entry/frame(). + default_command: set in subclasses, used in create_widgers(). + + title (of dialog): class attribute, override in subclasses. + icon (of dialog): ditto, use unclear if cannot minimize dialog. + ''' + self.root = root + self.engine = engine + self.top = None + + def open(self, text, searchphrase=None): + "Make dialog visible on top of others and ready to use." + self.text = text + if not self.top: + self.create_widgets() + else: + self.top.deiconify() + self.top.tkraise() + if searchphrase: + self.ent.delete(0,"end") + self.ent.insert("end",searchphrase) + self.ent.focus_set() + self.ent.selection_range(0, "end") + self.ent.icursor(0) + self.top.grab_set() + + def close(self, event=None): + "Put dialog away for later use." + if self.top: + self.top.grab_release() + self.top.withdraw() + + def create_widgets(self): + '''Create basic 3 row x 3 col search (find) dialog. + + Other dialogs override subsidiary create_x methods as needed. + Replace and Find-in-Files add another entry row. + ''' + top = Toplevel(self.root) + top.bind("", self.default_command) + top.bind("", self.close) + top.protocol("WM_DELETE_WINDOW", self.close) + top.wm_title(self.title) + top.wm_iconname(self.icon) + self.top = top + + self.row = 0 + self.top.grid_columnconfigure(0, pad=2, weight=0) + self.top.grid_columnconfigure(1, pad=2, minsize=100, weight=100) + + self.create_entries() # row 0 (and maybe 1), cols 0, 1 + self.create_option_buttons() # next row, cols 0, 1 + self.create_other_buttons() # next row, cols 0, 1 + self.create_command_buttons() # col 2, all rows + + def make_entry(self, label_text, var): + '''Return (entry, label), . + + entry - gridded labeled Entry for text entry. + label - Label widget, returned for testing. + ''' + label = Label(self.top, text=label_text) + label.grid(row=self.row, column=0, sticky="nw") + entry = Entry(self.top, textvariable=var, exportselection=0) + entry.grid(row=self.row, column=1, sticky="nwe") + self.row = self.row + 1 + return entry, label + + def create_entries(self): + "Create one or more entry lines with make_entry." + self.ent = self.make_entry("Find:", self.engine.patvar)[0] + + def make_frame(self,labeltext=None): + '''Return (frame, label). + + frame - gridded labeled Frame for option or other buttons. + label - Label widget, returned for testing. + ''' + if labeltext: + label = Label(self.top, text=labeltext) + label.grid(row=self.row, column=0, sticky="nw") + else: + label = '' + frame = Frame(self.top) + frame.grid(row=self.row, column=1, columnspan=1, sticky="nwe") + self.row = self.row + 1 + return frame, label + + def create_option_buttons(self): + '''Return (filled frame, options) for testing. + + Options is a list of SearchEngine booleanvar, label pairs. + A gridded frame from make_frame is filled with a Checkbutton + for each pair, bound to the var, with the corresponding label. + ''' + frame = self.make_frame("Options")[0] + engine = self.engine + options = [(engine.revar, "Regular expression"), + (engine.casevar, "Match case"), + (engine.wordvar, "Whole word")] + if self.needwrapbutton: + options.append((engine.wrapvar, "Wrap around")) + for var, label in options: + btn = Checkbutton(frame, anchor="w", variable=var, text=label) + btn.pack(side="left", fill="both") + if var.get(): + btn.select() + return frame, options + + def create_other_buttons(self): + '''Return (frame, others) for testing. + + Others is a list of value, label pairs. + A gridded frame from make_frame is filled with radio buttons. + ''' + frame = self.make_frame("Direction")[0] + var = self.engine.backvar + others = [(1, 'Up'), (0, 'Down')] + for val, label in others: + btn = Radiobutton(frame, anchor="w", + variable=var, value=val, text=label) + btn.pack(side="left", fill="both") + if var.get() == val: + btn.select() + return frame, others + + def make_button(self, label, command, isdef=0): + "Return command button gridded in command frame." + b = Button(self.buttonframe, + text=label, command=command, + default=isdef and "active" or "normal") + cols,rows=self.buttonframe.grid_size() + b.grid(pady=1,row=rows,column=0,sticky="ew") + self.buttonframe.grid(rowspan=rows+1) + return b + + def create_command_buttons(self): + "Place buttons in vertical command frame gridded on right." + f = self.buttonframe = Frame(self.top) + f.grid(row=0,column=2,padx=2,pady=2,ipadx=2,ipady=2) + + b = self.make_button("close", self.close) + b.lower() + +if __name__ == '__main__': + import unittest + unittest.main( + 'idlelib.idle_test.test_searchdialogbase', verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/SearchEngine.py b/Darwin/lib/python3.5/idlelib/SearchEngine.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/SearchEngine.py rename to Darwin/lib/python3.5/idlelib/SearchEngine.py index 9d3c4cb..37883bf 100644 --- a/Darwin/lib/python3.4/idlelib/SearchEngine.py +++ b/Darwin/lib/python3.5/idlelib/SearchEngine.py @@ -85,7 +85,7 @@ class SearchEngine: except re.error as what: args = what.args msg = args[0] - col = arg[1] if len(args) >= 2 else -1 + col = args[1] if len(args) >= 2 else -1 self.report_error(pat, msg, col) return None return prog @@ -107,7 +107,7 @@ class SearchEngine: It directly return the result of that call. Text is a text widget. Prog is a precompiled pattern. - The ok parameteris a bit complicated as it has two effects. + The ok parameter is a bit complicated as it has two effects. If there is a selection, the search begin at either end, depending on the direction setting and ok, with ok meaning that @@ -191,7 +191,7 @@ def search_reverse(prog, chars, col): This is done by searching forwards until there is no match. Prog: compiled re object with a search method returning a match. - Chars: line of text, without \n. + Chars: line of text, without \\n. Col: stop index for the search; the limit for match.end(). ''' m = prog.search(chars) @@ -229,6 +229,5 @@ def get_line_col(index): return line, col if __name__ == "__main__": - from test import support; support.use_resources = ['gui'] import unittest unittest.main('idlelib.idle_test.test_searchengine', verbosity=2, exit=False) diff --git a/Darwin/lib/python3.4/idlelib/StackViewer.py b/Darwin/lib/python3.5/idlelib/StackViewer.py similarity index 80% rename from Darwin/lib/python3.4/idlelib/StackViewer.py rename to Darwin/lib/python3.5/idlelib/StackViewer.py index 4ef2d31..b1e5e26 100644 --- a/Darwin/lib/python3.4/idlelib/StackViewer.py +++ b/Darwin/lib/python3.5/idlelib/StackViewer.py @@ -1,9 +1,12 @@ import os import sys import linecache +import re +import tkinter as tk from idlelib.TreeWidget import TreeNode, TreeItem, ScrolledCanvas from idlelib.ObjectBrowser import ObjectTreeItem, make_objecttreeitem +from idlelib.PyShell import PyShellFileList def StackBrowser(root, flist=None, tb=None, top=None): if top is None: @@ -120,3 +123,30 @@ class VariablesTreeItem(ObjectTreeItem): item = make_objecttreeitem(key + " =", value, setfunction) sublist.append(item) return sublist + +def _stack_viewer(parent): + root = tk.Tk() + root.title("Test StackViewer") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + flist = PyShellFileList(root) + try: # to obtain a traceback object + intentional_name_error + except NameError: + exc_type, exc_value, exc_tb = sys.exc_info() + + # inject stack trace to sys + sys.last_type = exc_type + sys.last_value = exc_value + sys.last_traceback = exc_tb + + StackBrowser(root, flist=flist, top=root, tb=exc_tb) + + # restore sys to original state + del sys.last_type + del sys.last_value + del sys.last_traceback + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_stack_viewer) diff --git a/Darwin/lib/python3.4/idlelib/TODO.txt b/Darwin/lib/python3.5/idlelib/TODO.txt similarity index 100% rename from Darwin/lib/python3.4/idlelib/TODO.txt rename to Darwin/lib/python3.5/idlelib/TODO.txt diff --git a/Darwin/lib/python3.4/idlelib/ToolTip.py b/Darwin/lib/python3.5/idlelib/ToolTip.py similarity index 80% rename from Darwin/lib/python3.4/idlelib/ToolTip.py rename to Darwin/lib/python3.5/idlelib/ToolTip.py index b178803..964107e 100644 --- a/Darwin/lib/python3.4/idlelib/ToolTip.py +++ b/Darwin/lib/python3.5/idlelib/ToolTip.py @@ -76,14 +76,22 @@ class ListboxToolTip(ToolTipBase): for item in self.items: listbox.insert(END, item) -def main(): - # Test code +def _tooltip(parent): root = Tk() - b = Button(root, text="Hello", command=root.destroy) - b.pack() - root.update() - tip = ListboxToolTip(b, ["Hello", "world"]) + root.title("Test tooltip") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + label = Label(root, text="Place your mouse over buttons") + label.pack() + button1 = Button(root, text="Button 1") + button2 = Button(root, text="Button 2") + button1.pack() + button2.pack() + ToolTip(button1, "This is tooltip text for button1.") + ListboxToolTip(button2, ["This is","multiple line", + "tooltip text","for button2"]) root.mainloop() if __name__ == '__main__': - main() + from idlelib.idle_test.htest import run + run(_tooltip) diff --git a/Darwin/lib/python3.4/idlelib/TreeWidget.py b/Darwin/lib/python3.5/idlelib/TreeWidget.py similarity index 95% rename from Darwin/lib/python3.4/idlelib/TreeWidget.py rename to Darwin/lib/python3.5/idlelib/TreeWidget.py index 1f4854d..4844a69 100644 --- a/Darwin/lib/python3.4/idlelib/TreeWidget.py +++ b/Darwin/lib/python3.5/idlelib/TreeWidget.py @@ -173,11 +173,12 @@ class TreeNode: def draw(self, x, y): # XXX This hard-codes too many geometry constants! + dy = 20 self.x, self.y = x, y self.drawicon() self.drawtext() if self.state != 'expanded': - return y+17 + return y + dy # draw children if not self.children: sublist = self.item._GetSubList() @@ -188,7 +189,7 @@ class TreeNode: child = self.__class__(self.canvas, self, item) self.children.append(child) cx = x+20 - cy = y+17 + cy = y + dy cylast = 0 for child in self.children: cylast = cy @@ -227,7 +228,7 @@ class TreeNode: def drawtext(self): textx = self.x+20-1 - texty = self.y-1 + texty = self.y-4 labeltext = self.item.GetLabelText() if labeltext: id = self.canvas.create_text(textx, texty, anchor="nw", @@ -244,7 +245,7 @@ class TreeNode: else: self.edit_finish() try: - label = self.label + self.label except AttributeError: # padding carefully selected (on Windows) to match Entry widget: self.label = Label(self.canvas, text=text, bd=0, padx=2, pady=2) @@ -448,29 +449,18 @@ class ScrolledCanvas: return "break" -# Testing functions - -def test(): - from idlelib import PyShell - root = Toplevel(PyShell.root) - root.configure(bd=0, bg="yellow") - root.focus_set() +def _tree_widget(parent): + root = Tk() + root.title("Test TreeWidget") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1) - sc.frame.pack(expand=1, fill="both") - item = FileTreeItem("C:/windows/desktop") + sc.frame.pack(expand=1, fill="both", side=LEFT) + item = FileTreeItem(os.getcwd()) node = TreeNode(sc.canvas, None, item) node.expand() - -def test2(): - # test w/o scrolling canvas - root = Tk() - root.configure(bd=0) - canvas = Canvas(root, bg="white", highlightthickness=0) - canvas.pack(expand=1, fill="both") - item = FileTreeItem(os.curdir) - node = TreeNode(canvas, None, item) - node.update() - canvas.focus_set() + root.mainloop() if __name__ == '__main__': - test() + from idlelib.idle_test.htest import run + run(_tree_widget) diff --git a/Darwin/lib/python3.4/idlelib/UndoDelegator.py b/Darwin/lib/python3.5/idlelib/UndoDelegator.py similarity index 94% rename from Darwin/lib/python3.4/idlelib/UndoDelegator.py rename to Darwin/lib/python3.5/idlelib/UndoDelegator.py index d2ef638..04c1cf5 100644 --- a/Darwin/lib/python3.4/idlelib/UndoDelegator.py +++ b/Darwin/lib/python3.5/idlelib/UndoDelegator.py @@ -336,17 +336,30 @@ class CommandSequence(Command): self.depth = self.depth + incr return self.depth -def main(): +def _undo_delegator(parent): from idlelib.Percolator import Percolator root = Tk() - root.wm_protocol("WM_DELETE_WINDOW", root.quit) - text = Text() + root.title("Test UndoDelegator") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + + text = Text(root) + text.config(height=10) text.pack() text.focus_set() p = Percolator(text) d = UndoDelegator() p.insertfilter(d) + + undo = Button(root, text="Undo", command=lambda:d.undo_event(None)) + undo.pack(side='left') + redo = Button(root, text="Redo", command=lambda:d.redo_event(None)) + redo.pack(side='left') + dump = Button(root, text="Dump", command=lambda:d.dump_event(None)) + dump.pack(side='left') + root.mainloop() if __name__ == "__main__": - main() + from idlelib.idle_test.htest import run + run(_undo_delegator) diff --git a/Darwin/lib/python3.5/idlelib/WidgetRedirector.py b/Darwin/lib/python3.5/idlelib/WidgetRedirector.py new file mode 100644 index 0000000..67d7f61 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/WidgetRedirector.py @@ -0,0 +1,176 @@ +from tkinter import TclError + +class WidgetRedirector: + """Support for redirecting arbitrary widget subcommands. + + Some Tk operations don't normally pass through tkinter. For example, if a + character is inserted into a Text widget by pressing a key, a default Tk + binding to the widget's 'insert' operation is activated, and the Tk library + processes the insert without calling back into tkinter. + + Although a binding to could be made via tkinter, what we really want + to do is to hook the Tk 'insert' operation itself. For one thing, we want + a text.insert call in idle code to have the same effect as a key press. + + When a widget is instantiated, a Tcl command is created whose name is the + same as the pathname widget._w. This command is used to invoke the various + widget operations, e.g. insert (for a Text widget). We are going to hook + this command and provide a facility ('register') to intercept the widget + operation. We will also intercept method calls on the tkinter class + instance that represents the tk widget. + + In IDLE, WidgetRedirector is used in Percolator to intercept Text + commands. The function being registered provides access to the top + of a Percolator chain. At the bottom of the chain is a call to the + original Tk widget operation. + """ + def __init__(self, widget): + '''Initialize attributes and setup redirection. + + _operations: dict mapping operation name to new function. + widget: the widget whose tcl command is to be intercepted. + tk: widget.tk, a convenience attribute, probably not needed. + orig: new name of the original tcl command. + + Since renaming to orig fails with TclError when orig already + exists, only one WidgetDirector can exist for a given widget. + ''' + self._operations = {} + self.widget = widget # widget instance + self.tk = tk = widget.tk # widget's root + w = widget._w # widget's (full) Tk pathname + self.orig = w + "_orig" + # Rename the Tcl command within Tcl: + tk.call("rename", w, self.orig) + # Create a new Tcl command whose name is the widget's pathname, and + # whose action is to dispatch on the operation passed to the widget: + tk.createcommand(w, self.dispatch) + + def __repr__(self): + return "%s(%s<%s>)" % (self.__class__.__name__, + self.widget.__class__.__name__, + self.widget._w) + + def close(self): + "Unregister operations and revert redirection created by .__init__." + for operation in list(self._operations): + self.unregister(operation) + widget = self.widget + tk = widget.tk + w = widget._w + # Restore the original widget Tcl command. + tk.deletecommand(w) + tk.call("rename", self.orig, w) + del self.widget, self.tk # Should not be needed + # if instance is deleted after close, as in Percolator. + + def register(self, operation, function): + '''Return OriginalCommand(operation) after registering function. + + Registration adds an operation: function pair to ._operations. + It also adds an widget function attribute that masks the tkinter + class instance method. Method masking operates independently + from command dispatch. + + If a second function is registered for the same operation, the + first function is replaced in both places. + ''' + self._operations[operation] = function + setattr(self.widget, operation, function) + return OriginalCommand(self, operation) + + def unregister(self, operation): + '''Return the function for the operation, or None. + + Deleting the instance attribute unmasks the class attribute. + ''' + if operation in self._operations: + function = self._operations[operation] + del self._operations[operation] + try: + delattr(self.widget, operation) + except AttributeError: + pass + return function + else: + return None + + def dispatch(self, operation, *args): + '''Callback from Tcl which runs when the widget is referenced. + + If an operation has been registered in self._operations, apply the + associated function to the args passed into Tcl. Otherwise, pass the + operation through to Tk via the original Tcl function. + + Note that if a registered function is called, the operation is not + passed through to Tk. Apply the function returned by self.register() + to *args to accomplish that. For an example, see ColorDelegator.py. + + ''' + m = self._operations.get(operation) + try: + if m: + return m(*args) + else: + return self.tk.call((self.orig, operation) + args) + except TclError: + return "" + + +class OriginalCommand: + '''Callable for original tk command that has been redirected. + + Returned by .register; can be used in the function registered. + redir = WidgetRedirector(text) + def my_insert(*args): + print("insert", args) + original_insert(*args) + original_insert = redir.register("insert", my_insert) + ''' + + def __init__(self, redir, operation): + '''Create .tk_call and .orig_and_operation for .__call__ method. + + .redir and .operation store the input args for __repr__. + .tk and .orig copy attributes of .redir (probably not needed). + ''' + self.redir = redir + self.operation = operation + self.tk = redir.tk # redundant with self.redir + self.orig = redir.orig # redundant with self.redir + # These two could be deleted after checking recipient code. + self.tk_call = redir.tk.call + self.orig_and_operation = (redir.orig, operation) + + def __repr__(self): + return "%s(%r, %r)" % (self.__class__.__name__, + self.redir, self.operation) + + def __call__(self, *args): + return self.tk_call(self.orig_and_operation + args) + + +def _widget_redirector(parent): # htest # + from tkinter import Tk, Text + import re + + root = Tk() + root.title("Test WidgetRedirector") + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 150)) + text = Text(root) + text.pack() + text.focus_set() + redir = WidgetRedirector(text) + def my_insert(*args): + print("insert", args) + original_insert(*args) + original_insert = redir.register("insert", my_insert) + root.mainloop() + +if __name__ == "__main__": + import unittest + unittest.main('idlelib.idle_test.test_widgetredir', + verbosity=2, exit=False) + from idlelib.idle_test.htest import run + run(_widget_redirector) diff --git a/Darwin/lib/python3.4/idlelib/WindowList.py b/Darwin/lib/python3.5/idlelib/WindowList.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/WindowList.py rename to Darwin/lib/python3.5/idlelib/WindowList.py diff --git a/Darwin/lib/python3.4/idlelib/ZoomHeight.py b/Darwin/lib/python3.5/idlelib/ZoomHeight.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/ZoomHeight.py rename to Darwin/lib/python3.5/idlelib/ZoomHeight.py diff --git a/Darwin/lib/python3.4/idlelib/__init__.py b/Darwin/lib/python3.5/idlelib/__init__.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/__init__.py rename to Darwin/lib/python3.5/idlelib/__init__.py diff --git a/Darwin/lib/python3.4/idlelib/__main__.py b/Darwin/lib/python3.5/idlelib/__main__.py similarity index 67% rename from Darwin/lib/python3.4/idlelib/__main__.py rename to Darwin/lib/python3.5/idlelib/__main__.py index 0666f2f..2edf5f7 100644 --- a/Darwin/lib/python3.4/idlelib/__main__.py +++ b/Darwin/lib/python3.5/idlelib/__main__.py @@ -3,7 +3,6 @@ IDLE main entry point Run IDLE as python -m idlelib """ - - import idlelib.PyShell idlelib.PyShell.main() +# This file does not work for 2.7; See issue 24212. diff --git a/Darwin/lib/python3.4/idlelib/aboutDialog.py b/Darwin/lib/python3.5/idlelib/aboutDialog.py similarity index 89% rename from Darwin/lib/python3.4/idlelib/aboutDialog.py rename to Darwin/lib/python3.5/idlelib/aboutDialog.py index 2b58013..d876a97 100644 --- a/Darwin/lib/python3.4/idlelib/aboutDialog.py +++ b/Darwin/lib/python3.5/idlelib/aboutDialog.py @@ -2,21 +2,25 @@ """ -from tkinter import * import os - +from sys import version +from tkinter import * from idlelib import textView -from idlelib import idlever class AboutDialog(Toplevel): """Modal about dialog for idle """ - def __init__(self, parent, title): + def __init__(self, parent, title, _htest=False): + """ + _htest - bool, change box location when running htest + """ Toplevel.__init__(self, parent) self.configure(borderwidth=5) - self.geometry("+%d+%d" % (parent.winfo_rootx()+30, - parent.winfo_rooty()+30)) + # place dialog below parent if running htest + self.geometry("+%d+%d" % ( + parent.winfo_rootx()+30, + parent.winfo_rooty()+(30 if not _htest else 100))) self.bg = "#707070" self.fg = "#ffffff" self.CreateWidgets() @@ -32,6 +36,7 @@ class AboutDialog(Toplevel): self.wait_window() def CreateWidgets(self): + release = version[:version.index(' ')] frameMain = Frame(self, borderwidth=2, relief=SUNKEN) frameButtons = Frame(self) frameButtons.pack(side=BOTTOM, fill=X) @@ -57,14 +62,15 @@ class AboutDialog(Toplevel): justify=LEFT, fg=self.fg, bg=self.bg) labelEmail.grid(row=6, column=0, columnspan=2, sticky=W, padx=10, pady=0) - labelWWW = Label(frameBg, text='www: http://www.python.org/idle/', + labelWWW = Label(frameBg, text='https://docs.python.org/' + + version[:3] + '/library/idle.html', justify=LEFT, fg=self.fg, bg=self.bg) labelWWW.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0) Frame(frameBg, borderwidth=1, relief=SUNKEN, height=2, bg=self.bg).grid(row=8, column=0, sticky=EW, columnspan=3, padx=5, pady=5) - labelPythonVer = Label(frameBg, text='Python version: ' + \ - sys.version.split()[0], fg=self.fg, bg=self.bg) + labelPythonVer = Label(frameBg, text='Python version: ' + + release, fg=self.fg, bg=self.bg) labelPythonVer.grid(row=9, column=0, sticky=W, padx=10, pady=0) tkVer = self.tk.call('info', 'patchlevel') labelTkVer = Label(frameBg, text='Tk version: '+ @@ -87,7 +93,7 @@ class AboutDialog(Toplevel): Frame(frameBg, borderwidth=1, relief=SUNKEN, height=2, bg=self.bg).grid(row=11, column=0, sticky=EW, columnspan=3, padx=5, pady=5) - idle_v = Label(frameBg, text='IDLE version: ' + idlever.IDLE_VERSION, + idle_v = Label(frameBg, text='IDLE version: ' + release, fg=self.fg, bg=self.bg) idle_v.grid(row=12, column=0, sticky=W, padx=10, pady=0) idle_button_f = Frame(frameBg, bg=self.bg) diff --git a/Darwin/lib/python3.4/idlelib/config-extensions.def b/Darwin/lib/python3.5/idlelib/config-extensions.def similarity index 54% rename from Darwin/lib/python3.4/idlelib/config-extensions.def rename to Darwin/lib/python3.5/idlelib/config-extensions.def index 39e69ce..62c5eaa 100644 --- a/Darwin/lib/python3.4/idlelib/config-extensions.def +++ b/Darwin/lib/python3.5/idlelib/config-extensions.def @@ -3,75 +3,37 @@ # IDLE reads several config files to determine user preferences. This # file is the default configuration file for IDLE extensions settings. # -# Each extension must have at least one section, named after the extension -# module. This section must contain an 'enable' item (=1 to enable the -# extension, =0 to disable it), it may contain 'enable_editor' or 'enable_shell' -# items, to apply it only to editor/shell windows, and may also contain any -# other general configuration items for the extension. +# Each extension must have at least one section, named after the +# extension module. This section must contain an 'enable' item (=True to +# enable the extension, =False to disable it), it may contain +# 'enable_editor' or 'enable_shell' items, to apply it only to editor ir +# shell windows, and may also contain any other general configuration +# items for the extension. Other True/False values will also be +# recognized as boolean by the Extension Configuration dialog. # -# Each extension must define at least one section named ExtensionName_bindings -# or ExtensionName_cfgBindings. If present, ExtensionName_bindings defines -# virtual event bindings for the extension that are not user re-configurable. -# If present, ExtensionName_cfgBindings defines virtual event bindings for the +# Each extension must define at least one section named +# ExtensionName_bindings or ExtensionName_cfgBindings. If present, +# ExtensionName_bindings defines virtual event bindings for the +# extension that are not user re-configurable. If present, +# ExtensionName_cfgBindings defines virtual event bindings for the # extension that may be sensibly re-configured. # -# If there are no keybindings for a menus' virtual events, include lines like -# <>= (See [CodeContext], below.) +# If there are no keybindings for a menus' virtual events, include lines +# like <>= (See [CodeContext], below.) # -# Currently it is necessary to manually modify this file to change extension -# key bindings and default values. To customize, create +# Currently it is necessary to manually modify this file to change +# extension key bindings and default values. To customize, create # ~/.idlerc/config-extensions.cfg and append the appropriate customized # section(s). Those sections will override the defaults in this file. # -# Note: If a keybinding is already in use when the extension is -# loaded, the extension's virtual event's keybinding will be set to ''. +# Note: If a keybinding is already in use when the extension is loaded, +# the extension's virtual event's keybinding will be set to ''. # # See config-keys.def for notes on specifying keys and extend.txt for # information on creating IDLE extensions. -[FormatParagraph] -enable=1 -[FormatParagraph_cfgBindings] -format-paragraph= - -[AutoExpand] -enable=1 -[AutoExpand_cfgBindings] -expand-word= - -[ZoomHeight] -enable=1 -[ZoomHeight_cfgBindings] -zoom-height= - -[ScriptBinding] -enable=1 -enable_shell=0 -enable_editor=1 -[ScriptBinding_cfgBindings] -run-module= -check-module= - -[CallTips] -enable=1 -[CallTips_cfgBindings] -force-open-calltip= -[CallTips_bindings] -try-open-calltip= -refresh-calltip= - -[ParenMatch] -enable=1 -style= expression -flash-delay= 500 -bell= 1 -[ParenMatch_cfgBindings] -flash-paren= -[ParenMatch_bindings] -paren-closed= - [AutoComplete] -enable=1 +enable=True popupwait=2000 [AutoComplete_cfgBindings] force-open-completions= @@ -79,18 +41,59 @@ force-open-completions= autocomplete= try-open-completions= +[AutoExpand] +enable=True +[AutoExpand_cfgBindings] +expand-word= + +[CallTips] +enable=True +[CallTips_cfgBindings] +force-open-calltip= +[CallTips_bindings] +try-open-calltip= +refresh-calltip= + [CodeContext] -enable=1 -enable_shell=0 +enable=True +enable_shell=False numlines=3 -visible=0 +visible=False bgcolor=LightGray fgcolor=Black [CodeContext_bindings] toggle-code-context= -[RstripExtension] -enable=1 -enable_shell=0 -enable_editor=1 +[FormatParagraph] +enable=True +max-width=72 +[FormatParagraph_cfgBindings] +format-paragraph= +[ParenMatch] +enable=True +style= expression +flash-delay= 500 +bell=True +[ParenMatch_cfgBindings] +flash-paren= +[ParenMatch_bindings] +paren-closed= + +[RstripExtension] +enable=True +enable_shell=False +enable_editor=True + +[ScriptBinding] +enable=True +enable_shell=False +enable_editor=True +[ScriptBinding_cfgBindings] +run-module= +check-module= + +[ZoomHeight] +enable=True +[ZoomHeight_cfgBindings] +zoom-height= diff --git a/Darwin/lib/python3.4/idlelib/config-highlight.def b/Darwin/lib/python3.5/idlelib/config-highlight.def similarity index 100% rename from Darwin/lib/python3.4/idlelib/config-highlight.def rename to Darwin/lib/python3.5/idlelib/config-highlight.def diff --git a/Darwin/lib/python3.4/idlelib/config-keys.def b/Darwin/lib/python3.5/idlelib/config-keys.def similarity index 89% rename from Darwin/lib/python3.4/idlelib/config-keys.def rename to Darwin/lib/python3.5/idlelib/config-keys.def index fdc35ba..3bfcb69 100644 --- a/Darwin/lib/python3.4/idlelib/config-keys.def +++ b/Darwin/lib/python3.5/idlelib/config-keys.def @@ -13,37 +13,37 @@ cut= paste= beginning-of-line= center-insert= -close-all-windows= +close-all-windows= close-window= do-nothing= end-of-file= python-docs= python-context-help= -history-next= -history-previous= +history-next= +history-previous= interrupt-execution= view-restart= restart-shell= -open-class-browser= -open-module= +open-class-browser= +open-module= open-new-window= open-window-from-file= plain-newline-and-indent= print-window= -redo= +redo= remove-selection= -save-copy-of-window-as-file= -save-window-as-file= -save-window= -select-all= +save-copy-of-window-as-file= +save-window-as-file= +save-window= +select-all= toggle-auto-coloring= undo= find= -find-again= +find-again= find-in-files= find-selection= replace= -goto-line= +goto-line= smart-backspace= newline-and-indent= smart-indent= @@ -53,8 +53,8 @@ comment-region= uncomment-region= tabify-region= untabify-region= -toggle-tabs= -change-indentwidth= +toggle-tabs= +change-indentwidth= del-word-left= del-word-right= diff --git a/Darwin/lib/python3.4/idlelib/config-main.def b/Darwin/lib/python3.5/idlelib/config-main.def similarity index 97% rename from Darwin/lib/python3.4/idlelib/config-main.def rename to Darwin/lib/python3.5/idlelib/config-main.def index 8f0fe76..3cf2cf0 100644 --- a/Darwin/lib/python3.4/idlelib/config-main.def +++ b/Darwin/lib/python3.5/idlelib/config-main.def @@ -53,14 +53,11 @@ delete-exitfunc= 1 [EditorWindow] width= 80 height= 40 -font= courier +font= TkFixedFont font-size= 10 font-bold= 0 encoding= none -[FormatParagraph] -paragraph=72 - [Indent] use-spaces= 1 num-spaces= 4 @@ -71,7 +68,7 @@ name= IDLE Classic [Keys] default= 1 -name= IDLE Classic Windows +name= IDLE Classic OSX [History] cyclic=1 diff --git a/Darwin/lib/python3.5/idlelib/configDialog.py b/Darwin/lib/python3.5/idlelib/configDialog.py new file mode 100644 index 0000000..9ed6336 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/configDialog.py @@ -0,0 +1,1420 @@ +"""IDLE Configuration Dialog: support user customization of IDLE by GUI + +Customize font faces, sizes, and colorization attributes. Set indentation +defaults. Customize keybindings. Colorization and keybindings can be +saved as user defined sets. Select startup options including shell/editor +and default window size. Define additional help sources. + +Note that tab width in IDLE is currently fixed at eight due to Tk issues. +Refer to comments in EditorWindow autoindent code for details. + +""" +from tkinter import * +import tkinter.messagebox as tkMessageBox +import tkinter.colorchooser as tkColorChooser +import tkinter.font as tkFont + +from idlelib.configHandler import idleConf +from idlelib.dynOptionMenuWidget import DynOptionMenu +from idlelib.keybindingDialog import GetKeysDialog +from idlelib.configSectionNameDialog import GetCfgSectionNameDialog +from idlelib.configHelpSourceEdit import GetHelpSourceDialog +from idlelib.tabbedpages import TabbedPageSet +from idlelib import macosxSupport +class ConfigDialog(Toplevel): + + def __init__(self, parent, title='', _htest=False, _utest=False): + """ + _htest - bool, change box location when running htest + _utest - bool, don't wait_window when running unittest + """ + Toplevel.__init__(self, parent) + self.parent = parent + if _htest: + parent.instance_dict = {} + self.wm_withdraw() + + self.configure(borderwidth=5) + self.title(title or 'IDLE Preferences') + self.geometry( + "+%d+%d" % (parent.winfo_rootx() + 20, + parent.winfo_rooty() + (30 if not _htest else 150))) + #Theme Elements. Each theme element key is its display name. + #The first value of the tuple is the sample area tag name. + #The second value is the display name list sort index. + self.themeElements={ + 'Normal Text':('normal', '00'), + 'Python Keywords':('keyword', '01'), + 'Python Definitions':('definition', '02'), + 'Python Builtins':('builtin', '03'), + 'Python Comments':('comment', '04'), + 'Python Strings':('string', '05'), + 'Selected Text':('hilite', '06'), + 'Found Text':('hit', '07'), + 'Cursor':('cursor', '08'), + 'Error Text':('error', '09'), + 'Shell Normal Text':('console', '10'), + 'Shell Stdout Text':('stdout', '11'), + 'Shell Stderr Text':('stderr', '12'), + } + self.ResetChangedItems() #load initial values in changed items dict + self.CreateWidgets() + self.resizable(height=FALSE, width=FALSE) + self.transient(parent) + self.grab_set() + self.protocol("WM_DELETE_WINDOW", self.Cancel) + self.tabPages.focus_set() + #key bindings for this dialog + #self.bind('', self.Cancel) #dismiss dialog, no save + #self.bind('', self.Apply) #apply changes, save + #self.bind('', self.Help) #context help + self.LoadConfigs() + self.AttachVarCallbacks() #avoid callbacks during LoadConfigs + + if not _utest: + self.wm_deiconify() + self.wait_window() + + def CreateWidgets(self): + self.tabPages = TabbedPageSet(self, + page_names=['Fonts/Tabs', 'Highlighting', 'Keys', 'General']) + self.tabPages.pack(side=TOP, expand=TRUE, fill=BOTH) + self.CreatePageFontTab() + self.CreatePageHighlight() + self.CreatePageKeys() + self.CreatePageGeneral() + self.create_action_buttons().pack(side=BOTTOM) + def create_action_buttons(self): + if macosxSupport.isAquaTk(): + # Changing the default padding on OSX results in unreadable + # text in the buttons + paddingArgs = {} + else: + paddingArgs = {'padx':6, 'pady':3} + outer = Frame(self, pady=2) + buttons = Frame(outer, pady=2) + self.buttonOk = Button( + buttons, text='Ok', command=self.Ok, + takefocus=FALSE, **paddingArgs) + self.buttonApply = Button( + buttons, text='Apply', command=self.Apply, + takefocus=FALSE, **paddingArgs) + self.buttonCancel = Button( + buttons, text='Cancel', command=self.Cancel, + takefocus=FALSE, **paddingArgs) + self.buttonOk.pack(side=LEFT, padx=5) + self.buttonApply.pack(side=LEFT, padx=5) + self.buttonCancel.pack(side=LEFT, padx=5) +# Comment out Help button creation and packing until implement self.Help +## self.buttonHelp = Button( +## buttons, text='Help', command=self.Help, +## takefocus=FALSE, **paddingArgs) +## self.buttonHelp.pack(side=RIGHT, padx=5) + + # add space above buttons + Frame(outer, height=2, borderwidth=0).pack(side=TOP) + buttons.pack(side=BOTTOM) + return outer + def CreatePageFontTab(self): + parent = self.parent + self.fontSize = StringVar(parent) + self.fontBold = BooleanVar(parent) + self.fontName = StringVar(parent) + self.spaceNum = IntVar(parent) + self.editFont = tkFont.Font(parent, ('courier', 10, 'normal')) + + ##widget creation + #body frame + frame = self.tabPages.pages['Fonts/Tabs'].frame + #body section frames + frameFont = LabelFrame( + frame, borderwidth=2, relief=GROOVE, text=' Base Editor Font ') + frameIndent = LabelFrame( + frame, borderwidth=2, relief=GROOVE, text=' Indentation Width ') + #frameFont + frameFontName = Frame(frameFont) + frameFontParam = Frame(frameFont) + labelFontNameTitle = Label( + frameFontName, justify=LEFT, text='Font Face :') + self.listFontName = Listbox( + frameFontName, height=5, takefocus=FALSE, exportselection=FALSE) + self.listFontName.bind( + '', self.OnListFontButtonRelease) + scrollFont = Scrollbar(frameFontName) + scrollFont.config(command=self.listFontName.yview) + self.listFontName.config(yscrollcommand=scrollFont.set) + labelFontSizeTitle = Label(frameFontParam, text='Size :') + self.optMenuFontSize = DynOptionMenu( + frameFontParam, self.fontSize, None, command=self.SetFontSample) + checkFontBold = Checkbutton( + frameFontParam, variable=self.fontBold, onvalue=1, + offvalue=0, text='Bold', command=self.SetFontSample) + frameFontSample = Frame(frameFont, relief=SOLID, borderwidth=1) + self.labelFontSample = Label( + frameFontSample, justify=LEFT, font=self.editFont, + text='AaBbCcDdEe\nFfGgHhIiJjK\n1234567890\n#:+=(){}[]') + #frameIndent + frameIndentSize = Frame(frameIndent) + labelSpaceNumTitle = Label( + frameIndentSize, justify=LEFT, + text='Python Standard: 4 Spaces!') + self.scaleSpaceNum = Scale( + frameIndentSize, variable=self.spaceNum, + orient='horizontal', tickinterval=2, from_=2, to=16) + + #widget packing + #body + frameFont.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH) + frameIndent.pack(side=LEFT, padx=5, pady=5, fill=Y) + #frameFont + frameFontName.pack(side=TOP, padx=5, pady=5, fill=X) + frameFontParam.pack(side=TOP, padx=5, pady=5, fill=X) + labelFontNameTitle.pack(side=TOP, anchor=W) + self.listFontName.pack(side=LEFT, expand=TRUE, fill=X) + scrollFont.pack(side=LEFT, fill=Y) + labelFontSizeTitle.pack(side=LEFT, anchor=W) + self.optMenuFontSize.pack(side=LEFT, anchor=W) + checkFontBold.pack(side=LEFT, anchor=W, padx=20) + frameFontSample.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH) + self.labelFontSample.pack(expand=TRUE, fill=BOTH) + #frameIndent + frameIndentSize.pack(side=TOP, fill=X) + labelSpaceNumTitle.pack(side=TOP, anchor=W, padx=5) + self.scaleSpaceNum.pack(side=TOP, padx=5, fill=X) + return frame + + def CreatePageHighlight(self): + parent = self.parent + self.builtinTheme = StringVar(parent) + self.customTheme = StringVar(parent) + self.fgHilite = BooleanVar(parent) + self.colour = StringVar(parent) + self.fontName = StringVar(parent) + self.themeIsBuiltin = BooleanVar(parent) + self.highlightTarget = StringVar(parent) + + ##widget creation + #body frame + frame = self.tabPages.pages['Highlighting'].frame + #body section frames + frameCustom = LabelFrame(frame, borderwidth=2, relief=GROOVE, + text=' Custom Highlighting ') + frameTheme = LabelFrame(frame, borderwidth=2, relief=GROOVE, + text=' Highlighting Theme ') + #frameCustom + self.textHighlightSample=Text( + frameCustom, relief=SOLID, borderwidth=1, + font=('courier', 12, ''), cursor='hand2', width=21, height=11, + takefocus=FALSE, highlightthickness=0, wrap=NONE) + text=self.textHighlightSample + text.bind('', lambda e: 'break') + text.bind('', lambda e: 'break') + textAndTags=( + ('#you can click here', 'comment'), ('\n', 'normal'), + ('#to choose items', 'comment'), ('\n', 'normal'), + ('def', 'keyword'), (' ', 'normal'), + ('func', 'definition'), ('(param):\n ', 'normal'), + ('"""string"""', 'string'), ('\n var0 = ', 'normal'), + ("'string'", 'string'), ('\n var1 = ', 'normal'), + ("'selected'", 'hilite'), ('\n var2 = ', 'normal'), + ("'found'", 'hit'), ('\n var3 = ', 'normal'), + ('list', 'builtin'), ('(', 'normal'), + ('None', 'keyword'), (')\n\n', 'normal'), + (' error ', 'error'), (' ', 'normal'), + ('cursor |', 'cursor'), ('\n ', 'normal'), + ('shell', 'console'), (' ', 'normal'), + ('stdout', 'stdout'), (' ', 'normal'), + ('stderr', 'stderr'), ('\n', 'normal')) + for txTa in textAndTags: + text.insert(END, txTa[0], txTa[1]) + for element in self.themeElements: + def tem(event, elem=element): + event.widget.winfo_toplevel().highlightTarget.set(elem) + text.tag_bind( + self.themeElements[element][0], '', tem) + text.config(state=DISABLED) + self.frameColourSet = Frame(frameCustom, relief=SOLID, borderwidth=1) + frameFgBg = Frame(frameCustom) + buttonSetColour = Button( + self.frameColourSet, text='Choose Colour for :', + command=self.GetColour, highlightthickness=0) + self.optMenuHighlightTarget = DynOptionMenu( + self.frameColourSet, self.highlightTarget, None, + highlightthickness=0) #, command=self.SetHighlightTargetBinding + self.radioFg = Radiobutton( + frameFgBg, variable=self.fgHilite, value=1, + text='Foreground', command=self.SetColourSampleBinding) + self.radioBg=Radiobutton( + frameFgBg, variable=self.fgHilite, value=0, + text='Background', command=self.SetColourSampleBinding) + self.fgHilite.set(1) + buttonSaveCustomTheme = Button( + frameCustom, text='Save as New Custom Theme', + command=self.SaveAsNewTheme) + #frameTheme + labelTypeTitle = Label(frameTheme, text='Select : ') + self.radioThemeBuiltin = Radiobutton( + frameTheme, variable=self.themeIsBuiltin, value=1, + command=self.SetThemeType, text='a Built-in Theme') + self.radioThemeCustom = Radiobutton( + frameTheme, variable=self.themeIsBuiltin, value=0, + command=self.SetThemeType, text='a Custom Theme') + self.optMenuThemeBuiltin = DynOptionMenu( + frameTheme, self.builtinTheme, None, command=None) + self.optMenuThemeCustom=DynOptionMenu( + frameTheme, self.customTheme, None, command=None) + self.buttonDeleteCustomTheme=Button( + frameTheme, text='Delete Custom Theme', + command=self.DeleteCustomTheme) + + ##widget packing + #body + frameCustom.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH) + frameTheme.pack(side=LEFT, padx=5, pady=5, fill=Y) + #frameCustom + self.frameColourSet.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=X) + frameFgBg.pack(side=TOP, padx=5, pady=0) + self.textHighlightSample.pack( + side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH) + buttonSetColour.pack(side=TOP, expand=TRUE, fill=X, padx=8, pady=4) + self.optMenuHighlightTarget.pack( + side=TOP, expand=TRUE, fill=X, padx=8, pady=3) + self.radioFg.pack(side=LEFT, anchor=E) + self.radioBg.pack(side=RIGHT, anchor=W) + buttonSaveCustomTheme.pack(side=BOTTOM, fill=X, padx=5, pady=5) + #frameTheme + labelTypeTitle.pack(side=TOP, anchor=W, padx=5, pady=5) + self.radioThemeBuiltin.pack(side=TOP, anchor=W, padx=5) + self.radioThemeCustom.pack(side=TOP, anchor=W, padx=5, pady=2) + self.optMenuThemeBuiltin.pack(side=TOP, fill=X, padx=5, pady=5) + self.optMenuThemeCustom.pack(side=TOP, fill=X, anchor=W, padx=5, pady=5) + self.buttonDeleteCustomTheme.pack(side=TOP, fill=X, padx=5, pady=5) + return frame + + def CreatePageKeys(self): + parent = self.parent + self.bindingTarget = StringVar(parent) + self.builtinKeys = StringVar(parent) + self.customKeys = StringVar(parent) + self.keysAreBuiltin = BooleanVar(parent) + self.keyBinding = StringVar(parent) + + ##widget creation + #body frame + frame = self.tabPages.pages['Keys'].frame + #body section frames + frameCustom = LabelFrame( + frame, borderwidth=2, relief=GROOVE, + text=' Custom Key Bindings ') + frameKeySets = LabelFrame( + frame, borderwidth=2, relief=GROOVE, text=' Key Set ') + #frameCustom + frameTarget = Frame(frameCustom) + labelTargetTitle = Label(frameTarget, text='Action - Key(s)') + scrollTargetY = Scrollbar(frameTarget) + scrollTargetX = Scrollbar(frameTarget, orient=HORIZONTAL) + self.listBindings = Listbox( + frameTarget, takefocus=FALSE, exportselection=FALSE) + self.listBindings.bind('', self.KeyBindingSelected) + scrollTargetY.config(command=self.listBindings.yview) + scrollTargetX.config(command=self.listBindings.xview) + self.listBindings.config(yscrollcommand=scrollTargetY.set) + self.listBindings.config(xscrollcommand=scrollTargetX.set) + self.buttonNewKeys = Button( + frameCustom, text='Get New Keys for Selection', + command=self.GetNewKeys, state=DISABLED) + #frameKeySets + frames = [Frame(frameKeySets, padx=2, pady=2, borderwidth=0) + for i in range(2)] + self.radioKeysBuiltin = Radiobutton( + frames[0], variable=self.keysAreBuiltin, value=1, + command=self.SetKeysType, text='Use a Built-in Key Set') + self.radioKeysCustom = Radiobutton( + frames[0], variable=self.keysAreBuiltin, value=0, + command=self.SetKeysType, text='Use a Custom Key Set') + self.optMenuKeysBuiltin = DynOptionMenu( + frames[0], self.builtinKeys, None, command=None) + self.optMenuKeysCustom = DynOptionMenu( + frames[0], self.customKeys, None, command=None) + self.buttonDeleteCustomKeys = Button( + frames[1], text='Delete Custom Key Set', + command=self.DeleteCustomKeys) + buttonSaveCustomKeys = Button( + frames[1], text='Save as New Custom Key Set', + command=self.SaveAsNewKeySet) + + ##widget packing + #body + frameCustom.pack(side=BOTTOM, padx=5, pady=5, expand=TRUE, fill=BOTH) + frameKeySets.pack(side=BOTTOM, padx=5, pady=5, fill=BOTH) + #frameCustom + self.buttonNewKeys.pack(side=BOTTOM, fill=X, padx=5, pady=5) + frameTarget.pack(side=LEFT, padx=5, pady=5, expand=TRUE, fill=BOTH) + #frame target + frameTarget.columnconfigure(0, weight=1) + frameTarget.rowconfigure(1, weight=1) + labelTargetTitle.grid(row=0, column=0, columnspan=2, sticky=W) + self.listBindings.grid(row=1, column=0, sticky=NSEW) + scrollTargetY.grid(row=1, column=1, sticky=NS) + scrollTargetX.grid(row=2, column=0, sticky=EW) + #frameKeySets + self.radioKeysBuiltin.grid(row=0, column=0, sticky=W+NS) + self.radioKeysCustom.grid(row=1, column=0, sticky=W+NS) + self.optMenuKeysBuiltin.grid(row=0, column=1, sticky=NSEW) + self.optMenuKeysCustom.grid(row=1, column=1, sticky=NSEW) + self.buttonDeleteCustomKeys.pack(side=LEFT, fill=X, expand=True, padx=2) + buttonSaveCustomKeys.pack(side=LEFT, fill=X, expand=True, padx=2) + frames[0].pack(side=TOP, fill=BOTH, expand=True) + frames[1].pack(side=TOP, fill=X, expand=True, pady=2) + return frame + + def CreatePageGeneral(self): + parent = self.parent + self.winWidth = StringVar(parent) + self.winHeight = StringVar(parent) + self.startupEdit = IntVar(parent) + self.autoSave = IntVar(parent) + self.encoding = StringVar(parent) + self.userHelpBrowser = BooleanVar(parent) + self.helpBrowser = StringVar(parent) + + #widget creation + #body + frame = self.tabPages.pages['General'].frame + #body section frames + frameRun = LabelFrame(frame, borderwidth=2, relief=GROOVE, + text=' Startup Preferences ') + frameSave = LabelFrame(frame, borderwidth=2, relief=GROOVE, + text=' Autosave Preferences ') + frameWinSize = Frame(frame, borderwidth=2, relief=GROOVE) + frameHelp = LabelFrame(frame, borderwidth=2, relief=GROOVE, + text=' Additional Help Sources ') + #frameRun + labelRunChoiceTitle = Label(frameRun, text='At Startup') + radioStartupEdit = Radiobutton( + frameRun, variable=self.startupEdit, value=1, + command=self.SetKeysType, text="Open Edit Window") + radioStartupShell = Radiobutton( + frameRun, variable=self.startupEdit, value=0, + command=self.SetKeysType, text='Open Shell Window') + #frameSave + labelRunSaveTitle = Label(frameSave, text='At Start of Run (F5) ') + radioSaveAsk = Radiobutton( + frameSave, variable=self.autoSave, value=0, + command=self.SetKeysType, text="Prompt to Save") + radioSaveAuto = Radiobutton( + frameSave, variable=self.autoSave, value=1, + command=self.SetKeysType, text='No Prompt') + #frameWinSize + labelWinSizeTitle = Label( + frameWinSize, text='Initial Window Size (in characters)') + labelWinWidthTitle = Label(frameWinSize, text='Width') + entryWinWidth = Entry( + frameWinSize, textvariable=self.winWidth, width=3) + labelWinHeightTitle = Label(frameWinSize, text='Height') + entryWinHeight = Entry( + frameWinSize, textvariable=self.winHeight, width=3) + #frameHelp + frameHelpList = Frame(frameHelp) + frameHelpListButtons = Frame(frameHelpList) + scrollHelpList = Scrollbar(frameHelpList) + self.listHelp = Listbox( + frameHelpList, height=5, takefocus=FALSE, + exportselection=FALSE) + scrollHelpList.config(command=self.listHelp.yview) + self.listHelp.config(yscrollcommand=scrollHelpList.set) + self.listHelp.bind('', self.HelpSourceSelected) + self.buttonHelpListEdit = Button( + frameHelpListButtons, text='Edit', state=DISABLED, + width=8, command=self.HelpListItemEdit) + self.buttonHelpListAdd = Button( + frameHelpListButtons, text='Add', + width=8, command=self.HelpListItemAdd) + self.buttonHelpListRemove = Button( + frameHelpListButtons, text='Remove', state=DISABLED, + width=8, command=self.HelpListItemRemove) + + #widget packing + #body + frameRun.pack(side=TOP, padx=5, pady=5, fill=X) + frameSave.pack(side=TOP, padx=5, pady=5, fill=X) + frameWinSize.pack(side=TOP, padx=5, pady=5, fill=X) + frameHelp.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH) + #frameRun + labelRunChoiceTitle.pack(side=LEFT, anchor=W, padx=5, pady=5) + radioStartupShell.pack(side=RIGHT, anchor=W, padx=5, pady=5) + radioStartupEdit.pack(side=RIGHT, anchor=W, padx=5, pady=5) + #frameSave + labelRunSaveTitle.pack(side=LEFT, anchor=W, padx=5, pady=5) + radioSaveAuto.pack(side=RIGHT, anchor=W, padx=5, pady=5) + radioSaveAsk.pack(side=RIGHT, anchor=W, padx=5, pady=5) + #frameWinSize + labelWinSizeTitle.pack(side=LEFT, anchor=W, padx=5, pady=5) + entryWinHeight.pack(side=RIGHT, anchor=E, padx=10, pady=5) + labelWinHeightTitle.pack(side=RIGHT, anchor=E, pady=5) + entryWinWidth.pack(side=RIGHT, anchor=E, padx=10, pady=5) + labelWinWidthTitle.pack(side=RIGHT, anchor=E, pady=5) + #frameHelp + frameHelpListButtons.pack(side=RIGHT, padx=5, pady=5, fill=Y) + frameHelpList.pack(side=TOP, padx=5, pady=5, expand=TRUE, fill=BOTH) + scrollHelpList.pack(side=RIGHT, anchor=W, fill=Y) + self.listHelp.pack(side=LEFT, anchor=E, expand=TRUE, fill=BOTH) + self.buttonHelpListEdit.pack(side=TOP, anchor=W, pady=5) + self.buttonHelpListAdd.pack(side=TOP, anchor=W) + self.buttonHelpListRemove.pack(side=TOP, anchor=W, pady=5) + return frame + + def AttachVarCallbacks(self): + self.fontSize.trace_variable('w', self.VarChanged_font) + self.fontName.trace_variable('w', self.VarChanged_font) + self.fontBold.trace_variable('w', self.VarChanged_font) + self.spaceNum.trace_variable('w', self.VarChanged_spaceNum) + self.colour.trace_variable('w', self.VarChanged_colour) + self.builtinTheme.trace_variable('w', self.VarChanged_builtinTheme) + self.customTheme.trace_variable('w', self.VarChanged_customTheme) + self.themeIsBuiltin.trace_variable('w', self.VarChanged_themeIsBuiltin) + self.highlightTarget.trace_variable('w', self.VarChanged_highlightTarget) + self.keyBinding.trace_variable('w', self.VarChanged_keyBinding) + self.builtinKeys.trace_variable('w', self.VarChanged_builtinKeys) + self.customKeys.trace_variable('w', self.VarChanged_customKeys) + self.keysAreBuiltin.trace_variable('w', self.VarChanged_keysAreBuiltin) + self.winWidth.trace_variable('w', self.VarChanged_winWidth) + self.winHeight.trace_variable('w', self.VarChanged_winHeight) + self.startupEdit.trace_variable('w', self.VarChanged_startupEdit) + self.autoSave.trace_variable('w', self.VarChanged_autoSave) + self.encoding.trace_variable('w', self.VarChanged_encoding) + + def VarChanged_font(self, *params): + '''When one font attribute changes, save them all, as they are + not independent from each other. In particular, when we are + overriding the default font, we need to write out everything. + ''' + value = self.fontName.get() + self.AddChangedItem('main', 'EditorWindow', 'font', value) + value = self.fontSize.get() + self.AddChangedItem('main', 'EditorWindow', 'font-size', value) + value = self.fontBold.get() + self.AddChangedItem('main', 'EditorWindow', 'font-bold', value) + + def VarChanged_spaceNum(self, *params): + value = self.spaceNum.get() + self.AddChangedItem('main', 'Indent', 'num-spaces', value) + + def VarChanged_colour(self, *params): + self.OnNewColourSet() + + def VarChanged_builtinTheme(self, *params): + value = self.builtinTheme.get() + self.AddChangedItem('main', 'Theme', 'name', value) + self.PaintThemeSample() + + def VarChanged_customTheme(self, *params): + value = self.customTheme.get() + if value != '- no custom themes -': + self.AddChangedItem('main', 'Theme', 'name', value) + self.PaintThemeSample() + + def VarChanged_themeIsBuiltin(self, *params): + value = self.themeIsBuiltin.get() + self.AddChangedItem('main', 'Theme', 'default', value) + if value: + self.VarChanged_builtinTheme() + else: + self.VarChanged_customTheme() + + def VarChanged_highlightTarget(self, *params): + self.SetHighlightTarget() + + def VarChanged_keyBinding(self, *params): + value = self.keyBinding.get() + keySet = self.customKeys.get() + event = self.listBindings.get(ANCHOR).split()[0] + if idleConf.IsCoreBinding(event): + #this is a core keybinding + self.AddChangedItem('keys', keySet, event, value) + else: #this is an extension key binding + extName = idleConf.GetExtnNameForEvent(event) + extKeybindSection = extName + '_cfgBindings' + self.AddChangedItem('extensions', extKeybindSection, event, value) + + def VarChanged_builtinKeys(self, *params): + value = self.builtinKeys.get() + self.AddChangedItem('main', 'Keys', 'name', value) + self.LoadKeysList(value) + + def VarChanged_customKeys(self, *params): + value = self.customKeys.get() + if value != '- no custom keys -': + self.AddChangedItem('main', 'Keys', 'name', value) + self.LoadKeysList(value) + + def VarChanged_keysAreBuiltin(self, *params): + value = self.keysAreBuiltin.get() + self.AddChangedItem('main', 'Keys', 'default', value) + if value: + self.VarChanged_builtinKeys() + else: + self.VarChanged_customKeys() + + def VarChanged_winWidth(self, *params): + value = self.winWidth.get() + self.AddChangedItem('main', 'EditorWindow', 'width', value) + + def VarChanged_winHeight(self, *params): + value = self.winHeight.get() + self.AddChangedItem('main', 'EditorWindow', 'height', value) + + def VarChanged_startupEdit(self, *params): + value = self.startupEdit.get() + self.AddChangedItem('main', 'General', 'editor-on-startup', value) + + def VarChanged_autoSave(self, *params): + value = self.autoSave.get() + self.AddChangedItem('main', 'General', 'autosave', value) + + def VarChanged_encoding(self, *params): + value = self.encoding.get() + self.AddChangedItem('main', 'EditorWindow', 'encoding', value) + + def ResetChangedItems(self): + #When any config item is changed in this dialog, an entry + #should be made in the relevant section (config type) of this + #dictionary. The key should be the config file section name and the + #value a dictionary, whose key:value pairs are item=value pairs for + #that config file section. + self.changedItems = {'main':{}, 'highlight':{}, 'keys':{}, + 'extensions':{}} + + def AddChangedItem(self, typ, section, item, value): + value = str(value) #make sure we use a string + if section not in self.changedItems[typ]: + self.changedItems[typ][section] = {} + self.changedItems[typ][section][item] = value + + def GetDefaultItems(self): + dItems={'main':{}, 'highlight':{}, 'keys':{}, 'extensions':{}} + for configType in dItems: + sections = idleConf.GetSectionList('default', configType) + for section in sections: + dItems[configType][section] = {} + options = idleConf.defaultCfg[configType].GetOptionList(section) + for option in options: + dItems[configType][section][option] = ( + idleConf.defaultCfg[configType].Get(section, option)) + return dItems + + def SetThemeType(self): + if self.themeIsBuiltin.get(): + self.optMenuThemeBuiltin.config(state=NORMAL) + self.optMenuThemeCustom.config(state=DISABLED) + self.buttonDeleteCustomTheme.config(state=DISABLED) + else: + self.optMenuThemeBuiltin.config(state=DISABLED) + self.radioThemeCustom.config(state=NORMAL) + self.optMenuThemeCustom.config(state=NORMAL) + self.buttonDeleteCustomTheme.config(state=NORMAL) + + def SetKeysType(self): + if self.keysAreBuiltin.get(): + self.optMenuKeysBuiltin.config(state=NORMAL) + self.optMenuKeysCustom.config(state=DISABLED) + self.buttonDeleteCustomKeys.config(state=DISABLED) + else: + self.optMenuKeysBuiltin.config(state=DISABLED) + self.radioKeysCustom.config(state=NORMAL) + self.optMenuKeysCustom.config(state=NORMAL) + self.buttonDeleteCustomKeys.config(state=NORMAL) + + def GetNewKeys(self): + listIndex = self.listBindings.index(ANCHOR) + binding = self.listBindings.get(listIndex) + bindName = binding.split()[0] #first part, up to first space + if self.keysAreBuiltin.get(): + currentKeySetName = self.builtinKeys.get() + else: + currentKeySetName = self.customKeys.get() + currentBindings = idleConf.GetCurrentKeySet() + if currentKeySetName in self.changedItems['keys']: #unsaved changes + keySetChanges = self.changedItems['keys'][currentKeySetName] + for event in keySetChanges: + currentBindings[event] = keySetChanges[event].split() + currentKeySequences = list(currentBindings.values()) + newKeys = GetKeysDialog(self, 'Get New Keys', bindName, + currentKeySequences).result + if newKeys: #new keys were specified + if self.keysAreBuiltin.get(): #current key set is a built-in + message = ('Your changes will be saved as a new Custom Key Set.' + ' Enter a name for your new Custom Key Set below.') + newKeySet = self.GetNewKeysName(message) + if not newKeySet: #user cancelled custom key set creation + self.listBindings.select_set(listIndex) + self.listBindings.select_anchor(listIndex) + return + else: #create new custom key set based on previously active key set + self.CreateNewKeySet(newKeySet) + self.listBindings.delete(listIndex) + self.listBindings.insert(listIndex, bindName+' - '+newKeys) + self.listBindings.select_set(listIndex) + self.listBindings.select_anchor(listIndex) + self.keyBinding.set(newKeys) + else: + self.listBindings.select_set(listIndex) + self.listBindings.select_anchor(listIndex) + + def GetNewKeysName(self, message): + usedNames = (idleConf.GetSectionList('user', 'keys') + + idleConf.GetSectionList('default', 'keys')) + newKeySet = GetCfgSectionNameDialog( + self, 'New Custom Key Set', message, usedNames).result + return newKeySet + + def SaveAsNewKeySet(self): + newKeysName = self.GetNewKeysName('New Key Set Name:') + if newKeysName: + self.CreateNewKeySet(newKeysName) + + def KeyBindingSelected(self, event): + self.buttonNewKeys.config(state=NORMAL) + + def CreateNewKeySet(self, newKeySetName): + #creates new custom key set based on the previously active key set, + #and makes the new key set active + if self.keysAreBuiltin.get(): + prevKeySetName = self.builtinKeys.get() + else: + prevKeySetName = self.customKeys.get() + prevKeys = idleConf.GetCoreKeys(prevKeySetName) + newKeys = {} + for event in prevKeys: #add key set to changed items + eventName = event[2:-2] #trim off the angle brackets + binding = ' '.join(prevKeys[event]) + newKeys[eventName] = binding + #handle any unsaved changes to prev key set + if prevKeySetName in self.changedItems['keys']: + keySetChanges = self.changedItems['keys'][prevKeySetName] + for event in keySetChanges: + newKeys[event] = keySetChanges[event] + #save the new theme + self.SaveNewKeySet(newKeySetName, newKeys) + #change gui over to the new key set + customKeyList = idleConf.GetSectionList('user', 'keys') + customKeyList.sort() + self.optMenuKeysCustom.SetMenu(customKeyList, newKeySetName) + self.keysAreBuiltin.set(0) + self.SetKeysType() + + def LoadKeysList(self, keySetName): + reselect = 0 + newKeySet = 0 + if self.listBindings.curselection(): + reselect = 1 + listIndex = self.listBindings.index(ANCHOR) + keySet = idleConf.GetKeySet(keySetName) + bindNames = list(keySet.keys()) + bindNames.sort() + self.listBindings.delete(0, END) + for bindName in bindNames: + key = ' '.join(keySet[bindName]) #make key(s) into a string + bindName = bindName[2:-2] #trim off the angle brackets + if keySetName in self.changedItems['keys']: + #handle any unsaved changes to this key set + if bindName in self.changedItems['keys'][keySetName]: + key = self.changedItems['keys'][keySetName][bindName] + self.listBindings.insert(END, bindName+' - '+key) + if reselect: + self.listBindings.see(listIndex) + self.listBindings.select_set(listIndex) + self.listBindings.select_anchor(listIndex) + + def DeleteCustomKeys(self): + keySetName=self.customKeys.get() + delmsg = 'Are you sure you wish to delete the key set %r ?' + if not tkMessageBox.askyesno( + 'Delete Key Set', delmsg % keySetName, parent=self): + return + #remove key set from config + idleConf.userCfg['keys'].remove_section(keySetName) + if keySetName in self.changedItems['keys']: + del(self.changedItems['keys'][keySetName]) + #write changes + idleConf.userCfg['keys'].Save() + #reload user key set list + itemList = idleConf.GetSectionList('user', 'keys') + itemList.sort() + if not itemList: + self.radioKeysCustom.config(state=DISABLED) + self.optMenuKeysCustom.SetMenu(itemList, '- no custom keys -') + else: + self.optMenuKeysCustom.SetMenu(itemList, itemList[0]) + #revert to default key set + self.keysAreBuiltin.set(idleConf.defaultCfg['main'].Get('Keys', 'default')) + self.builtinKeys.set(idleConf.defaultCfg['main'].Get('Keys', 'name')) + #user can't back out of these changes, they must be applied now + self.Apply() + self.SetKeysType() + + def DeleteCustomTheme(self): + themeName = self.customTheme.get() + delmsg = 'Are you sure you wish to delete the theme %r ?' + if not tkMessageBox.askyesno( + 'Delete Theme', delmsg % themeName, parent=self): + return + #remove theme from config + idleConf.userCfg['highlight'].remove_section(themeName) + if themeName in self.changedItems['highlight']: + del(self.changedItems['highlight'][themeName]) + #write changes + idleConf.userCfg['highlight'].Save() + #reload user theme list + itemList = idleConf.GetSectionList('user', 'highlight') + itemList.sort() + if not itemList: + self.radioThemeCustom.config(state=DISABLED) + self.optMenuThemeCustom.SetMenu(itemList, '- no custom themes -') + else: + self.optMenuThemeCustom.SetMenu(itemList, itemList[0]) + #revert to default theme + self.themeIsBuiltin.set(idleConf.defaultCfg['main'].Get('Theme', 'default')) + self.builtinTheme.set(idleConf.defaultCfg['main'].Get('Theme', 'name')) + #user can't back out of these changes, they must be applied now + self.Apply() + self.SetThemeType() + + def GetColour(self): + target = self.highlightTarget.get() + prevColour = self.frameColourSet.cget('bg') + rgbTuplet, colourString = tkColorChooser.askcolor( + parent=self, title='Pick new colour for : '+target, + initialcolor=prevColour) + if colourString and (colourString != prevColour): + #user didn't cancel, and they chose a new colour + if self.themeIsBuiltin.get(): #current theme is a built-in + message = ('Your changes will be saved as a new Custom Theme. ' + 'Enter a name for your new Custom Theme below.') + newTheme = self.GetNewThemeName(message) + if not newTheme: #user cancelled custom theme creation + return + else: #create new custom theme based on previously active theme + self.CreateNewTheme(newTheme) + self.colour.set(colourString) + else: #current theme is user defined + self.colour.set(colourString) + + def OnNewColourSet(self): + newColour=self.colour.get() + self.frameColourSet.config(bg=newColour) #set sample + plane ='foreground' if self.fgHilite.get() else 'background' + sampleElement = self.themeElements[self.highlightTarget.get()][0] + self.textHighlightSample.tag_config(sampleElement, **{plane:newColour}) + theme = self.customTheme.get() + themeElement = sampleElement + '-' + plane + self.AddChangedItem('highlight', theme, themeElement, newColour) + + def GetNewThemeName(self, message): + usedNames = (idleConf.GetSectionList('user', 'highlight') + + idleConf.GetSectionList('default', 'highlight')) + newTheme = GetCfgSectionNameDialog( + self, 'New Custom Theme', message, usedNames).result + return newTheme + + def SaveAsNewTheme(self): + newThemeName = self.GetNewThemeName('New Theme Name:') + if newThemeName: + self.CreateNewTheme(newThemeName) + + def CreateNewTheme(self, newThemeName): + #creates new custom theme based on the previously active theme, + #and makes the new theme active + if self.themeIsBuiltin.get(): + themeType = 'default' + themeName = self.builtinTheme.get() + else: + themeType = 'user' + themeName = self.customTheme.get() + newTheme = idleConf.GetThemeDict(themeType, themeName) + #apply any of the old theme's unsaved changes to the new theme + if themeName in self.changedItems['highlight']: + themeChanges = self.changedItems['highlight'][themeName] + for element in themeChanges: + newTheme[element] = themeChanges[element] + #save the new theme + self.SaveNewTheme(newThemeName, newTheme) + #change gui over to the new theme + customThemeList = idleConf.GetSectionList('user', 'highlight') + customThemeList.sort() + self.optMenuThemeCustom.SetMenu(customThemeList, newThemeName) + self.themeIsBuiltin.set(0) + self.SetThemeType() + + def OnListFontButtonRelease(self, event): + font = self.listFontName.get(ANCHOR) + self.fontName.set(font.lower()) + self.SetFontSample() + + def SetFontSample(self, event=None): + fontName = self.fontName.get() + fontWeight = tkFont.BOLD if self.fontBold.get() else tkFont.NORMAL + newFont = (fontName, self.fontSize.get(), fontWeight) + self.labelFontSample.config(font=newFont) + self.textHighlightSample.configure(font=newFont) + + def SetHighlightTarget(self): + if self.highlightTarget.get() == 'Cursor': #bg not possible + self.radioFg.config(state=DISABLED) + self.radioBg.config(state=DISABLED) + self.fgHilite.set(1) + else: #both fg and bg can be set + self.radioFg.config(state=NORMAL) + self.radioBg.config(state=NORMAL) + self.fgHilite.set(1) + self.SetColourSample() + + def SetColourSampleBinding(self, *args): + self.SetColourSample() + + def SetColourSample(self): + #set the colour smaple area + tag = self.themeElements[self.highlightTarget.get()][0] + plane = 'foreground' if self.fgHilite.get() else 'background' + colour = self.textHighlightSample.tag_cget(tag, plane) + self.frameColourSet.config(bg=colour) + + def PaintThemeSample(self): + if self.themeIsBuiltin.get(): #a default theme + theme = self.builtinTheme.get() + else: #a user theme + theme = self.customTheme.get() + for elementTitle in self.themeElements: + element = self.themeElements[elementTitle][0] + colours = idleConf.GetHighlight(theme, element) + if element == 'cursor': #cursor sample needs special painting + colours['background'] = idleConf.GetHighlight( + theme, 'normal', fgBg='bg') + #handle any unsaved changes to this theme + if theme in self.changedItems['highlight']: + themeDict = self.changedItems['highlight'][theme] + if element + '-foreground' in themeDict: + colours['foreground'] = themeDict[element + '-foreground'] + if element + '-background' in themeDict: + colours['background'] = themeDict[element + '-background'] + self.textHighlightSample.tag_config(element, **colours) + self.SetColourSample() + + def HelpSourceSelected(self, event): + self.SetHelpListButtonStates() + + def SetHelpListButtonStates(self): + if self.listHelp.size() < 1: #no entries in list + self.buttonHelpListEdit.config(state=DISABLED) + self.buttonHelpListRemove.config(state=DISABLED) + else: #there are some entries + if self.listHelp.curselection(): #there currently is a selection + self.buttonHelpListEdit.config(state=NORMAL) + self.buttonHelpListRemove.config(state=NORMAL) + else: #there currently is not a selection + self.buttonHelpListEdit.config(state=DISABLED) + self.buttonHelpListRemove.config(state=DISABLED) + + def HelpListItemAdd(self): + helpSource = GetHelpSourceDialog(self, 'New Help Source').result + if helpSource: + self.userHelpList.append((helpSource[0], helpSource[1])) + self.listHelp.insert(END, helpSource[0]) + self.UpdateUserHelpChangedItems() + self.SetHelpListButtonStates() + + def HelpListItemEdit(self): + itemIndex = self.listHelp.index(ANCHOR) + helpSource = self.userHelpList[itemIndex] + newHelpSource = GetHelpSourceDialog( + self, 'Edit Help Source', menuItem=helpSource[0], + filePath=helpSource[1]).result + if (not newHelpSource) or (newHelpSource == helpSource): + return #no changes + self.userHelpList[itemIndex] = newHelpSource + self.listHelp.delete(itemIndex) + self.listHelp.insert(itemIndex, newHelpSource[0]) + self.UpdateUserHelpChangedItems() + self.SetHelpListButtonStates() + + def HelpListItemRemove(self): + itemIndex = self.listHelp.index(ANCHOR) + del(self.userHelpList[itemIndex]) + self.listHelp.delete(itemIndex) + self.UpdateUserHelpChangedItems() + self.SetHelpListButtonStates() + + def UpdateUserHelpChangedItems(self): + "Clear and rebuild the HelpFiles section in self.changedItems" + self.changedItems['main']['HelpFiles'] = {} + for num in range(1, len(self.userHelpList) + 1): + self.AddChangedItem( + 'main', 'HelpFiles', str(num), + ';'.join(self.userHelpList[num-1][:2])) + + def LoadFontCfg(self): + ##base editor font selection list + fonts = list(tkFont.families(self)) + fonts.sort() + for font in fonts: + self.listFontName.insert(END, font) + configuredFont = idleConf.GetFont(self, 'main', 'EditorWindow') + fontName = configuredFont[0].lower() + fontSize = configuredFont[1] + fontBold = configuredFont[2]=='bold' + self.fontName.set(fontName) + lc_fonts = [s.lower() for s in fonts] + try: + currentFontIndex = lc_fonts.index(fontName) + self.listFontName.see(currentFontIndex) + self.listFontName.select_set(currentFontIndex) + self.listFontName.select_anchor(currentFontIndex) + except ValueError: + pass + ##font size dropdown + self.optMenuFontSize.SetMenu(('7', '8', '9', '10', '11', '12', '13', + '14', '16', '18', '20', '22'), fontSize ) + ##fontWeight + self.fontBold.set(fontBold) + ##font sample + self.SetFontSample() + + def LoadTabCfg(self): + ##indent sizes + spaceNum = idleConf.GetOption( + 'main', 'Indent', 'num-spaces', default=4, type='int') + self.spaceNum.set(spaceNum) + + def LoadThemeCfg(self): + ##current theme type radiobutton + self.themeIsBuiltin.set(idleConf.GetOption( + 'main', 'Theme', 'default', type='bool', default=1)) + ##currently set theme + currentOption = idleConf.CurrentTheme() + ##load available theme option menus + if self.themeIsBuiltin.get(): #default theme selected + itemList = idleConf.GetSectionList('default', 'highlight') + itemList.sort() + self.optMenuThemeBuiltin.SetMenu(itemList, currentOption) + itemList = idleConf.GetSectionList('user', 'highlight') + itemList.sort() + if not itemList: + self.radioThemeCustom.config(state=DISABLED) + self.customTheme.set('- no custom themes -') + else: + self.optMenuThemeCustom.SetMenu(itemList, itemList[0]) + else: #user theme selected + itemList = idleConf.GetSectionList('user', 'highlight') + itemList.sort() + self.optMenuThemeCustom.SetMenu(itemList, currentOption) + itemList = idleConf.GetSectionList('default', 'highlight') + itemList.sort() + self.optMenuThemeBuiltin.SetMenu(itemList, itemList[0]) + self.SetThemeType() + ##load theme element option menu + themeNames = list(self.themeElements.keys()) + themeNames.sort(key=lambda x: self.themeElements[x][1]) + self.optMenuHighlightTarget.SetMenu(themeNames, themeNames[0]) + self.PaintThemeSample() + self.SetHighlightTarget() + + def LoadKeyCfg(self): + ##current keys type radiobutton + self.keysAreBuiltin.set(idleConf.GetOption( + 'main', 'Keys', 'default', type='bool', default=1)) + ##currently set keys + currentOption = idleConf.CurrentKeys() + ##load available keyset option menus + if self.keysAreBuiltin.get(): #default theme selected + itemList = idleConf.GetSectionList('default', 'keys') + itemList.sort() + self.optMenuKeysBuiltin.SetMenu(itemList, currentOption) + itemList = idleConf.GetSectionList('user', 'keys') + itemList.sort() + if not itemList: + self.radioKeysCustom.config(state=DISABLED) + self.customKeys.set('- no custom keys -') + else: + self.optMenuKeysCustom.SetMenu(itemList, itemList[0]) + else: #user key set selected + itemList = idleConf.GetSectionList('user', 'keys') + itemList.sort() + self.optMenuKeysCustom.SetMenu(itemList, currentOption) + itemList = idleConf.GetSectionList('default', 'keys') + itemList.sort() + self.optMenuKeysBuiltin.SetMenu(itemList, itemList[0]) + self.SetKeysType() + ##load keyset element list + keySetName = idleConf.CurrentKeys() + self.LoadKeysList(keySetName) + + def LoadGeneralCfg(self): + #startup state + self.startupEdit.set(idleConf.GetOption( + 'main', 'General', 'editor-on-startup', default=1, type='bool')) + #autosave state + self.autoSave.set(idleConf.GetOption( + 'main', 'General', 'autosave', default=0, type='bool')) + #initial window size + self.winWidth.set(idleConf.GetOption( + 'main', 'EditorWindow', 'width', type='int')) + self.winHeight.set(idleConf.GetOption( + 'main', 'EditorWindow', 'height', type='int')) + # default source encoding + self.encoding.set(idleConf.GetOption( + 'main', 'EditorWindow', 'encoding', default='none')) + # additional help sources + self.userHelpList = idleConf.GetAllExtraHelpSourcesList() + for helpItem in self.userHelpList: + self.listHelp.insert(END, helpItem[0]) + self.SetHelpListButtonStates() + + def LoadConfigs(self): + """ + load configuration from default and user config files and populate + the widgets on the config dialog pages. + """ + ### fonts / tabs page + self.LoadFontCfg() + self.LoadTabCfg() + ### highlighting page + self.LoadThemeCfg() + ### keys page + self.LoadKeyCfg() + ### general page + self.LoadGeneralCfg() + + def SaveNewKeySet(self, keySetName, keySet): + """ + save a newly created core key set. + keySetName - string, the name of the new key set + keySet - dictionary containing the new key set + """ + if not idleConf.userCfg['keys'].has_section(keySetName): + idleConf.userCfg['keys'].add_section(keySetName) + for event in keySet: + value = keySet[event] + idleConf.userCfg['keys'].SetOption(keySetName, event, value) + + def SaveNewTheme(self, themeName, theme): + """ + save a newly created theme. + themeName - string, the name of the new theme + theme - dictionary containing the new theme + """ + if not idleConf.userCfg['highlight'].has_section(themeName): + idleConf.userCfg['highlight'].add_section(themeName) + for element in theme: + value = theme[element] + idleConf.userCfg['highlight'].SetOption(themeName, element, value) + + def SetUserValue(self, configType, section, item, value): + if idleConf.defaultCfg[configType].has_option(section, item): + if idleConf.defaultCfg[configType].Get(section, item) == value: + #the setting equals a default setting, remove it from user cfg + return idleConf.userCfg[configType].RemoveOption(section, item) + #if we got here set the option + return idleConf.userCfg[configType].SetOption(section, item, value) + + def SaveAllChangedConfigs(self): + "Save configuration changes to the user config file." + idleConf.userCfg['main'].Save() + for configType in self.changedItems: + cfgTypeHasChanges = False + for section in self.changedItems[configType]: + if section == 'HelpFiles': + #this section gets completely replaced + idleConf.userCfg['main'].remove_section('HelpFiles') + cfgTypeHasChanges = True + for item in self.changedItems[configType][section]: + value = self.changedItems[configType][section][item] + if self.SetUserValue(configType, section, item, value): + cfgTypeHasChanges = True + if cfgTypeHasChanges: + idleConf.userCfg[configType].Save() + for configType in ['keys', 'highlight']: + # save these even if unchanged! + idleConf.userCfg[configType].Save() + self.ResetChangedItems() #clear the changed items dict + + def DeactivateCurrentConfig(self): + #Before a config is saved, some cleanup of current + #config must be done - remove the previous keybindings + winInstances = self.parent.instance_dict.keys() + for instance in winInstances: + instance.RemoveKeybindings() + + def ActivateConfigChanges(self): + "Dynamically apply configuration changes" + winInstances = self.parent.instance_dict.keys() + for instance in winInstances: + instance.ResetColorizer() + instance.ResetFont() + instance.set_notabs_indentwidth() + instance.ApplyKeybindings() + instance.reset_help_menu_entries() + + def Cancel(self): + self.destroy() + + def Ok(self): + self.Apply() + self.destroy() + + def Apply(self): + self.DeactivateCurrentConfig() + self.SaveAllChangedConfigs() + self.ActivateConfigChanges() + + def Help(self): + pass + +class VerticalScrolledFrame(Frame): + """A pure Tkinter vertically scrollable frame. + + * Use the 'interior' attribute to place widgets inside the scrollable frame + * Construct and pack/place/grid normally + * This frame only allows vertical scrolling + """ + def __init__(self, parent, *args, **kw): + Frame.__init__(self, parent, *args, **kw) + + # create a canvas object and a vertical scrollbar for scrolling it + vscrollbar = Scrollbar(self, orient=VERTICAL) + vscrollbar.pack(fill=Y, side=RIGHT, expand=FALSE) + canvas = Canvas(self, bd=0, highlightthickness=0, + yscrollcommand=vscrollbar.set) + canvas.pack(side=LEFT, fill=BOTH, expand=TRUE) + vscrollbar.config(command=canvas.yview) + + # reset the view + canvas.xview_moveto(0) + canvas.yview_moveto(0) + + # create a frame inside the canvas which will be scrolled with it + self.interior = interior = Frame(canvas) + interior_id = canvas.create_window(0, 0, window=interior, anchor=NW) + + # track changes to the canvas and frame width and sync them, + # also updating the scrollbar + def _configure_interior(event): + # update the scrollbars to match the size of the inner frame + size = (interior.winfo_reqwidth(), interior.winfo_reqheight()) + canvas.config(scrollregion="0 0 %s %s" % size) + if interior.winfo_reqwidth() != canvas.winfo_width(): + # update the canvas's width to fit the inner frame + canvas.config(width=interior.winfo_reqwidth()) + interior.bind('', _configure_interior) + + def _configure_canvas(event): + if interior.winfo_reqwidth() != canvas.winfo_width(): + # update the inner frame's width to fill the canvas + canvas.itemconfigure(interior_id, width=canvas.winfo_width()) + canvas.bind('', _configure_canvas) + + return + +def is_int(s): + "Return 's is blank or represents an int'" + if not s: + return True + try: + int(s) + return True + except ValueError: + return False + +# TODO: +# * Revert to default(s)? Per option or per extension? +# * List options in their original order (possible??) +class ConfigExtensionsDialog(Toplevel): + """A dialog for configuring IDLE extensions. + + This dialog is generic - it works for any and all IDLE extensions. + + IDLE extensions save their configuration options using idleConf. + ConfigExtensionsDialog reads the current configuration using idleConf, + supplies a GUI interface to change the configuration values, and saves the + changes using idleConf. + + Not all changes take effect immediately - some may require restarting IDLE. + This depends on each extension's implementation. + + All values are treated as text, and it is up to the user to supply + reasonable values. The only exception to this are the 'enable*' options, + which are boolean, and can be toggled with an True/False button. + """ + def __init__(self, parent, title=None, _htest=False): + Toplevel.__init__(self, parent) + self.wm_withdraw() + + self.configure(borderwidth=5) + self.geometry( + "+%d+%d" % (parent.winfo_rootx() + 20, + parent.winfo_rooty() + (30 if not _htest else 150))) + self.wm_title(title or 'IDLE Extensions Configuration') + + self.defaultCfg = idleConf.defaultCfg['extensions'] + self.userCfg = idleConf.userCfg['extensions'] + self.is_int = self.register(is_int) + self.load_extensions() + self.create_widgets() + + self.resizable(height=FALSE, width=FALSE) # don't allow resizing yet + self.transient(parent) + self.protocol("WM_DELETE_WINDOW", self.Cancel) + self.tabbed_page_set.focus_set() + # wait for window to be generated + self.update() + # set current width as the minimum width + self.wm_minsize(self.winfo_width(), 1) + # now allow resizing + self.resizable(height=TRUE, width=TRUE) + + self.wm_deiconify() + if not _htest: + self.grab_set() + self.wait_window() + + def load_extensions(self): + "Fill self.extensions with data from the default and user configs." + self.extensions = {} + for ext_name in idleConf.GetExtensions(active_only=False): + self.extensions[ext_name] = [] + + for ext_name in self.extensions: + opt_list = sorted(self.defaultCfg.GetOptionList(ext_name)) + + # bring 'enable' options to the beginning of the list + enables = [opt_name for opt_name in opt_list + if opt_name.startswith('enable')] + for opt_name in enables: + opt_list.remove(opt_name) + opt_list = enables + opt_list + + for opt_name in opt_list: + def_str = self.defaultCfg.Get( + ext_name, opt_name, raw=True) + try: + def_obj = {'True':True, 'False':False}[def_str] + opt_type = 'bool' + except KeyError: + try: + def_obj = int(def_str) + opt_type = 'int' + except ValueError: + def_obj = def_str + opt_type = None + try: + value = self.userCfg.Get( + ext_name, opt_name, type=opt_type, raw=True, + default=def_obj) + except ValueError: # Need this until .Get fixed + value = def_obj # bad values overwritten by entry + var = StringVar(self) + var.set(str(value)) + + self.extensions[ext_name].append({'name': opt_name, + 'type': opt_type, + 'default': def_str, + 'value': value, + 'var': var, + }) + + def create_widgets(self): + """Create the dialog's widgets.""" + self.rowconfigure(0, weight=1) + self.rowconfigure(1, weight=0) + self.columnconfigure(0, weight=1) + + # create the tabbed pages + self.tabbed_page_set = TabbedPageSet( + self, page_names=self.extensions.keys(), + n_rows=None, max_tabs_per_row=5, + page_class=TabbedPageSet.PageRemove) + self.tabbed_page_set.grid(row=0, column=0, sticky=NSEW) + for ext_name in self.extensions: + self.create_tab_page(ext_name) + + self.create_action_buttons().grid(row=1) + + create_action_buttons = ConfigDialog.create_action_buttons + + def create_tab_page(self, ext_name): + """Create the page for an extension.""" + + page = LabelFrame(self.tabbed_page_set.pages[ext_name].frame, + border=2, padx=2, relief=GROOVE, + text=' %s ' % ext_name) + page.pack(fill=BOTH, expand=True, padx=12, pady=2) + + # create the scrollable frame which will contain the entries + scrolled_frame = VerticalScrolledFrame(page, pady=2, height=250) + scrolled_frame.pack(side=BOTTOM, fill=BOTH, expand=TRUE) + entry_area = scrolled_frame.interior + entry_area.columnconfigure(0, weight=0) + entry_area.columnconfigure(1, weight=1) + + # create an entry for each configuration option + for row, opt in enumerate(self.extensions[ext_name]): + # create a row with a label and entry/checkbutton + label = Label(entry_area, text=opt['name']) + label.grid(row=row, column=0, sticky=NW) + var = opt['var'] + if opt['type'] == 'bool': + Checkbutton(entry_area, textvariable=var, variable=var, + onvalue='True', offvalue='False', + indicatoron=FALSE, selectcolor='', width=8 + ).grid(row=row, column=1, sticky=W, padx=7) + elif opt['type'] == 'int': + Entry(entry_area, textvariable=var, validate='key', + validatecommand=(self.is_int, '%P') + ).grid(row=row, column=1, sticky=NSEW, padx=7) + + else: + Entry(entry_area, textvariable=var + ).grid(row=row, column=1, sticky=NSEW, padx=7) + return + + + Ok = ConfigDialog.Ok + + def Apply(self): + self.save_all_changed_configs() + pass + + Cancel = ConfigDialog.Cancel + + def Help(self): + pass + + def set_user_value(self, section, opt): + name = opt['name'] + default = opt['default'] + value = opt['var'].get().strip() or default + opt['var'].set(value) + # if self.defaultCfg.has_section(section): + # Currently, always true; if not, indent to return + if (value == default): + return self.userCfg.RemoveOption(section, name) + # set the option + return self.userCfg.SetOption(section, name, value) + + def save_all_changed_configs(self): + """Save configuration changes to the user config file.""" + has_changes = False + for ext_name in self.extensions: + options = self.extensions[ext_name] + for opt in options: + if self.set_user_value(ext_name, opt): + has_changes = True + if has_changes: + self.userCfg.Save() + + +if __name__ == '__main__': + import unittest + unittest.main('idlelib.idle_test.test_configdialog', + verbosity=2, exit=False) + from idlelib.idle_test.htest import run + run(ConfigDialog, ConfigExtensionsDialog) diff --git a/Darwin/lib/python3.4/idlelib/configHandler.py b/Darwin/lib/python3.5/idlelib/configHandler.py similarity index 54% rename from Darwin/lib/python3.4/idlelib/configHandler.py rename to Darwin/lib/python3.5/idlelib/configHandler.py index 8608f7c..83abad7 100644 --- a/Darwin/lib/python3.4/idlelib/configHandler.py +++ b/Darwin/lib/python3.5/idlelib/configHandler.py @@ -15,12 +15,15 @@ idle. This is to allow IDLE to continue to function in spite of errors in the retrieval of config information. When a default is returned instead of a requested config value, a message is printed to stderr to aid in configuration problem notification and resolution. - """ +# TODOs added Oct 2014, tjr + import os import sys -from configparser import ConfigParser, NoOptionError, NoSectionError +from configparser import ConfigParser +from tkinter import TkVersion +from tkinter.font import Font, nametofont class InvalidConfigType(Exception): pass class InvalidConfigSet(Exception): pass @@ -35,7 +38,7 @@ class IdleConfParser(ConfigParser): """ cfgFile - string, fully specified configuration file name """ - self.file=cfgFile + self.file = cfgFile ConfigParser.__init__(self, defaults=cfgDefaults, strict=False) def Get(self, section, option, type=None, default=None, raw=False): @@ -43,28 +46,27 @@ class IdleConfParser(ConfigParser): Get an option value for given section/option or return default. If type is specified, return as type. """ + # TODO Use default as fallback, at least if not None + # Should also print Warning(file, section, option). + # Currently may raise ValueError if not self.has_option(section, option): return default - if type=='bool': + if type == 'bool': return self.getboolean(section, option) - elif type=='int': + elif type == 'int': return self.getint(section, option) else: return self.get(section, option, raw=raw) - def GetOptionList(self,section): - """ - Get an option list for given section - """ + def GetOptionList(self, section): + "Return a list of options for given section, else []." if self.has_section(section): return self.options(section) else: #return a default value return [] def Load(self): - """ - Load the configuration file from disk - """ + "Load the configuration file from disk." self.read(self.file) class IdleUserConfParser(IdleConfParser): @@ -72,61 +74,50 @@ class IdleUserConfParser(IdleConfParser): IdleConfigParser specialised for user configuration handling. """ - def AddSection(self,section): - """ - if section doesn't exist, add it - """ + def AddSection(self, section): + "If section doesn't exist, add it." if not self.has_section(section): self.add_section(section) def RemoveEmptySections(self): - """ - remove any sections that have no options - """ + "Remove any sections that have no options." for section in self.sections(): if not self.GetOptionList(section): self.remove_section(section) def IsEmpty(self): - """ - Remove empty sections and then return 1 if parser has no sections - left, else return 0. - """ + "Return True if no sections after removing empty sections." self.RemoveEmptySections() - if self.sections(): - return 0 - else: - return 1 + return not self.sections() - def RemoveOption(self,section,option): - """ - If section/option exists, remove it. - Returns 1 if option was removed, 0 otherwise. + def RemoveOption(self, section, option): + """Return True if option is removed from section, else False. + + False if either section does not exist or did not have option. """ if self.has_section(section): - return self.remove_option(section,option) + return self.remove_option(section, option) + return False - def SetOption(self,section,option,value): + def SetOption(self, section, option, value): + """Return True if option is added or changed to value, else False. + + Add section if required. False means option already had value. """ - Sets option to value, adding section if required. - Returns 1 if option was added or changed, otherwise 0. - """ - if self.has_option(section,option): - if self.get(section,option)==value: - return 0 + if self.has_option(section, option): + if self.get(section, option) == value: + return False else: - self.set(section,option,value) - return 1 + self.set(section, option, value) + return True else: if not self.has_section(section): self.add_section(section) - self.set(section,option,value) - return 1 + self.set(section, option, value) + return True def RemoveFile(self): - """ - Removes the user config file from disk if it exists. - """ + "Remove user config file self.file from disk if it exists." if os.path.exists(self.file): os.remove(self.file) @@ -150,62 +141,59 @@ class IdleUserConfParser(IdleConfParser): self.RemoveFile() class IdleConf: - """ - holds config parsers for all idle config files: - default config files - (idle install dir)/config-main.def - (idle install dir)/config-extensions.def - (idle install dir)/config-highlight.def - (idle install dir)/config-keys.def - user config files - (user home dir)/.idlerc/config-main.cfg - (user home dir)/.idlerc/config-extensions.cfg - (user home dir)/.idlerc/config-highlight.cfg - (user home dir)/.idlerc/config-keys.cfg + """Hold config parsers for all idle config files in singleton instance. + + Default config files, self.defaultCfg -- + for config_type in self.config_types: + (idle install dir)/config-{config-type}.def + + User config files, self.userCfg -- + for config_type in self.config_types: + (user home dir)/.idlerc/config-{config-type}.cfg """ def __init__(self): - self.defaultCfg={} - self.userCfg={} - self.cfg={} + self.config_types = ('main', 'extensions', 'highlight', 'keys') + self.defaultCfg = {} + self.userCfg = {} + self.cfg = {} # TODO use to select userCfg vs defaultCfg self.CreateConfigHandlers() self.LoadCfgFiles() - #self.LoadCfg() + def CreateConfigHandlers(self): - """ - set up a dictionary of config parsers for default and user - configurations respectively - """ + "Populate default and user config parser dictionaries." #build idle install path if __name__ != '__main__': # we were imported idleDir=os.path.dirname(__file__) else: # we were exec'ed (for testing only) idleDir=os.path.abspath(sys.path[0]) userDir=self.GetUserCfgDir() - configTypes=('main','extensions','highlight','keys') - defCfgFiles={} - usrCfgFiles={} - for cfgType in configTypes: #build config file names - defCfgFiles[cfgType]=os.path.join(idleDir,'config-'+cfgType+'.def') - usrCfgFiles[cfgType]=os.path.join(userDir,'config-'+cfgType+'.cfg') - for cfgType in configTypes: #create config parsers - self.defaultCfg[cfgType]=IdleConfParser(defCfgFiles[cfgType]) - self.userCfg[cfgType]=IdleUserConfParser(usrCfgFiles[cfgType]) + + defCfgFiles = {} + usrCfgFiles = {} + # TODO eliminate these temporaries by combining loops + for cfgType in self.config_types: #build config file names + defCfgFiles[cfgType] = os.path.join( + idleDir, 'config-' + cfgType + '.def') + usrCfgFiles[cfgType] = os.path.join( + userDir, 'config-' + cfgType + '.cfg') + for cfgType in self.config_types: #create config parsers + self.defaultCfg[cfgType] = IdleConfParser(defCfgFiles[cfgType]) + self.userCfg[cfgType] = IdleUserConfParser(usrCfgFiles[cfgType]) def GetUserCfgDir(self): - """ - Creates (if required) and returns a filesystem directory for storing - user config files. + """Return a filesystem directory for storing user config files. + Creates it if required. """ cfgDir = '.idlerc' userDir = os.path.expanduser('~') if userDir != '~': # expanduser() found user home dir if not os.path.exists(userDir): - warn = ('\n Warning: os.path.expanduser("~") points to\n '+ - userDir+',\n but the path does not exist.\n') + warn = ('\n Warning: os.path.expanduser("~") points to\n ' + + userDir + ',\n but the path does not exist.') try: - sys.stderr.write(warn) + print(warn, file=sys.stderr) except OSError: pass userDir = '~' @@ -217,45 +205,44 @@ class IdleConf: try: os.mkdir(userDir) except OSError: - warn = ('\n Warning: unable to create user config directory\n'+ - userDir+'\n Check path and permissions.\n Exiting!\n\n') - sys.stderr.write(warn) + warn = ('\n Warning: unable to create user config directory\n' + + userDir + '\n Check path and permissions.\n Exiting!\n') + print(warn, file=sys.stderr) raise SystemExit + # TODO continue without userDIr instead of exit return userDir def GetOption(self, configType, section, option, default=None, type=None, warn_on_default=True, raw=False): - """ - Get an option value for given config type and given general - configuration section/option or return a default. If type is specified, - return as type. Firstly the user configuration is checked, with a - fallback to the default configuration, and a final 'catch all' - fallback to a useable passed-in default if the option isn't present in - either the user or the default configuration. - configType must be one of ('main','extensions','highlight','keys') - If a default is returned, and warn_on_default is True, a warning is - printed to stderr. + """Return a value for configType section option, or default. + If type is not None, return a value of that type. Also pass raw + to the config parser. First try to return a valid value + (including type) from a user configuration. If that fails, try + the default configuration. If that fails, return default, with a + default of None. + + Warn if either user or default configurations have an invalid value. + Warn if default is returned and warn_on_default is True. """ try: - if self.userCfg[configType].has_option(section,option): + if self.userCfg[configType].has_option(section, option): return self.userCfg[configType].Get(section, option, type=type, raw=raw) except ValueError: warning = ('\n Warning: configHandler.py - IdleConf.GetOption -\n' ' invalid %r value for configuration option %r\n' - ' from section %r: %r\n' % + ' from section %r: %r' % (type, option, section, - self.userCfg[configType].Get(section, option, - raw=raw))) + self.userCfg[configType].Get(section, option, raw=raw))) try: - sys.stderr.write(warning) + print(warning, file=sys.stderr) except OSError: pass try: if self.defaultCfg[configType].has_option(section,option): - return self.defaultCfg[configType].Get(section, option, - type=type, raw=raw) + return self.defaultCfg[configType].Get( + section, option, type=type, raw=raw) except ValueError: pass #returning default, print warning @@ -263,31 +250,28 @@ class IdleConf: warning = ('\n Warning: configHandler.py - IdleConf.GetOption -\n' ' problem retrieving configuration option %r\n' ' from section %r.\n' - ' returning default value: %r\n' % + ' returning default value: %r' % (option, section, default)) try: - sys.stderr.write(warning) + print(warning, file=sys.stderr) except OSError: pass return default def SetOption(self, configType, section, option, value): - """In user's config file, set section's option to value. - - """ + """Set section option to value in user config file.""" self.userCfg[configType].SetOption(section, option, value) def GetSectionList(self, configSet, configType): - """ - Get a list of sections from either the user or default config for - the given config type. + """Return sections for configSet configType configuration. + configSet must be either 'user' or 'default' - configType must be one of ('main','extensions','highlight','keys') + configType must be in self.config_types. """ - if not (configType in ('main','extensions','highlight','keys')): + if not (configType in self.config_types): raise InvalidConfigType('Invalid configType specified') if configSet == 'user': - cfgParser=self.userCfg[configType] + cfgParser = self.userCfg[configType] elif configSet == 'default': cfgParser=self.defaultCfg[configType] else: @@ -295,25 +279,27 @@ class IdleConf: return cfgParser.sections() def GetHighlight(self, theme, element, fgBg=None): - """ - return individual highlighting theme elements. - fgBg - string ('fg'or'bg') or None, if None return a dictionary - containing fg and bg colours (appropriate for passing to Tkinter in, - e.g., a tag_config call), otherwise fg or bg colour only as specified. + """Return individual theme element highlight color(s). + + fgBg - string ('fg' or 'bg') or None. + If None, return a dictionary containing fg and bg colors with + keys 'foreground' and 'background'. Otherwise, only return + fg or bg color, as specified. Colors are intended to be + appropriate for passing to Tkinter in, e.g., a tag_config call). """ if self.defaultCfg['highlight'].has_section(theme): - themeDict=self.GetThemeDict('default',theme) + themeDict = self.GetThemeDict('default', theme) else: - themeDict=self.GetThemeDict('user',theme) - fore=themeDict[element+'-foreground'] - if element=='cursor': #there is no config value for cursor bg - back=themeDict['normal-background'] + themeDict = self.GetThemeDict('user', theme) + fore = themeDict[element + '-foreground'] + if element == 'cursor': # There is no config value for cursor bg + back = themeDict['normal-background'] else: - back=themeDict[element+'-background'] - highlight={"foreground": fore,"background": back} - if not fgBg: #return dict of both colours + back = themeDict[element + '-background'] + highlight = {"foreground": fore, "background": back} + if not fgBg: # Return dict of both colors return highlight - else: #return specified colour only + else: # Return specified color only if fgBg == 'fg': return highlight["foreground"] if fgBg == 'bg': @@ -321,26 +307,26 @@ class IdleConf: else: raise InvalidFgBg('Invalid fgBg specified') - def GetThemeDict(self,type,themeName): - """ + def GetThemeDict(self, type, themeName): + """Return {option:value} dict for elements in themeName. + type - string, 'default' or 'user' theme type themeName - string, theme name - Returns a dictionary which holds {option:value} for each element - in the specified theme. Values are loaded over a set of ultimate last - fallback defaults to guarantee that all theme elements are present in - a newly created theme. + Values are loaded over ultimate fallback defaults to guarantee + that all theme elements are present in a newly created theme. """ if type == 'user': - cfgParser=self.userCfg['highlight'] + cfgParser = self.userCfg['highlight'] elif type == 'default': - cfgParser=self.defaultCfg['highlight'] + cfgParser = self.defaultCfg['highlight'] else: raise InvalidTheme('Invalid theme type specified') - #foreground and background values are provded for each theme element - #(apart from cursor) even though all these values are not yet used - #by idle, to allow for their use in the future. Default values are - #generally black and white. - theme={ 'normal-foreground':'#000000', + # Provide foreground and background colors for each theme + # element (other than cursor) even though some values are not + # yet used by idle, to allow for their use in the future. + # Default values are generally black and white. + # TODO copy theme from a class attribute. + theme ={'normal-foreground':'#000000', 'normal-background':'#ffffff', 'keyword-foreground':'#000000', 'keyword-background':'#ffffff', @@ -370,52 +356,50 @@ class IdleConf: 'console-foreground':'#000000', 'console-background':'#ffffff' } for element in theme: - if not cfgParser.has_option(themeName,element): - #we are going to return a default, print warning - warning=('\n Warning: configHandler.py - IdleConf.GetThemeDict' + if not cfgParser.has_option(themeName, element): + # Print warning that will return a default color + warning = ('\n Warning: configHandler.IdleConf.GetThemeDict' ' -\n problem retrieving theme element %r' '\n from theme %r.\n' - ' returning default value: %r\n' % + ' returning default color: %r' % (element, themeName, theme[element])) try: - sys.stderr.write(warning) + print(warning, file=sys.stderr) except OSError: pass - colour=cfgParser.Get(themeName,element,default=theme[element]) - theme[element]=colour + theme[element] = cfgParser.Get( + themeName, element, default=theme[element]) return theme def CurrentTheme(self): - """ - Returns the name of the currently active theme - """ - return self.GetOption('main','Theme','name',default='') + "Return the name of the currently active theme." + return self.GetOption('main', 'Theme', 'name', default='') def CurrentKeys(self): - """ - Returns the name of the currently active key set - """ - return self.GetOption('main','Keys','name',default='') + "Return the name of the currently active key set." + return self.GetOption('main', 'Keys', 'name', default='') def GetExtensions(self, active_only=True, editor_only=False, shell_only=False): + """Return extensions in default and user config-extensions files. + + If active_only True, only return active (enabled) extensions + and optionally only editor or shell extensions. + If active_only False, return all extensions. """ - Gets a list of all idle extensions declared in the config files. - active_only - boolean, if true only return active (enabled) extensions - """ - extns=self.RemoveKeyBindNames( - self.GetSectionList('default','extensions')) - userExtns=self.RemoveKeyBindNames( - self.GetSectionList('user','extensions')) + extns = self.RemoveKeyBindNames( + self.GetSectionList('default', 'extensions')) + userExtns = self.RemoveKeyBindNames( + self.GetSectionList('user', 'extensions')) for extn in userExtns: if extn not in extns: #user has added own extension extns.append(extn) if active_only: - activeExtns=[] + activeExtns = [] for extn in extns: if self.GetOption('extensions', extn, 'enable', default=True, type='bool'): #the extension is enabled - if editor_only or shell_only: + if editor_only or shell_only: # TODO if both, contradictory if editor_only: option = "enable_editor" else: @@ -430,107 +414,108 @@ class IdleConf: else: return extns - def RemoveKeyBindNames(self,extnNameList): - #get rid of keybinding section names - names=extnNameList - kbNameIndicies=[] + def RemoveKeyBindNames(self, extnNameList): + "Return extnNameList with keybinding section names removed." + # TODO Easier to return filtered copy with list comp + names = extnNameList + kbNameIndicies = [] for name in names: if name.endswith(('_bindings', '_cfgBindings')): kbNameIndicies.append(names.index(name)) - kbNameIndicies.sort() - kbNameIndicies.reverse() + kbNameIndicies.sort(reverse=True) for index in kbNameIndicies: #delete each keybinding section name del(names[index]) return names - def GetExtnNameForEvent(self,virtualEvent): + def GetExtnNameForEvent(self, virtualEvent): + """Return the name of the extension binding virtualEvent, or None. + + virtualEvent - string, name of the virtual event to test for, + without the enclosing '<< >>' """ - Returns the name of the extension that virtualEvent is bound in, or - None if not bound in any extension. - virtualEvent - string, name of the virtual event to test for, without - the enclosing '<< >>' - """ - extName=None - vEvent='<<'+virtualEvent+'>>' + extName = None + vEvent = '<<' + virtualEvent + '>>' for extn in self.GetExtensions(active_only=0): for event in self.GetExtensionKeys(extn): if event == vEvent: - extName=extn + extName = extn # TODO return here? return extName - def GetExtensionKeys(self,extensionName): + def GetExtensionKeys(self, extensionName): + """Return dict: {configurable extensionName event : active keybinding}. + + Events come from default config extension_cfgBindings section. + Keybindings come from GetCurrentKeySet() active key dict, + where previously used bindings are disabled. """ - returns a dictionary of the configurable keybindings for a particular - extension,as they exist in the dictionary returned by GetCurrentKeySet; - that is, where previously used bindings are disabled. - """ - keysName=extensionName+'_cfgBindings' - activeKeys=self.GetCurrentKeySet() - extKeys={} + keysName = extensionName + '_cfgBindings' + activeKeys = self.GetCurrentKeySet() + extKeys = {} if self.defaultCfg['extensions'].has_section(keysName): - eventNames=self.defaultCfg['extensions'].GetOptionList(keysName) + eventNames = self.defaultCfg['extensions'].GetOptionList(keysName) for eventName in eventNames: - event='<<'+eventName+'>>' - binding=activeKeys[event] - extKeys[event]=binding + event = '<<' + eventName + '>>' + binding = activeKeys[event] + extKeys[event] = binding return extKeys def __GetRawExtensionKeys(self,extensionName): + """Return dict {configurable extensionName event : keybinding list}. + + Events come from default config extension_cfgBindings section. + Keybindings list come from the splitting of GetOption, which + tries user config before default config. """ - returns a dictionary of the configurable keybindings for a particular - extension, as defined in the configuration files, or an empty dictionary - if no bindings are found - """ - keysName=extensionName+'_cfgBindings' - extKeys={} + keysName = extensionName+'_cfgBindings' + extKeys = {} if self.defaultCfg['extensions'].has_section(keysName): - eventNames=self.defaultCfg['extensions'].GetOptionList(keysName) + eventNames = self.defaultCfg['extensions'].GetOptionList(keysName) for eventName in eventNames: - binding=self.GetOption('extensions',keysName, - eventName,default='').split() - event='<<'+eventName+'>>' - extKeys[event]=binding + binding = self.GetOption( + 'extensions', keysName, eventName, default='').split() + event = '<<' + eventName + '>>' + extKeys[event] = binding return extKeys - def GetExtensionBindings(self,extensionName): + def GetExtensionBindings(self, extensionName): + """Return dict {extensionName event : active or defined keybinding}. + + Augment self.GetExtensionKeys(extensionName) with mapping of non- + configurable events (from default config) to GetOption splits, + as in self.__GetRawExtensionKeys. """ - Returns a dictionary of all the event bindings for a particular - extension. The configurable keybindings are returned as they exist in - the dictionary returned by GetCurrentKeySet; that is, where re-used - keybindings are disabled. - """ - bindsName=extensionName+'_bindings' - extBinds=self.GetExtensionKeys(extensionName) + bindsName = extensionName + '_bindings' + extBinds = self.GetExtensionKeys(extensionName) #add the non-configurable bindings if self.defaultCfg['extensions'].has_section(bindsName): - eventNames=self.defaultCfg['extensions'].GetOptionList(bindsName) + eventNames = self.defaultCfg['extensions'].GetOptionList(bindsName) for eventName in eventNames: - binding=self.GetOption('extensions',bindsName, - eventName,default='').split() - event='<<'+eventName+'>>' - extBinds[event]=binding + binding = self.GetOption( + 'extensions', bindsName, eventName, default='').split() + event = '<<' + eventName + '>>' + extBinds[event] = binding return extBinds def GetKeyBinding(self, keySetName, eventStr): + """Return the keybinding list for keySetName eventStr. + + keySetName - name of key binding set (config-keys section). + eventStr - virtual event, including brackets, as in '<>'. """ - returns the keybinding for a specific event. - keySetName - string, name of key binding set - eventStr - string, the virtual event we want the binding for, - represented as a string, eg. '<>' - """ - eventName=eventStr[2:-2] #trim off the angle brackets - binding=self.GetOption('keys',keySetName,eventName,default='').split() + eventName = eventStr[2:-2] #trim off the angle brackets + binding = self.GetOption('keys', keySetName, eventName, default='').split() return binding def GetCurrentKeySet(self): + "Return CurrentKeys with 'darwin' modifications." result = self.GetKeySet(self.CurrentKeys()) if sys.platform == "darwin": # OS X Tk variants do not support the "Alt" keyboard modifier. # So replace all keybingings that use "Alt" with ones that # use the "Option" keyboard modifier. - # TO DO: the "Option" modifier does not work properly for + # TODO (Ned?): the "Option" modifier does not work properly for # Cocoa Tk and XQuartz Tk so we should not use it # in default OS X KeySets. for k, v in result.items(): @@ -540,40 +525,43 @@ class IdleConf: return result - def GetKeySet(self,keySetName): + def GetKeySet(self, keySetName): + """Return event-key dict for keySetName core plus active extensions. + + If a binding defined in an extension is already in use, the + extension binding is disabled by being set to '' """ - Returns a dictionary of: all requested core keybindings, plus the - keybindings for all currently active extensions. If a binding defined - in an extension is already in use, that binding is disabled. - """ - keySet=self.GetCoreKeys(keySetName) - activeExtns=self.GetExtensions(active_only=1) + keySet = self.GetCoreKeys(keySetName) + activeExtns = self.GetExtensions(active_only=1) for extn in activeExtns: - extKeys=self.__GetRawExtensionKeys(extn) + extKeys = self.__GetRawExtensionKeys(extn) if extKeys: #the extension defines keybindings for event in extKeys: if extKeys[event] in keySet.values(): #the binding is already in use - extKeys[event]='' #disable this binding - keySet[event]=extKeys[event] #add binding + extKeys[event] = '' #disable this binding + keySet[event] = extKeys[event] #add binding return keySet - def IsCoreBinding(self,virtualEvent): - """ - returns true if the virtual event is bound in the core idle keybindings. - virtualEvent - string, name of the virtual event to test for, without - the enclosing '<< >>' + def IsCoreBinding(self, virtualEvent): + """Return True if the virtual event is one of the core idle key events. + + virtualEvent - string, name of the virtual event to test for, + without the enclosing '<< >>' """ return ('<<'+virtualEvent+'>>') in self.GetCoreKeys() +# TODO make keyBindins a file or class attribute used for test above +# and copied in function below + def GetCoreKeys(self, keySetName=None): - """ - returns the requested set of core keybindings, with fallbacks if - required. - Keybindings loaded from the config file(s) are loaded _over_ these - defaults, so if there is a problem getting any core binding there will - be an 'ultimate last resort fallback' to the CUA-ish bindings - defined here. + """Return dict of core virtual-key keybindings for keySetName. + + The default keySetName None corresponds to the keyBindings base + dict. If keySetName is not None, bindings from the config + file(s) are loaded _over_ these defaults, so if there is a + problem getting any core binding there will be an 'ultimate last + resort fallback' to the CUA-ish bindings defined here. """ keyBindings={ '<>': ['', ''], @@ -628,23 +616,23 @@ class IdleConf: } if keySetName: for event in keyBindings: - binding=self.GetKeyBinding(keySetName,event) + binding = self.GetKeyBinding(keySetName, event) if binding: - keyBindings[event]=binding + keyBindings[event] = binding else: #we are going to return a default, print warning warning=('\n Warning: configHandler.py - IdleConf.GetCoreKeys' ' -\n problem retrieving key binding for event %r' '\n from key set %r.\n' - ' returning default value: %r\n' % + ' returning default value: %r' % (event, keySetName, keyBindings[event])) try: - sys.stderr.write(warning) + print(warning, file=sys.stderr) except OSError: pass return keyBindings - def GetExtraHelpSourceList(self,configSet): - """Fetch list of extra help sources from a given configSet. + def GetExtraHelpSourceList(self, configSet): + """Return list of extra help sources from a given configSet. Valid configSets are 'user' or 'default'. Return a list of tuples of the form (menu_item , path_to_help_file , option), or return the empty @@ -653,19 +641,19 @@ class IdleConf: therefore the returned list must be sorted by 'option'. """ - helpSources=[] - if configSet=='user': - cfgParser=self.userCfg['main'] - elif configSet=='default': - cfgParser=self.defaultCfg['main'] + helpSources = [] + if configSet == 'user': + cfgParser = self.userCfg['main'] + elif configSet == 'default': + cfgParser = self.defaultCfg['main'] else: raise InvalidConfigSet('Invalid configSet specified') options=cfgParser.GetOptionList('HelpFiles') for option in options: - value=cfgParser.Get('HelpFiles',option,default=';') - if value.find(';')==-1: #malformed config entry with no ';' - menuItem='' #make these empty - helpPath='' #so value won't be added to list + value=cfgParser.Get('HelpFiles', option, default=';') + if value.find(';') == -1: #malformed config entry with no ';' + menuItem = '' #make these empty + helpPath = '' #so value won't be added to list else: #config entry contains ';' as expected value=value.split(';') menuItem=value[0].strip() @@ -676,47 +664,73 @@ class IdleConf: return helpSources def GetAllExtraHelpSourcesList(self): + """Return a list of the details of all additional help sources. + + Tuples in the list are those of GetExtraHelpSourceList. """ - Returns a list of tuples containing the details of all additional help - sources configured, or an empty list if there are none. Tuples are of - the format returned by GetExtraHelpSourceList. - """ - allHelpSources=( self.GetExtraHelpSourceList('default')+ + allHelpSources = (self.GetExtraHelpSourceList('default') + self.GetExtraHelpSourceList('user') ) return allHelpSources + def GetFont(self, root, configType, section): + """Retrieve a font from configuration (font, font-size, font-bold) + Intercept the special value 'TkFixedFont' and substitute + the actual font, factoring in some tweaks if needed for + appearance sakes. + + The 'root' parameter can normally be any valid Tkinter widget. + + Return a tuple (family, size, weight) suitable for passing + to tkinter.Font + """ + family = self.GetOption(configType, section, 'font', default='courier') + size = self.GetOption(configType, section, 'font-size', type='int', + default='10') + bold = self.GetOption(configType, section, 'font-bold', default=0, + type='bool') + if (family == 'TkFixedFont'): + if TkVersion < 8.5: + family = 'Courier' + else: + f = Font(name='TkFixedFont', exists=True, root=root) + actualFont = Font.actual(f) + family = actualFont['family'] + size = actualFont['size'] + if size < 0: + size = 10 # if font in pixels, ignore actual size + bold = actualFont['weight']=='bold' + return (family, size, 'bold' if bold else 'normal') + def LoadCfgFiles(self): - """ - load all configuration files. - """ + "Load all configuration files." for key in self.defaultCfg: self.defaultCfg[key].Load() self.userCfg[key].Load() #same keys def SaveUserCfgFiles(self): - """ - write all loaded user configuration files back to disk - """ + "Write all loaded user configuration files to disk." for key in self.userCfg: self.userCfg[key].Save() -idleConf=IdleConf() +idleConf = IdleConf() + +# TODO Revise test output, write expanded unittest ### module test if __name__ == '__main__': def dumpCfg(cfg): - print('\n',cfg,'\n') + print('\n', cfg, '\n') for key in cfg: - sections=cfg[key].sections() + sections = cfg[key].sections() print(key) print(sections) for section in sections: - options=cfg[key].options(section) + options = cfg[key].options(section) print(section) print(options) for option in options: - print(option, '=', cfg[key].Get(section,option)) + print(option, '=', cfg[key].Get(section, option)) dumpCfg(idleConf.defaultCfg) dumpCfg(idleConf.userCfg) - print(idleConf.userCfg['main'].Get('Theme','name')) + print(idleConf.userCfg['main'].Get('Theme', 'name')) #print idleConf.userCfg['highlight'].GetDefHighlight('Foo','normal') diff --git a/Darwin/lib/python3.4/idlelib/configHelpSourceEdit.py b/Darwin/lib/python3.5/idlelib/configHelpSourceEdit.py similarity index 90% rename from Darwin/lib/python3.4/idlelib/configHelpSourceEdit.py rename to Darwin/lib/python3.5/idlelib/configHelpSourceEdit.py index 2ccb400..242b08d 100644 --- a/Darwin/lib/python3.4/idlelib/configHelpSourceEdit.py +++ b/Darwin/lib/python3.5/idlelib/configHelpSourceEdit.py @@ -8,13 +8,14 @@ import tkinter.messagebox as tkMessageBox import tkinter.filedialog as tkFileDialog class GetHelpSourceDialog(Toplevel): - def __init__(self, parent, title, menuItem='', filePath=''): + def __init__(self, parent, title, menuItem='', filePath='', _htest=False): """Get menu entry and url/ local file location for Additional Help User selects a name for the Help resource and provides a web url or a local file as its source. The user can enter a url or browse for the file. + _htest - bool, change box location when running htest """ Toplevel.__init__(self, parent) self.configure(borderwidth=5) @@ -31,12 +32,14 @@ class GetHelpSourceDialog(Toplevel): self.withdraw() #hide while setting geometry #needs to be done here so that the winfo_reqwidth is valid self.update_idletasks() - #centre dialog over parent: - self.geometry("+%d+%d" % - ((parent.winfo_rootx() + ((parent.winfo_width()/2) - -(self.winfo_reqwidth()/2)), - parent.winfo_rooty() + ((parent.winfo_height()/2) - -(self.winfo_reqheight()/2))))) + #centre dialog over parent. below parent if running htest. + self.geometry( + "+%d+%d" % ( + parent.winfo_rootx() + + (parent.winfo_width()/2 - self.winfo_reqwidth()/2), + parent.winfo_rooty() + + ((parent.winfo_height()/2 - self.winfo_reqheight()/2) + if not _htest else 150))) self.deiconify() #geometry set, unhide self.bind('', self.Ok) self.wait_window() @@ -159,11 +162,5 @@ class GetHelpSourceDialog(Toplevel): self.destroy() if __name__ == '__main__': - #test the dialog - root = Tk() - def run(): - keySeq = '' - dlg = GetHelpSourceDialog(root, 'Get Help Source') - print(dlg.result) - Button(root,text='Dialog', command=run).pack() - root.mainloop() + from idlelib.idle_test.htest import run + run(GetHelpSourceDialog) diff --git a/Darwin/lib/python3.4/idlelib/configSectionNameDialog.py b/Darwin/lib/python3.5/idlelib/configSectionNameDialog.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/configSectionNameDialog.py rename to Darwin/lib/python3.5/idlelib/configSectionNameDialog.py diff --git a/Darwin/lib/python3.4/idlelib/dynOptionMenuWidget.py b/Darwin/lib/python3.5/idlelib/dynOptionMenuWidget.py similarity index 59% rename from Darwin/lib/python3.4/idlelib/dynOptionMenuWidget.py rename to Darwin/lib/python3.5/idlelib/dynOptionMenuWidget.py index 922de96..515b4ba 100644 --- a/Darwin/lib/python3.4/idlelib/dynOptionMenuWidget.py +++ b/Darwin/lib/python3.5/idlelib/dynOptionMenuWidget.py @@ -2,16 +2,15 @@ OptionMenu widget modified to allow dynamic menu reconfiguration and setting of highlightthickness """ -from tkinter import OptionMenu -from tkinter import _setit import copy +from tkinter import OptionMenu, _setit, StringVar, Button class DynOptionMenu(OptionMenu): """ unlike OptionMenu, our kwargs can include highlightthickness """ def __init__(self, master, variable, value, *values, **kwargs): - #get a copy of kwargs before OptionMenu.__init__ munges them + # TODO copy value instead of whole dict kwargsCopy=copy.copy(kwargs) if 'highlightthickness' in list(kwargs.keys()): del(kwargs['highlightthickness']) @@ -33,3 +32,26 @@ class DynOptionMenu(OptionMenu): command=_setit(self.variable,item,self.command)) if value: self.variable.set(value) + +def _dyn_option_menu(parent): # htest # + from tkinter import Toplevel + + top = Toplevel() + top.title("Tets dynamic option menu") + top.geometry("200x100+%d+%d" % (parent.winfo_rootx() + 200, + parent.winfo_rooty() + 150)) + top.focus_set() + + var = StringVar(top) + var.set("Old option set") #Set the default value + dyn = DynOptionMenu(top,var, "old1","old2","old3","old4") + dyn.pack() + + def update(): + dyn.SetMenu(["new1","new2","new3","new4"], value="new option set") + button = Button(top, text="Change option set", command=update) + button.pack() + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_dyn_option_menu) diff --git a/Darwin/lib/python3.4/idlelib/extend.txt b/Darwin/lib/python3.5/idlelib/extend.txt similarity index 100% rename from Darwin/lib/python3.4/idlelib/extend.txt rename to Darwin/lib/python3.5/idlelib/extend.txt diff --git a/Darwin/lib/python3.4/idlelib/help.txt b/Darwin/lib/python3.5/idlelib/help.txt similarity index 98% rename from Darwin/lib/python3.4/idlelib/help.txt rename to Darwin/lib/python3.5/idlelib/help.txt index 6378a2e..3f7bb23 100644 --- a/Darwin/lib/python3.4/idlelib/help.txt +++ b/Darwin/lib/python3.5/idlelib/help.txt @@ -15,9 +15,7 @@ Menus: IDLE has two window types the Shell window and the Editor window. It is possible to have multiple editor windows simultaneously. IDLE's menus dynamically change based on which window is currently selected. Each menu -documented below indicates which window type it is associated with. Click on -the dotted line at the top of a menu to "tear it off": a separate window -containing the menu is created (for Unix and Windows only). +documented below indicates which window type it is associated with. File Menu (Shell and Editor): @@ -129,7 +127,9 @@ Options Menu (Shell and Editor): Configure IDLE -- Open a configuration dialog. Fonts, indentation, keybindings, and color themes may be altered. Startup Preferences may be set, and additional Help - sources can be specified. + sources can be specified. On OS X, open the + configuration dialog by selecting Preferences + in the application menu. --- Code Context (toggle) -- Open a pane at the top of the edit window @@ -138,7 +138,7 @@ Options Menu (Shell and Editor): window. This is not present in the Shell window only the Editor window. -Windows Menu (Shell and Editor): +Window Menu (Shell and Editor): Zoom Height -- Toggles the window between normal size (40x80 initial setting) and maximum height. The initial size is in the Configure diff --git a/Darwin/lib/python3.4/idlelib/idle.bat b/Darwin/lib/python3.5/idlelib/idle.bat similarity index 97% rename from Darwin/lib/python3.4/idlelib/idle.bat rename to Darwin/lib/python3.5/idlelib/idle.bat index e77b96e..3d619a3 100755 --- a/Darwin/lib/python3.4/idlelib/idle.bat +++ b/Darwin/lib/python3.5/idlelib/idle.bat @@ -1,4 +1,4 @@ -@echo off -rem Start IDLE using the appropriate Python interpreter -set CURRDIR=%~dp0 -start "IDLE" "%CURRDIR%..\..\pythonw.exe" "%CURRDIR%idle.pyw" %1 %2 %3 %4 %5 %6 %7 %8 %9 +@echo off +rem Start IDLE using the appropriate Python interpreter +set CURRDIR=%~dp0 +start "IDLE" "%CURRDIR%..\..\pythonw.exe" "%CURRDIR%idle.pyw" %1 %2 %3 %4 %5 %6 %7 %8 %9 diff --git a/Darwin/lib/python3.4/idlelib/idle.py b/Darwin/lib/python3.5/idlelib/idle.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle.py rename to Darwin/lib/python3.5/idlelib/idle.py diff --git a/Darwin/lib/python3.5/idlelib/idle.pyw b/Darwin/lib/python3.5/idlelib/idle.pyw new file mode 100644 index 0000000..142cb32 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle.pyw @@ -0,0 +1,17 @@ +try: + import idlelib.PyShell +except ImportError: + # IDLE is not installed, but maybe PyShell is on sys.path: + from . import PyShell + import os + idledir = os.path.dirname(os.path.abspath(PyShell.__file__)) + if idledir != os.getcwd(): + # We're not in the IDLE directory, help the subprocess find run.py + pypath = os.environ.get('PYTHONPATH', '') + if pypath: + os.environ['PYTHONPATH'] = pypath + ':' + idledir + else: + os.environ['PYTHONPATH'] = idledir + PyShell.main() +else: + idlelib.PyShell.main() diff --git a/Darwin/lib/python3.5/idlelib/idle_test/README.txt b/Darwin/lib/python3.5/idlelib/idle_test/README.txt new file mode 100644 index 0000000..2339926 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/README.txt @@ -0,0 +1,143 @@ +README FOR IDLE TESTS IN IDLELIB.IDLE_TEST + +0. Quick Start + +Automated unit tests were added in 2.7 for Python 2.x and 3.3 for Python 3.x. +To run the tests from a command line: + +python -m test.test_idle + +Human-mediated tests were added later in 2.7 and in 3.4. + +python -m idlelib.idle_test.htest + + +1. Test Files + +The idle directory, idlelib, has over 60 xyz.py files. The idle_test +subdirectory should contain a test_xyz.py for each, where 'xyz' is lowercased +even if xyz.py is not. Here is a possible template, with the blanks after after +'.' and 'as', and before and after '_' to be filled in. + +import unittest +from test.support import requires +import idlelib. as + +class _Test(unittest.TestCase): + + def test_(self): + +if __name__ == '__main__': + unittest.main(verbosity=2) + +Add the following at the end of xyy.py, with the appropriate name added after +'test_'. Some files already have something like this for htest. If so, insert +the import and unittest.main lines before the htest lines. + +if __name__ == "__main__": + import unittest + unittest.main('idlelib.idle_test.test_', verbosity=2, exit=False) + + + +2. GUI Tests + +When run as part of the Python test suite, Idle gui tests need to run +test.support.requires('gui') (test.test_support in 2.7). A test is a gui test +if it creates a Tk root or master object either directly or indirectly by +instantiating a tkinter or idle class. For the benefit of test processes that +either have no graphical environment available or are not allowed to use it, gui +tests must be 'guarded' by "requires('gui')" in a setUp function or method. +This will typically be setUpClass. + +To avoid interfering with other gui tests, all gui objects must be destroyed and +deleted by the end of the test. Widgets, such as a Tk root, created in a setUpX +function, should be destroyed in the corresponding tearDownX. Module and class +widget attributes should also be deleted.. + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = tk.Tk() + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + del cls.root + + +Requires('gui') causes the test(s) it guards to be skipped if any of +a few conditions are met: + + - The tests are being run by regrtest.py, and it was started without enabling + the "gui" resource with the "-u" command line option. + + - The tests are being run on Windows by a service that is not allowed to + interact with the graphical environment. + + - The tests are being run on Mac OSX in a process that cannot make a window + manager connection. + + - tkinter.Tk cannot be successfully instantiated for some reason. + + - test.support.use_resources has been set by something other than + regrtest.py and does not contain "gui". + +Tests of non-gui operations should avoid creating tk widgets. Incidental uses of +tk variables and messageboxes can be replaced by the mock classes in +idle_test/mock_tk.py. The mock text handles some uses of the tk Text widget. + + +3. Running Unit Tests + +Assume that xyz.py and test_xyz.py both end with a unittest.main() call. +Running either from an Idle editor runs all tests in the test_xyz file with the +version of Python running Idle. Test output appears in the Shell window. The +'verbosity=2' option lists all test methods in the file, which is appropriate +when developing tests. The 'exit=False' option is needed in xyx.py files when an +htest follows. + +The following command lines also run all test methods, including +gui tests, in test_xyz.py. (Both '-m idlelib' and '-m idlelib.idle' start +Idle and so cannot run tests.) + +python -m idlelib.xyz +python -m idlelib.idle_test.test_xyz + +The following runs all idle_test/test_*.py tests interactively. + +>>> import unittest +>>> unittest.main('idlelib.idle_test', verbosity=2) + +The following run all Idle tests at a command line. Option '-v' is the same as +'verbosity=2'. (For 2.7, replace 'test' in the second line with +'test.regrtest'.) + +python -m unittest -v idlelib.idle_test +python -m test -v -ugui test_idle +python -m test.test_idle + +The idle tests are 'discovered' by idlelib.idle_test.__init__.load_tests, +which is also imported into test.test_idle. Normally, neither file should be +changed when working on individual test modules. The third command runs +unittest indirectly through regrtest. The same happens when the entire test +suite is run with 'python -m test'. So that command must work for buildbots +to stay green. Idle tests must not disturb the environment in a way that +makes other tests fail (issue 18081). + +To run an individual Testcase or test method, extend the dotted name given to +unittest on the command line. + +python -m unittest -v idlelib.idle_test.test_xyz.Test_case.test_meth + + +4. Human-mediated Tests + +Human-mediated tests are widget tests that cannot be automated but need human +verification. They are contained in idlelib/idle_test/htest.py, which has +instructions. (Some modules need an auxiliary function, identified with # htest +# on the header line.) The set is about complete, though some tests need +improvement. To run all htests, run the htest file from an editor or from the +command line with: + +python -m idlelib.idle_test.htest diff --git a/Darwin/lib/python3.4/idlelib/idle_test/__init__.py b/Darwin/lib/python3.5/idlelib/idle_test/__init__.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/__init__.py rename to Darwin/lib/python3.5/idlelib/idle_test/__init__.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/htest.py b/Darwin/lib/python3.5/idlelib/idle_test/htest.py new file mode 100644 index 0000000..aa7f2e8 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/htest.py @@ -0,0 +1,407 @@ +'''Run human tests of Idle's window, dialog, and popup widgets. + +run(*tests) +Create a master Tk window. Within that, run each callable in tests +after finding the matching test spec in this file. If tests is empty, +run an htest for each spec dict in this file after finding the matching +callable in the module named in the spec. Close the window to skip or +end the test. + +In a tested module, let X be a global name bound to a callable (class +or function) whose .__name__ attrubute is also X (the usual situation). +The first parameter of X must be 'parent'. When called, the parent +argument will be the root window. X must create a child Toplevel +window (or subclass thereof). The Toplevel may be a test widget or +dialog, in which case the callable is the corresonding class. Or the +Toplevel may contain the widget to be tested or set up a context in +which a test widget is invoked. In this latter case, the callable is a +wrapper function that sets up the Toplevel and other objects. Wrapper +function names, such as _editor_window', should start with '_'. + + +End the module with + +if __name__ == '__main__': + + from idlelib.idle_test.htest import run + run(X) + +To have wrapper functions and test invocation code ignored by coveragepy +reports, put '# htest #' on the def statement header line. + +def _wrapper(parent): # htest # + +Also make sure that the 'if __name__' line matches the above. Then have +make sure that .coveragerc includes the following. + +[report] +exclude_lines = + .*# htest # + if __name__ == .__main__.: + +(The "." instead of "'" is intentional and necessary.) + + +To run any X, this file must contain a matching instance of the +following template, with X.__name__ prepended to '_spec'. +When all tests are run, the prefix is use to get X. + +_spec = { + 'file': '', + 'kwds': {'title': ''}, + 'msg': "" + } + +file (no .py): run() imports file.py. +kwds: augmented with {'parent':root} and passed to X as **kwds. +title: an example kwd; some widgets need this, delete if not. +msg: master window hints about testing the widget. + + +Modules and classes not being tested at the moment: +PyShell.PyShellEditorWindow +Debugger.Debugger +AutoCompleteWindow.AutoCompleteWindow +OutputWindow.OutputWindow (indirectly being tested with grep test) +''' + +from importlib import import_module +from idlelib.macosxSupport import _initializeTkVariantTests +import tkinter as tk + +AboutDialog_spec = { + 'file': 'aboutDialog', + 'kwds': {'title': 'aboutDialog test', + '_htest': True, + }, + 'msg': "Test every button. Ensure Python, TK and IDLE versions " + "are correctly displayed.\n [Close] to exit.", + } + +_calltip_window_spec = { + 'file': 'CallTipWindow', + 'kwds': {}, + 'msg': "Typing '(' should display a calltip.\n" + "Typing ') should hide the calltip.\n" + } + +_class_browser_spec = { + 'file': 'ClassBrowser', + 'kwds': {}, + 'msg': "Inspect names of module, class(with superclass if " + "applicable), methods and functions.\nToggle nested items.\n" + "Double clicking on items prints a traceback for an exception " + "that is ignored." + } +ConfigExtensionsDialog_spec = { + 'file': 'configDialog', + 'kwds': {'title': 'Test Extension Configuration', + '_htest': True,}, + 'msg': "IDLE extensions dialog.\n" + "\n[Ok] to close the dialog.[Apply] to apply the settings and " + "and [Cancel] to revert all changes.\nRe-run the test to ensure " + "changes made have persisted." + } + +_color_delegator_spec = { + 'file': 'ColorDelegator', + 'kwds': {}, + 'msg': "The text is sample Python code.\n" + "Ensure components like comments, keywords, builtins,\n" + "string, definitions, and break are correctly colored.\n" + "The default color scheme is in idlelib/config-highlight.def" + } + +ConfigDialog_spec = { + 'file': 'configDialog', + 'kwds': {'title': 'ConfigDialogTest', + '_htest': True,}, + 'msg': "IDLE preferences dialog.\n" + "In the 'Fonts/Tabs' tab, changing font face, should update the " + "font face of the text in the area below it.\nIn the " + "'Highlighting' tab, try different color schemes. Clicking " + "items in the sample program should update the choices above it." + "\nIn the 'Keys' and 'General' tab, test settings of interest." + "\n[Ok] to close the dialog.[Apply] to apply the settings and " + "and [Cancel] to revert all changes.\nRe-run the test to ensure " + "changes made have persisted." + } + +# TODO Improve message +_dyn_option_menu_spec = { + 'file': 'dynOptionMenuWidget', + 'kwds': {}, + 'msg': "Select one of the many options in the 'old option set'.\n" + "Click the button to change the option set.\n" + "Select one of the many options in the 'new option set'." + } + +# TODO edit wrapper +_editor_window_spec = { + 'file': 'EditorWindow', + 'kwds': {}, + 'msg': "Test editor functions of interest.\n" + "Best to close editor first." + } + +GetCfgSectionNameDialog_spec = { + 'file': 'configSectionNameDialog', + 'kwds': {'title':'Get Name', + 'message':'Enter something', + 'used_names': {'abc'}, + '_htest': True}, + 'msg': "After the text entered with [Ok] is stripped, , " + "'abc', or more that 30 chars are errors.\n" + "Close 'Get Name' with a valid entry (printed to Shell), " + "[Cancel], or [X]", + } + +GetHelpSourceDialog_spec = { + 'file': 'configHelpSourceEdit', + 'kwds': {'title': 'Get helpsource', + '_htest': True}, + 'msg': "Enter menu item name and help file path\n " + " and more than 30 chars are invalid menu item names.\n" + ", file does not exist are invalid path items.\n" + "Test for incomplete web address for help file path.\n" + "A valid entry will be printed to shell with [0k].\n" + "[Cancel] will print None to shell", + } + +# Update once issue21519 is resolved. +GetKeysDialog_spec = { + 'file': 'keybindingDialog', + 'kwds': {'title': 'Test keybindings', + 'action': 'find-again', + 'currentKeySequences': [''] , + '_htest': True, + }, + 'msg': "Test for different key modifier sequences.\n" + " is invalid.\n" + "No modifier key is invalid.\n" + "Shift key with [a-z],[0-9], function key, move key, tab, space" + "is invalid.\nNo validity checking if advanced key binding " + "entry is used." + } + +_grep_dialog_spec = { + 'file': 'GrepDialog', + 'kwds': {}, + 'msg': "Click the 'Show GrepDialog' button.\n" + "Test the various 'Find-in-files' functions.\n" + "The results should be displayed in a new '*Output*' window.\n" + "'Right-click'->'Goto file/line' anywhere in the search results " + "should open that file \nin a new EditorWindow." + } + +_help_dialog_spec = { + 'file': 'EditorWindow', + 'kwds': {}, + 'msg': "If the help text displays, this works.\n" + "Text is selectable. Window is scrollable." + } + +_io_binding_spec = { + 'file': 'IOBinding', + 'kwds': {}, + 'msg': "Test the following bindings\n" + " to display open window from file dialog.\n" + " to save the file\n" + } + +_multi_call_spec = { + 'file': 'MultiCall', + 'kwds': {}, + 'msg': "The following actions should trigger a print to console or IDLE" + " Shell.\nEntering and leaving the text area, key entry, " + ",\n, , " + ", \n, and " + "focusing out of the window\nare sequences to be tested." + } + +_multistatus_bar_spec = { + 'file': 'MultiStatusBar', + 'kwds': {}, + 'msg': "Ensure presence of multi-status bar below text area.\n" + "Click 'Update Status' to change the multi-status text" + } + +_object_browser_spec = { + 'file': 'ObjectBrowser', + 'kwds': {}, + 'msg': "Double click on items upto the lowest level.\n" + "Attributes of the objects and related information " + "will be displayed side-by-side at each level." + } + +_path_browser_spec = { + 'file': 'PathBrowser', + 'kwds': {}, + 'msg': "Test for correct display of all paths in sys.path.\n" + "Toggle nested items upto the lowest level.\n" + "Double clicking on an item prints a traceback\n" + "for an exception that is ignored." + } + +_percolator_spec = { + 'file': 'Percolator', + 'kwds': {}, + 'msg': "There are two tracers which can be toggled using a checkbox.\n" + "Toggling a tracer 'on' by checking it should print tracer" + "output to the console or to the IDLE shell.\n" + "If both the tracers are 'on', the output from the tracer which " + "was switched 'on' later, should be printed first\n" + "Test for actions like text entry, and removal." + } + +_replace_dialog_spec = { + 'file': 'ReplaceDialog', + 'kwds': {}, + 'msg': "Click the 'Replace' button.\n" + "Test various replace options in the 'Replace dialog'.\n" + "Click [Close] or [X] to close the 'Replace Dialog'." + } + +_search_dialog_spec = { + 'file': 'SearchDialog', + 'kwds': {}, + 'msg': "Click the 'Search' button.\n" + "Test various search options in the 'Search dialog'.\n" + "Click [Close] or [X] to close the 'Search Dialog'." + } + +_scrolled_list_spec = { + 'file': 'ScrolledList', + 'kwds': {}, + 'msg': "You should see a scrollable list of items\n" + "Selecting (clicking) or double clicking an item " + "prints the name to the console or Idle shell.\n" + "Right clicking an item will display a popup." + } + +_stack_viewer_spec = { + 'file': 'StackViewer', + 'kwds': {}, + 'msg': "A stacktrace for a NameError exception.\n" + "Expand 'idlelib ...' and ''.\n" + "Check that exc_value, exc_tb, and exc_type are correct.\n" + } + +_tabbed_pages_spec = { + 'file': 'tabbedpages', + 'kwds': {}, + 'msg': "Toggle between the two tabs 'foo' and 'bar'\n" + "Add a tab by entering a suitable name for it.\n" + "Remove an existing tab by entering its name.\n" + "Remove all existing tabs.\n" + " is an invalid add page and remove page name.\n" + } + +TextViewer_spec = { + 'file': 'textView', + 'kwds': {'title': 'Test textView', + 'text':'The quick brown fox jumps over the lazy dog.\n'*35, + '_htest': True}, + 'msg': "Test for read-only property of text.\n" + "Text is selectable. Window is scrollable.", + } + +_tooltip_spec = { + 'file': 'ToolTip', + 'kwds': {}, + 'msg': "Place mouse cursor over both the buttons\n" + "A tooltip should appear with some text." + } + +_tree_widget_spec = { + 'file': 'TreeWidget', + 'kwds': {}, + 'msg': "The canvas is scrollable.\n" + "Click on folders upto to the lowest level." + } + +_undo_delegator_spec = { + 'file': 'UndoDelegator', + 'kwds': {}, + 'msg': "Click [Undo] to undo any action.\n" + "Click [Redo] to redo any action.\n" + "Click [Dump] to dump the current state " + "by printing to the console or the IDLE shell.\n" + } + +_widget_redirector_spec = { + 'file': 'WidgetRedirector', + 'kwds': {}, + 'msg': "Every text insert should be printed to the console." + "or the IDLE shell." + } + +def run(*tests): + root = tk.Tk() + root.title('IDLE htest') + root.resizable(0, 0) + _initializeTkVariantTests(root) + + # a scrollable Label like constant width text widget. + frameLabel = tk.Frame(root, padx=10) + frameLabel.pack() + text = tk.Text(frameLabel, wrap='word') + text.configure(bg=root.cget('bg'), relief='flat', height=4, width=70) + scrollbar = tk.Scrollbar(frameLabel, command=text.yview) + text.config(yscrollcommand=scrollbar.set) + scrollbar.pack(side='right', fill='y', expand=False) + text.pack(side='left', fill='both', expand=True) + + test_list = [] # List of tuples of the form (spec, callable widget) + if tests: + for test in tests: + test_spec = globals()[test.__name__ + '_spec'] + test_spec['name'] = test.__name__ + test_list.append((test_spec, test)) + else: + for k, d in globals().items(): + if k.endswith('_spec'): + test_name = k[:-5] + test_spec = d + test_spec['name'] = test_name + mod = import_module('idlelib.' + test_spec['file']) + test = getattr(mod, test_name) + test_list.append((test_spec, test)) + + test_name = tk.StringVar('') + callable_object = None + test_kwds = None + + def next(): + + nonlocal test_name, callable_object, test_kwds + if len(test_list) == 1: + next_button.pack_forget() + test_spec, callable_object = test_list.pop() + test_kwds = test_spec['kwds'] + test_kwds['parent'] = root + test_name.set('Test ' + test_spec['name']) + + text.configure(state='normal') # enable text editing + text.delete('1.0','end') + text.insert("1.0",test_spec['msg']) + text.configure(state='disabled') # preserve read-only property + + def run_test(): + widget = callable_object(**test_kwds) + try: + print(widget.result) + except AttributeError: + pass + + button = tk.Button(root, textvariable=test_name, command=run_test) + button.pack() + next_button = tk.Button(root, text="Next", command=next) + next_button.pack() + + next() + + root.mainloop() + +if __name__ == '__main__': + run() diff --git a/Darwin/lib/python3.5/idlelib/idle_test/mock_idle.py b/Darwin/lib/python3.5/idlelib/idle_test/mock_idle.py new file mode 100644 index 0000000..1672a34 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/mock_idle.py @@ -0,0 +1,55 @@ +'''Mock classes that imitate idlelib modules or classes. + +Attributes and methods will be added as needed for tests. +''' + +from idlelib.idle_test.mock_tk import Text + +class Func: + '''Mock function captures args and returns result set by test. + + Attributes: + self.called - records call even if no args, kwds passed. + self.result - set by init, returned by call. + self.args - captures positional arguments. + self.kwds - captures keyword arguments. + + Most common use will probably be to mock methods. + Mock_tk.Var and Mbox_func are special variants of this. + ''' + def __init__(self, result=None): + self.called = False + self.result = result + self.args = None + self.kwds = None + def __call__(self, *args, **kwds): + self.called = True + self.args = args + self.kwds = kwds + if isinstance(self.result, BaseException): + raise self.result + else: + return self.result + + +class Editor: + '''Minimally imitate EditorWindow.EditorWindow class. + ''' + def __init__(self, flist=None, filename=None, key=None, root=None): + self.text = Text() + self.undo = UndoDelegator() + + def get_selection_indices(self): + first = self.text.index('1.0') + last = self.text.index('end') + return first, last + + +class UndoDelegator: + '''Minimally imitate UndoDelegator,UndoDelegator class. + ''' + # A real undo block is only needed for user interaction. + def undo_block_start(*args): + pass + def undo_block_stop(*args): + pass diff --git a/Darwin/lib/python3.4/idlelib/idle_test/mock_tk.py b/Darwin/lib/python3.5/idlelib/idle_test/mock_tk.py similarity index 89% rename from Darwin/lib/python3.4/idlelib/idle_test/mock_tk.py rename to Darwin/lib/python3.5/idlelib/idle_test/mock_tk.py index 762bbc9..86fe848 100644 --- a/Darwin/lib/python3.4/idlelib/idle_test/mock_tk.py +++ b/Darwin/lib/python3.5/idlelib/idle_test/mock_tk.py @@ -1,9 +1,27 @@ """Classes that replace tkinter gui objects used by an object being tested. -A gui object is anything with a master or parent paramenter, which is typically -required in spite of what the doc strings say. +A gui object is anything with a master or parent parameter, which is +typically required in spite of what the doc strings say. """ +class Event: + '''Minimal mock with attributes for testing event handlers. + + This is not a gui object, but is used as an argument for callbacks + that access attributes of the event passed. If a callback ignores + the event, other than the fact that is happened, pass 'event'. + + Keyboard, mouse, window, and other sources generate Event instances. + Event instances have the following attributes: serial (number of + event), time (of event), type (of event as number), widget (in which + event occurred), and x,y (position of mouse). There are other + attributes for specific events, such as keycode for key events. + tkinter.Event.__doc__ has more but is still not complete. + ''' + def __init__(self, **kwds): + "Create event with attributes needed for test" + self.__dict__.update(kwds) + class Var: "Use for String/Int/BooleanVar: incomplete" def __init__(self, master=None, value=None, name=None): @@ -20,9 +38,10 @@ class Mbox_func: Instead of displaying a message box, the mock's call method saves the arguments as instance attributes, which test functions can then examime. + The test can set the result returned to ask function """ - def __init__(self): - self.result = None # The return for all show funcs + def __init__(self, result=None): + self.result = result # Return None for all show funcs def __call__(self, title, message, *args, **kwds): # Save all args for possible examination by tester self.title = title @@ -97,7 +116,7 @@ class Text: """Return a (line, char) tuple of int indexes into self.data. This implements .index without converting the result back to a string. - The result is contrained by the number of lines and linelengths of + The result is constrained by the number of lines and linelengths of self.data. For many indexes, the result is initially (1, 0). The input index may have any of several possible forms: diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_autocomplete.py b/Darwin/lib/python3.5/idlelib/idle_test/test_autocomplete.py new file mode 100644 index 0000000..3a2192e --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_autocomplete.py @@ -0,0 +1,143 @@ +import unittest +from test.support import requires +from tkinter import Tk, Text + +import idlelib.AutoComplete as ac +import idlelib.AutoCompleteWindow as acw +import idlelib.macosxSupport as mac +from idlelib.idle_test.mock_idle import Func +from idlelib.idle_test.mock_tk import Event + +class AutoCompleteWindow: + def complete(): + return + +class DummyEditwin: + def __init__(self, root, text): + self.root = root + self.text = text + self.indentwidth = 8 + self.tabwidth = 8 + self.context_use_ps1 = True + + +class AutoCompleteTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + mac.setupApp(cls.root, None) + cls.text = Text(cls.root) + cls.editor = DummyEditwin(cls.root, cls.text) + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + del cls.text + del cls.editor + del cls.root + + def setUp(self): + self.editor.text.delete('1.0', 'end') + self.autocomplete = ac.AutoComplete(self.editor) + + def test_init(self): + self.assertEqual(self.autocomplete.editwin, self.editor) + + def test_make_autocomplete_window(self): + testwin = self.autocomplete._make_autocomplete_window() + self.assertIsInstance(testwin, acw.AutoCompleteWindow) + + def test_remove_autocomplete_window(self): + self.autocomplete.autocompletewindow = ( + self.autocomplete._make_autocomplete_window()) + self.autocomplete._remove_autocomplete_window() + self.assertIsNone(self.autocomplete.autocompletewindow) + + def test_force_open_completions_event(self): + # Test that force_open_completions_event calls _open_completions + o_cs = Func() + self.autocomplete.open_completions = o_cs + self.autocomplete.force_open_completions_event('event') + self.assertEqual(o_cs.args, (True, False, True)) + + def test_try_open_completions_event(self): + Equal = self.assertEqual + autocomplete = self.autocomplete + trycompletions = self.autocomplete.try_open_completions_event + o_c_l = Func() + autocomplete._open_completions_later = o_c_l + + # _open_completions_later should not be called with no text in editor + trycompletions('event') + Equal(o_c_l.args, None) + + # _open_completions_later should be called with COMPLETE_ATTRIBUTES (1) + self.text.insert('1.0', 're.') + trycompletions('event') + Equal(o_c_l.args, (False, False, False, 1)) + + # _open_completions_later should be called with COMPLETE_FILES (2) + self.text.delete('1.0', 'end') + self.text.insert('1.0', '"./Lib/') + trycompletions('event') + Equal(o_c_l.args, (False, False, False, 2)) + + def test_autocomplete_event(self): + Equal = self.assertEqual + autocomplete = self.autocomplete + + # Test that the autocomplete event is ignored if user is pressing a + # modifier key in addition to the tab key + ev = Event(mc_state=True) + self.assertIsNone(autocomplete.autocomplete_event(ev)) + del ev.mc_state + + # If autocomplete window is open, complete() method is called + self.text.insert('1.0', 're.') + # This must call autocomplete._make_autocomplete_window() + Equal(self.autocomplete.autocomplete_event(ev), 'break') + + # If autocomplete window is not active or does not exist, + # open_completions is called. Return depends on its return. + autocomplete._remove_autocomplete_window() + o_cs = Func() # .result = None + autocomplete.open_completions = o_cs + Equal(self.autocomplete.autocomplete_event(ev), None) + Equal(o_cs.args, (False, True, True)) + o_cs.result = True + Equal(self.autocomplete.autocomplete_event(ev), 'break') + Equal(o_cs.args, (False, True, True)) + + def test_open_completions_later(self): + # Test that autocomplete._delayed_completion_id is set + pass + + def test_delayed_open_completions(self): + # Test that autocomplete._delayed_completion_id set to None and that + # open_completions only called if insertion index is the same as + # _delayed_completion_index + pass + + def test_open_completions(self): + # Test completions of files and attributes as well as non-completion + # of errors + pass + + def test_fetch_completions(self): + # Test that fetch_completions returns 2 lists: + # For attribute completion, a large list containing all variables, and + # a small list containing non-private variables. + # For file completion, a large list containing all files in the path, + # and a small list containing files that do not start with '.' + pass + + def test_get_entity(self): + # Test that a name is in the namespace of sys.modules and + # __main__.__dict__ + pass + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_autoexpand.py b/Darwin/lib/python3.5/idlelib/idle_test/test_autoexpand.py new file mode 100644 index 0000000..7ca941e --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_autoexpand.py @@ -0,0 +1,141 @@ +"""Unit tests for idlelib.AutoExpand""" +import unittest +from test.support import requires +from tkinter import Text, Tk +#from idlelib.idle_test.mock_tk import Text +from idlelib.AutoExpand import AutoExpand + + +class Dummy_Editwin: + # AutoExpand.__init__ only needs .text + def __init__(self, text): + self.text = text + +class AutoExpandTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + if 'tkinter' in str(Text): + requires('gui') + cls.tk = Tk() + cls.text = Text(cls.tk) + else: + cls.text = Text() + cls.auto_expand = AutoExpand(Dummy_Editwin(cls.text)) + + @classmethod + def tearDownClass(cls): + if hasattr(cls, 'tk'): + cls.tk.destroy() + del cls.tk + del cls.text, cls.auto_expand + + def tearDown(self): + self.text.delete('1.0', 'end') + + def test_get_prevword(self): + text = self.text + previous = self.auto_expand.getprevword + equal = self.assertEqual + + equal(previous(), '') + + text.insert('insert', 't') + equal(previous(), 't') + + text.insert('insert', 'his') + equal(previous(), 'this') + + text.insert('insert', ' ') + equal(previous(), '') + + text.insert('insert', 'is') + equal(previous(), 'is') + + text.insert('insert', '\nsample\nstring') + equal(previous(), 'string') + + text.delete('3.0', 'insert') + equal(previous(), '') + + text.delete('1.0', 'end') + equal(previous(), '') + + def test_before_only(self): + previous = self.auto_expand.getprevword + expand = self.auto_expand.expand_word_event + equal = self.assertEqual + + self.text.insert('insert', 'ab ac bx ad ab a') + equal(self.auto_expand.getwords(), ['ab', 'ad', 'ac', 'a']) + expand('event') + equal(previous(), 'ab') + expand('event') + equal(previous(), 'ad') + expand('event') + equal(previous(), 'ac') + expand('event') + equal(previous(), 'a') + + def test_after_only(self): + # Also add punctuation 'noise' that should be ignored. + text = self.text + previous = self.auto_expand.getprevword + expand = self.auto_expand.expand_word_event + equal = self.assertEqual + + text.insert('insert', 'a, [ab] ac: () bx"" cd ac= ad ya') + text.mark_set('insert', '1.1') + equal(self.auto_expand.getwords(), ['ab', 'ac', 'ad', 'a']) + expand('event') + equal(previous(), 'ab') + expand('event') + equal(previous(), 'ac') + expand('event') + equal(previous(), 'ad') + expand('event') + equal(previous(), 'a') + + def test_both_before_after(self): + text = self.text + previous = self.auto_expand.getprevword + expand = self.auto_expand.expand_word_event + equal = self.assertEqual + + text.insert('insert', 'ab xy yz\n') + text.insert('insert', 'a ac by ac') + + text.mark_set('insert', '2.1') + equal(self.auto_expand.getwords(), ['ab', 'ac', 'a']) + expand('event') + equal(previous(), 'ab') + expand('event') + equal(previous(), 'ac') + expand('event') + equal(previous(), 'a') + + def test_other_expand_cases(self): + text = self.text + expand = self.auto_expand.expand_word_event + equal = self.assertEqual + + # no expansion candidate found + equal(self.auto_expand.getwords(), []) + equal(expand('event'), 'break') + + text.insert('insert', 'bx cy dz a') + equal(self.auto_expand.getwords(), []) + + # reset state by successfully expanding once + # move cursor to another position and expand again + text.insert('insert', 'ac xy a ac ad a') + text.mark_set('insert', '1.7') + expand('event') + initial_state = self.auto_expand.state + text.mark_set('insert', '1.end') + expand('event') + new_state = self.auto_expand.state + self.assertNotEqual(initial_state, new_state) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_calltips.py b/Darwin/lib/python3.5/idlelib/idle_test/test_calltips.py similarity index 90% rename from Darwin/lib/python3.4/idlelib/idle_test/test_calltips.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_calltips.py index 4ee15ae..b2a733c 100644 --- a/Darwin/lib/python3.4/idlelib/idle_test/test_calltips.py +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_calltips.py @@ -52,7 +52,8 @@ class Get_signatureTest(unittest.TestCase): def gtest(obj, out): self.assertEqual(signature(obj), out) - gtest(List, List.__doc__) + if List.__doc__ is not None: + gtest(List, List.__doc__) gtest(list.__new__, 'Create and return a new object. See help(type) for accurate signature.') gtest(list.__init__, @@ -66,7 +67,8 @@ class Get_signatureTest(unittest.TestCase): gtest(SB(), default_tip) def test_signature_wrap(self): - self.assertEqual(signature(textwrap.TextWrapper), '''\ + if textwrap.TextWrapper.__doc__ is not None: + self.assertEqual(signature(textwrap.TextWrapper), '''\ (width=70, initial_indent='', subsequent_indent='', expand_tabs=True, replace_whitespace=True, fix_sentence_endings=False, break_long_words=True, drop_whitespace=True, break_on_hyphens=True, tabsize=8, *, max_lines=None, @@ -108,20 +110,23 @@ bytes() -> empty bytes object''') def t5(a, b=None, *args, **kw): 'doc' t5.tip = "(a, b=None, *args, **kw)" + doc = '\ndoc' if t1.__doc__ is not None else '' for func in (t1, t2, t3, t4, t5, TC): - self.assertEqual(signature(func), func.tip + '\ndoc') + self.assertEqual(signature(func), func.tip + doc) def test_methods(self): + doc = '\ndoc' if TC.__doc__ is not None else '' for meth in (TC.t1, TC.t2, TC.t3, TC.t4, TC.t5, TC.t6, TC.__call__): - self.assertEqual(signature(meth), meth.tip + "\ndoc") - self.assertEqual(signature(TC.cm), "(a)\ndoc") - self.assertEqual(signature(TC.sm), "(b)\ndoc") + self.assertEqual(signature(meth), meth.tip + doc) + self.assertEqual(signature(TC.cm), "(a)" + doc) + self.assertEqual(signature(TC.sm), "(b)" + doc) def test_bound_methods(self): # test that first parameter is correctly removed from argspec + doc = '\ndoc' if TC.__doc__ is not None else '' for meth, mtip in ((tc.t1, "()"), (tc.t4, "(*args)"), (tc.t6, "(self)"), (tc.__call__, '(ci)'), (tc, '(ci)'), (TC.cm, "(a)"),): - self.assertEqual(signature(meth), mtip + "\ndoc") + self.assertEqual(signature(meth), mtip + doc) def test_starred_parameter(self): # test that starred first parameter is *not* removed from argspec diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_config_name.py b/Darwin/lib/python3.5/idlelib/idle_test/test_config_name.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_config_name.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_config_name.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_configdialog.py b/Darwin/lib/python3.5/idlelib/idle_test/test_configdialog.py new file mode 100644 index 0000000..6883123 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_configdialog.py @@ -0,0 +1,32 @@ +'''Unittests for idlelib/configHandler.py + +Coverage: 46% just by creating dialog. The other half is change code. + +''' +import unittest +from test.support import requires +from tkinter import Tk +from idlelib.configDialog import ConfigDialog +from idlelib.macosxSupport import _initializeTkVariantTests + + +class ConfigDialogTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + _initializeTkVariantTests(cls.root) + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + del cls.root + + def test_dialog(self): + d=ConfigDialog(self.root, 'Test', _utest=True) + d.destroy() + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_delegator.py b/Darwin/lib/python3.5/idlelib/idle_test/test_delegator.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_delegator.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_delegator.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_editor.py b/Darwin/lib/python3.5/idlelib/idle_test/test_editor.py new file mode 100644 index 0000000..a31d26d --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_editor.py @@ -0,0 +1,16 @@ +import unittest +from tkinter import Tk, Text +from idlelib.EditorWindow import EditorWindow +from test.support import requires + +class Editor_func_test(unittest.TestCase): + def test_filename_to_unicode(self): + func = EditorWindow._filename_to_unicode + class dummy(): filesystemencoding = 'utf-8' + pairs = (('abc', 'abc'), ('a\U00011111c', 'a\ufffdc'), + (b'abc', 'abc'), (b'a\xf0\x91\x84\x91c', 'a\ufffdc')) + for inp, out in pairs: + self.assertEqual(func(dummy, inp), out) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_formatparagraph.py b/Darwin/lib/python3.5/idlelib/idle_test/test_formatparagraph.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/idle_test/test_formatparagraph.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_formatparagraph.py index 690c936..f6039e6 100644 --- a/Darwin/lib/python3.4/idlelib/idle_test/test_formatparagraph.py +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_formatparagraph.py @@ -2,7 +2,7 @@ import unittest from idlelib import FormatParagraph as fp from idlelib.EditorWindow import EditorWindow -from tkinter import Tk, Text, TclError +from tkinter import Tk, Text from test.support import requires diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_grep.py b/Darwin/lib/python3.5/idlelib/idle_test/test_grep.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_grep.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_grep.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_hyperparser.py b/Darwin/lib/python3.5/idlelib/idle_test/test_hyperparser.py new file mode 100644 index 0000000..edfc783 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_hyperparser.py @@ -0,0 +1,273 @@ +"""Unittest for idlelib.HyperParser""" +import unittest +from test.support import requires +from tkinter import Tk, Text +from idlelib.EditorWindow import EditorWindow +from idlelib.HyperParser import HyperParser + +class DummyEditwin: + def __init__(self, text): + self.text = text + self.indentwidth = 8 + self.tabwidth = 8 + self.context_use_ps1 = True + self.num_context_lines = 50, 500, 1000 + + _build_char_in_string_func = EditorWindow._build_char_in_string_func + is_char_in_string = EditorWindow.is_char_in_string + + +class HyperParserTest(unittest.TestCase): + code = ( + '"""This is a module docstring"""\n' + '# this line is a comment\n' + 'x = "this is a string"\n' + "y = 'this is also a string'\n" + 'l = [i for i in range(10)]\n' + 'm = [py*py for # comment\n' + ' py in l]\n' + 'x.__len__\n' + "z = ((r'asdf')+('a')))\n" + '[x for x in\n' + 'for = False\n' + 'cliché = "this is a string with unicode, what a cliché"' + ) + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + cls.text = Text(cls.root) + cls.editwin = DummyEditwin(cls.text) + + @classmethod + def tearDownClass(cls): + del cls.text, cls.editwin + cls.root.destroy() + del cls.root + + def setUp(self): + self.text.insert('insert', self.code) + + def tearDown(self): + self.text.delete('1.0', 'end') + self.editwin.context_use_ps1 = True + + def get_parser(self, index): + """ + Return a parser object with index at 'index' + """ + return HyperParser(self.editwin, index) + + def test_init(self): + """ + test corner cases in the init method + """ + with self.assertRaises(ValueError) as ve: + self.text.tag_add('console', '1.0', '1.end') + p = self.get_parser('1.5') + self.assertIn('precedes', str(ve.exception)) + + # test without ps1 + self.editwin.context_use_ps1 = False + + # number of lines lesser than 50 + p = self.get_parser('end') + self.assertEqual(p.rawtext, self.text.get('1.0', 'end')) + + # number of lines greater than 50 + self.text.insert('end', self.text.get('1.0', 'end')*4) + p = self.get_parser('54.5') + + def test_is_in_string(self): + get = self.get_parser + + p = get('1.0') + self.assertFalse(p.is_in_string()) + p = get('1.4') + self.assertTrue(p.is_in_string()) + p = get('2.3') + self.assertFalse(p.is_in_string()) + p = get('3.3') + self.assertFalse(p.is_in_string()) + p = get('3.7') + self.assertTrue(p.is_in_string()) + p = get('4.6') + self.assertTrue(p.is_in_string()) + p = get('12.54') + self.assertTrue(p.is_in_string()) + + def test_is_in_code(self): + get = self.get_parser + + p = get('1.0') + self.assertTrue(p.is_in_code()) + p = get('1.1') + self.assertFalse(p.is_in_code()) + p = get('2.5') + self.assertFalse(p.is_in_code()) + p = get('3.4') + self.assertTrue(p.is_in_code()) + p = get('3.6') + self.assertFalse(p.is_in_code()) + p = get('4.14') + self.assertFalse(p.is_in_code()) + + def test_get_surrounding_bracket(self): + get = self.get_parser + + def without_mustclose(parser): + # a utility function to get surrounding bracket + # with mustclose=False + return parser.get_surrounding_brackets(mustclose=False) + + def with_mustclose(parser): + # a utility function to get surrounding bracket + # with mustclose=True + return parser.get_surrounding_brackets(mustclose=True) + + p = get('3.2') + self.assertIsNone(with_mustclose(p)) + self.assertIsNone(without_mustclose(p)) + + p = get('5.6') + self.assertTupleEqual(without_mustclose(p), ('5.4', '5.25')) + self.assertTupleEqual(without_mustclose(p), with_mustclose(p)) + + p = get('5.23') + self.assertTupleEqual(without_mustclose(p), ('5.21', '5.24')) + self.assertTupleEqual(without_mustclose(p), with_mustclose(p)) + + p = get('6.15') + self.assertTupleEqual(without_mustclose(p), ('6.4', '6.end')) + self.assertIsNone(with_mustclose(p)) + + p = get('9.end') + self.assertIsNone(with_mustclose(p)) + self.assertIsNone(without_mustclose(p)) + + def test_get_expression(self): + get = self.get_parser + + p = get('4.2') + self.assertEqual(p.get_expression(), 'y ') + + p = get('4.7') + with self.assertRaises(ValueError) as ve: + p.get_expression() + self.assertIn('is inside a code', str(ve.exception)) + + p = get('5.25') + self.assertEqual(p.get_expression(), 'range(10)') + + p = get('6.7') + self.assertEqual(p.get_expression(), 'py') + + p = get('6.8') + self.assertEqual(p.get_expression(), '') + + p = get('7.9') + self.assertEqual(p.get_expression(), 'py') + + p = get('8.end') + self.assertEqual(p.get_expression(), 'x.__len__') + + p = get('9.13') + self.assertEqual(p.get_expression(), "r'asdf'") + + p = get('9.17') + with self.assertRaises(ValueError) as ve: + p.get_expression() + self.assertIn('is inside a code', str(ve.exception)) + + p = get('10.0') + self.assertEqual(p.get_expression(), '') + + p = get('10.6') + self.assertEqual(p.get_expression(), '') + + p = get('10.11') + self.assertEqual(p.get_expression(), '') + + p = get('11.3') + self.assertEqual(p.get_expression(), '') + + p = get('11.11') + self.assertEqual(p.get_expression(), 'False') + + p = get('12.6') + self.assertEqual(p.get_expression(), 'cliché') + + def test_eat_identifier(self): + def is_valid_id(candidate): + result = HyperParser._eat_identifier(candidate, 0, len(candidate)) + if result == len(candidate): + return True + elif result == 0: + return False + else: + err_msg = "Unexpected result: {} (expected 0 or {}".format( + result, len(candidate) + ) + raise Exception(err_msg) + + # invalid first character which is valid elsewhere in an identifier + self.assertFalse(is_valid_id('2notid')) + + # ASCII-only valid identifiers + self.assertTrue(is_valid_id('valid_id')) + self.assertTrue(is_valid_id('_valid_id')) + self.assertTrue(is_valid_id('valid_id_')) + self.assertTrue(is_valid_id('_2valid_id')) + + # keywords which should be "eaten" + self.assertTrue(is_valid_id('True')) + self.assertTrue(is_valid_id('False')) + self.assertTrue(is_valid_id('None')) + + # keywords which should not be "eaten" + self.assertFalse(is_valid_id('for')) + self.assertFalse(is_valid_id('import')) + self.assertFalse(is_valid_id('return')) + + # valid unicode identifiers + self.assertTrue(is_valid_id('cliche')) + self.assertTrue(is_valid_id('cliché')) + self.assertTrue(is_valid_id('a٢')) + + # invalid unicode identifiers + self.assertFalse(is_valid_id('2a')) + self.assertFalse(is_valid_id('٢a')) + self.assertFalse(is_valid_id('a²')) + + # valid identifier after "punctuation" + self.assertEqual(HyperParser._eat_identifier('+ var', 0, 5), len('var')) + self.assertEqual(HyperParser._eat_identifier('+var', 0, 4), len('var')) + self.assertEqual(HyperParser._eat_identifier('.var', 0, 4), len('var')) + + # invalid identifiers + self.assertFalse(is_valid_id('+')) + self.assertFalse(is_valid_id(' ')) + self.assertFalse(is_valid_id(':')) + self.assertFalse(is_valid_id('?')) + self.assertFalse(is_valid_id('^')) + self.assertFalse(is_valid_id('\\')) + self.assertFalse(is_valid_id('"')) + self.assertFalse(is_valid_id('"a string"')) + + def test_eat_identifier_various_lengths(self): + eat_id = HyperParser._eat_identifier + + for length in range(1, 21): + self.assertEqual(eat_id('a' * length, 0, length), length) + self.assertEqual(eat_id('é' * length, 0, length), length) + self.assertEqual(eat_id('a' + '2' * (length - 1), 0, length), length) + self.assertEqual(eat_id('é' + '2' * (length - 1), 0, length), length) + self.assertEqual(eat_id('é' + 'a' * (length - 1), 0, length), length) + self.assertEqual(eat_id('é' * (length - 1) + 'a', 0, length), length) + self.assertEqual(eat_id('+' * length, 0, length), 0) + self.assertEqual(eat_id('2' + 'a' * (length - 1), 0, length), 0) + self.assertEqual(eat_id('2' + 'é' * (length - 1), 0, length), 0) + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_idlehistory.py b/Darwin/lib/python3.5/idlelib/idle_test/test_idlehistory.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_idlehistory.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_idlehistory.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_io.py b/Darwin/lib/python3.5/idlelib/idle_test/test_io.py new file mode 100644 index 0000000..e0e3b98 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_io.py @@ -0,0 +1,233 @@ +import unittest +import io +from idlelib.PyShell import PseudoInputFile, PseudoOutputFile + + +class S(str): + def __str__(self): + return '%s:str' % type(self).__name__ + def __unicode__(self): + return '%s:unicode' % type(self).__name__ + def __len__(self): + return 3 + def __iter__(self): + return iter('abc') + def __getitem__(self, *args): + return '%s:item' % type(self).__name__ + def __getslice__(self, *args): + return '%s:slice' % type(self).__name__ + +class MockShell: + def __init__(self): + self.reset() + + def write(self, *args): + self.written.append(args) + + def readline(self): + return self.lines.pop() + + def close(self): + pass + + def reset(self): + self.written = [] + + def push(self, lines): + self.lines = list(lines)[::-1] + + +class PseudeOutputFilesTest(unittest.TestCase): + def test_misc(self): + shell = MockShell() + f = PseudoOutputFile(shell, 'stdout', 'utf-8') + self.assertIsInstance(f, io.TextIOBase) + self.assertEqual(f.encoding, 'utf-8') + self.assertIsNone(f.errors) + self.assertIsNone(f.newlines) + self.assertEqual(f.name, '') + self.assertFalse(f.closed) + self.assertTrue(f.isatty()) + self.assertFalse(f.readable()) + self.assertTrue(f.writable()) + self.assertFalse(f.seekable()) + + def test_unsupported(self): + shell = MockShell() + f = PseudoOutputFile(shell, 'stdout', 'utf-8') + self.assertRaises(OSError, f.fileno) + self.assertRaises(OSError, f.tell) + self.assertRaises(OSError, f.seek, 0) + self.assertRaises(OSError, f.read, 0) + self.assertRaises(OSError, f.readline, 0) + + def test_write(self): + shell = MockShell() + f = PseudoOutputFile(shell, 'stdout', 'utf-8') + f.write('test') + self.assertEqual(shell.written, [('test', 'stdout')]) + shell.reset() + f.write('t\xe8st') + self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) + shell.reset() + + f.write(S('t\xe8st')) + self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) + self.assertEqual(type(shell.written[0][0]), str) + shell.reset() + + self.assertRaises(TypeError, f.write) + self.assertEqual(shell.written, []) + self.assertRaises(TypeError, f.write, b'test') + self.assertRaises(TypeError, f.write, 123) + self.assertEqual(shell.written, []) + self.assertRaises(TypeError, f.write, 'test', 'spam') + self.assertEqual(shell.written, []) + + def test_writelines(self): + shell = MockShell() + f = PseudoOutputFile(shell, 'stdout', 'utf-8') + f.writelines([]) + self.assertEqual(shell.written, []) + shell.reset() + f.writelines(['one\n', 'two']) + self.assertEqual(shell.written, + [('one\n', 'stdout'), ('two', 'stdout')]) + shell.reset() + f.writelines(['on\xe8\n', 'tw\xf2']) + self.assertEqual(shell.written, + [('on\xe8\n', 'stdout'), ('tw\xf2', 'stdout')]) + shell.reset() + + f.writelines([S('t\xe8st')]) + self.assertEqual(shell.written, [('t\xe8st', 'stdout')]) + self.assertEqual(type(shell.written[0][0]), str) + shell.reset() + + self.assertRaises(TypeError, f.writelines) + self.assertEqual(shell.written, []) + self.assertRaises(TypeError, f.writelines, 123) + self.assertEqual(shell.written, []) + self.assertRaises(TypeError, f.writelines, [b'test']) + self.assertRaises(TypeError, f.writelines, [123]) + self.assertEqual(shell.written, []) + self.assertRaises(TypeError, f.writelines, [], []) + self.assertEqual(shell.written, []) + + def test_close(self): + shell = MockShell() + f = PseudoOutputFile(shell, 'stdout', 'utf-8') + self.assertFalse(f.closed) + f.write('test') + f.close() + self.assertTrue(f.closed) + self.assertRaises(ValueError, f.write, 'x') + self.assertEqual(shell.written, [('test', 'stdout')]) + f.close() + self.assertRaises(TypeError, f.close, 1) + + +class PseudeInputFilesTest(unittest.TestCase): + def test_misc(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + self.assertIsInstance(f, io.TextIOBase) + self.assertEqual(f.encoding, 'utf-8') + self.assertIsNone(f.errors) + self.assertIsNone(f.newlines) + self.assertEqual(f.name, '') + self.assertFalse(f.closed) + self.assertTrue(f.isatty()) + self.assertTrue(f.readable()) + self.assertFalse(f.writable()) + self.assertFalse(f.seekable()) + + def test_unsupported(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + self.assertRaises(OSError, f.fileno) + self.assertRaises(OSError, f.tell) + self.assertRaises(OSError, f.seek, 0) + self.assertRaises(OSError, f.write, 'x') + self.assertRaises(OSError, f.writelines, ['x']) + + def test_read(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.read(), 'one\ntwo\n') + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.read(-1), 'one\ntwo\n') + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.read(None), 'one\ntwo\n') + shell.push(['one\n', 'two\n', 'three\n', '']) + self.assertEqual(f.read(2), 'on') + self.assertEqual(f.read(3), 'e\nt') + self.assertEqual(f.read(10), 'wo\nthree\n') + + shell.push(['one\n', 'two\n']) + self.assertEqual(f.read(0), '') + self.assertRaises(TypeError, f.read, 1.5) + self.assertRaises(TypeError, f.read, '1') + self.assertRaises(TypeError, f.read, 1, 1) + + def test_readline(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + shell.push(['one\n', 'two\n', 'three\n', 'four\n']) + self.assertEqual(f.readline(), 'one\n') + self.assertEqual(f.readline(-1), 'two\n') + self.assertEqual(f.readline(None), 'three\n') + shell.push(['one\ntwo\n']) + self.assertEqual(f.readline(), 'one\n') + self.assertEqual(f.readline(), 'two\n') + shell.push(['one', 'two', 'three']) + self.assertEqual(f.readline(), 'one') + self.assertEqual(f.readline(), 'two') + shell.push(['one\n', 'two\n', 'three\n']) + self.assertEqual(f.readline(2), 'on') + self.assertEqual(f.readline(1), 'e') + self.assertEqual(f.readline(1), '\n') + self.assertEqual(f.readline(10), 'two\n') + + shell.push(['one\n', 'two\n']) + self.assertEqual(f.readline(0), '') + self.assertRaises(TypeError, f.readlines, 1.5) + self.assertRaises(TypeError, f.readlines, '1') + self.assertRaises(TypeError, f.readlines, 1, 1) + + def test_readlines(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(), ['one\n', 'two\n']) + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(-1), ['one\n', 'two\n']) + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(None), ['one\n', 'two\n']) + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(0), ['one\n', 'two\n']) + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(3), ['one\n']) + shell.push(['one\n', 'two\n', '']) + self.assertEqual(f.readlines(4), ['one\n', 'two\n']) + + shell.push(['one\n', 'two\n', '']) + self.assertRaises(TypeError, f.readlines, 1.5) + self.assertRaises(TypeError, f.readlines, '1') + self.assertRaises(TypeError, f.readlines, 1, 1) + + def test_close(self): + shell = MockShell() + f = PseudoInputFile(shell, 'stdin', 'utf-8') + shell.push(['one\n', 'two\n', '']) + self.assertFalse(f.closed) + self.assertEqual(f.readline(), 'one\n') + f.close() + self.assertFalse(f.closed) + self.assertEqual(f.readline(), 'two\n') + self.assertRaises(TypeError, f.close, 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_parenmatch.py b/Darwin/lib/python3.5/idlelib/idle_test/test_parenmatch.py new file mode 100644 index 0000000..9aba4be --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_parenmatch.py @@ -0,0 +1,109 @@ +"""Test idlelib.ParenMatch.""" +# This must currently be a gui test because ParenMatch methods use +# several text methods not defined on idlelib.idle_test.mock_tk.Text. + +import unittest +from unittest.mock import Mock +from test.support import requires +from tkinter import Tk, Text +from idlelib.ParenMatch import ParenMatch + +class DummyEditwin: + def __init__(self, text): + self.text = text + self.indentwidth = 8 + self.tabwidth = 8 + self.context_use_ps1 = True + + +class ParenMatchTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + cls.text = Text(cls.root) + cls.editwin = DummyEditwin(cls.text) + cls.editwin.text_frame = Mock() + + @classmethod + def tearDownClass(cls): + del cls.text, cls.editwin + cls.root.destroy() + del cls.root + + def tearDown(self): + self.text.delete('1.0', 'end') + + def test_paren_expression(self): + """ + Test ParenMatch with 'expression' style. + """ + text = self.text + pm = ParenMatch(self.editwin) + pm.set_style('expression') + + text.insert('insert', 'def foobar(a, b') + pm.flash_paren_event('event') + self.assertIn('<>', text.event_info()) + self.assertTupleEqual(text.tag_prevrange('paren', 'end'), + ('1.10', '1.15')) + text.insert('insert', ')') + pm.restore_event() + self.assertNotIn('<>', text.event_info()) + self.assertEqual(text.tag_prevrange('paren', 'end'), ()) + + # paren_closed_event can only be tested as below + pm.paren_closed_event('event') + self.assertTupleEqual(text.tag_prevrange('paren', 'end'), + ('1.10', '1.16')) + + def test_paren_default(self): + """ + Test ParenMatch with 'default' style. + """ + text = self.text + pm = ParenMatch(self.editwin) + pm.set_style('default') + + text.insert('insert', 'def foobar(a, b') + pm.flash_paren_event('event') + self.assertIn('<>', text.event_info()) + self.assertTupleEqual(text.tag_prevrange('paren', 'end'), + ('1.10', '1.11')) + text.insert('insert', ')') + pm.restore_event() + self.assertNotIn('<>', text.event_info()) + self.assertEqual(text.tag_prevrange('paren', 'end'), ()) + + def test_paren_corner(self): + """ + Test corner cases in flash_paren_event and paren_closed_event. + + These cases force conditional expression and alternate paths. + """ + text = self.text + pm = ParenMatch(self.editwin) + + text.insert('insert', '# this is a commen)') + self.assertIsNone(pm.paren_closed_event('event')) + + text.insert('insert', '\ndef') + self.assertIsNone(pm.flash_paren_event('event')) + self.assertIsNone(pm.paren_closed_event('event')) + + text.insert('insert', ' a, *arg)') + self.assertIsNone(pm.paren_closed_event('event')) + + def test_handle_restore_timer(self): + pm = ParenMatch(self.editwin) + pm.restore_event = Mock() + pm.handle_restore_timer(0) + self.assertTrue(pm.restore_event.called) + pm.restore_event.reset_mock() + pm.handle_restore_timer(1) + self.assertFalse(pm.restore_event.called) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_pathbrowser.py b/Darwin/lib/python3.5/idlelib/idle_test/test_pathbrowser.py new file mode 100644 index 0000000..afb886f --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_pathbrowser.py @@ -0,0 +1,27 @@ +import unittest +import os +import sys +import idlelib +from idlelib import PathBrowser + +class PathBrowserTest(unittest.TestCase): + + def test_DirBrowserTreeItem(self): + # Issue16226 - make sure that getting a sublist works + d = PathBrowser.DirBrowserTreeItem('') + d.GetSubList() + self.assertEqual('', d.GetText()) + + dir = os.path.split(os.path.abspath(idlelib.__file__))[0] + self.assertEqual(d.ispackagedir(dir), True) + self.assertEqual(d.ispackagedir(dir + '/Icons'), False) + + def test_PathBrowserTreeItem(self): + p = PathBrowser.PathBrowserTreeItem() + self.assertEqual(p.GetText(), 'sys.path') + sub = p.GetSubList() + self.assertEqual(len(sub), len(sys.path)) + self.assertEqual(type(sub[0]), PathBrowser.DirBrowserTreeItem) + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=False) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_rstrip.py b/Darwin/lib/python3.5/idlelib/idle_test/test_rstrip.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_rstrip.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_rstrip.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_searchdialogbase.py b/Darwin/lib/python3.5/idlelib/idle_test/test_searchdialogbase.py new file mode 100644 index 0000000..8036b91 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_searchdialogbase.py @@ -0,0 +1,165 @@ +'''Unittests for idlelib/SearchDialogBase.py + +Coverage: 99%. The only thing not covered is inconsequential -- +testing skipping of suite when self.needwrapbutton is false. + +''' +import unittest +from test.support import requires +from tkinter import Tk, Toplevel, Frame ##, BooleanVar, StringVar +from idlelib import SearchEngine as se +from idlelib import SearchDialogBase as sdb +from idlelib.idle_test.mock_idle import Func +## from idlelib.idle_test.mock_tk import Var + +# The ## imports above & following could help make some tests gui-free. +# However, they currently make radiobutton tests fail. +##def setUpModule(): +## # Replace tk objects used to initialize se.SearchEngine. +## se.BooleanVar = Var +## se.StringVar = Var +## +##def tearDownModule(): +## se.BooleanVar = BooleanVar +## se.StringVar = StringVar + +class SearchDialogBaseTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.root = Tk() + + @classmethod + def tearDownClass(cls): + cls.root.destroy() + del cls.root + + def setUp(self): + self.engine = se.SearchEngine(self.root) # None also seems to work + self.dialog = sdb.SearchDialogBase(root=self.root, engine=self.engine) + + def tearDown(self): + self.dialog.close() + + def test_open_and_close(self): + # open calls create_widgets, which needs default_command + self.dialog.default_command = None + + # Since text parameter of .open is not used in base class, + # pass dummy 'text' instead of tk.Text(). + self.dialog.open('text') + self.assertEqual(self.dialog.top.state(), 'normal') + self.dialog.close() + self.assertEqual(self.dialog.top.state(), 'withdrawn') + + self.dialog.open('text', searchphrase="hello") + self.assertEqual(self.dialog.ent.get(), 'hello') + self.dialog.close() + + def test_create_widgets(self): + self.dialog.create_entries = Func() + self.dialog.create_option_buttons = Func() + self.dialog.create_other_buttons = Func() + self.dialog.create_command_buttons = Func() + + self.dialog.default_command = None + self.dialog.create_widgets() + + self.assertTrue(self.dialog.create_entries.called) + self.assertTrue(self.dialog.create_option_buttons.called) + self.assertTrue(self.dialog.create_other_buttons.called) + self.assertTrue(self.dialog.create_command_buttons.called) + + def test_make_entry(self): + equal = self.assertEqual + self.dialog.row = 0 + self.dialog.top = Toplevel(self.root) + entry, label = self.dialog.make_entry("Test:", 'hello') + equal(label['text'], 'Test:') + + self.assertIn(entry.get(), 'hello') + egi = entry.grid_info() + equal(int(egi['row']), 0) + equal(int(egi['column']), 1) + equal(int(egi['rowspan']), 1) + equal(int(egi['columnspan']), 1) + equal(self.dialog.row, 1) + + def test_create_entries(self): + self.dialog.row = 0 + self.engine.setpat('hello') + self.dialog.create_entries() + self.assertIn(self.dialog.ent.get(), 'hello') + + def test_make_frame(self): + self.dialog.row = 0 + self.dialog.top = Toplevel(self.root) + frame, label = self.dialog.make_frame() + self.assertEqual(label, '') + self.assertIsInstance(frame, Frame) + + frame, label = self.dialog.make_frame('testlabel') + self.assertEqual(label['text'], 'testlabel') + self.assertIsInstance(frame, Frame) + + def btn_test_setup(self, meth): + self.dialog.top = Toplevel(self.root) + self.dialog.row = 0 + return meth() + + def test_create_option_buttons(self): + e = self.engine + for state in (0, 1): + for var in (e.revar, e.casevar, e.wordvar, e.wrapvar): + var.set(state) + frame, options = self.btn_test_setup( + self.dialog.create_option_buttons) + for spec, button in zip (options, frame.pack_slaves()): + var, label = spec + self.assertEqual(button['text'], label) + self.assertEqual(var.get(), state) + if state == 1: + button.deselect() + else: + button.select() + self.assertEqual(var.get(), 1 - state) + + def test_create_other_buttons(self): + for state in (False, True): + var = self.engine.backvar + var.set(state) + frame, others = self.btn_test_setup( + self.dialog.create_other_buttons) + buttons = frame.pack_slaves() + for spec, button in zip(others, buttons): + val, label = spec + self.assertEqual(button['text'], label) + if val == state: + # hit other button, then this one + # indexes depend on button order + self.assertEqual(var.get(), state) + buttons[val].select() + self.assertEqual(var.get(), 1 - state) + buttons[1-val].select() + self.assertEqual(var.get(), state) + + def test_make_button(self): + self.dialog.top = Toplevel(self.root) + self.dialog.buttonframe = Frame(self.dialog.top) + btn = self.dialog.make_button('Test', self.dialog.close) + self.assertEqual(btn['text'], 'Test') + + def test_create_command_buttons(self): + self.dialog.create_command_buttons() + # Look for close button command in buttonframe + closebuttoncommand = '' + for child in self.dialog.buttonframe.winfo_children(): + if child['text'] == 'close': + closebuttoncommand = child['command'] + self.assertIn('close', closebuttoncommand) + + + +if __name__ == '__main__': + unittest.main(verbosity=2, exit=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_searchengine.py b/Darwin/lib/python3.5/idlelib/idle_test/test_searchengine.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/idle_test/test_searchengine.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_searchengine.py index 129a5a3..edbd558 100644 --- a/Darwin/lib/python3.4/idlelib/idle_test/test_searchengine.py +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_searchengine.py @@ -7,7 +7,7 @@ import re import unittest -from test.support import requires +# from test.support import requires from tkinter import BooleanVar, StringVar, TclError # ,Tk, Text import tkinter.messagebox as tkMessageBox from idlelib import SearchEngine as se @@ -178,7 +178,7 @@ class SearchEngineTest(unittest.TestCase): engine.revar.set(1) Equal(engine.getprog(), None) self.assertEqual(Mbox.showerror.message, - 'Error: nothing to repeat\nPattern: +') + 'Error: nothing to repeat at position 0\nPattern: +') def test_report_error(self): showerror = Mbox.showerror diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_text.py b/Darwin/lib/python3.5/idlelib/idle_test/test_text.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/idle_test/test_text.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_text.py index 5ac2fd7..7e823df 100644 --- a/Darwin/lib/python3.4/idlelib/idle_test/test_text.py +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_text.py @@ -3,7 +3,6 @@ import unittest from test.support import requires from _tkinter import TclError -import tkinter as tk class TextTest(object): diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_textview.py b/Darwin/lib/python3.5/idlelib/idle_test/test_textview.py new file mode 100644 index 0000000..68e5b82 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_textview.py @@ -0,0 +1,97 @@ +'''Test the functions and main class method of textView.py. + +Since all methods and functions create (or destroy) a TextViewer, which +is a widget containing multiple widgets, all tests must be gui tests. +Using mock Text would not change this. Other mocks are used to retrieve +information about calls. + +The coverage is essentially 100%. +''' +from test.support import requires +requires('gui') + +import unittest +import os +from tkinter import Tk +from idlelib import textView as tv +from idlelib.idle_test.mock_idle import Func +from idlelib.idle_test.mock_tk import Mbox + +def setUpModule(): + global root + root = Tk() + +def tearDownModule(): + global root + root.destroy() # pyflakes falsely sees root as undefined + del root + + +class TV(tv.TextViewer): # used by TextViewTest + transient = Func() + grab_set = Func() + wait_window = Func() + +class TextViewTest(unittest.TestCase): + + def setUp(self): + TV.transient.__init__() + TV.grab_set.__init__() + TV.wait_window.__init__() + + def test_init_modal(self): + view = TV(root, 'Title', 'test text') + self.assertTrue(TV.transient.called) + self.assertTrue(TV.grab_set.called) + self.assertTrue(TV.wait_window.called) + view.Ok() + + def test_init_nonmodal(self): + view = TV(root, 'Title', 'test text', modal=False) + self.assertFalse(TV.transient.called) + self.assertFalse(TV.grab_set.called) + self.assertFalse(TV.wait_window.called) + view.Ok() + + def test_ok(self): + view = TV(root, 'Title', 'test text', modal=False) + view.destroy = Func() + view.Ok() + self.assertTrue(view.destroy.called) + del view.destroy # unmask real function + view.destroy + + +class textviewTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + cls.orig_mbox = tv.tkMessageBox + tv.tkMessageBox = Mbox + + @classmethod + def tearDownClass(cls): + tv.tkMessageBox = cls.orig_mbox + del cls.orig_mbox + + def test_view_text(self): + # If modal True, tkinter will error with 'can't invoke "event" command' + view = tv.view_text(root, 'Title', 'test text', modal=False) + self.assertIsInstance(view, tv.TextViewer) + + def test_view_file(self): + test_dir = os.path.dirname(__file__) + testfile = os.path.join(test_dir, 'test_textview.py') + view = tv.view_file(root, 'Title', testfile, modal=False) + self.assertIsInstance(view, tv.TextViewer) + self.assertIn('Test', view.textView.get('1.0', '1.end')) + view.Ok() + + # Mock messagebox will be used and view_file will not return anything + testfile = os.path.join(test_dir, '../notthere.py') + view = tv.view_file(root, 'Title', testfile, modal=False) + self.assertIsNone(view) + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.4/idlelib/idle_test/test_warning.py b/Darwin/lib/python3.5/idlelib/idle_test/test_warning.py similarity index 100% rename from Darwin/lib/python3.4/idlelib/idle_test/test_warning.py rename to Darwin/lib/python3.5/idlelib/idle_test/test_warning.py diff --git a/Darwin/lib/python3.5/idlelib/idle_test/test_widgetredir.py b/Darwin/lib/python3.5/idlelib/idle_test/test_widgetredir.py new file mode 100644 index 0000000..6440561 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idle_test/test_widgetredir.py @@ -0,0 +1,122 @@ +"""Unittest for idlelib.WidgetRedirector + +100% coverage +""" +from test.support import requires +import unittest +from idlelib.idle_test.mock_idle import Func +from tkinter import Tk, Text, TclError +from idlelib.WidgetRedirector import WidgetRedirector + + +class InitCloseTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.tk = Tk() + cls.text = Text(cls.tk) + + @classmethod + def tearDownClass(cls): + cls.text.destroy() + cls.tk.destroy() + del cls.text, cls.tk + + def test_init(self): + redir = WidgetRedirector(self.text) + self.assertEqual(redir.widget, self.text) + self.assertEqual(redir.tk, self.text.tk) + self.assertRaises(TclError, WidgetRedirector, self.text) + redir.close() # restore self.tk, self.text + + def test_close(self): + redir = WidgetRedirector(self.text) + redir.register('insert', Func) + redir.close() + self.assertEqual(redir._operations, {}) + self.assertFalse(hasattr(self.text, 'widget')) + + +class WidgetRedirectorTest(unittest.TestCase): + + @classmethod + def setUpClass(cls): + requires('gui') + cls.tk = Tk() + cls.text = Text(cls.tk) + + @classmethod + def tearDownClass(cls): + cls.text.destroy() + cls.tk.destroy() + del cls.text, cls.tk + + def setUp(self): + self.redir = WidgetRedirector(self.text) + self.func = Func() + self.orig_insert = self.redir.register('insert', self.func) + self.text.insert('insert', 'asdf') # leaves self.text empty + + def tearDown(self): + self.text.delete('1.0', 'end') + self.redir.close() + + def test_repr(self): # partly for 100% coverage + self.assertIn('Redirector', repr(self.redir)) + self.assertIn('Original', repr(self.orig_insert)) + + def test_register(self): + self.assertEqual(self.text.get('1.0', 'end'), '\n') + self.assertEqual(self.func.args, ('insert', 'asdf')) + self.assertIn('insert', self.redir._operations) + self.assertIn('insert', self.text.__dict__) + self.assertEqual(self.text.insert, self.func) + + def test_original_command(self): + self.assertEqual(self.orig_insert.operation, 'insert') + self.assertEqual(self.orig_insert.tk_call, self.text.tk.call) + self.orig_insert('insert', 'asdf') + self.assertEqual(self.text.get('1.0', 'end'), 'asdf\n') + + def test_unregister(self): + self.assertIsNone(self.redir.unregister('invalid operation name')) + self.assertEqual(self.redir.unregister('insert'), self.func) + self.assertNotIn('insert', self.redir._operations) + self.assertNotIn('insert', self.text.__dict__) + + def test_unregister_no_attribute(self): + del self.text.insert + self.assertEqual(self.redir.unregister('insert'), self.func) + + def test_dispatch_intercept(self): + self.func.__init__(True) + self.assertTrue(self.redir.dispatch('insert', False)) + self.assertFalse(self.func.args[0]) + + def test_dispatch_bypass(self): + self.orig_insert('insert', 'asdf') + # tk.call returns '' where Python would return None + self.assertEqual(self.redir.dispatch('delete', '1.0', 'end'), '') + self.assertEqual(self.text.get('1.0', 'end'), '\n') + + def test_dispatch_error(self): + self.func.__init__(TclError()) + self.assertEqual(self.redir.dispatch('insert', False), '') + self.assertEqual(self.redir.dispatch('invalid'), '') + + def test_command_dispatch(self): + # Test that .__init__ causes redirection of tk calls + # through redir.dispatch + self.tk.call(self.text._w, 'insert', 'hello') + self.assertEqual(self.func.args, ('hello',)) + self.assertEqual(self.text.get('1.0', 'end'), '\n') + # Ensure that called through redir .dispatch and not through + # self.text.insert by having mock raise TclError. + self.func.__init__(TclError()) + self.assertEqual(self.tk.call(self.text._w, 'insert', 'boo'), '') + + + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/Darwin/lib/python3.5/idlelib/idlever.py b/Darwin/lib/python3.5/idlelib/idlever.py new file mode 100644 index 0000000..563d933 --- /dev/null +++ b/Darwin/lib/python3.5/idlelib/idlever.py @@ -0,0 +1,4 @@ +"""Unused by Idle: there is no separate Idle version anymore. +Kept only for possible existing extension use.""" +from sys import version +IDLE_VERSION = version[:version.index(' ')] diff --git a/Darwin/lib/python3.4/idlelib/keybindingDialog.py b/Darwin/lib/python3.5/idlelib/keybindingDialog.py similarity index 95% rename from Darwin/lib/python3.4/idlelib/keybindingDialog.py rename to Darwin/lib/python3.5/idlelib/keybindingDialog.py index db88cb4..e6438bf 100644 --- a/Darwin/lib/python3.4/idlelib/keybindingDialog.py +++ b/Darwin/lib/python3.5/idlelib/keybindingDialog.py @@ -7,12 +7,13 @@ import string import sys class GetKeysDialog(Toplevel): - def __init__(self,parent,title,action,currentKeySequences): + def __init__(self,parent,title,action,currentKeySequences,_htest=False): """ action - string, the name of the virtual event these keys will be mapped to currentKeys - list, a list of all key sequence lists currently mapped to virtual events, for overlap checking + _htest - bool, change box location when running htest """ Toplevel.__init__(self, parent) self.configure(borderwidth=5) @@ -38,11 +39,14 @@ class GetKeysDialog(Toplevel): self.LoadFinalKeyList() self.withdraw() #hide while setting geometry self.update_idletasks() - self.geometry("+%d+%d" % - ((parent.winfo_rootx()+((parent.winfo_width()/2) - -(self.winfo_reqwidth()/2)), - parent.winfo_rooty()+((parent.winfo_height()/2) - -(self.winfo_reqheight()/2)) )) ) #centre dialog over parent + self.geometry( + "+%d+%d" % ( + parent.winfo_rootx() + + (parent.winfo_width()/2 - self.winfo_reqwidth()/2), + parent.winfo_rooty() + + ((parent.winfo_height()/2 - self.winfo_reqheight()/2) + if not _htest else 150) + ) ) #centre dialog over parent (or below htest box) self.deiconify() #geometry set, unhide self.wait_window() @@ -258,11 +262,5 @@ class GetKeysDialog(Toplevel): return keysOK if __name__ == '__main__': - #test the dialog - root=Tk() - def run(): - keySeq='' - dlg=GetKeysDialog(root,'Get Keys','find-again',[]) - print(dlg.result) - Button(root,text='Dialog',command=run).pack() - root.mainloop() + from idlelib.idle_test.htest import run + run(GetKeysDialog) diff --git a/Darwin/lib/python3.4/idlelib/macosxSupport.py b/Darwin/lib/python3.5/idlelib/macosxSupport.py similarity index 96% rename from Darwin/lib/python3.4/idlelib/macosxSupport.py rename to Darwin/lib/python3.5/idlelib/macosxSupport.py index b6488f8..77330cf 100644 --- a/Darwin/lib/python3.4/idlelib/macosxSupport.py +++ b/Darwin/lib/python3.5/idlelib/macosxSupport.py @@ -123,11 +123,9 @@ def overrideRootMenu(root, flist): # # Due to a (mis-)feature of TkAqua the user will also see an empty Help # menu. - from tkinter import Menu, Text, Text - from idlelib.EditorWindow import prepstr, get_accelerator + from tkinter import Menu from idlelib import Bindings from idlelib import WindowList - from idlelib.MultiCall import MultiCallCreator closeItem = Bindings.menudefs[0][1][-2] @@ -140,16 +138,14 @@ def overrideRootMenu(root, flist): # Remove the 'About' entry from the help menu, it is in the application # menu del Bindings.menudefs[-1][1][0:2] - - # Remove the 'Configure' entry from the options menu, it is in the + # Remove the 'Configure Idle' entry from the options menu, it is in the # application menu as 'Preferences' - del Bindings.menudefs[-2][1][0:2] - + del Bindings.menudefs[-2][1][0] menubar = Menu(root) root.configure(menu=menubar) menudict = {} - menudict['windows'] = menu = Menu(menubar, name='windows') + menudict['windows'] = menu = Menu(menubar, name='windows', tearoff=0) menubar.add_cascade(label='Window', menu=menu, underline=0) def postwindowsmenu(menu=menu): @@ -195,7 +191,8 @@ def overrideRootMenu(root, flist): if isCarbonTk(): # for Carbon AquaTk, replace the default Tk apple menu - menudict['application'] = menu = Menu(menubar, name='apple') + menudict['application'] = menu = Menu(menubar, name='apple', + tearoff=0) menubar.add_cascade(label='IDLE', menu=menu) Bindings.menudefs.insert(0, ('application', [ diff --git a/Darwin/lib/python3.4/idlelib/rpc.py b/Darwin/lib/python3.5/idlelib/rpc.py similarity index 97% rename from Darwin/lib/python3.4/idlelib/rpc.py rename to Darwin/lib/python3.5/idlelib/rpc.py index 9c51b8f..aa33041 100644 --- a/Darwin/lib/python3.4/idlelib/rpc.py +++ b/Darwin/lib/python3.5/idlelib/rpc.py @@ -29,6 +29,7 @@ accomplished in Idle. import sys import os +import io import socket import select import socketserver @@ -53,16 +54,15 @@ def pickle_code(co): ms = marshal.dumps(co) return unpickle_code, (ms,) -# XXX KBK 24Aug02 function pickling capability not used in Idle -# def unpickle_function(ms): -# return ms +def dumps(obj, protocol=None): + f = io.BytesIO() + p = CodePickler(f, protocol) + p.dump(obj) + return f.getvalue() -# def pickle_function(fn): -# assert isinstance(fn, type.FunctionType) -# return repr(fn) - -copyreg.pickle(types.CodeType, pickle_code, unpickle_code) -# copyreg.pickle(types.FunctionType, pickle_function, unpickle_function) +class CodePickler(pickle.Pickler): + dispatch_table = {types.CodeType: pickle_code} + dispatch_table.update(copyreg.dispatch_table) BUFSIZE = 8*1024 LOCALHOST = '127.0.0.1' @@ -329,7 +329,7 @@ class SocketIO(object): def putmessage(self, message): self.debug("putmessage:%d:" % message[0]) try: - s = pickle.dumps(message) + s = dumps(message) except pickle.PicklingError: print("Cannot pickle:", repr(message), file=sys.__stderr__) raise @@ -340,10 +340,7 @@ class SocketIO(object): n = self.sock.send(s[:BUFSIZE]) except (AttributeError, TypeError): raise OSError("socket no longer exists") - except OSError: - raise - else: - s = s[n:] + s = s[n:] buff = b'' bufneed = 4 diff --git a/Darwin/lib/python3.4/idlelib/run.py b/Darwin/lib/python3.5/idlelib/run.py similarity index 99% rename from Darwin/lib/python3.4/idlelib/run.py rename to Darwin/lib/python3.5/idlelib/run.py index 13cec62..228875c 100644 --- a/Darwin/lib/python3.4/idlelib/run.py +++ b/Darwin/lib/python3.5/idlelib/run.py @@ -1,8 +1,6 @@ import sys -import io import linecache import time -import socket import traceback import _thread as thread import threading diff --git a/Darwin/lib/python3.4/idlelib/tabbedpages.py b/Darwin/lib/python3.5/idlelib/tabbedpages.py similarity index 98% rename from Darwin/lib/python3.4/idlelib/tabbedpages.py rename to Darwin/lib/python3.5/idlelib/tabbedpages.py index 2557732..965f9f8 100644 --- a/Darwin/lib/python3.4/idlelib/tabbedpages.py +++ b/Darwin/lib/python3.5/idlelib/tabbedpages.py @@ -467,9 +467,12 @@ class TabbedPageSet(Frame): self._tab_set.set_selected_tab(page_name) -if __name__ == '__main__': +def _tabbed_pages(parent): # test dialog root=Tk() + width, height, x, y = list(map(int, re.split('[x+]', parent.geometry()))) + root.geometry("+%d+%d"%(x, y + 175)) + root.title("Test tabbed pages") tabPage=TabbedPageSet(root, page_names=['Foobar','Baz'], n_rows=0, expand_tabs=False, ) @@ -488,3 +491,8 @@ if __name__ == '__main__': labelPgName.pack(padx=5) entryPgName.pack(padx=5) root.mainloop() + + +if __name__ == '__main__': + from idlelib.idle_test.htest import run + run(_tabbed_pages) diff --git a/Darwin/lib/python3.4/idlelib/textView.py b/Darwin/lib/python3.5/idlelib/textView.py similarity index 71% rename from Darwin/lib/python3.4/idlelib/textView.py rename to Darwin/lib/python3.5/idlelib/textView.py index dd50544..4257eea 100644 --- a/Darwin/lib/python3.4/idlelib/textView.py +++ b/Darwin/lib/python3.5/idlelib/textView.py @@ -9,15 +9,21 @@ class TextViewer(Toplevel): """A simple text viewer dialog for IDLE """ - def __init__(self, parent, title, text, modal=True): + def __init__(self, parent, title, text, modal=True, _htest=False): """Show the given text in a scrollable window with a 'close' button + If modal option set to False, user can interact with other windows, + otherwise they will be unable to interact with other windows until + the textview window is closed. + + _htest - bool; change box location when running htest. """ Toplevel.__init__(self, parent) self.configure(borderwidth=5) + # place dialog below parent if running htest self.geometry("=%dx%d+%d+%d" % (625, 500, - parent.winfo_rootx() + 10, - parent.winfo_rooty() + 10)) + parent.winfo_rootx() + 10, + parent.winfo_rooty() + (10 if not _htest else 100))) #elguavas - config placeholders til config stuff completed self.bg = '#ffffff' self.fg = '#000000' @@ -66,32 +72,15 @@ def view_file(parent, title, filename, encoding=None, modal=True): try: with open(filename, 'r', encoding=encoding) as file: contents = file.read() - except OSError: - import tkinter.messagebox as tkMessageBox + except IOError: tkMessageBox.showerror(title='File Load Error', message='Unable to load file %r .' % filename, parent=parent) else: return view_text(parent, title, contents, modal) - if __name__ == '__main__': - #test the dialog - root=Tk() - root.title('textView test') - filename = './textView.py' - with open(filename, 'r') as f: - text = f.read() - btn1 = Button(root, text='view_text', - command=lambda:view_text(root, 'view_text', text)) - btn1.pack(side=LEFT) - btn2 = Button(root, text='view_file', - command=lambda:view_file(root, 'view_file', filename)) - btn2.pack(side=LEFT) - btn3 = Button(root, text='nonmodal view_text', - command=lambda:view_text(root, 'nonmodal view_text', text, - modal=False)) - btn3.pack(side=LEFT) - close = Button(root, text='Close', command=root.destroy) - close.pack(side=RIGHT) - root.mainloop() + import unittest + unittest.main('idlelib.idle_test.test_textview', verbosity=2, exit=False) + from idlelib.idle_test.htest import run + run(TextViewer) diff --git a/Darwin/lib/python3.4/imaplib.py b/Darwin/lib/python3.5/imaplib.py similarity index 94% rename from Darwin/lib/python3.4/imaplib.py rename to Darwin/lib/python3.5/imaplib.py index ad104fe..4e8a4bb 100644 --- a/Darwin/lib/python3.4/imaplib.py +++ b/Darwin/lib/python3.5/imaplib.py @@ -45,11 +45,12 @@ AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first # Maximal line length when calling readline(). This is to prevent # reading arbitrary length lines. RFC 3501 and 2060 (IMAP 4rev1) -# don't specify a line length. RFC 2683 however suggests limiting client -# command lines to 1000 octets and server command lines to 8000 octets. -# We have selected 10000 for some extra margin and since that is supposedly -# also what UW and Panda IMAP does. -_MAXLINE = 10000 +# don't specify a line length. RFC 2683 suggests limiting client +# command lines to 1000 octets and that servers should be prepared +# to accept command lines up to 8000 octets, so we used to use 10K here. +# In the modern world (eg: gmail) the response to, for example, a +# search command can be quite large, so we now use 1M. +_MAXLINE = 1000000 # Commands @@ -65,6 +66,7 @@ Commands = { 'CREATE': ('AUTH', 'SELECTED'), 'DELETE': ('AUTH', 'SELECTED'), 'DELETEACL': ('AUTH', 'SELECTED'), + 'ENABLE': ('AUTH', ), 'EXAMINE': ('AUTH', 'SELECTED'), 'EXPUNGE': ('SELECTED',), 'FETCH': ('SELECTED',), @@ -106,12 +108,17 @@ InternalDate = re.compile(br'.*INTERNALDATE "' br' (?P[0-9][0-9]):(?P[0-9][0-9]):(?P[0-9][0-9])' br' (?P[-+])(?P[0-9][0-9])(?P[0-9][0-9])' br'"') +# Literal is no longer used; kept for backward compatibility. Literal = re.compile(br'.*{(?P\d+)}$', re.ASCII) MapCRLF = re.compile(br'\r\n|\r|\n') Response_code = re.compile(br'\[(?P[A-Z-]+)( (?P[^\]]*))?\]') Untagged_response = re.compile(br'\* (?P[A-Z-]+)( (?P.*))?') +# Untagged_status is no longer used; kept for backward compatibility Untagged_status = re.compile( br'\* (?P\d+) (?P[A-Z-]+)( (?P.*))?', re.ASCII) +# We compile these in _mode_xxx. +_Literal = br'.*{(?P\d+)}$' +_Untagged_status = br'\* (?P\d+) (?P[A-Z-]+)( (?P.*))?' @@ -165,7 +172,7 @@ class IMAP4: class abort(error): pass # Service errors - close and retry class readonly(abort): pass # Mailbox status changed to READ-ONLY - def __init__(self, host = '', port = IMAP4_PORT): + def __init__(self, host='', port=IMAP4_PORT): self.debug = Debug self.state = 'LOGOUT' self.literal = None # A literal argument to a command @@ -175,6 +182,7 @@ class IMAP4: self.is_readonly = False # READ-ONLY desired state self.tagnum = 0 self._tls_established = False + self._mode_ascii() # Open socket to server. @@ -189,6 +197,19 @@ class IMAP4: pass raise + def _mode_ascii(self): + self.utf8_enabled = False + self._encoding = 'ascii' + self.Literal = re.compile(_Literal, re.ASCII) + self.Untagged_status = re.compile(_Untagged_status, re.ASCII) + + + def _mode_utf8(self): + self.utf8_enabled = True + self._encoding = 'utf-8' + self.Literal = re.compile(_Literal) + self.Untagged_status = re.compile(_Untagged_status) + def _connect(self): # Create unique tag for this session, @@ -238,6 +259,14 @@ class IMAP4: return getattr(self, attr.lower()) raise AttributeError("Unknown IMAP4 command: '%s'" % attr) + def __enter__(self): + return self + + def __exit__(self, *args): + try: + self.logout() + except OSError: + pass # Overridable methods @@ -351,7 +380,10 @@ class IMAP4: date_time = Time2Internaldate(date_time) else: date_time = None - self.literal = MapCRLF.sub(CRLF, message) + literal = MapCRLF.sub(CRLF, message) + if self.utf8_enabled: + literal = b'UTF8 (' + literal + b')' + self.literal = literal return self._simple_command(name, mailbox, flags, date_time) @@ -446,6 +478,18 @@ class IMAP4: """ return self._simple_command('DELETEACL', mailbox, who) + def enable(self, capability): + """Send an RFC5161 enable string to the server. + + (typ, [data]) = .enable(capability) + """ + if 'ENABLE' not in self.capabilities: + raise IMAP4.error("Server does not support ENABLE") + typ, data = self._simple_command('ENABLE', capability) + if typ == 'OK' and 'UTF8=ACCEPT' in capability.upper(): + self._mode_utf8() + return typ, data + def expunge(self): """Permanently remove deleted items from selected mailbox. @@ -552,7 +596,7 @@ class IMAP4: def _CRAM_MD5_AUTH(self, challenge): """ Authobject to use with CRAM-MD5 authentication. """ import hmac - pwd = (self.password.encode('ASCII') if isinstance(self.password, str) + pwd = (self.password.encode('utf-8') if isinstance(self.password, str) else self.password) return self.user + " " + hmac.HMAC(pwd, challenge, 'md5').hexdigest() @@ -652,9 +696,12 @@ class IMAP4: (typ, [data]) = .search(charset, criterion, ...) 'data' is space separated list of matching message numbers. + If UTF8 is enabled, charset MUST be None. """ name = 'SEARCH' if charset: + if self.utf8_enabled: + raise IMAP4.error("Non-None charset not valid in UTF8 mode") typ, dat = self._simple_command(name, 'CHARSET', charset, *criteria) else: typ, dat = self._simple_command(name, *criteria) @@ -745,9 +792,8 @@ class IMAP4: ssl_context = ssl._create_stdlib_context() typ, dat = self._simple_command(name) if typ == 'OK': - server_hostname = self.host if ssl.HAS_SNI else None self.sock = ssl_context.wrap_socket(self.sock, - server_hostname=server_hostname) + server_hostname=self.host) self.file = self.sock.makefile('rb') self._tls_established = True self._get_capabilities() @@ -869,7 +915,7 @@ class IMAP4: def _check_bye(self): bye = self.untagged_responses.get('BYE') if bye: - raise self.abort(bye[-1].decode('ascii', 'replace')) + raise self.abort(bye[-1].decode(self._encoding, 'replace')) def _command(self, name, *args): @@ -890,12 +936,12 @@ class IMAP4: raise self.readonly('mailbox status changed to READ-ONLY') tag = self._new_tag() - name = bytes(name, 'ASCII') + name = bytes(name, self._encoding) data = tag + b' ' + name for arg in args: if arg is None: continue if isinstance(arg, str): - arg = bytes(arg, "ASCII") + arg = bytes(arg, self._encoding) data = data + b' ' + arg literal = self.literal @@ -905,7 +951,7 @@ class IMAP4: literator = literal else: literator = None - data = data + bytes(' {%s}' % len(literal), 'ASCII') + data = data + bytes(' {%s}' % len(literal), self._encoding) if __debug__: if self.debug >= 4: @@ -970,7 +1016,7 @@ class IMAP4: typ, dat = self.capability() if dat == [None]: raise self.error('no CAPABILITY response from server') - dat = str(dat[-1], "ASCII") + dat = str(dat[-1], self._encoding) dat = dat.upper() self.capabilities = tuple(dat.split()) @@ -989,10 +1035,10 @@ class IMAP4: if self._match(self.tagre, resp): tag = self.mo.group('tag') if not tag in self.tagged_commands: - raise self.abort('unexpected tagged response: %s' % resp) + raise self.abort('unexpected tagged response: %r' % resp) typ = self.mo.group('type') - typ = str(typ, 'ASCII') + typ = str(typ, self._encoding) dat = self.mo.group('data') self.tagged_commands[tag] = (typ, [dat]) else: @@ -1001,7 +1047,7 @@ class IMAP4: # '*' (untagged) responses? if not self._match(Untagged_response, resp): - if self._match(Untagged_status, resp): + if self._match(self.Untagged_status, resp): dat2 = self.mo.group('data2') if self.mo is None: @@ -1011,17 +1057,17 @@ class IMAP4: self.continuation_response = self.mo.group('data') return None # NB: indicates continuation - raise self.abort("unexpected response: '%s'" % resp) + raise self.abort("unexpected response: %r" % resp) typ = self.mo.group('type') - typ = str(typ, 'ascii') + typ = str(typ, self._encoding) dat = self.mo.group('data') if dat is None: dat = b'' # Null untagged response if dat2: dat = dat + b' ' + dat2 # Is there a literal to come? - while self._match(Literal, dat): + while self._match(self.Literal, dat): # Read literal direct from connection. @@ -1045,7 +1091,7 @@ class IMAP4: if typ in ('OK', 'NO', 'BAD') and self._match(Response_code, dat): typ = self.mo.group('type') - typ = str(typ, "ASCII") + typ = str(typ, self._encoding) self._append_untagged(typ, self.mo.group('data')) if __debug__: @@ -1115,7 +1161,7 @@ class IMAP4: def _new_tag(self): - tag = self.tagpre + bytes(str(self.tagnum), 'ASCII') + tag = self.tagpre + bytes(str(self.tagnum), self._encoding) self.tagnum = self.tagnum + 1 self.tagged_commands[tag] = None return tag @@ -1205,7 +1251,8 @@ if HAVE_SSL: """ - def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, certfile=None, ssl_context=None): + def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None, + certfile=None, ssl_context=None): if ssl_context is not None and keyfile is not None: raise ValueError("ssl_context and keyfile arguments are mutually " "exclusive") @@ -1223,9 +1270,8 @@ if HAVE_SSL: def _create_socket(self): sock = IMAP4._create_socket(self) - server_hostname = self.host if ssl.HAS_SNI else None return self.ssl_context.wrap_socket(sock, - server_hostname=server_hostname) + server_hostname=self.host) def open(self, host='', port=IMAP4_SSL_PORT): """Setup connection to remote server on "host:port". @@ -1244,7 +1290,7 @@ class IMAP4_stream(IMAP4): Instantiate with: IMAP4_stream(command) - where "command" is a string that can be passed to subprocess.Popen() + "command" - a string that can be passed to subprocess.Popen() for more documentation see the docstring of the parent class IMAP4. """ @@ -1307,7 +1353,7 @@ class _Authenticator: def process(self, data): ret = self.mech(self.decode(data)) if ret is None: - return '*' # Abort conversation + return b'*' # Abort conversation return self.encode(ret) def encode(self, inp): @@ -1321,7 +1367,7 @@ class _Authenticator: # oup = b'' if isinstance(inp, str): - inp = inp.encode('ASCII') + inp = inp.encode('utf-8') while inp: if len(inp) > 48: t = inp[:48] diff --git a/Darwin/lib/python3.4/imghdr.py b/Darwin/lib/python3.5/imghdr.py similarity index 93% rename from Darwin/lib/python3.4/imghdr.py rename to Darwin/lib/python3.5/imghdr.py index add2ea8..b267925 100644 --- a/Darwin/lib/python3.4/imghdr.py +++ b/Darwin/lib/python3.5/imghdr.py @@ -110,6 +110,18 @@ def test_bmp(h, f): tests.append(test_bmp) +def test_webp(h, f): + if h.startswith(b'RIFF') and h[8:12] == b'WEBP': + return 'webp' + +tests.append(test_webp) + +def test_exr(h, f): + if h.startswith(b'\x76\x2f\x31\x01'): + return 'exr' + +tests.append(test_exr) + #--------------------# # Small test program # #--------------------# diff --git a/Darwin/lib/python3.4/imp.py b/Darwin/lib/python3.5/imp.py similarity index 82% rename from Darwin/lib/python3.4/imp.py rename to Darwin/lib/python3.5/imp.py index c8449c6..f6fff44 100644 --- a/Darwin/lib/python3.4/imp.py +++ b/Darwin/lib/python3.5/imp.py @@ -8,15 +8,16 @@ functionality over this module. # (Probably) need to stay in _imp from _imp import (lock_held, acquire_lock, release_lock, get_frozen_object, is_frozen_package, - init_builtin, init_frozen, is_builtin, is_frozen, + init_frozen, is_builtin, is_frozen, _fix_co_filename) try: - from _imp import load_dynamic + from _imp import create_dynamic except ImportError: # Platform doesn't support dynamic loading. - load_dynamic = None + create_dynamic = None -from importlib._bootstrap import SourcelessFileLoader, _ERR_MSG, _SpecMethods +from importlib._bootstrap import _ERR_MSG, _exec, _load, _builtin_from_name +from importlib._bootstrap_external import SourcelessFileLoader from importlib import machinery from importlib import util @@ -29,7 +30,7 @@ import warnings warnings.warn("the imp module is deprecated in favour of importlib; " "see the module's documentation for alternative uses", - PendingDeprecationWarning) + PendingDeprecationWarning, stacklevel=2) # DEPRECATED SEARCH_ERROR = 0 @@ -58,24 +59,23 @@ def new_module(name): def get_magic(): """**DEPRECATED** - Return the magic number for .pyc or .pyo files. + Return the magic number for .pyc files. """ return util.MAGIC_NUMBER def get_tag(): - """Return the magic tag for .pyc or .pyo files.""" + """Return the magic tag for .pyc files.""" return sys.implementation.cache_tag def cache_from_source(path, debug_override=None): """**DEPRECATED** - Given the path to a .py file, return the path to its .pyc/.pyo file. + Given the path to a .py file, return the path to its .pyc file. The .py file does not need to exist; this simply returns the path to the - .pyc/.pyo file calculated as if the .py file were imported. The extension - will be .pyc unless sys.flags.optimize is non-zero, then it will be .pyo. + .pyc file calculated as if the .py file were imported. If debug_override is not None, then it must be a boolean and is used in place of sys.flags.optimize. @@ -83,16 +83,18 @@ def cache_from_source(path, debug_override=None): If sys.implementation.cache_tag is None then NotImplementedError is raised. """ - return util.cache_from_source(path, debug_override) + with warnings.catch_warnings(): + warnings.simplefilter('ignore') + return util.cache_from_source(path, debug_override) def source_from_cache(path): """**DEPRECATED** - Given the path to a .pyc./.pyo file, return the path to its .py file. + Given the path to a .pyc. file, return the path to its .py file. - The .pyc/.pyo file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc/.pyo file. If path does + The .pyc file does not need to exist; this simply returns the path to + the .py file calculated to correspond to the .pyc file. If path does not conform to PEP 3147 format, ValueError will be raised. If sys.implementation.cache_tag is None then NotImplementedError is raised. @@ -130,7 +132,7 @@ class NullImporter: class _HackedGetData: - """Compatibiilty support for 'file' arguments of various load_*() + """Compatibility support for 'file' arguments of various load_*() functions.""" def __init__(self, fullname, path, file=None): @@ -164,11 +166,10 @@ class _LoadSourceCompatibility(_HackedGetData, machinery.SourceFileLoader): def load_source(name, pathname, file=None): loader = _LoadSourceCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) - methods = _SpecMethods(spec) if name in sys.modules: - module = methods.exec(sys.modules[name]) + module = _exec(spec, sys.modules[name]) else: - module = methods.load() + module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = machinery.SourceFileLoader(name, pathname) @@ -185,11 +186,10 @@ def load_compiled(name, pathname, file=None): """**DEPRECATED**""" loader = _LoadCompiledCompatibility(name, pathname, file) spec = util.spec_from_file_location(name, pathname, loader=loader) - methods = _SpecMethods(spec) if name in sys.modules: - module = methods.exec(sys.modules[name]) + module = _exec(spec, sys.modules[name]) else: - module = methods.load() + module = _load(spec) # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = SourcelessFileLoader(name, pathname) @@ -210,11 +210,10 @@ def load_package(name, path): raise ValueError('{!r} is not a package'.format(path)) spec = util.spec_from_file_location(name, path, submodule_search_locations=[]) - methods = _SpecMethods(spec) if name in sys.modules: - return methods.exec(sys.modules[name]) + return _exec(spec, sys.modules[name]) else: - return methods.load() + return _load(spec) def load_module(name, file, filename, details): @@ -313,3 +312,34 @@ def reload(module): """ return importlib.reload(module) + + +def init_builtin(name): + """**DEPRECATED** + + Load and return a built-in module by name, or None is such module doesn't + exist + """ + try: + return _builtin_from_name(name) + except ImportError: + return None + + +if create_dynamic: + def load_dynamic(name, path, file=None): + """**DEPRECATED** + + Load an extension module. + """ + import importlib.machinery + loader = importlib.machinery.ExtensionFileLoader(name, path) + + # Issue #24748: Skip the sys.modules check in _load_module_shim; + # always load new extension + spec = importlib.machinery.ModuleSpec( + name=name, loader=loader, origin=path) + return _load(spec) + +else: + load_dynamic = None diff --git a/Darwin/lib/python3.4/importlib/__init__.py b/Darwin/lib/python3.5/importlib/__init__.py similarity index 84% rename from Darwin/lib/python3.4/importlib/__init__.py rename to Darwin/lib/python3.5/importlib/__init__.py index 1bc9947..b6a9f82 100644 --- a/Darwin/lib/python3.4/importlib/__init__.py +++ b/Darwin/lib/python3.5/importlib/__init__.py @@ -30,9 +30,26 @@ else: pass sys.modules['importlib._bootstrap'] = _bootstrap +try: + import _frozen_importlib_external as _bootstrap_external +except ImportError: + from . import _bootstrap_external + _bootstrap_external._setup(_bootstrap) + _bootstrap._bootstrap_external = _bootstrap_external +else: + _bootstrap_external.__name__ = 'importlib._bootstrap_external' + _bootstrap_external.__package__ = 'importlib' + try: + _bootstrap_external.__file__ = __file__.replace('__init__.py', '_bootstrap_external.py') + except NameError: + # __file__ is not guaranteed to be defined, e.g. if this code gets + # frozen by a tool like cx_Freeze. + pass + sys.modules['importlib._bootstrap_external'] = _bootstrap_external + # To simplify imports in test code -_w_long = _bootstrap._w_long -_r_long = _bootstrap._r_long +_w_long = _bootstrap_external._w_long +_r_long = _bootstrap_external._r_long # Fully bootstrapped at this point, import whatever you like, circular # dependencies and startup overhead minimisation permitting :) @@ -73,7 +90,7 @@ def find_loader(name, path=None): except KeyError: pass except AttributeError: - raise ValueError('{}.__loader__ is not set'.format(name)) + raise ValueError('{}.__loader__ is not set'.format(name)) from None spec = _bootstrap._find_spec(name, path) # We won't worry about malformed specs (missing attributes). @@ -138,15 +155,15 @@ def reload(module): parent = sys.modules[parent_name] except KeyError: msg = "parent {!r} not in sys.modules" - raise ImportError(msg.format(parent_name), name=parent_name) + raise ImportError(msg.format(parent_name), + name=parent_name) from None else: pkgpath = parent.__path__ else: pkgpath = None target = module spec = module.__spec__ = _bootstrap._find_spec(name, pkgpath, target) - methods = _bootstrap._SpecMethods(spec) - methods.exec(module) + _bootstrap._exec(spec, module) # The module may have replaced itself in sys.modules! return sys.modules[name] finally: diff --git a/Darwin/lib/python3.5/importlib/_bootstrap.py b/Darwin/lib/python3.5/importlib/_bootstrap.py new file mode 100644 index 0000000..6f62bb3 --- /dev/null +++ b/Darwin/lib/python3.5/importlib/_bootstrap.py @@ -0,0 +1,1142 @@ +"""Core implementation of import. + +This module is NOT meant to be directly imported! It has been designed such +that it can be bootstrapped into Python as the implementation of import. As +such it requires the injection of specific modules and attributes in order to +work. One should use importlib as the public-facing version of this module. + +""" +# +# IMPORTANT: Whenever making changes to this module, be sure to run +# a top-level make in order to get the frozen version of the module +# updated. Not doing so will result in the Makefile to fail for +# all others who don't have a ./python around to freeze the module +# in the early stages of compilation. +# + +# See importlib._setup() for what is injected into the global namespace. + +# When editing this code be aware that code executed at import time CANNOT +# reference any injected objects! This includes not only global code but also +# anything specified at the class level. + +# Bootstrap-related code ###################################################### + +_bootstrap_external = None + +def _wrap(new, old): + """Simple substitute for functools.update_wrapper.""" + for replace in ['__module__', '__name__', '__qualname__', '__doc__']: + if hasattr(old, replace): + setattr(new, replace, getattr(old, replace)) + new.__dict__.update(old.__dict__) + + +def _new_module(name): + return type(sys)(name) + + +class _ManageReload: + + """Manages the possible clean-up of sys.modules for load_module().""" + + def __init__(self, name): + self._name = name + + def __enter__(self): + self._is_reload = self._name in sys.modules + + def __exit__(self, *args): + if any(arg is not None for arg in args) and not self._is_reload: + try: + del sys.modules[self._name] + except KeyError: + pass + +# Module-level locking ######################################################## + +# A dict mapping module names to weakrefs of _ModuleLock instances +_module_locks = {} +# A dict mapping thread ids to _ModuleLock instances +_blocking_on = {} + + +class _DeadlockError(RuntimeError): + pass + + +class _ModuleLock: + """A recursive lock implementation which is able to detect deadlocks + (e.g. thread 1 trying to take locks A then B, and thread 2 trying to + take locks B then A). + """ + + def __init__(self, name): + self.lock = _thread.allocate_lock() + self.wakeup = _thread.allocate_lock() + self.name = name + self.owner = None + self.count = 0 + self.waiters = 0 + + def has_deadlock(self): + # Deadlock avoidance for concurrent circular imports. + me = _thread.get_ident() + tid = self.owner + while True: + lock = _blocking_on.get(tid) + if lock is None: + return False + tid = lock.owner + if tid == me: + return True + + def acquire(self): + """ + Acquire the module lock. If a potential deadlock is detected, + a _DeadlockError is raised. + Otherwise, the lock is always acquired and True is returned. + """ + tid = _thread.get_ident() + _blocking_on[tid] = self + try: + while True: + with self.lock: + if self.count == 0 or self.owner == tid: + self.owner = tid + self.count += 1 + return True + if self.has_deadlock(): + raise _DeadlockError('deadlock detected by %r' % self) + if self.wakeup.acquire(False): + self.waiters += 1 + # Wait for a release() call + self.wakeup.acquire() + self.wakeup.release() + finally: + del _blocking_on[tid] + + def release(self): + tid = _thread.get_ident() + with self.lock: + if self.owner != tid: + raise RuntimeError('cannot release un-acquired lock') + assert self.count > 0 + self.count -= 1 + if self.count == 0: + self.owner = None + if self.waiters: + self.waiters -= 1 + self.wakeup.release() + + def __repr__(self): + return '_ModuleLock({!r}) at {}'.format(self.name, id(self)) + + +class _DummyModuleLock: + """A simple _ModuleLock equivalent for Python builds without + multi-threading support.""" + + def __init__(self, name): + self.name = name + self.count = 0 + + def acquire(self): + self.count += 1 + return True + + def release(self): + if self.count == 0: + raise RuntimeError('cannot release un-acquired lock') + self.count -= 1 + + def __repr__(self): + return '_DummyModuleLock({!r}) at {}'.format(self.name, id(self)) + + +class _ModuleLockManager: + + def __init__(self, name): + self._name = name + self._lock = None + + def __enter__(self): + try: + self._lock = _get_module_lock(self._name) + finally: + _imp.release_lock() + self._lock.acquire() + + def __exit__(self, *args, **kwargs): + self._lock.release() + + +# The following two functions are for consumption by Python/import.c. + +def _get_module_lock(name): + """Get or create the module lock for a given module name. + + Should only be called with the import lock taken.""" + lock = None + try: + lock = _module_locks[name]() + except KeyError: + pass + if lock is None: + if _thread is None: + lock = _DummyModuleLock(name) + else: + lock = _ModuleLock(name) + def cb(_): + del _module_locks[name] + _module_locks[name] = _weakref.ref(lock, cb) + return lock + +def _lock_unlock_module(name): + """Release the global import lock, and acquires then release the + module lock for a given module name. + This is used to ensure a module is completely initialized, in the + event it is being imported by another thread. + + Should only be called with the import lock taken.""" + lock = _get_module_lock(name) + _imp.release_lock() + try: + lock.acquire() + except _DeadlockError: + # Concurrent circular import, we'll accept a partially initialized + # module object. + pass + else: + lock.release() + +# Frame stripping magic ############################################### +def _call_with_frames_removed(f, *args, **kwds): + """remove_importlib_frames in import.c will always remove sequences + of importlib frames that end with a call to this function + + Use it instead of a normal call in places where including the importlib + frames introduces unwanted noise into the traceback (e.g. when executing + module code) + """ + return f(*args, **kwds) + + +def _verbose_message(message, *args, verbosity=1): + """Print the message to stderr if -v/PYTHONVERBOSE is turned on.""" + if sys.flags.verbose >= verbosity: + if not message.startswith(('#', 'import ')): + message = '# ' + message + print(message.format(*args), file=sys.stderr) + + +def _requires_builtin(fxn): + """Decorator to verify the named module is built-in.""" + def _requires_builtin_wrapper(self, fullname): + if fullname not in sys.builtin_module_names: + raise ImportError('{!r} is not a built-in module'.format(fullname), + name=fullname) + return fxn(self, fullname) + _wrap(_requires_builtin_wrapper, fxn) + return _requires_builtin_wrapper + + +def _requires_frozen(fxn): + """Decorator to verify the named module is frozen.""" + def _requires_frozen_wrapper(self, fullname): + if not _imp.is_frozen(fullname): + raise ImportError('{!r} is not a frozen module'.format(fullname), + name=fullname) + return fxn(self, fullname) + _wrap(_requires_frozen_wrapper, fxn) + return _requires_frozen_wrapper + + +# Typically used by loader classes as a method replacement. +def _load_module_shim(self, fullname): + """Load the specified module into sys.modules and return it. + + This method is deprecated. Use loader.exec_module instead. + + """ + spec = spec_from_loader(fullname, self) + if fullname in sys.modules: + module = sys.modules[fullname] + _exec(spec, module) + return sys.modules[fullname] + else: + return _load(spec) + +# Module specifications ####################################################### + +def _module_repr(module): + # The implementation of ModuleType__repr__(). + loader = getattr(module, '__loader__', None) + if hasattr(loader, 'module_repr'): + # As soon as BuiltinImporter, FrozenImporter, and NamespaceLoader + # drop their implementations for module_repr. we can add a + # deprecation warning here. + try: + return loader.module_repr(module) + except Exception: + pass + try: + spec = module.__spec__ + except AttributeError: + pass + else: + if spec is not None: + return _module_repr_from_spec(spec) + + # We could use module.__class__.__name__ instead of 'module' in the + # various repr permutations. + try: + name = module.__name__ + except AttributeError: + name = '?' + try: + filename = module.__file__ + except AttributeError: + if loader is None: + return ''.format(name) + else: + return ''.format(name, loader) + else: + return ''.format(name, filename) + + +class _installed_safely: + + def __init__(self, module): + self._module = module + self._spec = module.__spec__ + + def __enter__(self): + # This must be done before putting the module in sys.modules + # (otherwise an optimization shortcut in import.c becomes + # wrong) + self._spec._initializing = True + sys.modules[self._spec.name] = self._module + + def __exit__(self, *args): + try: + spec = self._spec + if any(arg is not None for arg in args): + try: + del sys.modules[spec.name] + except KeyError: + pass + else: + _verbose_message('import {!r} # {!r}', spec.name, spec.loader) + finally: + self._spec._initializing = False + + +class ModuleSpec: + """The specification for a module, used for loading. + + A module's spec is the source for information about the module. For + data associated with the module, including source, use the spec's + loader. + + `name` is the absolute name of the module. `loader` is the loader + to use when loading the module. `parent` is the name of the + package the module is in. The parent is derived from the name. + + `is_package` determines if the module is considered a package or + not. On modules this is reflected by the `__path__` attribute. + + `origin` is the specific location used by the loader from which to + load the module, if that information is available. When filename is + set, origin will match. + + `has_location` indicates that a spec's "origin" reflects a location. + When this is True, `__file__` attribute of the module is set. + + `cached` is the location of the cached bytecode file, if any. It + corresponds to the `__cached__` attribute. + + `submodule_search_locations` is the sequence of path entries to + search when importing submodules. If set, is_package should be + True--and False otherwise. + + Packages are simply modules that (may) have submodules. If a spec + has a non-None value in `submodule_search_locations`, the import + system will consider modules loaded from the spec as packages. + + Only finders (see importlib.abc.MetaPathFinder and + importlib.abc.PathEntryFinder) should modify ModuleSpec instances. + + """ + + def __init__(self, name, loader, *, origin=None, loader_state=None, + is_package=None): + self.name = name + self.loader = loader + self.origin = origin + self.loader_state = loader_state + self.submodule_search_locations = [] if is_package else None + + # file-location attributes + self._set_fileattr = False + self._cached = None + + def __repr__(self): + args = ['name={!r}'.format(self.name), + 'loader={!r}'.format(self.loader)] + if self.origin is not None: + args.append('origin={!r}'.format(self.origin)) + if self.submodule_search_locations is not None: + args.append('submodule_search_locations={}' + .format(self.submodule_search_locations)) + return '{}({})'.format(self.__class__.__name__, ', '.join(args)) + + def __eq__(self, other): + smsl = self.submodule_search_locations + try: + return (self.name == other.name and + self.loader == other.loader and + self.origin == other.origin and + smsl == other.submodule_search_locations and + self.cached == other.cached and + self.has_location == other.has_location) + except AttributeError: + return False + + @property + def cached(self): + if self._cached is None: + if self.origin is not None and self._set_fileattr: + if _bootstrap_external is None: + raise NotImplementedError + self._cached = _bootstrap_external._get_cached(self.origin) + return self._cached + + @cached.setter + def cached(self, cached): + self._cached = cached + + @property + def parent(self): + """The name of the module's parent.""" + if self.submodule_search_locations is None: + return self.name.rpartition('.')[0] + else: + return self.name + + @property + def has_location(self): + return self._set_fileattr + + @has_location.setter + def has_location(self, value): + self._set_fileattr = bool(value) + + +def spec_from_loader(name, loader, *, origin=None, is_package=None): + """Return a module spec based on various loader methods.""" + if hasattr(loader, 'get_filename'): + if _bootstrap_external is None: + raise NotImplementedError + spec_from_file_location = _bootstrap_external.spec_from_file_location + + if is_package is None: + return spec_from_file_location(name, loader=loader) + search = [] if is_package else None + return spec_from_file_location(name, loader=loader, + submodule_search_locations=search) + + if is_package is None: + if hasattr(loader, 'is_package'): + try: + is_package = loader.is_package(name) + except ImportError: + is_package = None # aka, undefined + else: + # the default + is_package = False + + return ModuleSpec(name, loader, origin=origin, is_package=is_package) + + +_POPULATE = object() + + +def _spec_from_module(module, loader=None, origin=None): + # This function is meant for use in _setup(). + try: + spec = module.__spec__ + except AttributeError: + pass + else: + if spec is not None: + return spec + + name = module.__name__ + if loader is None: + try: + loader = module.__loader__ + except AttributeError: + # loader will stay None. + pass + try: + location = module.__file__ + except AttributeError: + location = None + if origin is None: + if location is None: + try: + origin = loader._ORIGIN + except AttributeError: + origin = None + else: + origin = location + try: + cached = module.__cached__ + except AttributeError: + cached = None + try: + submodule_search_locations = list(module.__path__) + except AttributeError: + submodule_search_locations = None + + spec = ModuleSpec(name, loader, origin=origin) + spec._set_fileattr = False if location is None else True + spec.cached = cached + spec.submodule_search_locations = submodule_search_locations + return spec + + +def _init_module_attrs(spec, module, *, override=False): + # The passed-in module may be not support attribute assignment, + # in which case we simply don't set the attributes. + # __name__ + if (override or getattr(module, '__name__', None) is None): + try: + module.__name__ = spec.name + except AttributeError: + pass + # __loader__ + if override or getattr(module, '__loader__', None) is None: + loader = spec.loader + if loader is None: + # A backward compatibility hack. + if spec.submodule_search_locations is not None: + if _bootstrap_external is None: + raise NotImplementedError + _NamespaceLoader = _bootstrap_external._NamespaceLoader + + loader = _NamespaceLoader.__new__(_NamespaceLoader) + loader._path = spec.submodule_search_locations + try: + module.__loader__ = loader + except AttributeError: + pass + # __package__ + if override or getattr(module, '__package__', None) is None: + try: + module.__package__ = spec.parent + except AttributeError: + pass + # __spec__ + try: + module.__spec__ = spec + except AttributeError: + pass + # __path__ + if override or getattr(module, '__path__', None) is None: + if spec.submodule_search_locations is not None: + try: + module.__path__ = spec.submodule_search_locations + except AttributeError: + pass + # __file__/__cached__ + if spec.has_location: + if override or getattr(module, '__file__', None) is None: + try: + module.__file__ = spec.origin + except AttributeError: + pass + + if override or getattr(module, '__cached__', None) is None: + if spec.cached is not None: + try: + module.__cached__ = spec.cached + except AttributeError: + pass + return module + + +def module_from_spec(spec): + """Create a module based on the provided spec.""" + # Typically loaders will not implement create_module(). + module = None + if hasattr(spec.loader, 'create_module'): + # If create_module() returns `None` then it means default + # module creation should be used. + module = spec.loader.create_module(spec) + elif hasattr(spec.loader, 'exec_module'): + _warnings.warn('starting in Python 3.6, loaders defining exec_module() ' + 'must also define create_module()', + DeprecationWarning, stacklevel=2) + if module is None: + module = _new_module(spec.name) + _init_module_attrs(spec, module) + return module + + +def _module_repr_from_spec(spec): + """Return the repr to use for the module.""" + # We mostly replicate _module_repr() using the spec attributes. + name = '?' if spec.name is None else spec.name + if spec.origin is None: + if spec.loader is None: + return ''.format(name) + else: + return ''.format(name, spec.loader) + else: + if spec.has_location: + return ''.format(name, spec.origin) + else: + return ''.format(spec.name, spec.origin) + + +# Used by importlib.reload() and _load_module_shim(). +def _exec(spec, module): + """Execute the spec in an existing module's namespace.""" + name = spec.name + _imp.acquire_lock() + with _ModuleLockManager(name): + if sys.modules.get(name) is not module: + msg = 'module {!r} not in sys.modules'.format(name) + raise ImportError(msg, name=name) + if spec.loader is None: + if spec.submodule_search_locations is None: + raise ImportError('missing loader', name=spec.name) + # namespace package + _init_module_attrs(spec, module, override=True) + return module + _init_module_attrs(spec, module, override=True) + if not hasattr(spec.loader, 'exec_module'): + # (issue19713) Once BuiltinImporter and ExtensionFileLoader + # have exec_module() implemented, we can add a deprecation + # warning here. + spec.loader.load_module(name) + else: + spec.loader.exec_module(module) + return sys.modules[name] + + +def _load_backward_compatible(spec): + # (issue19713) Once BuiltinImporter and ExtensionFileLoader + # have exec_module() implemented, we can add a deprecation + # warning here. + spec.loader.load_module(spec.name) + # The module must be in sys.modules at this point! + module = sys.modules[spec.name] + if getattr(module, '__loader__', None) is None: + try: + module.__loader__ = spec.loader + except AttributeError: + pass + if getattr(module, '__package__', None) is None: + try: + # Since module.__path__ may not line up with + # spec.submodule_search_paths, we can't necessarily rely + # on spec.parent here. + module.__package__ = module.__name__ + if not hasattr(module, '__path__'): + module.__package__ = spec.name.rpartition('.')[0] + except AttributeError: + pass + if getattr(module, '__spec__', None) is None: + try: + module.__spec__ = spec + except AttributeError: + pass + return module + +def _load_unlocked(spec): + # A helper for direct use by the import system. + if spec.loader is not None: + # not a namespace package + if not hasattr(spec.loader, 'exec_module'): + return _load_backward_compatible(spec) + + module = module_from_spec(spec) + with _installed_safely(module): + if spec.loader is None: + if spec.submodule_search_locations is None: + raise ImportError('missing loader', name=spec.name) + # A namespace package so do nothing. + else: + spec.loader.exec_module(module) + + # We don't ensure that the import-related module attributes get + # set in the sys.modules replacement case. Such modules are on + # their own. + return sys.modules[spec.name] + +# A method used during testing of _load_unlocked() and by +# _load_module_shim(). +def _load(spec): + """Return a new module object, loaded by the spec's loader. + + The module is not added to its parent. + + If a module is already in sys.modules, that existing module gets + clobbered. + + """ + _imp.acquire_lock() + with _ModuleLockManager(spec.name): + return _load_unlocked(spec) + + +# Loaders ##################################################################### + +class BuiltinImporter: + + """Meta path import for built-in modules. + + All methods are either class or static methods to avoid the need to + instantiate the class. + + """ + + @staticmethod + def module_repr(module): + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ + return ''.format(module.__name__) + + @classmethod + def find_spec(cls, fullname, path=None, target=None): + if path is not None: + return None + if _imp.is_builtin(fullname): + return spec_from_loader(fullname, cls, origin='built-in') + else: + return None + + @classmethod + def find_module(cls, fullname, path=None): + """Find the built-in module. + + If 'path' is ever specified then the search is considered a failure. + + This method is deprecated. Use find_spec() instead. + + """ + spec = cls.find_spec(fullname, path) + return spec.loader if spec is not None else None + + @classmethod + def create_module(self, spec): + """Create a built-in module""" + if spec.name not in sys.builtin_module_names: + raise ImportError('{!r} is not a built-in module'.format(spec.name), + name=spec.name) + return _call_with_frames_removed(_imp.create_builtin, spec) + + @classmethod + def exec_module(self, module): + """Exec a built-in module""" + _call_with_frames_removed(_imp.exec_builtin, module) + + @classmethod + @_requires_builtin + def get_code(cls, fullname): + """Return None as built-in modules do not have code objects.""" + return None + + @classmethod + @_requires_builtin + def get_source(cls, fullname): + """Return None as built-in modules do not have source code.""" + return None + + @classmethod + @_requires_builtin + def is_package(cls, fullname): + """Return False as built-in modules are never packages.""" + return False + + load_module = classmethod(_load_module_shim) + + +class FrozenImporter: + + """Meta path import for frozen modules. + + All methods are either class or static methods to avoid the need to + instantiate the class. + + """ + + @staticmethod + def module_repr(m): + """Return repr for the module. + + The method is deprecated. The import machinery does the job itself. + + """ + return ''.format(m.__name__) + + @classmethod + def find_spec(cls, fullname, path=None, target=None): + if _imp.is_frozen(fullname): + return spec_from_loader(fullname, cls, origin='frozen') + else: + return None + + @classmethod + def find_module(cls, fullname, path=None): + """Find a frozen module. + + This method is deprecated. Use find_spec() instead. + + """ + return cls if _imp.is_frozen(fullname) else None + + @classmethod + def create_module(cls, spec): + """Use default semantics for module creation.""" + + @staticmethod + def exec_module(module): + name = module.__spec__.name + if not _imp.is_frozen(name): + raise ImportError('{!r} is not a frozen module'.format(name), + name=name) + code = _call_with_frames_removed(_imp.get_frozen_object, name) + exec(code, module.__dict__) + + @classmethod + def load_module(cls, fullname): + """Load a frozen module. + + This method is deprecated. Use exec_module() instead. + + """ + return _load_module_shim(cls, fullname) + + @classmethod + @_requires_frozen + def get_code(cls, fullname): + """Return the code object for the frozen module.""" + return _imp.get_frozen_object(fullname) + + @classmethod + @_requires_frozen + def get_source(cls, fullname): + """Return None as frozen modules do not have source code.""" + return None + + @classmethod + @_requires_frozen + def is_package(cls, fullname): + """Return True if the frozen module is a package.""" + return _imp.is_frozen_package(fullname) + + +# Import itself ############################################################### + +class _ImportLockContext: + + """Context manager for the import lock.""" + + def __enter__(self): + """Acquire the import lock.""" + _imp.acquire_lock() + + def __exit__(self, exc_type, exc_value, exc_traceback): + """Release the import lock regardless of any raised exceptions.""" + _imp.release_lock() + + +def _resolve_name(name, package, level): + """Resolve a relative module name to an absolute one.""" + bits = package.rsplit('.', level - 1) + if len(bits) < level: + raise ValueError('attempted relative import beyond top-level package') + base = bits[0] + return '{}.{}'.format(base, name) if name else base + + +def _find_spec_legacy(finder, name, path): + # This would be a good place for a DeprecationWarning if + # we ended up going that route. + loader = finder.find_module(name, path) + if loader is None: + return None + return spec_from_loader(name, loader) + + +def _find_spec(name, path, target=None): + """Find a module's loader.""" + if sys.meta_path is not None and not sys.meta_path: + _warnings.warn('sys.meta_path is empty', ImportWarning) + # We check sys.modules here for the reload case. While a passed-in + # target will usually indicate a reload there is no guarantee, whereas + # sys.modules provides one. + is_reload = name in sys.modules + for finder in sys.meta_path: + with _ImportLockContext(): + try: + find_spec = finder.find_spec + except AttributeError: + spec = _find_spec_legacy(finder, name, path) + if spec is None: + continue + else: + spec = find_spec(name, path, target) + if spec is not None: + # The parent import may have already imported this module. + if not is_reload and name in sys.modules: + module = sys.modules[name] + try: + __spec__ = module.__spec__ + except AttributeError: + # We use the found spec since that is the one that + # we would have used if the parent module hadn't + # beaten us to the punch. + return spec + else: + if __spec__ is None: + return spec + else: + return __spec__ + else: + return spec + else: + return None + + +def _sanity_check(name, package, level): + """Verify arguments are "sane".""" + if not isinstance(name, str): + raise TypeError('module name must be str, not {}'.format(type(name))) + if level < 0: + raise ValueError('level must be >= 0') + if package: + if not isinstance(package, str): + raise TypeError('__package__ not set to a string') + elif package not in sys.modules: + msg = ('Parent module {!r} not loaded, cannot perform relative ' + 'import') + raise SystemError(msg.format(package)) + if not name and level == 0: + raise ValueError('Empty module name') + + +_ERR_MSG_PREFIX = 'No module named ' +_ERR_MSG = _ERR_MSG_PREFIX + '{!r}' + +def _find_and_load_unlocked(name, import_): + path = None + parent = name.rpartition('.')[0] + if parent: + if parent not in sys.modules: + _call_with_frames_removed(import_, parent) + # Crazy side-effects! + if name in sys.modules: + return sys.modules[name] + parent_module = sys.modules[parent] + try: + path = parent_module.__path__ + except AttributeError: + msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent) + raise ImportError(msg, name=name) from None + spec = _find_spec(name, path) + if spec is None: + raise ImportError(_ERR_MSG.format(name), name=name) + else: + module = _load_unlocked(spec) + if parent: + # Set the module as an attribute on its parent. + parent_module = sys.modules[parent] + setattr(parent_module, name.rpartition('.')[2], module) + return module + + +def _find_and_load(name, import_): + """Find and load the module, and release the import lock.""" + with _ModuleLockManager(name): + return _find_and_load_unlocked(name, import_) + + +def _gcd_import(name, package=None, level=0): + """Import and return the module based on its name, the package the call is + being made from, and the level adjustment. + + This function represents the greatest common denominator of functionality + between import_module and __import__. This includes setting __package__ if + the loader did not. + + """ + _sanity_check(name, package, level) + if level > 0: + name = _resolve_name(name, package, level) + _imp.acquire_lock() + if name not in sys.modules: + return _find_and_load(name, _gcd_import) + module = sys.modules[name] + if module is None: + _imp.release_lock() + message = ('import of {} halted; ' + 'None in sys.modules'.format(name)) + raise ImportError(message, name=name) + _lock_unlock_module(name) + return module + +def _handle_fromlist(module, fromlist, import_): + """Figure out what __import__ should return. + + The import_ parameter is a callable which takes the name of module to + import. It is required to decouple the function from assuming importlib's + import implementation is desired. + + """ + # The hell that is fromlist ... + # If a package was imported, try to import stuff from fromlist. + if hasattr(module, '__path__'): + if '*' in fromlist: + fromlist = list(fromlist) + fromlist.remove('*') + if hasattr(module, '__all__'): + fromlist.extend(module.__all__) + for x in fromlist: + if not hasattr(module, x): + from_name = '{}.{}'.format(module.__name__, x) + try: + _call_with_frames_removed(import_, from_name) + except ImportError as exc: + # Backwards-compatibility dictates we ignore failed + # imports triggered by fromlist for modules that don't + # exist. + if str(exc).startswith(_ERR_MSG_PREFIX): + if exc.name == from_name: + continue + raise + return module + + +def _calc___package__(globals): + """Calculate what __package__ should be. + + __package__ is not guaranteed to be defined or could be set to None + to represent that its proper value is unknown. + + """ + package = globals.get('__package__') + if package is None: + package = globals['__name__'] + if '__path__' not in globals: + package = package.rpartition('.')[0] + return package + + +def __import__(name, globals=None, locals=None, fromlist=(), level=0): + """Import a module. + + The 'globals' argument is used to infer where the import is occuring from + to handle relative imports. The 'locals' argument is ignored. The + 'fromlist' argument specifies what should exist as attributes on the module + being imported (e.g. ``from module import ``). The 'level' + argument represents the package location to import from in a relative + import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). + + """ + if level == 0: + module = _gcd_import(name) + else: + globals_ = globals if globals is not None else {} + package = _calc___package__(globals_) + module = _gcd_import(name, package, level) + if not fromlist: + # Return up to the first dot in 'name'. This is complicated by the fact + # that 'name' may be relative. + if level == 0: + return _gcd_import(name.partition('.')[0]) + elif not name: + return module + else: + # Figure out where to slice the module's name up to the first dot + # in 'name'. + cut_off = len(name) - len(name.partition('.')[0]) + # Slice end needs to be positive to alleviate need to special-case + # when ``'.' not in name``. + return sys.modules[module.__name__[:len(module.__name__)-cut_off]] + else: + return _handle_fromlist(module, fromlist, _gcd_import) + + +def _builtin_from_name(name): + spec = BuiltinImporter.find_spec(name) + if spec is None: + raise ImportError('no built-in module named ' + name) + return _load_unlocked(spec) + + +def _setup(sys_module, _imp_module): + """Setup importlib by importing needed built-in modules and injecting them + into the global namespace. + + As sys is needed for sys.modules access and _imp is needed to load built-in + modules, those two modules must be explicitly passed in. + + """ + global _imp, sys + _imp = _imp_module + sys = sys_module + + # Set up the spec for existing builtin/frozen modules. + module_type = type(sys) + for name, module in sys.modules.items(): + if isinstance(module, module_type): + if name in sys.builtin_module_names: + loader = BuiltinImporter + elif _imp.is_frozen(name): + loader = FrozenImporter + else: + continue + spec = _spec_from_module(module, loader) + _init_module_attrs(spec, module) + + # Directly load built-in modules needed during bootstrap. + self_module = sys.modules[__name__] + for builtin_name in ('_warnings',): + if builtin_name not in sys.modules: + builtin_module = _builtin_from_name(builtin_name) + else: + builtin_module = sys.modules[builtin_name] + setattr(self_module, builtin_name, builtin_module) + + # Directly load the _thread module (needed during bootstrap). + try: + thread_module = _builtin_from_name('_thread') + except ImportError: + # Python was built without threads + thread_module = None + setattr(self_module, '_thread', thread_module) + + # Directly load the _weakref module (needed during bootstrap). + weakref_module = _builtin_from_name('_weakref') + setattr(self_module, '_weakref', weakref_module) + + +def _install(sys_module, _imp_module): + """Install importlib as the implementation of import.""" + _setup(sys_module, _imp_module) + + sys.meta_path.append(BuiltinImporter) + sys.meta_path.append(FrozenImporter) + + global _bootstrap_external + import _frozen_importlib_external + _bootstrap_external = _frozen_importlib_external + _frozen_importlib_external._install(sys.modules[__name__]) diff --git a/Darwin/lib/python3.4/importlib/_bootstrap.py b/Darwin/lib/python3.5/importlib/_bootstrap_external.py similarity index 53% rename from Darwin/lib/python3.4/importlib/_bootstrap.py rename to Darwin/lib/python3.5/importlib/_bootstrap_external.py index b8836c1..3508ce9 100644 --- a/Darwin/lib/python3.4/importlib/_bootstrap.py +++ b/Darwin/lib/python3.5/importlib/_bootstrap_external.py @@ -1,4 +1,4 @@ -"""Core implementation of import. +"""Core implementation of path-based import. This module is NOT meant to be directly imported! It has been designed such that it can be bootstrapped into Python as the implementation of import. As @@ -9,7 +9,7 @@ work. One should use importlib as the public-facing version of this module. # # IMPORTANT: Whenever making changes to this module, be sure to run # a top-level make in order to get the frozen version of the module -# update. Not doing so will result in the Makefile to fail for +# updated. Not doing so will result in the Makefile to fail for # all others who don't have a ./python around to freeze the module # in the early stages of compilation. # @@ -118,207 +118,7 @@ def _write_atomic(path, data, mode=0o666): raise -def _wrap(new, old): - """Simple substitute for functools.update_wrapper.""" - for replace in ['__module__', '__name__', '__qualname__', '__doc__']: - if hasattr(old, replace): - setattr(new, replace, getattr(old, replace)) - new.__dict__.update(old.__dict__) - - -def _new_module(name): - return type(sys)(name) - - -_code_type = type(_wrap.__code__) - - - -class _ManageReload: - - """Manages the possible clean-up of sys.modules for load_module().""" - - def __init__(self, name): - self._name = name - - def __enter__(self): - self._is_reload = self._name in sys.modules - - def __exit__(self, *args): - if any(arg is not None for arg in args) and not self._is_reload: - try: - del sys.modules[self._name] - except KeyError: - pass - -# Module-level locking ######################################################## - -# A dict mapping module names to weakrefs of _ModuleLock instances -_module_locks = {} -# A dict mapping thread ids to _ModuleLock instances -_blocking_on = {} - - -class _DeadlockError(RuntimeError): - pass - - -class _ModuleLock: - """A recursive lock implementation which is able to detect deadlocks - (e.g. thread 1 trying to take locks A then B, and thread 2 trying to - take locks B then A). - """ - - def __init__(self, name): - self.lock = _thread.allocate_lock() - self.wakeup = _thread.allocate_lock() - self.name = name - self.owner = None - self.count = 0 - self.waiters = 0 - - def has_deadlock(self): - # Deadlock avoidance for concurrent circular imports. - me = _thread.get_ident() - tid = self.owner - while True: - lock = _blocking_on.get(tid) - if lock is None: - return False - tid = lock.owner - if tid == me: - return True - - def acquire(self): - """ - Acquire the module lock. If a potential deadlock is detected, - a _DeadlockError is raised. - Otherwise, the lock is always acquired and True is returned. - """ - tid = _thread.get_ident() - _blocking_on[tid] = self - try: - while True: - with self.lock: - if self.count == 0 or self.owner == tid: - self.owner = tid - self.count += 1 - return True - if self.has_deadlock(): - raise _DeadlockError('deadlock detected by %r' % self) - if self.wakeup.acquire(False): - self.waiters += 1 - # Wait for a release() call - self.wakeup.acquire() - self.wakeup.release() - finally: - del _blocking_on[tid] - - def release(self): - tid = _thread.get_ident() - with self.lock: - if self.owner != tid: - raise RuntimeError('cannot release un-acquired lock') - assert self.count > 0 - self.count -= 1 - if self.count == 0: - self.owner = None - if self.waiters: - self.waiters -= 1 - self.wakeup.release() - - def __repr__(self): - return '_ModuleLock({!r}) at {}'.format(self.name, id(self)) - - -class _DummyModuleLock: - """A simple _ModuleLock equivalent for Python builds without - multi-threading support.""" - - def __init__(self, name): - self.name = name - self.count = 0 - - def acquire(self): - self.count += 1 - return True - - def release(self): - if self.count == 0: - raise RuntimeError('cannot release un-acquired lock') - self.count -= 1 - - def __repr__(self): - return '_DummyModuleLock({!r}) at {}'.format(self.name, id(self)) - - -class _ModuleLockManager: - - def __init__(self, name): - self._name = name - self._lock = None - - def __enter__(self): - try: - self._lock = _get_module_lock(self._name) - finally: - _imp.release_lock() - self._lock.acquire() - - def __exit__(self, *args, **kwargs): - self._lock.release() - - -# The following two functions are for consumption by Python/import.c. - -def _get_module_lock(name): - """Get or create the module lock for a given module name. - - Should only be called with the import lock taken.""" - lock = None - try: - lock = _module_locks[name]() - except KeyError: - pass - if lock is None: - if _thread is None: - lock = _DummyModuleLock(name) - else: - lock = _ModuleLock(name) - def cb(_): - del _module_locks[name] - _module_locks[name] = _weakref.ref(lock, cb) - return lock - -def _lock_unlock_module(name): - """Release the global import lock, and acquires then release the - module lock for a given module name. - This is used to ensure a module is completely initialized, in the - event it is being imported by another thread. - - Should only be called with the import lock taken.""" - lock = _get_module_lock(name) - _imp.release_lock() - try: - lock.acquire() - except _DeadlockError: - # Concurrent circular import, we'll accept a partially initialized - # module object. - pass - else: - lock.release() - -# Frame stripping magic ############################################### - -def _call_with_frames_removed(f, *args, **kwds): - """remove_importlib_frames in import.c will always remove sequences - of importlib frames that end with a call to this function - - Use it instead of a normal call in places where including the importlib - frames introduces unwanted noise into the traceback (e.g. when executing - module code) - """ - return f(*args, **kwds) +_code_type = type(_write_atomic.__code__) # Finder/loader utility code ############################################### @@ -419,54 +219,77 @@ def _call_with_frames_removed(f, *args, **kwds): # Python 3.4a4 3290 (changes to __qualname__ computation) # Python 3.4a4 3300 (more changes to __qualname__ computation) # Python 3.4rc2 3310 (alter __qualname__ computation) +# Python 3.5a0 3320 (matrix multiplication operator) +# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations) +# Python 3.5b2 3340 (fix dictionary display evaluation order #11205) +# Python 3.5b2 3350 (add GET_YIELD_FROM_ITER opcode #24400) # # MAGIC must change whenever the bytecode emitted by the compiler may no # longer be understood by older implementations of the eval loop (usually # due to the addition of new opcodes). -MAGIC_NUMBER = (3310).to_bytes(2, 'little') + b'\r\n' +MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n' _RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c _PYCACHE = '__pycache__' +_OPT = 'opt-' SOURCE_SUFFIXES = ['.py'] # _setup() adds .pyw as needed. -DEBUG_BYTECODE_SUFFIXES = ['.pyc'] -OPTIMIZED_BYTECODE_SUFFIXES = ['.pyo'] +BYTECODE_SUFFIXES = ['.pyc'] +# Deprecated. +DEBUG_BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES = BYTECODE_SUFFIXES -def cache_from_source(path, debug_override=None): - """Given the path to a .py file, return the path to its .pyc/.pyo file. +def cache_from_source(path, debug_override=None, *, optimization=None): + """Given the path to a .py file, return the path to its .pyc file. The .py file does not need to exist; this simply returns the path to the - .pyc/.pyo file calculated as if the .py file were imported. The extension - will be .pyc unless sys.flags.optimize is non-zero, then it will be .pyo. + .pyc file calculated as if the .py file were imported. - If debug_override is not None, then it must be a boolean and is used in - place of sys.flags.optimize. + The 'optimization' parameter controls the presumed optimization level of + the bytecode file. If 'optimization' is not None, the string representation + of the argument is taken and verified to be alphanumeric (else ValueError + is raised). + + The debug_override parameter is deprecated. If debug_override is not None, + a True value is the same as setting 'optimization' to the empty string + while a False value is equivalent to setting 'optimization' to '1'. If sys.implementation.cache_tag is None then NotImplementedError is raised. """ - debug = not sys.flags.optimize if debug_override is None else debug_override - if debug: - suffixes = DEBUG_BYTECODE_SUFFIXES - else: - suffixes = OPTIMIZED_BYTECODE_SUFFIXES + if debug_override is not None: + _warnings.warn('the debug_override parameter is deprecated; use ' + "'optimization' instead", DeprecationWarning) + if optimization is not None: + message = 'debug_override or optimization must be set to None' + raise TypeError(message) + optimization = '' if debug_override else 1 head, tail = _path_split(path) - base_filename, sep, _ = tail.partition('.') + base, sep, rest = tail.rpartition('.') tag = sys.implementation.cache_tag if tag is None: raise NotImplementedError('sys.implementation.cache_tag is None') - filename = ''.join([base_filename, sep, tag, suffixes[0]]) - return _path_join(head, _PYCACHE, filename) + almost_filename = ''.join([(base if base else rest), sep, tag]) + if optimization is None: + if sys.flags.optimize == 0: + optimization = '' + else: + optimization = sys.flags.optimize + optimization = str(optimization) + if optimization != '': + if not optimization.isalnum(): + raise ValueError('{!r} is not alphanumeric'.format(optimization)) + almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization) + return _path_join(head, _PYCACHE, almost_filename + BYTECODE_SUFFIXES[0]) def source_from_cache(path): - """Given the path to a .pyc./.pyo file, return the path to its .py file. + """Given the path to a .pyc. file, return the path to its .py file. - The .pyc/.pyo file does not need to exist; this simply returns the path to - the .py file calculated to correspond to the .pyc/.pyo file. If path does - not conform to PEP 3147 format, ValueError will be raised. If + The .pyc file does not need to exist; this simply returns the path to + the .py file calculated to correspond to the .pyc file. If path does + not conform to PEP 3147/488 format, ValueError will be raised. If sys.implementation.cache_tag is None then NotImplementedError is raised. """ @@ -477,9 +300,19 @@ def source_from_cache(path): if pycache != _PYCACHE: raise ValueError('{} not bottom-level directory in ' '{!r}'.format(_PYCACHE, path)) - if pycache_filename.count('.') != 2: - raise ValueError('expected only 2 dots in ' + dot_count = pycache_filename.count('.') + if dot_count not in {2, 3}: + raise ValueError('expected only 2 or 3 dots in ' '{!r}'.format(pycache_filename)) + elif dot_count == 3: + optimization = pycache_filename.rsplit('.', 2)[-2] + if not optimization.startswith(_OPT): + raise ValueError("optimization portion of filename does not start " + "with {!r}".format(_OPT)) + opt_level = optimization[len(_OPT):] + if not opt_level.isalnum(): + raise ValueError("optimization level {!r} is not an alphanumeric " + "value".format(optimization)) base_filename = pycache_filename.partition('.')[0] return _path_join(head, base_filename + SOURCE_SUFFIXES[0]) @@ -503,6 +336,18 @@ def _get_sourcefile(bytecode_path): return source_path if _path_isfile(source_path) else bytecode_path +def _get_cached(filename): + if filename.endswith(tuple(SOURCE_SUFFIXES)): + try: + return cache_from_source(filename) + except NotImplementedError: + pass + elif filename.endswith(tuple(BYTECODE_SUFFIXES)): + return filename + else: + return None + + def _calc_mode(path): """Calculate the mode permissions for a bytecode file.""" try: @@ -535,34 +380,22 @@ def _check_name(method): if name is None: name = self.name elif self.name != name: - raise ImportError('loader cannot handle %s' % name, name=name) + raise ImportError('loader for %s cannot handle %s' % + (self.name, name), name=name) return method(self, name, *args, **kwargs) + try: + _wrap = _bootstrap._wrap + except NameError: + # XXX yuck + def _wrap(new, old): + for replace in ['__module__', '__name__', '__qualname__', '__doc__']: + if hasattr(old, replace): + setattr(new, replace, getattr(old, replace)) + new.__dict__.update(old.__dict__) _wrap(_check_name_wrapper, method) return _check_name_wrapper -def _requires_builtin(fxn): - """Decorator to verify the named module is built-in.""" - def _requires_builtin_wrapper(self, fullname): - if fullname not in sys.builtin_module_names: - raise ImportError('{!r} is not a built-in module'.format(fullname), - name=fullname) - return fxn(self, fullname) - _wrap(_requires_builtin_wrapper, fxn) - return _requires_builtin_wrapper - - -def _requires_frozen(fxn): - """Decorator to verify the named module is frozen.""" - def _requires_frozen_wrapper(self, fullname): - if not _imp.is_frozen(fullname): - raise ImportError('{!r} is not a frozen module'.format(fullname), - name=fullname) - return fxn(self, fullname) - _wrap(_requires_frozen_wrapper, fxn) - return _requires_frozen_wrapper - - def _find_module_shim(self, fullname): """Try to find a loader for the specified module by delegating to self.find_loader(). @@ -580,22 +413,6 @@ def _find_module_shim(self, fullname): return loader -def _load_module_shim(self, fullname): - """Load the specified module into sys.modules and return it. - - This method is deprecated. Use loader.exec_module instead. - - """ - spec = spec_from_loader(fullname, self) - methods = _SpecMethods(spec) - if fullname in sys.modules: - module = sys.modules[fullname] - methods.exec(module) - return sys.modules[fullname] - else: - return methods.load() - - def _validate_bytecode_header(data, source_stats=None, name=None, path=None): """Validate the header of the passed-in bytecode against source_stats (if given) and returning the bytecode that can be compiled by compile(). @@ -687,197 +504,6 @@ def decode_source(source_bytes): # Module specifications ####################################################### -def _module_repr(module): - # The implementation of ModuleType__repr__(). - loader = getattr(module, '__loader__', None) - if hasattr(loader, 'module_repr'): - # As soon as BuiltinImporter, FrozenImporter, and NamespaceLoader - # drop their implementations for module_repr. we can add a - # deprecation warning here. - try: - return loader.module_repr(module) - except Exception: - pass - try: - spec = module.__spec__ - except AttributeError: - pass - else: - if spec is not None: - return _SpecMethods(spec).module_repr() - - # We could use module.__class__.__name__ instead of 'module' in the - # various repr permutations. - try: - name = module.__name__ - except AttributeError: - name = '?' - try: - filename = module.__file__ - except AttributeError: - if loader is None: - return ''.format(name) - else: - return ''.format(name, loader) - else: - return ''.format(name, filename) - - -class _installed_safely: - - def __init__(self, module): - self._module = module - self._spec = module.__spec__ - - def __enter__(self): - # This must be done before putting the module in sys.modules - # (otherwise an optimization shortcut in import.c becomes - # wrong) - self._spec._initializing = True - sys.modules[self._spec.name] = self._module - - def __exit__(self, *args): - try: - spec = self._spec - if any(arg is not None for arg in args): - try: - del sys.modules[spec.name] - except KeyError: - pass - else: - _verbose_message('import {!r} # {!r}', spec.name, spec.loader) - finally: - self._spec._initializing = False - - -class ModuleSpec: - """The specification for a module, used for loading. - - A module's spec is the source for information about the module. For - data associated with the module, including source, use the spec's - loader. - - `name` is the absolute name of the module. `loader` is the loader - to use when loading the module. `parent` is the name of the - package the module is in. The parent is derived from the name. - - `is_package` determines if the module is considered a package or - not. On modules this is reflected by the `__path__` attribute. - - `origin` is the specific location used by the loader from which to - load the module, if that information is available. When filename is - set, origin will match. - - `has_location` indicates that a spec's "origin" reflects a location. - When this is True, `__file__` attribute of the module is set. - - `cached` is the location of the cached bytecode file, if any. It - corresponds to the `__cached__` attribute. - - `submodule_search_locations` is the sequence of path entries to - search when importing submodules. If set, is_package should be - True--and False otherwise. - - Packages are simply modules that (may) have submodules. If a spec - has a non-None value in `submodule_search_locations`, the import - system will consider modules loaded from the spec as packages. - - Only finders (see importlib.abc.MetaPathFinder and - importlib.abc.PathEntryFinder) should modify ModuleSpec instances. - - """ - - def __init__(self, name, loader, *, origin=None, loader_state=None, - is_package=None): - self.name = name - self.loader = loader - self.origin = origin - self.loader_state = loader_state - self.submodule_search_locations = [] if is_package else None - - # file-location attributes - self._set_fileattr = False - self._cached = None - - def __repr__(self): - args = ['name={!r}'.format(self.name), - 'loader={!r}'.format(self.loader)] - if self.origin is not None: - args.append('origin={!r}'.format(self.origin)) - if self.submodule_search_locations is not None: - args.append('submodule_search_locations={}' - .format(self.submodule_search_locations)) - return '{}({})'.format(self.__class__.__name__, ', '.join(args)) - - def __eq__(self, other): - smsl = self.submodule_search_locations - try: - return (self.name == other.name and - self.loader == other.loader and - self.origin == other.origin and - smsl == other.submodule_search_locations and - self.cached == other.cached and - self.has_location == other.has_location) - except AttributeError: - return False - - @property - def cached(self): - if self._cached is None: - if self.origin is not None and self._set_fileattr: - filename = self.origin - if filename.endswith(tuple(SOURCE_SUFFIXES)): - try: - self._cached = cache_from_source(filename) - except NotImplementedError: - pass - elif filename.endswith(tuple(BYTECODE_SUFFIXES)): - self._cached = filename - return self._cached - - @cached.setter - def cached(self, cached): - self._cached = cached - - @property - def parent(self): - """The name of the module's parent.""" - if self.submodule_search_locations is None: - return self.name.rpartition('.')[0] - else: - return self.name - - @property - def has_location(self): - return self._set_fileattr - - @has_location.setter - def has_location(self, value): - self._set_fileattr = bool(value) - - -def spec_from_loader(name, loader, *, origin=None, is_package=None): - """Return a module spec based on various loader methods.""" - if hasattr(loader, 'get_filename'): - if is_package is None: - return spec_from_file_location(name, loader=loader) - search = [] if is_package else None - return spec_from_file_location(name, loader=loader, - submodule_search_locations=search) - - if is_package is None: - if hasattr(loader, 'is_package'): - try: - is_package = loader.is_package(name) - except ImportError: - is_package = None # aka, undefined - else: - # the default - is_package = False - - return ModuleSpec(name, loader, origin=origin, is_package=is_package) - - _POPULATE = object() @@ -911,7 +537,7 @@ def spec_from_file_location(name, location=None, *, loader=None, # indirect location (e.g. a zip file or URL) will look like a # non-existent file relative to the filesystem. - spec = ModuleSpec(name, loader, origin=location) + spec = _bootstrap.ModuleSpec(name, loader, origin=location) spec._set_fileattr = True # Pick a loader if one wasn't provided. @@ -945,447 +571,8 @@ def spec_from_file_location(name, location=None, *, loader=None, return spec -def _spec_from_module(module, loader=None, origin=None): - # This function is meant for use in _setup(). - try: - spec = module.__spec__ - except AttributeError: - pass - else: - if spec is not None: - return spec - - name = module.__name__ - if loader is None: - try: - loader = module.__loader__ - except AttributeError: - # loader will stay None. - pass - try: - location = module.__file__ - except AttributeError: - location = None - if origin is None: - if location is None: - try: - origin = loader._ORIGIN - except AttributeError: - origin = None - else: - origin = location - try: - cached = module.__cached__ - except AttributeError: - cached = None - try: - submodule_search_locations = list(module.__path__) - except AttributeError: - submodule_search_locations = None - - spec = ModuleSpec(name, loader, origin=origin) - spec._set_fileattr = False if location is None else True - spec.cached = cached - spec.submodule_search_locations = submodule_search_locations - return spec - - -class _SpecMethods: - - """Convenience wrapper around spec objects to provide spec-specific - methods.""" - - # The various spec_from_* functions could be made factory methods here. - - def __init__(self, spec): - self.spec = spec - - def module_repr(self): - """Return the repr to use for the module.""" - # We mostly replicate _module_repr() using the spec attributes. - spec = self.spec - name = '?' if spec.name is None else spec.name - if spec.origin is None: - if spec.loader is None: - return ''.format(name) - else: - return ''.format(name, spec.loader) - else: - if spec.has_location: - return ''.format(name, spec.origin) - else: - return ''.format(spec.name, spec.origin) - - def init_module_attrs(self, module, *, _override=False, _force_name=True): - """Set the module's attributes. - - All missing import-related module attributes will be set. Here - is how the spec attributes map onto the module: - - spec.name -> module.__name__ - spec.loader -> module.__loader__ - spec.parent -> module.__package__ - spec -> module.__spec__ - - Optional: - spec.origin -> module.__file__ (if spec.set_fileattr is true) - spec.cached -> module.__cached__ (if __file__ also set) - spec.submodule_search_locations -> module.__path__ (if set) - - """ - spec = self.spec - - # The passed in module may be not support attribute assignment, - # in which case we simply don't set the attributes. - - # __name__ - if (_override or _force_name or - getattr(module, '__name__', None) is None): - try: - module.__name__ = spec.name - except AttributeError: - pass - - # __loader__ - if _override or getattr(module, '__loader__', None) is None: - loader = spec.loader - if loader is None: - # A backward compatibility hack. - if spec.submodule_search_locations is not None: - loader = _NamespaceLoader.__new__(_NamespaceLoader) - loader._path = spec.submodule_search_locations - try: - module.__loader__ = loader - except AttributeError: - pass - - # __package__ - if _override or getattr(module, '__package__', None) is None: - try: - module.__package__ = spec.parent - except AttributeError: - pass - - # __spec__ - try: - module.__spec__ = spec - except AttributeError: - pass - - # __path__ - if _override or getattr(module, '__path__', None) is None: - if spec.submodule_search_locations is not None: - try: - module.__path__ = spec.submodule_search_locations - except AttributeError: - pass - - if spec.has_location: - # __file__ - if _override or getattr(module, '__file__', None) is None: - try: - module.__file__ = spec.origin - except AttributeError: - pass - - # __cached__ - if _override or getattr(module, '__cached__', None) is None: - if spec.cached is not None: - try: - module.__cached__ = spec.cached - except AttributeError: - pass - - def create(self): - """Return a new module to be loaded. - - The import-related module attributes are also set with the - appropriate values from the spec. - - """ - spec = self.spec - # Typically loaders will not implement create_module(). - if hasattr(spec.loader, 'create_module'): - # If create_module() returns `None` it means the default - # module creation should be used. - module = spec.loader.create_module(spec) - else: - module = None - if module is None: - # This must be done before open() is ever called as the 'io' - # module implicitly imports 'locale' and would otherwise - # trigger an infinite loop. - module = _new_module(spec.name) - self.init_module_attrs(module) - return module - - def _exec(self, module): - """Do everything necessary to execute the module. - - The namespace of `module` is used as the target of execution. - This method uses the loader's `exec_module()` method. - - """ - self.spec.loader.exec_module(module) - - # Used by importlib.reload() and _load_module_shim(). - def exec(self, module): - """Execute the spec in an existing module's namespace.""" - name = self.spec.name - _imp.acquire_lock() - with _ModuleLockManager(name): - if sys.modules.get(name) is not module: - msg = 'module {!r} not in sys.modules'.format(name) - raise ImportError(msg, name=name) - if self.spec.loader is None: - if self.spec.submodule_search_locations is None: - raise ImportError('missing loader', name=self.spec.name) - # namespace package - self.init_module_attrs(module, _override=True) - return module - self.init_module_attrs(module, _override=True) - if not hasattr(self.spec.loader, 'exec_module'): - # (issue19713) Once BuiltinImporter and ExtensionFileLoader - # have exec_module() implemented, we can add a deprecation - # warning here. - self.spec.loader.load_module(name) - else: - self._exec(module) - return sys.modules[name] - - def _load_backward_compatible(self): - # (issue19713) Once BuiltinImporter and ExtensionFileLoader - # have exec_module() implemented, we can add a deprecation - # warning here. - spec = self.spec - spec.loader.load_module(spec.name) - # The module must be in sys.modules at this point! - module = sys.modules[spec.name] - if getattr(module, '__loader__', None) is None: - try: - module.__loader__ = spec.loader - except AttributeError: - pass - if getattr(module, '__package__', None) is None: - try: - # Since module.__path__ may not line up with - # spec.submodule_search_paths, we can't necessarily rely - # on spec.parent here. - module.__package__ = module.__name__ - if not hasattr(module, '__path__'): - module.__package__ = spec.name.rpartition('.')[0] - except AttributeError: - pass - if getattr(module, '__spec__', None) is None: - try: - module.__spec__ = spec - except AttributeError: - pass - return module - - def _load_unlocked(self): - # A helper for direct use by the import system. - if self.spec.loader is not None: - # not a namespace package - if not hasattr(self.spec.loader, 'exec_module'): - return self._load_backward_compatible() - - module = self.create() - with _installed_safely(module): - if self.spec.loader is None: - if self.spec.submodule_search_locations is None: - raise ImportError('missing loader', name=self.spec.name) - # A namespace package so do nothing. - else: - self._exec(module) - - # We don't ensure that the import-related module attributes get - # set in the sys.modules replacement case. Such modules are on - # their own. - return sys.modules[self.spec.name] - - # A method used during testing of _load_unlocked() and by - # _load_module_shim(). - def load(self): - """Return a new module object, loaded by the spec's loader. - - The module is not added to its parent. - - If a module is already in sys.modules, that existing module gets - clobbered. - - """ - _imp.acquire_lock() - with _ModuleLockManager(self.spec.name): - return self._load_unlocked() - - -def _fix_up_module(ns, name, pathname, cpathname=None): - # This function is used by PyImport_ExecCodeModuleObject(). - loader = ns.get('__loader__') - spec = ns.get('__spec__') - if not loader: - if spec: - loader = spec.loader - elif pathname == cpathname: - loader = SourcelessFileLoader(name, pathname) - else: - loader = SourceFileLoader(name, pathname) - if not spec: - spec = spec_from_file_location(name, pathname, loader=loader) - try: - ns['__spec__'] = spec - ns['__loader__'] = loader - ns['__file__'] = pathname - ns['__cached__'] = cpathname - except Exception: - # Not important enough to report. - pass - - # Loaders ##################################################################### -class BuiltinImporter: - - """Meta path import for built-in modules. - - All methods are either class or static methods to avoid the need to - instantiate the class. - - """ - - @staticmethod - def module_repr(module): - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ - return ''.format(module.__name__) - - @classmethod - def find_spec(cls, fullname, path=None, target=None): - if path is not None: - return None - if _imp.is_builtin(fullname): - return spec_from_loader(fullname, cls, origin='built-in') - else: - return None - - @classmethod - def find_module(cls, fullname, path=None): - """Find the built-in module. - - If 'path' is ever specified then the search is considered a failure. - - This method is deprecated. Use find_spec() instead. - - """ - spec = cls.find_spec(fullname, path) - return spec.loader if spec is not None else None - - @classmethod - @_requires_builtin - def load_module(cls, fullname): - """Load a built-in module.""" - # Once an exec_module() implementation is added we can also - # add a deprecation warning here. - with _ManageReload(fullname): - module = _call_with_frames_removed(_imp.init_builtin, fullname) - module.__loader__ = cls - module.__package__ = '' - return module - - @classmethod - @_requires_builtin - def get_code(cls, fullname): - """Return None as built-in modules do not have code objects.""" - return None - - @classmethod - @_requires_builtin - def get_source(cls, fullname): - """Return None as built-in modules do not have source code.""" - return None - - @classmethod - @_requires_builtin - def is_package(cls, fullname): - """Return False as built-in modules are never packages.""" - return False - - -class FrozenImporter: - - """Meta path import for frozen modules. - - All methods are either class or static methods to avoid the need to - instantiate the class. - - """ - - @staticmethod - def module_repr(m): - """Return repr for the module. - - The method is deprecated. The import machinery does the job itself. - - """ - return ''.format(m.__name__) - - @classmethod - def find_spec(cls, fullname, path=None, target=None): - if _imp.is_frozen(fullname): - return spec_from_loader(fullname, cls, origin='frozen') - else: - return None - - @classmethod - def find_module(cls, fullname, path=None): - """Find a frozen module. - - This method is deprecated. Use find_spec() instead. - - """ - return cls if _imp.is_frozen(fullname) else None - - @staticmethod - def exec_module(module): - name = module.__spec__.name - if not _imp.is_frozen(name): - raise ImportError('{!r} is not a frozen module'.format(name), - name=name) - code = _call_with_frames_removed(_imp.get_frozen_object, name) - exec(code, module.__dict__) - - @classmethod - def load_module(cls, fullname): - """Load a frozen module. - - This method is deprecated. Use exec_module() instead. - - """ - return _load_module_shim(cls, fullname) - - @classmethod - @_requires_frozen - def get_code(cls, fullname): - """Return the code object for the frozen module.""" - return _imp.get_frozen_object(fullname) - - @classmethod - @_requires_frozen - def get_source(cls, fullname): - """Return None as frozen modules do not have source code.""" - return None - - @classmethod - @_requires_frozen - def is_package(cls, fullname): - """Return True if the frozen module is a package.""" - return _imp.is_frozen_package(fullname) - - class WindowsRegistryFinder: """Meta path finder for modules declared in the Windows registry.""" @@ -1431,8 +618,9 @@ class WindowsRegistryFinder: return None for loader, suffixes in _get_supported_file_loaders(): if filepath.endswith(tuple(suffixes)): - spec = spec_from_loader(fullname, loader(fullname, filepath), - origin=filepath) + spec = _bootstrap.spec_from_loader(fullname, + loader(fullname, filepath), + origin=filepath) return spec @classmethod @@ -1462,15 +650,19 @@ class _LoaderBasics: tail_name = fullname.rpartition('.')[2] return filename_base == '__init__' and tail_name != '__init__' + def create_module(self, spec): + """Use default semantics for module creation.""" + def exec_module(self, module): """Execute the module.""" code = self.get_code(module.__name__) if code is None: raise ImportError('cannot load module {!r} when get_code() ' 'returns None'.format(module.__name__)) - _call_with_frames_removed(exec, code, module.__dict__) + _bootstrap._call_with_frames_removed(exec, code, module.__dict__) - load_module = _load_module_shim + def load_module(self, fullname): + return _bootstrap._load_module_shim(self, fullname) class SourceLoader(_LoaderBasics): @@ -1528,7 +720,7 @@ class SourceLoader(_LoaderBasics): The 'data' argument can be any object type that compile() supports. """ - return _call_with_frames_removed(compile, data, path, 'exec', + return _bootstrap._call_with_frames_removed(compile, data, path, 'exec', dont_inherit=True, optimize=_optimize) def get_code(self, fullname): @@ -1686,7 +878,7 @@ class SourcelessFileLoader(FileLoader, _LoaderBasics): EXTENSION_SUFFIXES = [] -class ExtensionFileLoader: +class ExtensionFileLoader(FileLoader, _LoaderBasics): """Loader for extension modules. @@ -1705,24 +897,20 @@ class ExtensionFileLoader: def __hash__(self): return hash(self.name) ^ hash(self.path) - @_check_name - def load_module(self, fullname): - """Load an extension module.""" - # Once an exec_module() implementation is added we can also - # add a deprecation warning here. - with _ManageReload(fullname): - module = _call_with_frames_removed(_imp.load_dynamic, - fullname, self.path) - _verbose_message('extension module loaded from {!r}', self.path) - is_package = self.is_package(fullname) - if is_package and not hasattr(module, '__path__'): - module.__path__ = [_path_split(self.path)[0]] - module.__loader__ = self - module.__package__ = module.__name__ - if not is_package: - module.__package__ = module.__package__.rpartition('.')[0] + def create_module(self, spec): + """Create an unitialized extension module""" + module = _bootstrap._call_with_frames_removed( + _imp.create_dynamic, spec) + _verbose_message('extension module {!r} loaded from {!r}', + spec.name, self.path) return module + def exec_module(self, module): + """Initialize an extension module""" + _bootstrap._call_with_frames_removed(_imp.exec_dynamic, module) + _verbose_message('extension module {!r} executed from {!r}', + self.name, self.path) + def is_package(self, fullname): """Return True if the extension module is a package.""" file_name = _path_split(self.path)[1] @@ -1799,7 +987,7 @@ class _NamespacePath: self._path.append(item) -# We use this exclusively in init_module_attrs() for backward-compatibility. +# We use this exclusively in module_from_spec() for backward-compatibility. class _NamespaceLoader: def __init__(self, name, path, path_finder): self._path = _NamespacePath(name, path, path_finder) @@ -1822,6 +1010,9 @@ class _NamespaceLoader: def get_code(self, fullname): return compile('', '', 'exec', dont_inherit=True) + def create_module(self, spec): + """Use default semantics for module creation.""" + def exec_module(self, module): pass @@ -1833,7 +1024,7 @@ class _NamespaceLoader: """ # The import system never calls this method. _verbose_message('namespace module loaded with path {!r}', self._path) - return _load_module_shim(self, fullname) + return _bootstrap._load_module_shim(self, fullname) # Finders ##################################################################### @@ -1857,7 +1048,7 @@ class PathFinder: If 'hooks' is false then use sys.path_hooks. """ - if not sys.path_hooks: + if sys.path_hooks is not None and not sys.path_hooks: _warnings.warn('sys.path_hooks is empty', ImportWarning) for hook in sys.path_hooks: try: @@ -1876,7 +1067,12 @@ class PathFinder: """ if path == '': - path = _os.getcwd() + try: + path = _os.getcwd() + except FileNotFoundError: + # Don't cache the failure as the cwd can easily change to + # a valid directory later on. + return None try: finder = sys.path_importer_cache[path] except KeyError: @@ -1894,8 +1090,8 @@ class PathFinder: loader = finder.find_module(fullname) portions = [] if loader is not None: - return spec_from_loader(fullname, loader) - spec = ModuleSpec(fullname, None) + return _bootstrap.spec_from_loader(fullname, loader) + spec = _bootstrap.ModuleSpec(fullname, None) spec.submodule_search_locations = portions return spec @@ -1927,7 +1123,7 @@ class PathFinder: # on path. namespace_path.extend(portions) else: - spec = ModuleSpec(fullname, None) + spec = _bootstrap.ModuleSpec(fullname, None) spec.submodule_search_locations = namespace_path return spec @@ -2053,7 +1249,7 @@ class FileFinder: return self._get_spec(loader_class, fullname, full_path, None, target) if is_namespace: _verbose_message('possible namespace for {}'.format(base_path)) - spec = ModuleSpec(fullname, None) + spec = _bootstrap.ModuleSpec(fullname, None) spec.submodule_search_locations = [base_path] return spec return None @@ -2111,201 +1307,29 @@ class FileFinder: return 'FileFinder({!r})'.format(self.path) -# Import itself ############################################################### +# Import setup ############################################################### -class _ImportLockContext: - - """Context manager for the import lock.""" - - def __enter__(self): - """Acquire the import lock.""" - _imp.acquire_lock() - - def __exit__(self, exc_type, exc_value, exc_traceback): - """Release the import lock regardless of any raised exceptions.""" - _imp.release_lock() - - -def _resolve_name(name, package, level): - """Resolve a relative module name to an absolute one.""" - bits = package.rsplit('.', level - 1) - if len(bits) < level: - raise ValueError('attempted relative import beyond top-level package') - base = bits[0] - return '{}.{}'.format(base, name) if name else base - - -def _find_spec_legacy(finder, name, path): - # This would be a good place for a DeprecationWarning if - # we ended up going that route. - loader = finder.find_module(name, path) - if loader is None: - return None - return spec_from_loader(name, loader) - - -def _find_spec(name, path, target=None): - """Find a module's loader.""" - if not sys.meta_path: - _warnings.warn('sys.meta_path is empty', ImportWarning) - # We check sys.modules here for the reload case. While a passed-in - # target will usually indicate a reload there is no guarantee, whereas - # sys.modules provides one. - is_reload = name in sys.modules - for finder in sys.meta_path: - with _ImportLockContext(): - try: - find_spec = finder.find_spec - except AttributeError: - spec = _find_spec_legacy(finder, name, path) - if spec is None: - continue - else: - spec = find_spec(name, path, target) - if spec is not None: - # The parent import may have already imported this module. - if not is_reload and name in sys.modules: - module = sys.modules[name] - try: - __spec__ = module.__spec__ - except AttributeError: - # We use the found spec since that is the one that - # we would have used if the parent module hadn't - # beaten us to the punch. - return spec - else: - if __spec__ is None: - return spec - else: - return __spec__ - else: - return spec - else: - return None - - -def _sanity_check(name, package, level): - """Verify arguments are "sane".""" - if not isinstance(name, str): - raise TypeError('module name must be str, not {}'.format(type(name))) - if level < 0: - raise ValueError('level must be >= 0') - if package: - if not isinstance(package, str): - raise TypeError('__package__ not set to a string') - elif package not in sys.modules: - msg = ('Parent module {!r} not loaded, cannot perform relative ' - 'import') - raise SystemError(msg.format(package)) - if not name and level == 0: - raise ValueError('Empty module name') - - -_ERR_MSG_PREFIX = 'No module named ' -_ERR_MSG = _ERR_MSG_PREFIX + '{!r}' - -def _find_and_load_unlocked(name, import_): - path = None - parent = name.rpartition('.')[0] - if parent: - if parent not in sys.modules: - _call_with_frames_removed(import_, parent) - # Crazy side-effects! - if name in sys.modules: - return sys.modules[name] - parent_module = sys.modules[parent] - try: - path = parent_module.__path__ - except AttributeError: - msg = (_ERR_MSG + '; {!r} is not a package').format(name, parent) - raise ImportError(msg, name=name) - spec = _find_spec(name, path) - if spec is None: - raise ImportError(_ERR_MSG.format(name), name=name) - else: - module = _SpecMethods(spec)._load_unlocked() - if parent: - # Set the module as an attribute on its parent. - parent_module = sys.modules[parent] - setattr(parent_module, name.rpartition('.')[2], module) - return module - - -def _find_and_load(name, import_): - """Find and load the module, and release the import lock.""" - with _ModuleLockManager(name): - return _find_and_load_unlocked(name, import_) - - -def _gcd_import(name, package=None, level=0): - """Import and return the module based on its name, the package the call is - being made from, and the level adjustment. - - This function represents the greatest common denominator of functionality - between import_module and __import__. This includes setting __package__ if - the loader did not. - - """ - _sanity_check(name, package, level) - if level > 0: - name = _resolve_name(name, package, level) - _imp.acquire_lock() - if name not in sys.modules: - return _find_and_load(name, _gcd_import) - module = sys.modules[name] - if module is None: - _imp.release_lock() - message = ('import of {} halted; ' - 'None in sys.modules'.format(name)) - raise ImportError(message, name=name) - _lock_unlock_module(name) - return module - -def _handle_fromlist(module, fromlist, import_): - """Figure out what __import__ should return. - - The import_ parameter is a callable which takes the name of module to - import. It is required to decouple the function from assuming importlib's - import implementation is desired. - - """ - # The hell that is fromlist ... - # If a package was imported, try to import stuff from fromlist. - if hasattr(module, '__path__'): - if '*' in fromlist: - fromlist = list(fromlist) - fromlist.remove('*') - if hasattr(module, '__all__'): - fromlist.extend(module.__all__) - for x in fromlist: - if not hasattr(module, x): - from_name = '{}.{}'.format(module.__name__, x) - try: - _call_with_frames_removed(import_, from_name) - except ImportError as exc: - # Backwards-compatibility dictates we ignore failed - # imports triggered by fromlist for modules that don't - # exist. - if str(exc).startswith(_ERR_MSG_PREFIX): - if exc.name == from_name: - continue - raise - return module - - -def _calc___package__(globals): - """Calculate what __package__ should be. - - __package__ is not guaranteed to be defined or could be set to None - to represent that its proper value is unknown. - - """ - package = globals.get('__package__') - if package is None: - package = globals['__name__'] - if '__path__' not in globals: - package = package.rpartition('.')[0] - return package +def _fix_up_module(ns, name, pathname, cpathname=None): + # This function is used by PyImport_ExecCodeModuleObject(). + loader = ns.get('__loader__') + spec = ns.get('__spec__') + if not loader: + if spec: + loader = spec.loader + elif pathname == cpathname: + loader = SourcelessFileLoader(name, pathname) + else: + loader = SourceFileLoader(name, pathname) + if not spec: + spec = spec_from_file_location(name, pathname, loader=loader) + try: + ns['__spec__'] = spec + ns['__loader__'] = loader + ns['__file__'] = pathname + ns['__cached__'] = cpathname + except Exception: + # Not important enough to report. + pass def _get_supported_file_loaders(): @@ -2319,85 +1343,23 @@ def _get_supported_file_loaders(): return [extensions, source, bytecode] -def __import__(name, globals=None, locals=None, fromlist=(), level=0): - """Import a module. +def _setup(_bootstrap_module): + """Setup the path-based importers for importlib by importing needed + built-in modules and injecting them into the global namespace. - The 'globals' argument is used to infer where the import is occuring from - to handle relative imports. The 'locals' argument is ignored. The - 'fromlist' argument specifies what should exist as attributes on the module - being imported (e.g. ``from module import ``). The 'level' - argument represents the package location to import from in a relative - import (e.g. ``from ..pkg import mod`` would have a 'level' of 2). + Other components are extracted from the core bootstrap module. """ - if level == 0: - module = _gcd_import(name) - else: - globals_ = globals if globals is not None else {} - package = _calc___package__(globals_) - module = _gcd_import(name, package, level) - if not fromlist: - # Return up to the first dot in 'name'. This is complicated by the fact - # that 'name' may be relative. - if level == 0: - return _gcd_import(name.partition('.')[0]) - elif not name: - return module - else: - # Figure out where to slice the module's name up to the first dot - # in 'name'. - cut_off = len(name) - len(name.partition('.')[0]) - # Slice end needs to be positive to alleviate need to special-case - # when ``'.' not in name``. - return sys.modules[module.__name__[:len(module.__name__)-cut_off]] - else: - return _handle_fromlist(module, fromlist, _gcd_import) - - -def _builtin_from_name(name): - spec = BuiltinImporter.find_spec(name) - if spec is None: - raise ImportError('no built-in module named ' + name) - methods = _SpecMethods(spec) - return methods._load_unlocked() - - -def _setup(sys_module, _imp_module): - """Setup importlib by importing needed built-in modules and injecting them - into the global namespace. - - As sys is needed for sys.modules access and _imp is needed to load built-in - modules, those two modules must be explicitly passed in. - - """ - global _imp, sys, BYTECODE_SUFFIXES - _imp = _imp_module - sys = sys_module - - if sys.flags.optimize: - BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES - else: - BYTECODE_SUFFIXES = DEBUG_BYTECODE_SUFFIXES - - # Set up the spec for existing builtin/frozen modules. - module_type = type(sys) - for name, module in sys.modules.items(): - if isinstance(module, module_type): - if name in sys.builtin_module_names: - loader = BuiltinImporter - elif _imp.is_frozen(name): - loader = FrozenImporter - else: - continue - spec = _spec_from_module(module, loader) - methods = _SpecMethods(spec) - methods.init_module_attrs(module) + global sys, _imp, _bootstrap + _bootstrap = _bootstrap_module + sys = _bootstrap.sys + _imp = _bootstrap._imp # Directly load built-in modules needed during bootstrap. self_module = sys.modules[__name__] for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'): if builtin_name not in sys.modules: - builtin_module = _builtin_from_name(builtin_name) + builtin_module = _bootstrap._builtin_from_name(builtin_name) else: builtin_module = sys.modules[builtin_name] setattr(self_module, builtin_name, builtin_module) @@ -2413,7 +1375,7 @@ def _setup(sys_module, _imp_module): break else: try: - os_module = _builtin_from_name(builtin_os) + os_module = _bootstrap._builtin_from_name(builtin_os) break except ImportError: continue @@ -2425,19 +1387,19 @@ def _setup(sys_module, _imp_module): # Directly load the _thread module (needed during bootstrap). try: - thread_module = _builtin_from_name('_thread') + thread_module = _bootstrap._builtin_from_name('_thread') except ImportError: # Python was built without threads thread_module = None setattr(self_module, '_thread', thread_module) # Directly load the _weakref module (needed during bootstrap). - weakref_module = _builtin_from_name('_weakref') + weakref_module = _bootstrap._builtin_from_name('_weakref') setattr(self_module, '_weakref', weakref_module) # Directly load the winreg module (needed during bootstrap). if builtin_os == 'nt': - winreg_module = _builtin_from_name('winreg') + winreg_module = _bootstrap._builtin_from_name('winreg') setattr(self_module, '_winreg', winreg_module) # Constants @@ -2449,13 +1411,16 @@ def _setup(sys_module, _imp_module): WindowsRegistryFinder.DEBUG_BUILD = True -def _install(sys_module, _imp_module): - """Install importlib as the implementation of import.""" - _setup(sys_module, _imp_module) +def _install(_bootstrap_module): + """Install the path-based import components.""" + _setup(_bootstrap_module) supported_loaders = _get_supported_file_loaders() sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)]) - sys.meta_path.append(BuiltinImporter) - sys.meta_path.append(FrozenImporter) if _os.__name__ == 'nt': sys.meta_path.append(WindowsRegistryFinder) sys.meta_path.append(PathFinder) + + # XXX We expose a couple of classes in _bootstrap for the sake of + # a setuptools bug (https://bitbucket.org/pypa/setuptools/issue/378). + _bootstrap_module.FileFinder = FileFinder + _bootstrap_module.SourceFileLoader = SourceFileLoader diff --git a/Darwin/lib/python3.4/importlib/abc.py b/Darwin/lib/python3.5/importlib/abc.py similarity index 91% rename from Darwin/lib/python3.4/importlib/abc.py rename to Darwin/lib/python3.5/importlib/abc.py index 558abd3..11af22d 100644 --- a/Darwin/lib/python3.4/importlib/abc.py +++ b/Darwin/lib/python3.5/importlib/abc.py @@ -1,12 +1,18 @@ """Abstract base classes related to import.""" from . import _bootstrap +from . import _bootstrap_external from . import machinery try: import _frozen_importlib +# import _frozen_importlib_external except ImportError as exc: if exc.name != '_frozen_importlib': raise _frozen_importlib = None +try: + import _frozen_importlib_external +except ImportError as exc: + _frozen_importlib_external = _bootstrap_external import abc @@ -14,7 +20,10 @@ def _register(abstract_cls, *classes): for cls in classes: abstract_cls.register(cls) if _frozen_importlib is not None: - frozen_cls = getattr(_frozen_importlib, cls.__name__) + try: + frozen_cls = getattr(_frozen_importlib, cls.__name__) + except AttributeError: + frozen_cls = getattr(_frozen_importlib_external, cls.__name__) abstract_cls.register(frozen_cls) @@ -102,7 +111,7 @@ class PathEntryFinder(Finder): else: return None, [] - find_module = _bootstrap._find_module_shim + find_module = _bootstrap_external._find_module_shim def invalidate_caches(self): """An optional method for clearing the finder's cache, if any. @@ -122,11 +131,8 @@ class Loader(metaclass=abc.ABCMeta): This method should raise ImportError if anything prevents it from creating a new module. It may return None to indicate that the spec should create the new module. - - create_module() is optional. - """ - # By default, defer to _SpecMethods.create() for the new module. + # By default, defer to default semantics for the new module. return None # We don't define exec_module() here since that would break @@ -217,15 +223,16 @@ class InspectLoader(Loader): """ raise ImportError - def source_to_code(self, data, path=''): + @staticmethod + def source_to_code(data, path=''): """Compile 'data' into a code object. The 'data' argument can be anything that compile() can handle. The'path' argument should be where the data was retrieved (when applicable).""" return compile(data, path, 'exec', dont_inherit=True) - exec_module = _bootstrap._LoaderBasics.exec_module - load_module = _bootstrap._LoaderBasics.load_module + exec_module = _bootstrap_external._LoaderBasics.exec_module + load_module = _bootstrap_external._LoaderBasics.load_module _register(InspectLoader, machinery.BuiltinImporter, machinery.FrozenImporter) @@ -267,7 +274,7 @@ class ExecutionLoader(InspectLoader): _register(ExecutionLoader, machinery.ExtensionFileLoader) -class FileLoader(_bootstrap.FileLoader, ResourceLoader, ExecutionLoader): +class FileLoader(_bootstrap_external.FileLoader, ResourceLoader, ExecutionLoader): """Abstract base class partially implementing the ResourceLoader and ExecutionLoader ABCs.""" @@ -276,7 +283,7 @@ _register(FileLoader, machinery.SourceFileLoader, machinery.SourcelessFileLoader) -class SourceLoader(_bootstrap.SourceLoader, ResourceLoader, ExecutionLoader): +class SourceLoader(_bootstrap_external.SourceLoader, ResourceLoader, ExecutionLoader): """Abstract base class for loading source code (and optionally any corresponding bytecode). diff --git a/Darwin/lib/python3.5/importlib/machinery.py b/Darwin/lib/python3.5/importlib/machinery.py new file mode 100644 index 0000000..1b2b5c9 --- /dev/null +++ b/Darwin/lib/python3.5/importlib/machinery.py @@ -0,0 +1,21 @@ +"""The machinery of importlib: finders, loaders, hooks, etc.""" + +import _imp + +from ._bootstrap import ModuleSpec +from ._bootstrap import BuiltinImporter +from ._bootstrap import FrozenImporter +from ._bootstrap_external import (SOURCE_SUFFIXES, DEBUG_BYTECODE_SUFFIXES, + OPTIMIZED_BYTECODE_SUFFIXES, BYTECODE_SUFFIXES, + EXTENSION_SUFFIXES) +from ._bootstrap_external import WindowsRegistryFinder +from ._bootstrap_external import PathFinder +from ._bootstrap_external import FileFinder +from ._bootstrap_external import SourceFileLoader +from ._bootstrap_external import SourcelessFileLoader +from ._bootstrap_external import ExtensionFileLoader + + +def all_suffixes(): + """Returns a list of all recognized module suffixes for this process""" + return SOURCE_SUFFIXES + BYTECODE_SUFFIXES + EXTENSION_SUFFIXES diff --git a/Darwin/lib/python3.4/importlib/util.py b/Darwin/lib/python3.5/importlib/util.py similarity index 63% rename from Darwin/lib/python3.4/importlib/util.py rename to Darwin/lib/python3.5/importlib/util.py index 6d73b1d..1dbff26 100644 --- a/Darwin/lib/python3.4/importlib/util.py +++ b/Darwin/lib/python3.5/importlib/util.py @@ -1,17 +1,19 @@ """Utility code for constructing importers, etc.""" - -from ._bootstrap import MAGIC_NUMBER -from ._bootstrap import cache_from_source -from ._bootstrap import decode_source -from ._bootstrap import source_from_cache -from ._bootstrap import spec_from_loader -from ._bootstrap import spec_from_file_location +from . import abc +from ._bootstrap import module_from_spec from ._bootstrap import _resolve_name +from ._bootstrap import spec_from_loader from ._bootstrap import _find_spec +from ._bootstrap_external import MAGIC_NUMBER +from ._bootstrap_external import cache_from_source +from ._bootstrap_external import decode_source +from ._bootstrap_external import source_from_cache +from ._bootstrap_external import spec_from_file_location from contextlib import contextmanager import functools import sys +import types import warnings @@ -54,7 +56,7 @@ def _find_spec_from_path(name, path=None): try: spec = module.__spec__ except AttributeError: - raise ValueError('{}.__spec__ is not set'.format(name)) + raise ValueError('{}.__spec__ is not set'.format(name)) from None else: if spec is None: raise ValueError('{}.__spec__ is None'.format(name)) @@ -94,7 +96,7 @@ def find_spec(name, package=None): try: spec = module.__spec__ except AttributeError: - raise ValueError('{}.__spec__ is not set'.format(name)) + raise ValueError('{}.__spec__ is not set'.format(name)) from None else: if spec is None: raise ValueError('{}.__spec__ is None'.format(name)) @@ -200,3 +202,94 @@ def module_for_loader(fxn): return fxn(self, module, *args, **kwargs) return module_for_loader_wrapper + + +class _Module(types.ModuleType): + + """A subclass of the module type to allow __class__ manipulation.""" + + +class _LazyModule(types.ModuleType): + + """A subclass of the module type which triggers loading upon attribute access.""" + + def __getattribute__(self, attr): + """Trigger the load of the module and return the attribute.""" + # All module metadata must be garnered from __spec__ in order to avoid + # using mutated values. + # Stop triggering this method. + self.__class__ = _Module + # Get the original name to make sure no object substitution occurred + # in sys.modules. + original_name = self.__spec__.name + # Figure out exactly what attributes were mutated between the creation + # of the module and now. + attrs_then = self.__spec__.loader_state + attrs_now = self.__dict__ + attrs_updated = {} + for key, value in attrs_now.items(): + # Code that set the attribute may have kept a reference to the + # assigned object, making identity more important than equality. + if key not in attrs_then: + attrs_updated[key] = value + elif id(attrs_now[key]) != id(attrs_then[key]): + attrs_updated[key] = value + self.__spec__.loader.exec_module(self) + # If exec_module() was used directly there is no guarantee the module + # object was put into sys.modules. + if original_name in sys.modules: + if id(self) != id(sys.modules[original_name]): + msg = ('module object for {!r} substituted in sys.modules ' + 'during a lazy load') + raise ValueError(msg.format(original_name)) + # Update after loading since that's what would happen in an eager + # loading situation. + self.__dict__.update(attrs_updated) + return getattr(self, attr) + + def __delattr__(self, attr): + """Trigger the load and then perform the deletion.""" + # To trigger the load and raise an exception if the attribute + # doesn't exist. + self.__getattribute__(attr) + delattr(self, attr) + + +class LazyLoader(abc.Loader): + + """A loader that creates a module which defers loading until attribute access.""" + + @staticmethod + def __check_eager_loader(loader): + if not hasattr(loader, 'exec_module'): + raise TypeError('loader must define exec_module()') + elif hasattr(loader.__class__, 'create_module'): + if abc.Loader.create_module != loader.__class__.create_module: + # Only care if create_module() is overridden in a subclass of + # importlib.abc.Loader. + raise TypeError('loader cannot define create_module()') + + @classmethod + def factory(cls, loader): + """Construct a callable which returns the eager loader made lazy.""" + cls.__check_eager_loader(loader) + return lambda *args, **kwargs: cls(loader(*args, **kwargs)) + + def __init__(self, loader): + self.__check_eager_loader(loader) + self.loader = loader + + def create_module(self, spec): + """Create a module which can have its __class__ manipulated.""" + return _Module(spec.name) + + def exec_module(self, module): + """Make the module load lazily.""" + module.__spec__.loader = self.loader + module.__loader__ = self.loader + # Don't need to worry about deep-copying as trying to set an attribute + # on an object would have triggered the load, + # e.g. ``module.__spec__.loader = None`` would trigger a load from + # trying to access module.__spec__. + module.__spec__.loader_state = module.__dict__.copy() + module.__class__ = _LazyModule diff --git a/Darwin/lib/python3.4/inspect.py b/Darwin/lib/python3.5/inspect.py similarity index 82% rename from Darwin/lib/python3.4/inspect.py rename to Darwin/lib/python3.5/inspect.py index 4c3e33d..bf4f87d 100644 --- a/Darwin/lib/python3.4/inspect.py +++ b/Darwin/lib/python3.5/inspect.py @@ -17,7 +17,7 @@ Here are some of the useful functions provided by this module: getclasstree() - arrange classes so as to represent their hierarchy getargspec(), getargvalues(), getcallargs() - get info about function arguments - getfullargspec() - same, with support for Python-3000 features + getfullargspec() - same, with support for Python 3 features formatargspec(), formatargvalues() - format an argument spec getouterframes(), getinnerframes() - get info about frames currentframe() - get the current stack frame @@ -32,6 +32,9 @@ __author__ = ('Ka-Ping Yee ', 'Yury Selivanov ') import ast +import dis +import collections.abc +import enum import importlib.machinery import itertools import linecache @@ -48,18 +51,10 @@ from operator import attrgetter from collections import namedtuple, OrderedDict # Create constants for the compiler flags in Include/code.h -# We try to get them from dis to avoid duplication, but fall -# back to hardcoding so the dependency is optional -try: - from dis import COMPILER_FLAG_NAMES as _flag_names -except ImportError: - CO_OPTIMIZED, CO_NEWLOCALS = 0x1, 0x2 - CO_VARARGS, CO_VARKEYWORDS = 0x4, 0x8 - CO_NESTED, CO_GENERATOR, CO_NOFREE = 0x10, 0x20, 0x40 -else: - mod_dict = globals() - for k, v in _flag_names.items(): - mod_dict["CO_" + v] = k +# We try to get them from dis to avoid duplication +mod_dict = globals() +for k, v in dis.COMPILER_FLAG_NAMES.items(): + mod_dict["CO_" + v] = k # See Include/object.h TPFLAGS_IS_ABSTRACT = 1 << 20 @@ -182,6 +177,15 @@ def isgeneratorfunction(object): return bool((isfunction(object) or ismethod(object)) and object.__code__.co_flags & CO_GENERATOR) +def iscoroutinefunction(object): + """Return true if the object is a coroutine function. + + Coroutine functions are defined with "async def" syntax, + or generators decorated with "types.coroutine". + """ + return bool((isfunction(object) or ismethod(object)) and + object.__code__.co_flags & CO_COROUTINE) + def isgenerator(object): """Return true if the object is a generator. @@ -199,6 +203,17 @@ def isgenerator(object): throw used to raise an exception inside the generator""" return isinstance(object, types.GeneratorType) +def iscoroutine(object): + """Return true if the object is a coroutine.""" + return isinstance(object, types.CoroutineType) + +def isawaitable(object): + """Return true is object can be passed to an ``await`` expression.""" + return (isinstance(object, types.CoroutineType) or + isinstance(object, types.GeneratorType) and + object.gi_code.co_flags & CO_ITERABLE_COROUTINE or + isinstance(object, collections.abc.Awaitable)) + def istraceback(object): """Return true if the object is a traceback. @@ -380,7 +395,7 @@ def classify_class_attrs(cls): # first look in the classes for srch_cls in class_bases: srch_obj = getattr(srch_cls, name, None) - if srch_obj == get_obj: + if srch_obj is get_obj: last_cls = srch_cls # then check the metaclasses for srch_cls in metamro: @@ -388,7 +403,7 @@ def classify_class_attrs(cls): srch_obj = srch_cls.__getattr__(cls, name) except AttributeError: continue - if srch_obj == get_obj: + if srch_obj is get_obj: last_cls = srch_cls if last_cls is not None: homecls = last_cls @@ -402,7 +417,7 @@ def classify_class_attrs(cls): # unable to locate the attribute anywhere, most likely due to # buggy custom __dir__; discard and move on continue - obj = get_obj or dict_obj + obj = get_obj if get_obj is not None else dict_obj # Classify the object or its descriptor. if isinstance(dict_obj, staticmethod): kind = "static method" @@ -467,6 +482,74 @@ def indentsize(line): expline = line.expandtabs() return len(expline) - len(expline.lstrip()) +def _findclass(func): + cls = sys.modules.get(func.__module__) + if cls is None: + return None + for name in func.__qualname__.split('.')[:-1]: + cls = getattr(cls, name) + if not isclass(cls): + return None + return cls + +def _finddoc(obj): + if isclass(obj): + for base in obj.__mro__: + if base is not object: + try: + doc = base.__doc__ + except AttributeError: + continue + if doc is not None: + return doc + return None + + if ismethod(obj): + name = obj.__func__.__name__ + self = obj.__self__ + if (isclass(self) and + getattr(getattr(self, name, None), '__func__') is obj.__func__): + # classmethod + cls = self + else: + cls = self.__class__ + elif isfunction(obj): + name = obj.__name__ + cls = _findclass(obj) + if cls is None or getattr(cls, name) is not obj: + return None + elif isbuiltin(obj): + name = obj.__name__ + self = obj.__self__ + if (isclass(self) and + self.__qualname__ + '.' + name == obj.__qualname__): + # classmethod + cls = self + else: + cls = self.__class__ + elif ismethoddescriptor(obj) or isdatadescriptor(obj): + name = obj.__name__ + cls = obj.__objclass__ + if getattr(cls, name) is not obj: + return None + elif isinstance(obj, property): + func = f.fget + name = func.__name__ + cls = _findclass(func) + if cls is None or getattr(cls, name) is not obj: + return None + else: + return None + + for base in cls.__mro__: + try: + doc = getattr(base, name).__doc__ + except AttributeError: + continue + if doc is not None: + return doc + return None + def getdoc(object): """Get the documentation string for an object. @@ -477,6 +560,11 @@ def getdoc(object): doc = object.__doc__ except AttributeError: return None + if doc is None: + try: + doc = _finddoc(object) + except (AttributeError, TypeError): + return None if not isinstance(doc, str): return None return cleandoc(doc) @@ -652,11 +740,17 @@ def findsource(object): in the file and the line number indexes a line in that list. An OSError is raised if the source code cannot be retrieved.""" - file = getfile(object) - sourcefile = getsourcefile(object) - if not sourcefile and file[:1] + file[-1:] != '<>': - raise OSError('source code not available') - file = sourcefile if sourcefile else file + file = getsourcefile(object) + if file: + # Invalidate cache if needed. + linecache.checkcache(file) + else: + file = getfile(object) + # Allow filenames in form of "" to pass through. + # `doctest` monkeypatches `linecache` module to enable + # inspection, so let `linecache.getlines` to be called. + if not (file.startswith('<') and file.endswith('>')): + raise OSError('source code not available') module = getmodule(object, file) if module: @@ -704,7 +798,7 @@ def findsource(object): if not hasattr(object, 'co_firstlineno'): raise OSError('could not find function definition') lnum = object.co_firstlineno - 1 - pat = re.compile(r'^(\s*def\s)|(.*(? 0: if pat.match(lines[lnum]): break lnum = lnum - 1 @@ -765,21 +859,37 @@ class BlockFinder: self.islambda = False self.started = False self.passline = False + self.indecorator = False + self.decoratorhasargs = False self.last = 1 def tokeneater(self, type, token, srowcol, erowcol, line): - if not self.started: + if not self.started and not self.indecorator: + # skip any decorators + if token == "@": + self.indecorator = True # look for the first "def", "class" or "lambda" - if token in ("def", "class", "lambda"): + elif token in ("def", "class", "lambda"): if token == "lambda": self.islambda = True self.started = True self.passline = True # skip to the end of the line + elif token == "(": + if self.indecorator: + self.decoratorhasargs = True + elif token == ")": + if self.indecorator: + self.indecorator = False + self.decoratorhasargs = False elif type == tokenize.NEWLINE: self.passline = False # stop skipping when a NEWLINE is seen self.last = srowcol[0] if self.islambda: # lambdas always end at the first NEWLINE raise EndOfBlock + # hitting a NEWLINE when in a decorator without args + # ends the decorator + if self.indecorator and not self.decoratorhasargs: + self.indecorator = False elif self.passline: pass elif type == tokenize.INDENT: @@ -816,10 +926,13 @@ def getsourcelines(object): corresponding to the object and the line number indicates where in the original source file the first line of code was found. An OSError is raised if the source code cannot be retrieved.""" + object = unwrap(object) lines, lnum = findsource(object) - if ismodule(object): return lines, 0 - else: return getblock(lines[lnum:]), lnum + 1 + if ismodule(object): + return lines, 0 + else: + return getblock(lines[lnum:]), lnum + 1 def getsource(object): """Return the text of the source code for an object. @@ -913,17 +1026,18 @@ ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults') def getargspec(func): """Get the names and default values of a function's arguments. - A tuple of four things is returned: (args, varargs, varkw, defaults). - 'args' is a list of the argument names. - 'args' will include keyword-only argument names. - 'varargs' and 'varkw' are the names of the * and ** arguments or None. + A tuple of four things is returned: (args, varargs, keywords, defaults). + 'args' is a list of the argument names, including keyword-only argument names. + 'varargs' and 'keywords' are the names of the * and ** arguments or None. 'defaults' is an n-tuple of the default values of the last n arguments. - Use the getfullargspec() API for Python-3000 code, as annotations + Use the getfullargspec() API for Python 3 code, as annotations and keyword arguments are supported. getargspec() will raise ValueError if the func has either annotations or keyword arguments. """ - + warnings.warn("inspect.getargspec() is deprecated, " + "use inspect.signature() instead", DeprecationWarning, + stacklevel=2) args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = \ getfullargspec(func) if kwonlyargs or ann: @@ -947,6 +1061,8 @@ def getfullargspec(func): 'annotations' is a dictionary mapping argument names to annotations. The first four items in the tuple correspond to getargspec(). + + This function is deprecated, use inspect.signature() instead. """ try: @@ -966,9 +1082,10 @@ def getfullargspec(func): # getfullargspec() historically ignored __wrapped__ attributes, # so we ensure that remains the case in 3.3+ - sig = _signature_internal(func, - follow_wrapper_chains=False, - skip_bound_arg=False) + sig = _signature_from_callable(func, + follow_wrapper_chains=False, + skip_bound_arg=False, + sigcls=Signature) except Exception as ex: # Most of the times 'signature' will raise ValueError. # But, it can also raise AttributeError, and, maybe something @@ -1037,8 +1154,8 @@ def getargvalues(frame): def formatannotation(annotation, base_module=None): if isinstance(annotation, type): if annotation.__module__ in ('builtins', base_module): - return annotation.__name__ - return annotation.__module__+'.'+annotation.__name__ + return annotation.__qualname__ + return annotation.__module__+'.'+annotation.__qualname__ return repr(annotation) def formatannotationrelativeto(object): @@ -1311,6 +1428,8 @@ def getlineno(frame): # FrameType.f_lineno is now a descriptor that grovels co_lnotab return frame.f_lineno +FrameInfo = namedtuple('FrameInfo', ('frame',) + Traceback._fields) + def getouterframes(frame, context=1): """Get a list of records for a frame and all higher (calling) frames. @@ -1318,7 +1437,8 @@ def getouterframes(frame, context=1): name, a list of lines of context, and index within the context.""" framelist = [] while frame: - framelist.append((frame,) + getframeinfo(frame, context)) + frameinfo = (frame,) + getframeinfo(frame, context) + framelist.append(FrameInfo(*frameinfo)) frame = frame.f_back return framelist @@ -1329,7 +1449,8 @@ def getinnerframes(tb, context=1): name, a list of lines of context, and index within the context.""" framelist = [] while tb: - framelist.append((tb.tb_frame,) + getframeinfo(tb, context)) + frameinfo = (tb.tb_frame,) + getframeinfo(tb, context) + framelist.append(FrameInfo(*frameinfo)) tb = tb.tb_next return framelist @@ -1479,6 +1600,45 @@ def getgeneratorlocals(generator): else: return {} + +# ------------------------------------------------ coroutine introspection + +CORO_CREATED = 'CORO_CREATED' +CORO_RUNNING = 'CORO_RUNNING' +CORO_SUSPENDED = 'CORO_SUSPENDED' +CORO_CLOSED = 'CORO_CLOSED' + +def getcoroutinestate(coroutine): + """Get current state of a coroutine object. + + Possible states are: + CORO_CREATED: Waiting to start execution. + CORO_RUNNING: Currently being executed by the interpreter. + CORO_SUSPENDED: Currently suspended at an await expression. + CORO_CLOSED: Execution has completed. + """ + if coroutine.cr_running: + return CORO_RUNNING + if coroutine.cr_frame is None: + return CORO_CLOSED + if coroutine.cr_frame.f_lasti == -1: + return CORO_CREATED + return CORO_SUSPENDED + + +def getcoroutinelocals(coroutine): + """ + Get the mapping of coroutine local variables to their current values. + + A dict is returned, with the keys the local variable names and values the + bound values.""" + frame = getattr(coroutine, "cr_frame", None) + if frame is not None: + return frame.f_locals + else: + return {} + + ############################################################################### ### Function Signature Object (PEP 362) ############################################################################### @@ -1495,6 +1655,10 @@ _NonUserDefinedCallables = (_WrapperDescriptor, def _signature_get_user_defined_method(cls, method_name): + """Private helper. Checks if ``cls`` has an attribute + named ``method_name`` and returns it only if it is a + pure python function. + """ try: meth = getattr(cls, method_name) except AttributeError: @@ -1507,9 +1671,10 @@ def _signature_get_user_defined_method(cls, method_name): def _signature_get_partial(wrapped_sig, partial, extra_args=()): - # Internal helper to calculate how 'wrapped_sig' signature will - # look like after applying a 'functools.partial' object (or alike) - # on it. + """Private helper to calculate how 'wrapped_sig' signature will + look like after applying a 'functools.partial' object (or alike) + on it. + """ old_params = wrapped_sig.parameters new_params = OrderedDict(old_params.items()) @@ -1582,8 +1747,9 @@ def _signature_get_partial(wrapped_sig, partial, extra_args=()): def _signature_bound_method(sig): - # Internal helper to transform signatures for unbound - # functions to bound methods + """Private helper to transform signatures for unbound + functions to bound methods. + """ params = tuple(sig.parameters.values()) @@ -1607,8 +1773,9 @@ def _signature_bound_method(sig): def _signature_is_builtin(obj): - # Internal helper to test if `obj` is a callable that might - # support Argument Clinic's __text_signature__ protocol. + """Private helper to test if `obj` is a callable that might + support Argument Clinic's __text_signature__ protocol. + """ return (isbuiltin(obj) or ismethoddescriptor(obj) or isinstance(obj, _NonUserDefinedCallables) or @@ -1618,10 +1785,11 @@ def _signature_is_builtin(obj): def _signature_is_functionlike(obj): - # Internal helper to test if `obj` is a duck type of FunctionType. - # A good example of such objects are functions compiled with - # Cython, which have all attributes that a pure Python function - # would have, but have their code statically compiled. + """Private helper to test if `obj` is a duck type of FunctionType. + A good example of such objects are functions compiled with + Cython, which have all attributes that a pure Python function + would have, but have their code statically compiled. + """ if not callable(obj) or isclass(obj): # All function-like objects are obviously callables, @@ -1642,11 +1810,12 @@ def _signature_is_functionlike(obj): def _signature_get_bound_param(spec): - # Internal helper to get first parameter name from a - # __text_signature__ of a builtin method, which should - # be in the following format: '($param1, ...)'. - # Assumptions are that the first argument won't have - # a default value or an annotation. + """ Private helper to get first parameter name from a + __text_signature__ of a builtin method, which should + be in the following format: '($param1, ...)'. + Assumptions are that the first argument won't have + a default value or an annotation. + """ assert spec.startswith('($') @@ -1665,7 +1834,9 @@ def _signature_get_bound_param(spec): def _signature_strip_non_python_syntax(signature): """ - Takes a signature in Argument Clinic's extended signature format. + Private helper function. Takes a signature in Argument Clinic's + extended signature format. + Returns a tuple of three things: * that signature re-rendered in standard Python syntax, * the index of the "self" parameter (generally 0), or None if @@ -1734,8 +1905,10 @@ def _signature_strip_non_python_syntax(signature): def _signature_fromstr(cls, obj, s, skip_bound_arg=True): - # Internal helper to parse content of '__text_signature__' - # and return a Signature based on it + """Private helper to parse content of '__text_signature__' + and return a Signature based on it. + """ + Parameter = cls._parameter_cls clean_signature, self_parameter, last_positional_only = \ @@ -1873,8 +2046,10 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True): def _signature_from_builtin(cls, func, skip_bound_arg=True): - # Internal helper function to get signature for - # builtin callables + """Private helper function to get signature for + builtin callables. + """ + if not _signature_is_builtin(func): raise TypeError("{!r} is not a Python builtin " "function".format(func)) @@ -1886,7 +2061,95 @@ def _signature_from_builtin(cls, func, skip_bound_arg=True): return _signature_fromstr(cls, func, s, skip_bound_arg) -def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): +def _signature_from_function(cls, func): + """Private helper: constructs Signature for the given python function.""" + + is_duck_function = False + if not isfunction(func): + if _signature_is_functionlike(func): + is_duck_function = True + else: + # If it's not a pure Python function, and not a duck type + # of pure function: + raise TypeError('{!r} is not a Python function'.format(func)) + + Parameter = cls._parameter_cls + + # Parameter information. + func_code = func.__code__ + pos_count = func_code.co_argcount + arg_names = func_code.co_varnames + positional = tuple(arg_names[:pos_count]) + keyword_only_count = func_code.co_kwonlyargcount + keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] + annotations = func.__annotations__ + defaults = func.__defaults__ + kwdefaults = func.__kwdefaults__ + + if defaults: + pos_default_count = len(defaults) + else: + pos_default_count = 0 + + parameters = [] + + # Non-keyword-only parameters w/o defaults. + non_default_count = pos_count - pos_default_count + for name in positional[:non_default_count]: + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD)) + + # ... w/ defaults. + for offset, name in enumerate(positional[non_default_count:]): + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_POSITIONAL_OR_KEYWORD, + default=defaults[offset])) + + # *args + if func_code.co_flags & CO_VARARGS: + name = arg_names[pos_count + keyword_only_count] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_POSITIONAL)) + + # Keyword-only parameters. + for name in keyword_only: + default = _empty + if kwdefaults is not None: + default = kwdefaults.get(name, _empty) + + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_KEYWORD_ONLY, + default=default)) + # **kwargs + if func_code.co_flags & CO_VARKEYWORDS: + index = pos_count + keyword_only_count + if func_code.co_flags & CO_VARARGS: + index += 1 + + name = arg_names[index] + annotation = annotations.get(name, _empty) + parameters.append(Parameter(name, annotation=annotation, + kind=_VAR_KEYWORD)) + + # Is 'func' is a pure Python function - don't validate the + # parameters list (for correct order and defaults), it should be OK. + return cls(parameters, + return_annotation=annotations.get('return', _empty), + __validate_parameters__=is_duck_function) + + +def _signature_from_callable(obj, *, + follow_wrapper_chains=True, + skip_bound_arg=True, + sigcls): + + """Private helper function to get signature for arbitrary + callable objects. + """ if not callable(obj): raise TypeError('{!r} is not a callable object'.format(obj)) @@ -1894,9 +2157,12 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): if isinstance(obj, types.MethodType): # In this case we skip the first parameter of the underlying # function (usually `self` or `cls`). - sig = _signature_internal(obj.__func__, - follow_wrapper_chains, - skip_bound_arg) + sig = _signature_from_callable( + obj.__func__, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) + if skip_bound_arg: return _signature_bound_method(sig) else: @@ -1905,6 +2171,15 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): # Was this function wrapped by a decorator? if follow_wrapper_chains: obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__"))) + if isinstance(obj, types.MethodType): + # If the unwrapped object is a *method*, we might want to + # skip its first parameter (self). + # See test_signature_wrapped_bound_method for details. + return _signature_from_callable( + obj, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) try: sig = obj.__signature__ @@ -1912,6 +2187,10 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): pass else: if sig is not None: + if not isinstance(sig, Signature): + raise TypeError( + 'unexpected object {!r} in __signature__ ' + 'attribute'.format(sig)) return sig try: @@ -1927,9 +2206,12 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): # (usually `self`, or `cls`) will not be passed # automatically (as for boundmethods) - wrapped_sig = _signature_internal(partialmethod.func, - follow_wrapper_chains, - skip_bound_arg) + wrapped_sig = _signature_from_callable( + partialmethod.func, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) + sig = _signature_get_partial(wrapped_sig, partialmethod, (None,)) first_wrapped_param = tuple(wrapped_sig.parameters.values())[0] @@ -1940,16 +2222,18 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): if isfunction(obj) or _signature_is_functionlike(obj): # If it's a pure Python function, or an object that is duck type # of a Python function (Cython functions, for instance), then: - return Signature.from_function(obj) + return _signature_from_function(sigcls, obj) if _signature_is_builtin(obj): - return _signature_from_builtin(Signature, obj, + return _signature_from_builtin(sigcls, obj, skip_bound_arg=skip_bound_arg) if isinstance(obj, functools.partial): - wrapped_sig = _signature_internal(obj.func, - follow_wrapper_chains, - skip_bound_arg) + wrapped_sig = _signature_from_callable( + obj.func, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) return _signature_get_partial(wrapped_sig, obj) sig = None @@ -1960,23 +2244,29 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): # in its metaclass call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: - sig = _signature_internal(call, - follow_wrapper_chains, - skip_bound_arg) + sig = _signature_from_callable( + call, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) else: # Now we check if the 'obj' class has a '__new__' method new = _signature_get_user_defined_method(obj, '__new__') if new is not None: - sig = _signature_internal(new, - follow_wrapper_chains, - skip_bound_arg) + sig = _signature_from_callable( + new, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) else: # Finally, we should have at least __init__ implemented init = _signature_get_user_defined_method(obj, '__init__') if init is not None: - sig = _signature_internal(init, - follow_wrapper_chains, - skip_bound_arg) + sig = _signature_from_callable( + init, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) if sig is None: # At this point we know, that `obj` is a class, with no user- @@ -1998,7 +2288,7 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): if text_sig: # If 'obj' class has a __text_signature__ attribute: # return a signature based on it - return _signature_fromstr(Signature, obj, text_sig) + return _signature_fromstr(sigcls, obj, text_sig) # No '__text_signature__' was found for the 'obj' class. # Last option is to check if its '__init__' is @@ -2006,9 +2296,13 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): if type not in obj.__mro__: # We have a class (not metaclass), but no user-defined # __init__ or __new__ for it - if obj.__init__ is object.__init__: + if (obj.__init__ is object.__init__ and + obj.__new__ is object.__new__): # Return a signature of 'object' builtin. return signature(object) + else: + raise ValueError( + 'no signature found for builtin type {!r}'.format(obj)) elif not isinstance(obj, _NonUserDefinedCallables): # An object with __call__ @@ -2018,9 +2312,11 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): call = _signature_get_user_defined_method(type(obj), '__call__') if call is not None: try: - sig = _signature_internal(call, - follow_wrapper_chains, - skip_bound_arg) + sig = _signature_from_callable( + call, + follow_wrapper_chains=follow_wrapper_chains, + skip_bound_arg=skip_bound_arg, + sigcls=sigcls) except ValueError as ex: msg = 'no signature found for {!r}'.format(obj) raise ValueError(msg) from ex @@ -2040,41 +2336,35 @@ def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True): raise ValueError('callable {!r} is not supported by signature'.format(obj)) -def signature(obj): - '''Get a signature object for the passed callable.''' - return _signature_internal(obj) - class _void: - '''A private marker - used in Parameter & Signature''' + """A private marker - used in Parameter & Signature.""" class _empty: - pass + """Marker object for Signature.empty and Parameter.empty.""" -class _ParameterKind(int): - def __new__(self, *args, name): - obj = int.__new__(self, *args) - obj._name = name - return obj +class _ParameterKind(enum.IntEnum): + POSITIONAL_ONLY = 0 + POSITIONAL_OR_KEYWORD = 1 + VAR_POSITIONAL = 2 + KEYWORD_ONLY = 3 + VAR_KEYWORD = 4 def __str__(self): - return self._name - - def __repr__(self): - return '<_ParameterKind: {!r}>'.format(self._name) + return self._name_ -_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY') -_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD') -_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL') -_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY') -_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD') +_POSITIONAL_ONLY = _ParameterKind.POSITIONAL_ONLY +_POSITIONAL_OR_KEYWORD = _ParameterKind.POSITIONAL_OR_KEYWORD +_VAR_POSITIONAL = _ParameterKind.VAR_POSITIONAL +_KEYWORD_ONLY = _ParameterKind.KEYWORD_ONLY +_VAR_KEYWORD = _ParameterKind.VAR_KEYWORD class Parameter: - '''Represents a parameter in a function signature. + """Represents a parameter in a function signature. Has the following public attributes: @@ -2093,7 +2383,7 @@ class Parameter: Possible values: `Parameter.POSITIONAL_ONLY`, `Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`, `Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`. - ''' + """ __slots__ = ('_name', '_kind', '_default', '_annotation') @@ -2130,6 +2420,16 @@ class Parameter: self._name = name + def __reduce__(self): + return (type(self), + (self._name, self._kind), + {'_default': self._default, + '_annotation': self._annotation}) + + def __setstate__(self, state): + self._default = state['_default'] + self._annotation = state['_annotation'] + @property def name(self): return self._name @@ -2148,7 +2448,7 @@ class Parameter: def replace(self, *, name=_void, kind=_void, annotation=_void, default=_void): - '''Creates a customized copy of the Parameter.''' + """Creates a customized copy of the Parameter.""" if name is _void: name = self._name @@ -2184,22 +2484,24 @@ class Parameter: return formatted def __repr__(self): - return '<{} at {:#x} {!r}>'.format(self.__class__.__name__, - id(self), self.name) + return '<{} "{}">'.format(self.__class__.__name__, self) + + def __hash__(self): + return hash((self.name, self.kind, self.annotation, self.default)) def __eq__(self, other): - return (issubclass(other.__class__, Parameter) and - self._name == other._name and + if self is other: + return True + if not isinstance(other, Parameter): + return NotImplemented + return (self._name == other._name and self._kind == other._kind and self._default == other._default and self._annotation == other._annotation) - def __ne__(self, other): - return not self.__eq__(other) - class BoundArguments: - '''Result of `Signature.bind` call. Holds the mapping of arguments + """Result of `Signature.bind` call. Holds the mapping of arguments to the function's parameters. Has the following public attributes: @@ -2213,7 +2515,9 @@ class BoundArguments: Tuple of positional arguments values. * kwargs : dict Dict of keyword arguments values. - ''' + """ + + __slots__ = ('arguments', '_signature', '__weakref__') def __init__(self, signature, arguments): self.arguments = arguments @@ -2276,17 +2580,60 @@ class BoundArguments: return kwargs + def apply_defaults(self): + """Set default values for missing arguments. + + For variable-positional arguments (*args) the default is an + empty tuple. + + For variable-keyword arguments (**kwargs) the default is an + empty dict. + """ + arguments = self.arguments + if not arguments: + return + new_arguments = [] + for name, param in self._signature.parameters.items(): + try: + new_arguments.append((name, arguments[name])) + except KeyError: + if param.default is not _empty: + val = param.default + elif param.kind is _VAR_POSITIONAL: + val = () + elif param.kind is _VAR_KEYWORD: + val = {} + else: + # This BoundArguments was likely produced by + # Signature.bind_partial(). + continue + new_arguments.append((name, val)) + self.arguments = OrderedDict(new_arguments) + def __eq__(self, other): - return (issubclass(other.__class__, BoundArguments) and - self.signature == other.signature and + if self is other: + return True + if not isinstance(other, BoundArguments): + return NotImplemented + return (self.signature == other.signature and self.arguments == other.arguments) - def __ne__(self, other): - return not self.__eq__(other) + def __setstate__(self, state): + self._signature = state['_signature'] + self.arguments = state['arguments'] + + def __getstate__(self): + return {'_signature': self._signature, 'arguments': self.arguments} + + def __repr__(self): + args = [] + for arg, value in self.arguments.items(): + args.append('{}={!r}'.format(arg, value)) + return '<{} ({})>'.format(self.__class__.__name__, ', '.join(args)) class Signature: - '''A Signature object represents the overall signature of a function. + """A Signature object represents the overall signature of a function. It stores a Parameter object for each parameter accepted by the function, as well as information specific to the function itself. @@ -2306,7 +2653,7 @@ class Signature: * bind_partial(*args, **kwargs) -> BoundArguments Creates a partial mapping from positional and keyword arguments to parameters (simulating 'functools.partial' behavior.) - ''' + """ __slots__ = ('_return_annotation', '_parameters') @@ -2317,9 +2664,9 @@ class Signature: def __init__(self, parameters=None, *, return_annotation=_empty, __validate_parameters__=True): - '''Constructs Signature from the given list of Parameter + """Constructs Signature from the given list of Parameter objects and 'return_annotation'. All arguments are optional. - ''' + """ if parameters is None: params = OrderedDict() @@ -2368,89 +2715,28 @@ class Signature: @classmethod def from_function(cls, func): - '''Constructs Signature for the given python function''' + """Constructs Signature for the given python function.""" - is_duck_function = False - if not isfunction(func): - if _signature_is_functionlike(func): - is_duck_function = True - else: - # If it's not a pure Python function, and not a duck type - # of pure function: - raise TypeError('{!r} is not a Python function'.format(func)) - - Parameter = cls._parameter_cls - - # Parameter information. - func_code = func.__code__ - pos_count = func_code.co_argcount - arg_names = func_code.co_varnames - positional = tuple(arg_names[:pos_count]) - keyword_only_count = func_code.co_kwonlyargcount - keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)] - annotations = func.__annotations__ - defaults = func.__defaults__ - kwdefaults = func.__kwdefaults__ - - if defaults: - pos_default_count = len(defaults) - else: - pos_default_count = 0 - - parameters = [] - - # Non-keyword-only parameters w/o defaults. - non_default_count = pos_count - pos_default_count - for name in positional[:non_default_count]: - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, - kind=_POSITIONAL_OR_KEYWORD)) - - # ... w/ defaults. - for offset, name in enumerate(positional[non_default_count:]): - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, - kind=_POSITIONAL_OR_KEYWORD, - default=defaults[offset])) - - # *args - if func_code.co_flags & CO_VARARGS: - name = arg_names[pos_count + keyword_only_count] - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, - kind=_VAR_POSITIONAL)) - - # Keyword-only parameters. - for name in keyword_only: - default = _empty - if kwdefaults is not None: - default = kwdefaults.get(name, _empty) - - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, - kind=_KEYWORD_ONLY, - default=default)) - # **kwargs - if func_code.co_flags & CO_VARKEYWORDS: - index = pos_count + keyword_only_count - if func_code.co_flags & CO_VARARGS: - index += 1 - - name = arg_names[index] - annotation = annotations.get(name, _empty) - parameters.append(Parameter(name, annotation=annotation, - kind=_VAR_KEYWORD)) - - # Is 'func' is a pure Python function - don't validate the - # parameters list (for correct order and defaults), it should be OK. - return cls(parameters, - return_annotation=annotations.get('return', _empty), - __validate_parameters__=is_duck_function) + warnings.warn("inspect.Signature.from_function() is deprecated, " + "use Signature.from_callable()", + DeprecationWarning, stacklevel=2) + return _signature_from_function(cls, func) @classmethod def from_builtin(cls, func): + """Constructs Signature for the given builtin function.""" + + warnings.warn("inspect.Signature.from_builtin() is deprecated, " + "use Signature.from_callable()", + DeprecationWarning, stacklevel=2) return _signature_from_builtin(cls, func) + @classmethod + def from_callable(cls, obj, *, follow_wrapped=True): + """Constructs Signature for the given callable object.""" + return _signature_from_callable(obj, sigcls=cls, + follow_wrapper_chains=follow_wrapped) + @property def parameters(self): return self._parameters @@ -2460,10 +2746,10 @@ class Signature: return self._return_annotation def replace(self, *, parameters=_void, return_annotation=_void): - '''Creates a customized copy of the Signature. + """Creates a customized copy of the Signature. Pass 'parameters' and/or 'return_annotation' arguments to override them in the new copy. - ''' + """ if parameters is _void: parameters = self.parameters.values() @@ -2474,41 +2760,29 @@ class Signature: return type(self)(parameters, return_annotation=return_annotation) + def _hash_basis(self): + params = tuple(param for param in self.parameters.values() + if param.kind != _KEYWORD_ONLY) + + kwo_params = {param.name: param for param in self.parameters.values() + if param.kind == _KEYWORD_ONLY} + + return params, kwo_params, self.return_annotation + + def __hash__(self): + params, kwo_params, return_annotation = self._hash_basis() + kwo_params = frozenset(kwo_params.values()) + return hash((params, kwo_params, return_annotation)) + def __eq__(self, other): - if (not issubclass(type(other), Signature) or - self.return_annotation != other.return_annotation or - len(self.parameters) != len(other.parameters)): - return False - - other_positions = {param: idx - for idx, param in enumerate(other.parameters.keys())} - - for idx, (param_name, param) in enumerate(self.parameters.items()): - if param.kind == _KEYWORD_ONLY: - try: - other_param = other.parameters[param_name] - except KeyError: - return False - else: - if param != other_param: - return False - else: - try: - other_idx = other_positions[param_name] - except KeyError: - return False - else: - if (idx != other_idx or - param != other.parameters[param_name]): - return False - - return True - - def __ne__(self, other): - return not self.__eq__(other) + if self is other: + return True + if not isinstance(other, Signature): + return NotImplemented + return self._hash_basis() == other._hash_basis() def _bind(self, args, kwargs, *, partial=False): - '''Private method. Don't use directly.''' + """Private method. Don't use directly.""" arguments = OrderedDict() @@ -2556,7 +2830,7 @@ class Signature: parameters_ex = (param,) break else: - msg = '{arg!r} parameter lacking default value' + msg = 'missing a required argument: {arg!r}' msg = msg.format(arg=param.name) raise TypeError(msg) from None else: @@ -2569,7 +2843,8 @@ class Signature: if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY): # Looks like we have no parameter for this positional # argument - raise TypeError('too many positional arguments') + raise TypeError( + 'too many positional arguments') from None if param.kind == _VAR_POSITIONAL: # We have an '*args'-like argument, let's fill it with @@ -2581,8 +2856,9 @@ class Signature: break if param.name in kwargs: - raise TypeError('multiple values for argument ' - '{arg!r}'.format(arg=param.name)) + raise TypeError( + 'multiple values for argument {arg!r}'.format( + arg=param.name)) from None arguments[param.name] = arg_val @@ -2611,7 +2887,7 @@ class Signature: # arguments. if (not partial and param.kind != _VAR_POSITIONAL and param.default is _empty): - raise TypeError('{arg!r} parameter lacking default value'. \ + raise TypeError('missing a required argument: {arg!r}'. \ format(arg=param_name)) from None else: @@ -2630,24 +2906,37 @@ class Signature: # Process our '**kwargs'-like parameter arguments[kwargs_param.name] = kwargs else: - raise TypeError('too many keyword arguments') + raise TypeError( + 'got an unexpected keyword argument {arg!r}'.format( + arg=next(iter(kwargs)))) return self._bound_arguments_cls(self, arguments) def bind(*args, **kwargs): - '''Get a BoundArguments object, that maps the passed `args` + """Get a BoundArguments object, that maps the passed `args` and `kwargs` to the function's signature. Raises `TypeError` if the passed arguments can not be bound. - ''' + """ return args[0]._bind(args[1:], kwargs) def bind_partial(*args, **kwargs): - '''Get a BoundArguments object, that partially maps the + """Get a BoundArguments object, that partially maps the passed `args` and `kwargs` to the function's signature. Raises `TypeError` if the passed arguments can not be bound. - ''' + """ return args[0]._bind(args[1:], kwargs, partial=True) + def __reduce__(self): + return (type(self), + (tuple(self._parameters.values()),), + {'_return_annotation': self._return_annotation}) + + def __setstate__(self, state): + self._return_annotation = state['_return_annotation'] + + def __repr__(self): + return '<{} {}>'.format(self.__class__.__name__, self) + def __str__(self): result = [] render_pos_only_separator = False @@ -2693,6 +2982,12 @@ class Signature: return rendered + +def signature(obj, *, follow_wrapped=True): + """Get a signature object for the passed callable.""" + return Signature.from_callable(obj, follow_wrapped=follow_wrapped) + + def _main(): """ Logic for inspecting an object given at command line """ import argparse diff --git a/Darwin/lib/python3.4/io.py b/Darwin/lib/python3.5/io.py similarity index 100% rename from Darwin/lib/python3.4/io.py rename to Darwin/lib/python3.5/io.py diff --git a/Darwin/lib/python3.4/ipaddress.py b/Darwin/lib/python3.5/ipaddress.py similarity index 81% rename from Darwin/lib/python3.4/ipaddress.py rename to Darwin/lib/python3.5/ipaddress.py index 54df39a..7469a9d 100644 --- a/Darwin/lib/python3.4/ipaddress.py +++ b/Darwin/lib/python3.5/ipaddress.py @@ -135,7 +135,7 @@ def v4_int_to_packed(address): """ try: return address.to_bytes(4, 'big') - except: + except OverflowError: raise ValueError("Address negative or too large for IPv4") @@ -151,7 +151,7 @@ def v6_int_to_packed(address): """ try: return address.to_bytes(16, 'big') - except: + except OverflowError: raise ValueError("Address negative or too large for IPv6") @@ -164,22 +164,23 @@ def _split_optional_netmask(address): def _find_address_range(addresses): - """Find a sequence of IPv#Address. + """Find a sequence of sorted deduplicated IPv#Address. Args: addresses: a list of IPv#Address objects. - Returns: + Yields: A tuple containing the first and last IP addresses in the sequence. """ - first = last = addresses[0] - for ip in addresses[1:]: - if ip._ip == last._ip + 1: - last = ip - else: - break - return (first, last) + it = iter(addresses) + first = last = next(it) + for ip in it: + if ip._ip != last._ip + 1: + yield first, last + first = ip + last = ip + yield first, last def _count_righthand_zero_bits(number, bits): @@ -195,11 +196,7 @@ def _count_righthand_zero_bits(number, bits): """ if number == 0: return bits - for i in range(bits): - if (number >> i) & 1: - return i - # All bits of interest were zero, even if there are more in the number - return bits + return min(bits, (~number & (number-1)).bit_length()) def summarize_address_range(first, last): @@ -250,15 +247,14 @@ def summarize_address_range(first, last): while first_int <= last_int: nbits = min(_count_righthand_zero_bits(first_int, ip_bits), (last_int - first_int + 1).bit_length() - 1) - net = ip('%s/%d' % (first, ip_bits - nbits)) + net = ip((first_int, ip_bits - nbits)) yield net first_int += 1 << nbits if first_int - 1 == ip._ALL_ONES: break - first = first.__class__(first_int) -def _collapse_addresses_recursive(addresses): +def _collapse_addresses_internal(addresses): """Loops through the addresses, collapsing concurrent netblocks. Example: @@ -268,7 +264,7 @@ def _collapse_addresses_recursive(addresses): ip3 = IPv4Network('192.0.2.128/26') ip4 = IPv4Network('192.0.2.192/26') - _collapse_addresses_recursive([ip1, ip2, ip3, ip4]) -> + _collapse_addresses_internal([ip1, ip2, ip3, ip4]) -> [IPv4Network('192.0.2.0/24')] This shouldn't be called directly; it is called via @@ -282,28 +278,29 @@ def _collapse_addresses_recursive(addresses): passed. """ - while True: - last_addr = None - ret_array = [] - optimized = False - - for cur_addr in addresses: - if not ret_array: - last_addr = cur_addr - ret_array.append(cur_addr) - elif (cur_addr.network_address >= last_addr.network_address and - cur_addr.broadcast_address <= last_addr.broadcast_address): - optimized = True - elif cur_addr == list(last_addr.supernet().subnets())[1]: - ret_array[-1] = last_addr = last_addr.supernet() - optimized = True - else: - last_addr = cur_addr - ret_array.append(cur_addr) - - addresses = ret_array - if not optimized: - return addresses + # First merge + to_merge = list(addresses) + subnets = {} + while to_merge: + net = to_merge.pop() + supernet = net.supernet() + existing = subnets.get(supernet) + if existing is None: + subnets[supernet] = net + elif existing != net: + # Merge consecutive subnets + del subnets[supernet] + to_merge.append(supernet) + # Then iterate over resulting networks, skipping subsumed subnets + last = None + for net in sorted(subnets.values()): + if last is not None: + # Since they are sorted, last.network_address <= net.network_address + # is a given. + if last.broadcast_address >= net.broadcast_address: + continue + yield net + last = net def collapse_addresses(addresses): @@ -324,7 +321,6 @@ def collapse_addresses(addresses): TypeError: If passed a list of mixed version objects. """ - i = 0 addrs = [] ips = [] nets = [] @@ -352,15 +348,13 @@ def collapse_addresses(addresses): # sort and dedup ips = sorted(set(ips)) - nets = sorted(set(nets)) - while i < len(ips): - (first, last) = _find_address_range(ips[i:]) - i = ips.index(last) + 1 - addrs.extend(summarize_address_range(first, last)) + # find consecutive address ranges in the sorted sequence and summarize them + if ips: + for first, last in _find_address_range(ips): + addrs.extend(summarize_address_range(first, last)) - return iter(_collapse_addresses_recursive(sorted( - addrs + nets, key=_BaseNetwork._get_networks_key))) + return _collapse_addresses_internal(addrs + nets) def get_mixed_type_key(obj): @@ -388,43 +382,12 @@ def get_mixed_type_key(obj): return NotImplemented -class _TotalOrderingMixin: - # Helper that derives the other comparison operations from - # __lt__ and __eq__ - # We avoid functools.total_ordering because it doesn't handle - # NotImplemented correctly yet (http://bugs.python.org/issue10042) - def __eq__(self, other): - raise NotImplementedError - def __ne__(self, other): - equal = self.__eq__(other) - if equal is NotImplemented: - return NotImplemented - return not equal - def __lt__(self, other): - raise NotImplementedError - def __le__(self, other): - less = self.__lt__(other) - if less is NotImplemented or not less: - return self.__eq__(other) - return less - def __gt__(self, other): - less = self.__lt__(other) - if less is NotImplemented: - return NotImplemented - equal = self.__eq__(other) - if equal is NotImplemented: - return NotImplemented - return not (less or equal) - def __ge__(self, other): - less = self.__lt__(other) - if less is NotImplemented: - return NotImplemented - return not less - -class _IPAddressBase(_TotalOrderingMixin): +class _IPAddressBase: """The mother class.""" + __slots__ = () + @property def exploded(self): """Return the longhand version of the IP address as a string.""" @@ -435,6 +398,17 @@ class _IPAddressBase(_TotalOrderingMixin): """Return the shorthand version of the IP address as a string.""" return str(self) + @property + def reverse_pointer(self): + """The name of the reverse DNS pointer for the IP address, e.g.: + >>> ipaddress.ip_address("127.0.0.1").reverse_pointer + '1.0.0.127.in-addr.arpa' + >>> ipaddress.ip_address("2001:db8::1").reverse_pointer + '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa' + + """ + return self._reverse_pointer() + @property def version(self): msg = '%200s has no version specified' % (type(self),) @@ -456,7 +430,8 @@ class _IPAddressBase(_TotalOrderingMixin): raise AddressValueError(msg % (address, address_len, expected_len, self._version)) - def _ip_int_from_prefix(self, prefixlen): + @classmethod + def _ip_int_from_prefix(cls, prefixlen): """Turn the prefix length into a bitwise netmask Args: @@ -466,13 +441,14 @@ class _IPAddressBase(_TotalOrderingMixin): An integer. """ - return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen) + return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen) - def _prefix_from_ip_int(self, ip_int): + @classmethod + def _prefix_from_ip_int(cls, ip_int): """Return prefix length from the bitwise netmask. Args: - ip_int: An integer, the netmask in axpanded bitwise format + ip_int: An integer, the netmask in expanded bitwise format Returns: An integer, the prefix length. @@ -481,22 +457,24 @@ class _IPAddressBase(_TotalOrderingMixin): ValueError: If the input intermingles zeroes & ones """ trailing_zeroes = _count_righthand_zero_bits(ip_int, - self._max_prefixlen) - prefixlen = self._max_prefixlen - trailing_zeroes + cls._max_prefixlen) + prefixlen = cls._max_prefixlen - trailing_zeroes leading_ones = ip_int >> trailing_zeroes all_ones = (1 << prefixlen) - 1 if leading_ones != all_ones: - byteslen = self._max_prefixlen // 8 + byteslen = cls._max_prefixlen // 8 details = ip_int.to_bytes(byteslen, 'big') msg = 'Netmask pattern %r mixes zeroes & ones' raise ValueError(msg % details) return prefixlen - def _report_invalid_netmask(self, netmask_str): + @classmethod + def _report_invalid_netmask(cls, netmask_str): msg = '%r is not a valid netmask' % netmask_str raise NetmaskValueError(msg) from None - def _prefix_from_prefix_string(self, prefixlen_str): + @classmethod + def _prefix_from_prefix_string(cls, prefixlen_str): """Return prefix length from a numeric string Args: @@ -511,16 +489,17 @@ class _IPAddressBase(_TotalOrderingMixin): # int allows a leading +/- as well as surrounding whitespace, # so we ensure that isn't the case if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str): - self._report_invalid_netmask(prefixlen_str) + cls._report_invalid_netmask(prefixlen_str) try: prefixlen = int(prefixlen_str) except ValueError: - self._report_invalid_netmask(prefixlen_str) - if not (0 <= prefixlen <= self._max_prefixlen): - self._report_invalid_netmask(prefixlen_str) + cls._report_invalid_netmask(prefixlen_str) + if not (0 <= prefixlen <= cls._max_prefixlen): + cls._report_invalid_netmask(prefixlen_str) return prefixlen - def _prefix_from_ip_string(self, ip_str): + @classmethod + def _prefix_from_ip_string(cls, ip_str): """Turn a netmask/hostmask string into a prefix length Args: @@ -534,26 +513,30 @@ class _IPAddressBase(_TotalOrderingMixin): """ # Parse the netmask/hostmask like an IP address. try: - ip_int = self._ip_int_from_string(ip_str) + ip_int = cls._ip_int_from_string(ip_str) except AddressValueError: - self._report_invalid_netmask(ip_str) + cls._report_invalid_netmask(ip_str) # Try matching a netmask (this would be /1*0*/ as a bitwise regexp). # Note that the two ambiguous cases (all-ones and all-zeroes) are # treated as netmasks. try: - return self._prefix_from_ip_int(ip_int) + return cls._prefix_from_ip_int(ip_int) except ValueError: pass # Invert the bits, and try matching a /0+1+/ hostmask instead. - ip_int ^= self._ALL_ONES + ip_int ^= cls._ALL_ONES try: - return self._prefix_from_ip_int(ip_int) + return cls._prefix_from_ip_int(ip_int) except ValueError: - self._report_invalid_netmask(ip_str) + cls._report_invalid_netmask(ip_str) + + def __reduce__(self): + return self.__class__, (str(self),) +@functools.total_ordering class _BaseAddress(_IPAddressBase): """A generic IP object. @@ -562,10 +545,7 @@ class _BaseAddress(_IPAddressBase): used by single IP addresses. """ - def __init__(self, address): - if (not isinstance(address, bytes) - and '/' in str(address)): - raise AddressValueError("Unexpected '/' in %r" % address) + __slots__ = () def __int__(self): return self._ip @@ -578,12 +558,11 @@ class _BaseAddress(_IPAddressBase): return NotImplemented def __lt__(self, other): + if not isinstance(other, _BaseAddress): + return NotImplemented if self._version != other._version: raise TypeError('%s and %s are not of the same version' % ( self, other)) - if not isinstance(other, _BaseAddress): - raise TypeError('%s and %s are not of the same type' % ( - self, other)) if self._ip != other._ip: return self._ip < other._ip return False @@ -612,7 +591,11 @@ class _BaseAddress(_IPAddressBase): def _get_address_key(self): return (self._version, self) + def __reduce__(self): + return self.__class__, (self._ip,) + +@functools.total_ordering class _BaseNetwork(_IPAddressBase): """A generic IP network object. @@ -662,12 +645,11 @@ class _BaseNetwork(_IPAddressBase): return self._address_class(broadcast + n) def __lt__(self, other): + if not isinstance(other, _BaseNetwork): + return NotImplemented if self._version != other._version: raise TypeError('%s and %s are not of the same version' % ( self, other)) - if not isinstance(other, _BaseNetwork): - raise TypeError('%s and %s are not of the same type' % ( - self, other)) if self.network_address != other.network_address: return self.network_address < other.network_address if self.netmask != other.netmask: @@ -798,7 +780,7 @@ class _BaseNetwork(_IPAddressBase): other.broadcast_address <= self.broadcast_address): raise ValueError('%s not contained in %s' % (other, self)) if other == self: - raise StopIteration + return # Make sure we're comparing the network of other. other = other.__class__('%s/%s' % (other.network_address, @@ -933,20 +915,11 @@ class _BaseNetwork(_IPAddressBase): 'prefix length diff %d is invalid for netblock %s' % ( new_prefixlen, self)) - first = self.__class__('%s/%s' % - (self.network_address, - self._prefixlen + prefixlen_diff)) - - yield first - current = first - while True: - broadcast = current.broadcast_address - if broadcast == self.broadcast_address: - return - new_addr = self._address_class(int(broadcast) + 1) - current = self.__class__('%s/%s' % (new_addr, - new_prefixlen)) - + start = int(self.network_address) + end = int(self.broadcast_address) + step = (int(self.hostmask) + 1) >> prefixlen_diff + for new_addr in range(start, end, step): + current = self.__class__((new_addr, new_prefixlen)) yield current def supernet(self, prefixlen_diff=1, new_prefix=None): @@ -980,15 +953,15 @@ class _BaseNetwork(_IPAddressBase): raise ValueError('cannot set prefixlen_diff and new_prefix') prefixlen_diff = self._prefixlen - new_prefix - if self.prefixlen - prefixlen_diff < 0: + new_prefixlen = self.prefixlen - prefixlen_diff + if new_prefixlen < 0: raise ValueError( 'current prefixlen is %d, cannot have a prefixlen_diff of %d' % (self.prefixlen, prefixlen_diff)) - # TODO (pmoody): optimize this. - t = self.__class__('%s/%d' % (self.network_address, - self.prefixlen - prefixlen_diff), - strict=False) - return t.__class__('%s/%d' % (t.network_address, t.prefixlen)) + return self.__class__(( + int(self.network_address) & (int(self.netmask) << prefixlen_diff), + new_prefixlen + )) @property def is_multicast(self): @@ -1082,21 +1055,49 @@ class _BaseV4: """ + __slots__ = () + _version = 4 # Equivalent to 255.255.255.255 or 32 bits of 1's. _ALL_ONES = (2**IPV4LENGTH) - 1 _DECIMAL_DIGITS = frozenset('0123456789') # the valid octets for host and netmasks. only useful for IPv4. - _valid_mask_octets = frozenset((255, 254, 252, 248, 240, 224, 192, 128, 0)) + _valid_mask_octets = frozenset({255, 254, 252, 248, 240, 224, 192, 128, 0}) - def __init__(self, address): - self._version = 4 - self._max_prefixlen = IPV4LENGTH + _max_prefixlen = IPV4LENGTH + # There are only a handful of valid v4 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} def _explode_shorthand_ip_string(self): return str(self) - def _ip_int_from_string(self, ip_str): + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, int): + prefixlen = arg + else: + try: + # Check for a netmask in prefix length form + prefixlen = cls._prefix_from_prefix_string(arg) + except NetmaskValueError: + # Check for a netmask or hostmask in dotted-quad form. + # This may raise NetmaskValueError. + prefixlen = cls._prefix_from_ip_string(arg) + netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): """Turn the given IP string into an integer for comparison. Args: @@ -1117,11 +1118,12 @@ class _BaseV4: raise AddressValueError("Expected 4 octets in %r" % ip_str) try: - return int.from_bytes(map(self._parse_octet, octets), 'big') + return int.from_bytes(map(cls._parse_octet, octets), 'big') except ValueError as exc: raise AddressValueError("%s in %r" % (exc, ip_str)) from None - def _parse_octet(self, octet_str): + @classmethod + def _parse_octet(cls, octet_str): """Convert a decimal octet into an integer. Args: @@ -1137,7 +1139,7 @@ class _BaseV4: if not octet_str: raise ValueError("Empty octet not permitted") # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._DECIMAL_DIGITS.issuperset(octet_str): + if not cls._DECIMAL_DIGITS.issuperset(octet_str): msg = "Only decimal digits permitted in %r" raise ValueError(msg % octet_str) # We do the length check second, since the invalid character error @@ -1157,7 +1159,8 @@ class _BaseV4: raise ValueError("Octet %d (> 255) not permitted" % octet_int) return octet_int - def _string_from_ip_int(self, ip_int): + @classmethod + def _string_from_ip_int(cls, ip_int): """Turns a 32-bit integer into dotted decimal notation. Args: @@ -1221,6 +1224,15 @@ class _BaseV4: return True return False + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv4 address. + + This implements the method described in RFC1035 3.5. + + """ + reverse_octets = str(self).split('.')[::-1] + return '.'.join(reverse_octets) + '.in-addr.arpa' + @property def max_prefixlen(self): return self._max_prefixlen @@ -1234,6 +1246,8 @@ class IPv4Address(_BaseV4, _BaseAddress): """Represent and manipulate single IPv4 Addresses.""" + __slots__ = ('_ip', '__weakref__') + def __init__(self, address): """ @@ -1250,9 +1264,6 @@ class IPv4Address(_BaseV4, _BaseAddress): AddressValueError: If ipaddress isn't a valid IPv4 address. """ - _BaseAddress.__init__(self, address) - _BaseV4.__init__(self, address) - # Efficient constructor from integer. if isinstance(address, int): self._check_int_address(address) @@ -1268,6 +1279,8 @@ class IPv4Address(_BaseV4, _BaseAddress): # Assume input argument to be string or any object representation # which converts into a formatted IP string. addr_str = str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) self._ip = self._ip_int_from_string(addr_str) @property @@ -1284,8 +1297,7 @@ class IPv4Address(_BaseV4, _BaseAddress): reserved IPv4 Network range. """ - reserved_network = IPv4Network('240.0.0.0/4') - return self in reserved_network + return self in self._constants._reserved_network @property @functools.lru_cache() @@ -1297,21 +1309,7 @@ class IPv4Address(_BaseV4, _BaseAddress): iana-ipv4-special-registry. """ - return (self in IPv4Network('0.0.0.0/8') or - self in IPv4Network('10.0.0.0/8') or - self in IPv4Network('127.0.0.0/8') or - self in IPv4Network('169.254.0.0/16') or - self in IPv4Network('172.16.0.0/12') or - self in IPv4Network('192.0.0.0/29') or - self in IPv4Network('192.0.0.170/31') or - self in IPv4Network('192.0.2.0/24') or - self in IPv4Network('192.168.0.0/16') or - self in IPv4Network('198.18.0.0/15') or - self in IPv4Network('198.51.100.0/24') or - self in IPv4Network('203.0.113.0/24') or - self in IPv4Network('240.0.0.0/4') or - self in IPv4Network('255.255.255.255/32')) - + return any(self in net for net in self._constants._private_networks) @property def is_multicast(self): @@ -1322,8 +1320,7 @@ class IPv4Address(_BaseV4, _BaseAddress): See RFC 3171 for details. """ - multicast_network = IPv4Network('224.0.0.0/4') - return self in multicast_network + return self in self._constants._multicast_network @property def is_unspecified(self): @@ -1334,8 +1331,7 @@ class IPv4Address(_BaseV4, _BaseAddress): RFC 5735 3. """ - unspecified_address = IPv4Address('0.0.0.0') - return self == unspecified_address + return self == self._constants._unspecified_address @property def is_loopback(self): @@ -1345,8 +1341,7 @@ class IPv4Address(_BaseV4, _BaseAddress): A boolean, True if the address is a loopback per RFC 3330. """ - loopback_network = IPv4Network('127.0.0.0/8') - return self in loopback_network + return self in self._constants._loopback_network @property def is_link_local(self): @@ -1356,8 +1351,7 @@ class IPv4Address(_BaseV4, _BaseAddress): A boolean, True if the address is link-local per RFC 3927. """ - linklocal_network = IPv4Network('169.254.0.0/16') - return self in linklocal_network + return self in self._constants._linklocal_network class IPv4Interface(IPv4Address): @@ -1369,6 +1363,18 @@ class IPv4Interface(IPv4Address): self._prefixlen = self._max_prefixlen return + if isinstance(address, tuple): + IPv4Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + + self.network = IPv4Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return + addr = _split_optional_netmask(address) IPv4Address.__init__(self, addr[0]) @@ -1408,6 +1414,8 @@ class IPv4Interface(IPv4Address): def __hash__(self): return self._ip ^ self._prefixlen ^ int(self.network.network_address) + __reduce__ = _IPAddressBase.__reduce__ + @property def ip(self): return IPv4Address(self._ip) @@ -1480,24 +1488,30 @@ class IPv4Network(_BaseV4, _BaseNetwork): supplied. """ - - _BaseV4.__init__(self, address) _BaseNetwork.__init__(self, address) - # Constructing from a packed address - if isinstance(address, bytes): + # Constructing from a packed address or integer + if isinstance(address, (int, bytes)): self.network_address = IPv4Address(address) - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ALL_ONES) - #fixme: address/network test here + self.netmask, self._prefixlen = self._make_netmask(self._max_prefixlen) + #fixme: address/network test here. return - # Efficient constructor from integer. - if isinstance(address, int): - self.network_address = IPv4Address(address) - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ALL_ONES) - #fixme: address/network test here. + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + # We weren't given an address[1] + arg = self._max_prefixlen + self.network_address = IPv4Address(address[0]) + self.netmask, self._prefixlen = self._make_netmask(arg) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv4Address(packed & + int(self.netmask)) return # Assume input argument to be string or any object representation @@ -1506,16 +1520,10 @@ class IPv4Network(_BaseV4, _BaseNetwork): self.network_address = IPv4Address(self._ip_int_from_string(addr[0])) if len(addr) == 2: - try: - # Check for a netmask in prefix length form - self._prefixlen = self._prefix_from_prefix_string(addr[1]) - except NetmaskValueError: - # Check for a netmask or hostmask in dotted-quad form. - # This may raise NetmaskValueError. - self._prefixlen = self._prefix_from_ip_string(addr[1]) + arg = addr[1] else: - self._prefixlen = self._max_prefixlen - self.netmask = IPv4Address(self._ip_int_from_prefix(self._prefixlen)) + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) if strict: if (IPv4Address(int(self.network_address) & int(self.netmask)) != @@ -1542,6 +1550,37 @@ class IPv4Network(_BaseV4, _BaseNetwork): not self.is_private) +class _IPv4Constants: + _linklocal_network = IPv4Network('169.254.0.0/16') + + _loopback_network = IPv4Network('127.0.0.0/8') + + _multicast_network = IPv4Network('224.0.0.0/4') + + _private_networks = [ + IPv4Network('0.0.0.0/8'), + IPv4Network('10.0.0.0/8'), + IPv4Network('127.0.0.0/8'), + IPv4Network('169.254.0.0/16'), + IPv4Network('172.16.0.0/12'), + IPv4Network('192.0.0.0/29'), + IPv4Network('192.0.0.170/31'), + IPv4Network('192.0.2.0/24'), + IPv4Network('192.168.0.0/16'), + IPv4Network('198.18.0.0/15'), + IPv4Network('198.51.100.0/24'), + IPv4Network('203.0.113.0/24'), + IPv4Network('240.0.0.0/4'), + IPv4Network('255.255.255.255/32'), + ] + + _reserved_network = IPv4Network('240.0.0.0/4') + + _unspecified_address = IPv4Address('0.0.0.0') + + +IPv4Address._constants = _IPv4Constants + class _BaseV6: @@ -1552,15 +1591,37 @@ class _BaseV6: """ + __slots__ = () + _version = 6 _ALL_ONES = (2**IPV6LENGTH) - 1 _HEXTET_COUNT = 8 _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef') + _max_prefixlen = IPV6LENGTH - def __init__(self, address): - self._version = 6 - self._max_prefixlen = IPV6LENGTH + # There are only a bunch of valid v6 netmasks, so we cache them all + # when constructed (see _make_netmask()). + _netmask_cache = {} - def _ip_int_from_string(self, ip_str): + @classmethod + def _make_netmask(cls, arg): + """Make a (netmask, prefix_len) tuple from the given argument. + + Argument can be: + - an integer (the prefix length) + - a string representing the prefix length (e.g. "24") + - a string representing the prefix netmask (e.g. "255.255.255.0") + """ + if arg not in cls._netmask_cache: + if isinstance(arg, int): + prefixlen = arg + else: + prefixlen = cls._prefix_from_prefix_string(arg) + netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen)) + cls._netmask_cache[arg] = netmask, prefixlen + return cls._netmask_cache[arg] + + @classmethod + def _ip_int_from_string(cls, ip_str): """Turn an IPv6 ip_str into an integer. Args: @@ -1596,7 +1657,7 @@ class _BaseV6: # An IPv6 address can't have more than 8 colons (9 parts). # The extra colon comes from using the "::" notation for a single # leading or trailing zero part. - _max_parts = self._HEXTET_COUNT + 1 + _max_parts = cls._HEXTET_COUNT + 1 if len(parts) > _max_parts: msg = "At most %d colons permitted in %r" % (_max_parts-1, ip_str) raise AddressValueError(msg) @@ -1628,17 +1689,17 @@ class _BaseV6: if parts_lo: msg = "Trailing ':' only permitted as part of '::' in %r" raise AddressValueError(msg % ip_str) # :$ requires ::$ - parts_skipped = self._HEXTET_COUNT - (parts_hi + parts_lo) + parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo) if parts_skipped < 1: msg = "Expected at most %d other parts with '::' in %r" - raise AddressValueError(msg % (self._HEXTET_COUNT-1, ip_str)) + raise AddressValueError(msg % (cls._HEXTET_COUNT-1, ip_str)) else: # Otherwise, allocate the entire address to parts_hi. The # endpoints could still be empty, but _parse_hextet() will check # for that. - if len(parts) != self._HEXTET_COUNT: + if len(parts) != cls._HEXTET_COUNT: msg = "Exactly %d parts expected without '::' in %r" - raise AddressValueError(msg % (self._HEXTET_COUNT, ip_str)) + raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str)) if not parts[0]: msg = "Leading ':' only permitted as part of '::' in %r" raise AddressValueError(msg % ip_str) # ^: requires ^:: @@ -1654,16 +1715,17 @@ class _BaseV6: ip_int = 0 for i in range(parts_hi): ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) + ip_int |= cls._parse_hextet(parts[i]) ip_int <<= 16 * parts_skipped for i in range(-parts_lo, 0): ip_int <<= 16 - ip_int |= self._parse_hextet(parts[i]) + ip_int |= cls._parse_hextet(parts[i]) return ip_int except ValueError as exc: raise AddressValueError("%s in %r" % (exc, ip_str)) from None - def _parse_hextet(self, hextet_str): + @classmethod + def _parse_hextet(cls, hextet_str): """Convert an IPv6 hextet string into an integer. Args: @@ -1678,7 +1740,7 @@ class _BaseV6: """ # Whitelist the characters, since int() allows a lot of bizarre stuff. - if not self._HEX_DIGITS.issuperset(hextet_str): + if not cls._HEX_DIGITS.issuperset(hextet_str): raise ValueError("Only hex digits permitted in %r" % hextet_str) # We do the length check second, since the invalid character error # is likely to be more informative for the user @@ -1688,7 +1750,8 @@ class _BaseV6: # Length check means we can skip checking the integer value return int(hextet_str, 16) - def _compress_hextets(self, hextets): + @classmethod + def _compress_hextets(cls, hextets): """Compresses a list of hextets. Compresses a list of strings, replacing the longest continuous @@ -1735,7 +1798,8 @@ class _BaseV6: return hextets - def _string_from_ip_int(self, ip_int=None): + @classmethod + def _string_from_ip_int(cls, ip_int=None): """Turns a 128-bit integer into hexadecimal notation. Args: @@ -1749,15 +1813,15 @@ class _BaseV6: """ if ip_int is None: - ip_int = int(self._ip) + ip_int = int(cls._ip) - if ip_int > self._ALL_ONES: + if ip_int > cls._ALL_ONES: raise ValueError('IPv6 address is too large') hex_str = '%032x' % ip_int hextets = ['%x' % int(hex_str[x:x+4], 16) for x in range(0, 32, 4)] - hextets = self._compress_hextets(hextets) + hextets = cls._compress_hextets(hextets) return ':'.join(hextets) def _explode_shorthand_ip_string(self): @@ -1784,6 +1848,15 @@ class _BaseV6: return '%s/%d' % (':'.join(parts), self._prefixlen) return ':'.join(parts) + def _reverse_pointer(self): + """Return the reverse DNS pointer name for the IPv6 address. + + This implements the method described in RFC3596 2.5. + + """ + reverse_chars = self.exploded[::-1].replace(':', '') + return '.'.join(reverse_chars) + '.ip6.arpa' + @property def max_prefixlen(self): return self._max_prefixlen @@ -1797,6 +1870,8 @@ class IPv6Address(_BaseV6, _BaseAddress): """Represent and manipulate single IPv6 Addresses.""" + __slots__ = ('_ip', '__weakref__') + def __init__(self, address): """Instantiate a new IPv6 address object. @@ -1814,9 +1889,6 @@ class IPv6Address(_BaseV6, _BaseAddress): AddressValueError: If address isn't a valid IPv6 address. """ - _BaseAddress.__init__(self, address) - _BaseV6.__init__(self, address) - # Efficient constructor from integer. if isinstance(address, int): self._check_int_address(address) @@ -1832,6 +1904,8 @@ class IPv6Address(_BaseV6, _BaseAddress): # Assume input argument to be string or any object representation # which converts into a formatted IP string. addr_str = str(address) + if '/' in addr_str: + raise AddressValueError("Unexpected '/' in %r" % address) self._ip = self._ip_int_from_string(addr_str) @property @@ -1848,8 +1922,7 @@ class IPv6Address(_BaseV6, _BaseAddress): See RFC 2373 2.7 for details. """ - multicast_network = IPv6Network('ff00::/8') - return self in multicast_network + return self in self._constants._multicast_network @property def is_reserved(self): @@ -1860,16 +1933,7 @@ class IPv6Address(_BaseV6, _BaseAddress): reserved IPv6 Network ranges. """ - reserved_networks = [IPv6Network('::/8'), IPv6Network('100::/8'), - IPv6Network('200::/7'), IPv6Network('400::/6'), - IPv6Network('800::/5'), IPv6Network('1000::/4'), - IPv6Network('4000::/3'), IPv6Network('6000::/3'), - IPv6Network('8000::/3'), IPv6Network('A000::/3'), - IPv6Network('C000::/3'), IPv6Network('E000::/4'), - IPv6Network('F000::/5'), IPv6Network('F800::/6'), - IPv6Network('FE00::/9')] - - return any(self in x for x in reserved_networks) + return any(self in x for x in self._constants._reserved_networks) @property def is_link_local(self): @@ -1879,8 +1943,7 @@ class IPv6Address(_BaseV6, _BaseAddress): A boolean, True if the address is reserved per RFC 4291. """ - linklocal_network = IPv6Network('fe80::/10') - return self in linklocal_network + return self in self._constants._linklocal_network @property def is_site_local(self): @@ -1894,8 +1957,7 @@ class IPv6Address(_BaseV6, _BaseAddress): A boolean, True if the address is reserved per RFC 3513 2.5.6. """ - sitelocal_network = IPv6Network('fec0::/10') - return self in sitelocal_network + return self in self._constants._sitelocal_network @property @functools.lru_cache() @@ -1907,16 +1969,7 @@ class IPv6Address(_BaseV6, _BaseAddress): iana-ipv6-special-registry. """ - return (self in IPv6Network('::1/128') or - self in IPv6Network('::/128') or - self in IPv6Network('::ffff:0:0/96') or - self in IPv6Network('100::/64') or - self in IPv6Network('2001::/23') or - self in IPv6Network('2001:2::/48') or - self in IPv6Network('2001:db8::/32') or - self in IPv6Network('2001:10::/28') or - self in IPv6Network('fc00::/7') or - self in IPv6Network('fe80::/10')) + return any(self in net for net in self._constants._private_networks) @property def is_global(self): @@ -2001,6 +2054,16 @@ class IPv6Interface(IPv6Address): self.network = IPv6Network(self._ip) self._prefixlen = self._max_prefixlen return + if isinstance(address, tuple): + IPv6Address.__init__(self, address[0]) + if len(address) > 1: + self._prefixlen = int(address[1]) + else: + self._prefixlen = self._max_prefixlen + self.network = IPv6Network(address, strict=False) + self.netmask = self.network.netmask + self.hostmask = self.network.hostmask + return addr = _split_optional_netmask(address) IPv6Address.__init__(self, addr[0]) @@ -2039,6 +2102,8 @@ class IPv6Interface(IPv6Address): def __hash__(self): return self._ip ^ self._prefixlen ^ int(self.network.network_address) + __reduce__ = _IPAddressBase.__reduce__ + @property def ip(self): return IPv6Address(self._ip) @@ -2115,21 +2180,28 @@ class IPv6Network(_BaseV6, _BaseNetwork): supplied. """ - _BaseV6.__init__(self, address) _BaseNetwork.__init__(self, address) - # Efficient constructor from integer. - if isinstance(address, int): + # Efficient constructor from integer or packed address + if isinstance(address, (bytes, int)): self.network_address = IPv6Address(address) - self._prefixlen = self._max_prefixlen - self.netmask = IPv6Address(self._ALL_ONES) + self.netmask, self._prefixlen = self._make_netmask(self._max_prefixlen) return - # Constructing from a packed address - if isinstance(address, bytes): - self.network_address = IPv6Address(address) - self._prefixlen = self._max_prefixlen - self.netmask = IPv6Address(self._ALL_ONES) + if isinstance(address, tuple): + if len(address) > 1: + arg = address[1] + else: + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) + self.network_address = IPv6Address(address[0]) + packed = int(self.network_address) + if packed & int(self.netmask) != packed: + if strict: + raise ValueError('%s has host bits set' % self) + else: + self.network_address = IPv6Address(packed & + int(self.netmask)) return # Assume input argument to be string or any object representation @@ -2139,12 +2211,11 @@ class IPv6Network(_BaseV6, _BaseNetwork): self.network_address = IPv6Address(self._ip_int_from_string(addr[0])) if len(addr) == 2: - # This may raise NetmaskValueError - self._prefixlen = self._prefix_from_prefix_string(addr[1]) + arg = addr[1] else: - self._prefixlen = self._max_prefixlen + arg = self._max_prefixlen + self.netmask, self._prefixlen = self._make_netmask(arg) - self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen)) if strict: if (IPv6Address(int(self.network_address) & int(self.netmask)) != self.network_address): @@ -2181,3 +2252,39 @@ class IPv6Network(_BaseV6, _BaseNetwork): """ return (self.network_address.is_site_local and self.broadcast_address.is_site_local) + + +class _IPv6Constants: + + _linklocal_network = IPv6Network('fe80::/10') + + _multicast_network = IPv6Network('ff00::/8') + + _private_networks = [ + IPv6Network('::1/128'), + IPv6Network('::/128'), + IPv6Network('::ffff:0:0/96'), + IPv6Network('100::/64'), + IPv6Network('2001::/23'), + IPv6Network('2001:2::/48'), + IPv6Network('2001:db8::/32'), + IPv6Network('2001:10::/28'), + IPv6Network('fc00::/7'), + IPv6Network('fe80::/10'), + ] + + _reserved_networks = [ + IPv6Network('::/8'), IPv6Network('100::/8'), + IPv6Network('200::/7'), IPv6Network('400::/6'), + IPv6Network('800::/5'), IPv6Network('1000::/4'), + IPv6Network('4000::/3'), IPv6Network('6000::/3'), + IPv6Network('8000::/3'), IPv6Network('A000::/3'), + IPv6Network('C000::/3'), IPv6Network('E000::/4'), + IPv6Network('F000::/5'), IPv6Network('F800::/6'), + IPv6Network('FE00::/9'), + ] + + _sitelocal_network = IPv6Network('fec0::/10') + + +IPv6Address._constants = _IPv6Constants diff --git a/Darwin/lib/python3.4/json/__init__.py b/Darwin/lib/python3.5/json/__init__.py similarity index 97% rename from Darwin/lib/python3.4/json/__init__.py rename to Darwin/lib/python3.5/json/__init__.py index 94f7d8c..2612657 100644 --- a/Darwin/lib/python3.4/json/__init__.py +++ b/Darwin/lib/python3.5/json/__init__.py @@ -98,12 +98,12 @@ Using json.tool from the shell to validate and pretty-print:: __version__ = '2.0.9' __all__ = [ 'dump', 'dumps', 'load', 'loads', - 'JSONDecoder', 'JSONEncoder', + 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder', ] __author__ = 'Bob Ippolito ' -from .decoder import JSONDecoder +from .decoder import JSONDecoder, JSONDecodeError from .encoder import JSONEncoder _default_encoder = JSONEncoder( @@ -184,7 +184,7 @@ def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True, default=None, sort_keys=False, **kw): """Serialize ``obj`` to a JSON formatted ``str``. - If ``skipkeys`` is false then ``dict`` keys that are not basic types + If ``skipkeys`` is true then ``dict`` keys that are not basic types (``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped instead of raising a ``TypeError``. @@ -311,7 +311,8 @@ def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None, raise TypeError('the JSON object must be str, not {!r}'.format( s.__class__.__name__)) if s.startswith(u'\ufeff'): - raise ValueError("Unexpected UTF-8 BOM (decode using utf-8-sig)") + raise JSONDecodeError("Unexpected UTF-8 BOM (decode using utf-8-sig)", + s, 0) if (cls is None and object_hook is None and parse_int is None and parse_float is None and parse_constant is None and object_pairs_hook is None and not kw): diff --git a/Darwin/lib/python3.4/json/decoder.py b/Darwin/lib/python3.5/json/decoder.py similarity index 84% rename from Darwin/lib/python3.4/json/decoder.py rename to Darwin/lib/python3.5/json/decoder.py index 59e5f41..0f03f20 100644 --- a/Darwin/lib/python3.4/json/decoder.py +++ b/Darwin/lib/python3.5/json/decoder.py @@ -8,7 +8,7 @@ try: except ImportError: c_scanstring = None -__all__ = ['JSONDecoder'] +__all__ = ['JSONDecoder', 'JSONDecodeError'] FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL @@ -17,32 +17,30 @@ PosInf = float('inf') NegInf = float('-inf') -def linecol(doc, pos): - if isinstance(doc, bytes): - newline = b'\n' - else: - newline = '\n' - lineno = doc.count(newline, 0, pos) + 1 - if lineno == 1: - colno = pos + 1 - else: - colno = pos - doc.rindex(newline, 0, pos) - return lineno, colno +class JSONDecodeError(ValueError): + """Subclass of ValueError with the following additional properties: + msg: The unformatted error message + doc: The JSON document being parsed + pos: The start index of doc where parsing failed + lineno: The line corresponding to pos + colno: The column corresponding to pos -def errmsg(msg, doc, pos, end=None): - # Note that this function is called from _json - lineno, colno = linecol(doc, pos) - if end is None: - fmt = '{0}: line {1} column {2} (char {3})' - return fmt.format(msg, lineno, colno, pos) - #fmt = '%s: line %d column %d (char %d)' - #return fmt % (msg, lineno, colno, pos) - endlineno, endcolno = linecol(doc, end) - fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})' - return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end) - #fmt = '%s: line %d column %d - line %d column %d (char %d - %d)' - #return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end) + """ + # Note that this exception is used from _json + def __init__(self, msg, doc, pos): + lineno = doc.count('\n', 0, pos) + 1 + colno = pos - doc.rfind('\n', 0, pos) + errmsg = '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos) + ValueError.__init__(self, errmsg) + self.msg = msg + self.doc = doc + self.pos = pos + self.lineno = lineno + self.colno = colno + + def __reduce__(self): + return self.__class__, (self.msg, self.doc, self.pos) _CONSTANTS = { @@ -66,7 +64,7 @@ def _decode_uXXXX(s, pos): except ValueError: pass msg = "Invalid \\uXXXX escape" - raise ValueError(errmsg(msg, s, pos)) + raise JSONDecodeError(msg, s, pos) def py_scanstring(s, end, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match): @@ -84,8 +82,7 @@ def py_scanstring(s, end, strict=True, while 1: chunk = _m(s, end) if chunk is None: - raise ValueError( - errmsg("Unterminated string starting at", s, begin)) + raise JSONDecodeError("Unterminated string starting at", s, begin) end = chunk.end() content, terminator = chunk.groups() # Content is contains zero or more unescaped string characters @@ -99,22 +96,21 @@ def py_scanstring(s, end, strict=True, if strict: #msg = "Invalid control character %r at" % (terminator,) msg = "Invalid control character {0!r} at".format(terminator) - raise ValueError(errmsg(msg, s, end)) + raise JSONDecodeError(msg, s, end) else: _append(terminator) continue try: esc = s[end] except IndexError: - raise ValueError( - errmsg("Unterminated string starting at", s, begin)) + raise JSONDecodeError("Unterminated string starting at", s, begin) # If not a unicode escape sequence, must be in the lookup table if esc != 'u': try: char = _b[esc] except KeyError: msg = "Invalid \\escape: {0!r}".format(esc) - raise ValueError(errmsg(msg, s, end)) + raise JSONDecodeError(msg, s, end) end += 1 else: uni = _decode_uXXXX(s, end) @@ -163,8 +159,8 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, pairs = object_hook(pairs) return pairs, end + 1 elif nextchar != '"': - raise ValueError(errmsg( - "Expecting property name enclosed in double quotes", s, end)) + raise JSONDecodeError( + "Expecting property name enclosed in double quotes", s, end) end += 1 while True: key, end = scanstring(s, end, strict) @@ -174,7 +170,7 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, if s[end:end + 1] != ':': end = _w(s, end).end() if s[end:end + 1] != ':': - raise ValueError(errmsg("Expecting ':' delimiter", s, end)) + raise JSONDecodeError("Expecting ':' delimiter", s, end) end += 1 try: @@ -188,7 +184,7 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, try: value, end = scan_once(s, end) except StopIteration as err: - raise ValueError(errmsg("Expecting value", s, err.value)) from None + raise JSONDecodeError("Expecting value", s, err.value) from None pairs_append((key, value)) try: nextchar = s[end] @@ -202,13 +198,13 @@ def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook, if nextchar == '}': break elif nextchar != ',': - raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1)) + raise JSONDecodeError("Expecting ',' delimiter", s, end - 1) end = _w(s, end).end() nextchar = s[end:end + 1] end += 1 if nextchar != '"': - raise ValueError(errmsg( - "Expecting property name enclosed in double quotes", s, end - 1)) + raise JSONDecodeError( + "Expecting property name enclosed in double quotes", s, end - 1) if object_pairs_hook is not None: result = object_pairs_hook(pairs) return result, end @@ -232,7 +228,7 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): try: value, end = scan_once(s, end) except StopIteration as err: - raise ValueError(errmsg("Expecting value", s, err.value)) from None + raise JSONDecodeError("Expecting value", s, err.value) from None _append(value) nextchar = s[end:end + 1] if nextchar in _ws: @@ -242,7 +238,7 @@ def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR): if nextchar == ']': break elif nextchar != ',': - raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1)) + raise JSONDecodeError("Expecting ',' delimiter", s, end - 1) try: if s[end] in _ws: end += 1 @@ -343,7 +339,7 @@ class JSONDecoder(object): obj, end = self.raw_decode(s, idx=_w(s, 0).end()) end = _w(s, end).end() if end != len(s): - raise ValueError(errmsg("Extra data", s, end, len(s))) + raise JSONDecodeError("Extra data", s, end) return obj def raw_decode(self, s, idx=0): @@ -358,5 +354,5 @@ class JSONDecoder(object): try: obj, end = self.scan_once(s, idx) except StopIteration as err: - raise ValueError(errmsg("Expecting value", s, err.value)) from None + raise JSONDecodeError("Expecting value", s, err.value) from None return obj, end diff --git a/Darwin/lib/python3.4/json/encoder.py b/Darwin/lib/python3.5/json/encoder.py similarity index 98% rename from Darwin/lib/python3.4/json/encoder.py rename to Darwin/lib/python3.5/json/encoder.py index 0513838..26e9eb2 100644 --- a/Darwin/lib/python3.4/json/encoder.py +++ b/Darwin/lib/python3.5/json/encoder.py @@ -6,6 +6,10 @@ try: from _json import encode_basestring_ascii as c_encode_basestring_ascii except ImportError: c_encode_basestring_ascii = None +try: + from _json import encode_basestring as c_encode_basestring +except ImportError: + c_encode_basestring = None try: from _json import make_encoder as c_make_encoder except ImportError: @@ -30,7 +34,7 @@ for i in range(0x20): INFINITY = float('inf') FLOAT_REPR = repr -def encode_basestring(s): +def py_encode_basestring(s): """Return a JSON representation of a Python string """ @@ -39,6 +43,9 @@ def encode_basestring(s): return '"' + ESCAPE.sub(replace, s) + '"' +encode_basestring = (c_encode_basestring or py_encode_basestring) + + def py_encode_basestring_ascii(s): """Return an ASCII-only JSON representation of a Python string diff --git a/Darwin/lib/python3.4/json/scanner.py b/Darwin/lib/python3.5/json/scanner.py similarity index 100% rename from Darwin/lib/python3.4/json/scanner.py rename to Darwin/lib/python3.5/json/scanner.py diff --git a/Darwin/lib/python3.5/json/tool.py b/Darwin/lib/python3.5/json/tool.py new file mode 100644 index 0000000..4f3182c --- /dev/null +++ b/Darwin/lib/python3.5/json/tool.py @@ -0,0 +1,50 @@ +r"""Command-line tool to validate and pretty-print JSON + +Usage:: + + $ echo '{"json":"obj"}' | python -m json.tool + { + "json": "obj" + } + $ echo '{ 1.2:3.4}' | python -m json.tool + Expecting property name enclosed in double quotes: line 1 column 3 (char 2) + +""" +import argparse +import collections +import json +import sys + + +def main(): + prog = 'python -m json.tool' + description = ('A simple command line interface for json module ' + 'to validate and pretty-print JSON objects.') + parser = argparse.ArgumentParser(prog=prog, description=description) + parser.add_argument('infile', nargs='?', type=argparse.FileType(), + help='a JSON file to be validated or pretty-printed') + parser.add_argument('outfile', nargs='?', type=argparse.FileType('w'), + help='write the output of infile to outfile') + parser.add_argument('--sort-keys', action='store_true', default=False, + help='sort the output of dictionaries alphabetically by key') + options = parser.parse_args() + + infile = options.infile or sys.stdin + outfile = options.outfile or sys.stdout + sort_keys = options.sort_keys + with infile: + try: + if sort_keys: + obj = json.load(infile) + else: + obj = json.load(infile, + object_pairs_hook=collections.OrderedDict) + except ValueError as e: + raise SystemExit(e) + with outfile: + json.dump(obj, outfile, sort_keys=sort_keys, indent=4) + outfile.write('\n') + + +if __name__ == '__main__': + main() diff --git a/Darwin/lib/python3.4/keyword.py b/Darwin/lib/python3.5/keyword.py similarity index 100% rename from Darwin/lib/python3.4/keyword.py rename to Darwin/lib/python3.5/keyword.py diff --git a/Darwin/lib/python3.5/lib-dynload/_bisect.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_bisect.cpython-35m-darwin.so new file mode 100755 index 0000000..9fa321d Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_bisect.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_bz2.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_bz2.cpython-35m-darwin.so new file mode 100755 index 0000000..d61bf41 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_bz2.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_cn.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_cn.cpython-35m-darwin.so new file mode 100755 index 0000000..ba49fb2 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_cn.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_hk.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_hk.cpython-35m-darwin.so new file mode 100755 index 0000000..fabec7b Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_hk.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_iso2022.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_iso2022.cpython-35m-darwin.so new file mode 100755 index 0000000..3bce37c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_iso2022.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_jp.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_jp.cpython-35m-darwin.so new file mode 100755 index 0000000..da9aef3 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_jp.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_kr.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_kr.cpython-35m-darwin.so new file mode 100755 index 0000000..2065585 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_kr.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_codecs_tw.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_codecs_tw.cpython-35m-darwin.so new file mode 100755 index 0000000..3ea75ec Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_codecs_tw.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/lib-dynload/_crypt.so b/Darwin/lib/python3.5/lib-dynload/_crypt.cpython-35m-darwin.so similarity index 68% rename from Darwin/lib/python3.4/lib-dynload/_crypt.so rename to Darwin/lib/python3.5/lib-dynload/_crypt.cpython-35m-darwin.so index 6a6420d..773c08b 100755 Binary files a/Darwin/lib/python3.4/lib-dynload/_crypt.so and b/Darwin/lib/python3.5/lib-dynload/_crypt.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_csv.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_csv.cpython-35m-darwin.so new file mode 100755 index 0000000..1460b9f Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_csv.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_ctypes.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_ctypes.cpython-35m-darwin.so new file mode 100755 index 0000000..4fe8625 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_ctypes.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_ctypes_test.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_ctypes_test.cpython-35m-darwin.so new file mode 100755 index 0000000..c2b21c9 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_ctypes_test.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_curses.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_curses.cpython-35m-darwin.so new file mode 100755 index 0000000..c26042c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_curses.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_curses_panel.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_curses_panel.cpython-35m-darwin.so new file mode 100755 index 0000000..757efbe Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_curses_panel.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_datetime.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_datetime.cpython-35m-darwin.so new file mode 100755 index 0000000..2afd74d Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_datetime.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_dbm.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_dbm.cpython-35m-darwin.so new file mode 100755 index 0000000..7c27631 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_dbm.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_decimal.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_decimal.cpython-35m-darwin.so new file mode 100755 index 0000000..86f8a1b Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_decimal.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_elementtree.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_elementtree.cpython-35m-darwin.so new file mode 100755 index 0000000..9d5cf83 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_elementtree.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_hashlib.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_hashlib.cpython-35m-darwin.so new file mode 100755 index 0000000..2ee6c8c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_hashlib.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/lib-dynload/_heapq.so b/Darwin/lib/python3.5/lib-dynload/_heapq.cpython-35m-darwin.so similarity index 51% rename from Darwin/lib/python3.4/lib-dynload/_heapq.so rename to Darwin/lib/python3.5/lib-dynload/_heapq.cpython-35m-darwin.so index 4eb7756..de2d508 100755 Binary files a/Darwin/lib/python3.4/lib-dynload/_heapq.so and b/Darwin/lib/python3.5/lib-dynload/_heapq.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_json.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_json.cpython-35m-darwin.so new file mode 100755 index 0000000..2816687 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_json.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_lsprof.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_lsprof.cpython-35m-darwin.so new file mode 100755 index 0000000..e4fd2a6 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_lsprof.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_lzma.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_lzma.cpython-35m-darwin.so new file mode 100755 index 0000000..26477a1 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_lzma.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_md5.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_md5.cpython-35m-darwin.so new file mode 100755 index 0000000..aeed868 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_md5.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_multibytecodec.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_multibytecodec.cpython-35m-darwin.so new file mode 100755 index 0000000..8ee6b5c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_multibytecodec.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_multiprocessing.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_multiprocessing.cpython-35m-darwin.so new file mode 100755 index 0000000..ba485a6 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_multiprocessing.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/lib-dynload/_opcode.so b/Darwin/lib/python3.5/lib-dynload/_opcode.cpython-35m-darwin.so similarity index 59% rename from Darwin/lib/python3.4/lib-dynload/_opcode.so rename to Darwin/lib/python3.5/lib-dynload/_opcode.cpython-35m-darwin.so index 442cceb..4b4eec6 100755 Binary files a/Darwin/lib/python3.4/lib-dynload/_opcode.so and b/Darwin/lib/python3.5/lib-dynload/_opcode.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_pickle.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_pickle.cpython-35m-darwin.so new file mode 100755 index 0000000..91f08ce Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_pickle.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_posixsubprocess.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_posixsubprocess.cpython-35m-darwin.so new file mode 100755 index 0000000..0841fc3 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_posixsubprocess.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_random.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_random.cpython-35m-darwin.so new file mode 100755 index 0000000..280d869 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_random.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_scproxy.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_scproxy.cpython-35m-darwin.so new file mode 100755 index 0000000..45d68ae Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_scproxy.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_sha1.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_sha1.cpython-35m-darwin.so new file mode 100755 index 0000000..5a4194c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_sha1.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_sha256.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_sha256.cpython-35m-darwin.so new file mode 100755 index 0000000..a8bf512 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_sha256.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_sha512.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_sha512.cpython-35m-darwin.so new file mode 100755 index 0000000..e5f8491 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_sha512.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_socket.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_socket.cpython-35m-darwin.so new file mode 100755 index 0000000..553fc6c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_socket.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_sqlite3.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_sqlite3.cpython-35m-darwin.so new file mode 100755 index 0000000..d7f54f2 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_sqlite3.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_ssl.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_ssl.cpython-35m-darwin.so new file mode 100755 index 0000000..2e8ee3f Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_ssl.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_struct.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_struct.cpython-35m-darwin.so new file mode 100755 index 0000000..3129748 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_struct.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_testbuffer.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_testbuffer.cpython-35m-darwin.so new file mode 100755 index 0000000..2417f49 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_testbuffer.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_testcapi.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_testcapi.cpython-35m-darwin.so new file mode 100755 index 0000000..05dcdec Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_testcapi.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/lib-dynload/_testimportmultiple.so b/Darwin/lib/python3.5/lib-dynload/_testimportmultiple.cpython-35m-darwin.so similarity index 66% rename from Darwin/lib/python3.4/lib-dynload/_testimportmultiple.so rename to Darwin/lib/python3.5/lib-dynload/_testimportmultiple.cpython-35m-darwin.so index cc428f6..6497c20 100755 Binary files a/Darwin/lib/python3.4/lib-dynload/_testimportmultiple.so and b/Darwin/lib/python3.5/lib-dynload/_testimportmultiple.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_testmultiphase.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_testmultiphase.cpython-35m-darwin.so new file mode 100755 index 0000000..200ee12 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_testmultiphase.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/_tkinter.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/_tkinter.cpython-35m-darwin.so new file mode 100755 index 0000000..ef79566 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/_tkinter.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/array.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/array.cpython-35m-darwin.so new file mode 100755 index 0000000..9cd854a Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/array.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/audioop.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/audioop.cpython-35m-darwin.so new file mode 100755 index 0000000..edaaf3d Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/audioop.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/binascii.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/binascii.cpython-35m-darwin.so new file mode 100755 index 0000000..7f7a73b Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/binascii.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/cmath.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/cmath.cpython-35m-darwin.so new file mode 100755 index 0000000..cbb1dfb Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/cmath.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/fcntl.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/fcntl.cpython-35m-darwin.so new file mode 100755 index 0000000..e2bd21b Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/fcntl.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/grp.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/grp.cpython-35m-darwin.so new file mode 100755 index 0000000..4bb005f Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/grp.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/math.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/math.cpython-35m-darwin.so new file mode 100755 index 0000000..a662139 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/math.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/mmap.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/mmap.cpython-35m-darwin.so new file mode 100755 index 0000000..0306638 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/mmap.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/nis.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/nis.cpython-35m-darwin.so new file mode 100755 index 0000000..dd52bd0 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/nis.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/parser.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/parser.cpython-35m-darwin.so new file mode 100755 index 0000000..63594eb Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/parser.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/pyexpat.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/pyexpat.cpython-35m-darwin.so new file mode 100755 index 0000000..75b950a Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/pyexpat.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/readline.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/readline.cpython-35m-darwin.so new file mode 100755 index 0000000..a7e25f1 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/readline.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/resource.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/resource.cpython-35m-darwin.so new file mode 100755 index 0000000..7ddbcd0 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/resource.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/select.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/select.cpython-35m-darwin.so new file mode 100755 index 0000000..6e86602 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/select.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/syslog.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/syslog.cpython-35m-darwin.so new file mode 100755 index 0000000..1b81d5e Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/syslog.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/termios.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/termios.cpython-35m-darwin.so new file mode 100755 index 0000000..3c20e9c Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/termios.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/unicodedata.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/unicodedata.cpython-35m-darwin.so new file mode 100755 index 0000000..970bf81 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/unicodedata.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/xxlimited.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/xxlimited.cpython-35m-darwin.so new file mode 100755 index 0000000..d39876b Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/xxlimited.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/lib-dynload/zlib.cpython-35m-darwin.so b/Darwin/lib/python3.5/lib-dynload/zlib.cpython-35m-darwin.so new file mode 100755 index 0000000..77dcff4 Binary files /dev/null and b/Darwin/lib/python3.5/lib-dynload/zlib.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/lib2to3/Grammar.txt b/Darwin/lib/python3.5/lib2to3/Grammar.txt similarity index 96% rename from Darwin/lib/python3.4/lib2to3/Grammar.txt rename to Darwin/lib/python3.5/lib2to3/Grammar.txt index e667bcd..c954669 100644 --- a/Darwin/lib/python3.4/lib2to3/Grammar.txt +++ b/Darwin/lib/python3.5/lib2to3/Grammar.txt @@ -33,7 +33,8 @@ eval_input: testlist NEWLINE* ENDMARKER decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE decorators: decorator+ -decorated: decorators (classdef | funcdef) +decorated: decorators (classdef | funcdef | async_funcdef) +async_funcdef: ASYNC funcdef funcdef: 'def' NAME parameters ['->' test] ':' suite parameters: '(' [typedargslist] ')' typedargslist: ((tfpdef ['=' test] ',')* @@ -82,7 +83,8 @@ global_stmt: ('global' | 'nonlocal') NAME (',' NAME)* exec_stmt: 'exec' expr ['in' test [',' test]] assert_stmt: 'assert' test [',' test] -compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated +compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | classdef | decorated | async_stmt +async_stmt: ASYNC (funcdef | with_stmt | for_stmt) if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] while_stmt: 'while' test ':' suite ['else' ':' suite] for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] @@ -121,7 +123,7 @@ shift_expr: arith_expr (('<<'|'>>') arith_expr)* arith_expr: term (('+'|'-') term)* term: factor (('*'|'@'|'/'|'%'|'//') factor)* factor: ('+'|'-'|'~') factor | power -power: atom trailer* ['**' factor] +power: [AWAIT] atom trailer* ['**' factor] atom: ('(' [yield_expr|testlist_gexp] ')' | '[' [listmaker] ']' | '{' [dictsetmaker] '}' | @@ -142,7 +144,7 @@ dictsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | classdef: 'class' NAME ['(' [arglist] ')'] ':' suite arglist: (argument ',')* (argument [','] - |'*' test (',' argument)* [',' '**' test] + |'*' test (',' argument)* [',' '**' test] |'**' test) argument: test [comp_for] | test '=' test # Really [keyword '='] test diff --git a/Darwin/lib/python3.5/lib2to3/Grammar3.5.0.final.0.pickle b/Darwin/lib/python3.5/lib2to3/Grammar3.5.0.final.0.pickle new file mode 100644 index 0000000..d9095a6 Binary files /dev/null and b/Darwin/lib/python3.5/lib2to3/Grammar3.5.0.final.0.pickle differ diff --git a/Darwin/lib/python3.4/lib2to3/PatternGrammar.txt b/Darwin/lib/python3.5/lib2to3/PatternGrammar.txt similarity index 100% rename from Darwin/lib/python3.4/lib2to3/PatternGrammar.txt rename to Darwin/lib/python3.5/lib2to3/PatternGrammar.txt diff --git a/Darwin/lib/python3.5/lib2to3/PatternGrammar3.5.0.final.0.pickle b/Darwin/lib/python3.5/lib2to3/PatternGrammar3.5.0.final.0.pickle new file mode 100644 index 0000000..423e684 Binary files /dev/null and b/Darwin/lib/python3.5/lib2to3/PatternGrammar3.5.0.final.0.pickle differ diff --git a/Darwin/lib/python3.4/lib2to3/__init__.py b/Darwin/lib/python3.5/lib2to3/__init__.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/__init__.py rename to Darwin/lib/python3.5/lib2to3/__init__.py diff --git a/Darwin/lib/python3.4/lib2to3/__main__.py b/Darwin/lib/python3.5/lib2to3/__main__.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/__main__.py rename to Darwin/lib/python3.5/lib2to3/__main__.py diff --git a/Darwin/lib/python3.4/lib2to3/btm_matcher.py b/Darwin/lib/python3.5/lib2to3/btm_matcher.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/btm_matcher.py rename to Darwin/lib/python3.5/lib2to3/btm_matcher.py diff --git a/Darwin/lib/python3.4/lib2to3/btm_utils.py b/Darwin/lib/python3.5/lib2to3/btm_utils.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/btm_utils.py rename to Darwin/lib/python3.5/lib2to3/btm_utils.py diff --git a/Darwin/lib/python3.4/lib2to3/fixer_base.py b/Darwin/lib/python3.5/lib2to3/fixer_base.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixer_base.py rename to Darwin/lib/python3.5/lib2to3/fixer_base.py diff --git a/Darwin/lib/python3.4/lib2to3/fixer_util.py b/Darwin/lib/python3.5/lib2to3/fixer_util.py similarity index 98% rename from Darwin/lib/python3.4/lib2to3/fixer_util.py rename to Darwin/lib/python3.5/lib2to3/fixer_util.py index 6e259c5..44502bf 100644 --- a/Darwin/lib/python3.4/lib2to3/fixer_util.py +++ b/Darwin/lib/python3.5/lib2to3/fixer_util.py @@ -187,8 +187,8 @@ def parenthesize(node): return Node(syms.atom, [LParen(), node, RParen()]) -consuming_calls = set(["sorted", "list", "set", "any", "all", "tuple", "sum", - "min", "max", "enumerate"]) +consuming_calls = {"sorted", "list", "set", "any", "all", "tuple", "sum", + "min", "max", "enumerate"} def attr_chain(obj, attr): """Follow an attribute chain. @@ -359,7 +359,7 @@ def touch_import(package, name, node): root.insert_child(insert_pos, Node(syms.simple_stmt, children)) -_def_syms = set([syms.classdef, syms.funcdef]) +_def_syms = {syms.classdef, syms.funcdef} def find_binding(name, node, package=None): """ Returns the node which binds variable name, otherwise None. If optional argument package is supplied, only imports will @@ -402,7 +402,7 @@ def find_binding(name, node, package=None): return ret return None -_block_syms = set([syms.funcdef, syms.classdef, syms.trailer]) +_block_syms = {syms.funcdef, syms.classdef, syms.trailer} def _find(name, node): nodes = [node] while nodes: diff --git a/Darwin/lib/python3.4/lib2to3/fixes/__init__.py b/Darwin/lib/python3.5/lib2to3/fixes/__init__.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/__init__.py rename to Darwin/lib/python3.5/lib2to3/fixes/__init__.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_apply.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_apply.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_apply.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_apply.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_asserts.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_asserts.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_asserts.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_asserts.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_basestring.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_basestring.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_basestring.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_basestring.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_buffer.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_buffer.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_buffer.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_buffer.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_callable.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_callable.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_callable.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_callable.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_dict.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_dict.py similarity index 98% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_dict.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_dict.py index 4cc3717..963f952 100644 --- a/Darwin/lib/python3.4/lib2to3/fixes/fix_dict.py +++ b/Darwin/lib/python3.5/lib2to3/fixes/fix_dict.py @@ -36,7 +36,7 @@ from ..fixer_util import Name, Call, LParen, RParen, ArgList, Dot from .. import fixer_util -iter_exempt = fixer_util.consuming_calls | set(["iter"]) +iter_exempt = fixer_util.consuming_calls | {"iter"} class FixDict(fixer_base.BaseFix): diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_except.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_except.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_except.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_except.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_exec.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_exec.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_exec.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_exec.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_execfile.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_execfile.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_execfile.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_execfile.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_exitfunc.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_exitfunc.py similarity index 96% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_exitfunc.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_exitfunc.py index 9afc2fa..2e47887 100644 --- a/Darwin/lib/python3.4/lib2to3/fixes/fix_exitfunc.py +++ b/Darwin/lib/python3.5/lib2to3/fixes/fix_exitfunc.py @@ -35,7 +35,7 @@ class FixExitfunc(fixer_base.BaseFix): self.sys_import = None def transform(self, node, results): - # First, find a the sys import. We'll just hope it's global scope. + # First, find the sys import. We'll just hope it's global scope. if "sys_import" in results: if self.sys_import is None: self.sys_import = results["sys_import"] diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_filter.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_filter.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_filter.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_filter.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_funcattrs.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_funcattrs.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_funcattrs.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_funcattrs.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_future.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_future.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_future.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_future.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_getcwdu.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_getcwdu.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_getcwdu.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_getcwdu.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_has_key.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_has_key.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_has_key.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_has_key.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_idioms.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_idioms.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_idioms.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_idioms.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_import.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_import.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_import.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_import.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_imports.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_imports.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_imports.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_imports.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_imports2.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_imports2.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_imports2.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_imports2.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_input.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_input.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_input.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_input.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_intern.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_intern.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_intern.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_intern.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_isinstance.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_isinstance.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_isinstance.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_isinstance.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_itertools.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_itertools.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_itertools.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_itertools.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_itertools_imports.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_itertools_imports.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_itertools_imports.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_itertools_imports.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_long.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_long.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_long.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_long.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_map.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_map.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_map.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_map.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_metaclass.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_metaclass.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_metaclass.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_metaclass.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_methodattrs.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_methodattrs.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_methodattrs.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_methodattrs.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_ne.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_ne.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_ne.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_ne.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_next.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_next.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_next.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_next.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_nonzero.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_nonzero.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_nonzero.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_nonzero.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_numliterals.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_numliterals.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_numliterals.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_numliterals.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_operator.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_operator.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_operator.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_operator.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_paren.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_paren.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_paren.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_paren.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_print.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_print.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_print.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_print.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_raise.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_raise.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_raise.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_raise.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_raw_input.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_raw_input.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_raw_input.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_raw_input.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_reduce.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_reduce.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_reduce.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_reduce.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_reload.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_reload.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_reload.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_reload.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_renames.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_renames.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_renames.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_renames.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_repr.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_repr.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_repr.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_repr.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_set_literal.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_set_literal.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_set_literal.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_set_literal.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_standarderror.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_standarderror.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_standarderror.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_standarderror.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_sys_exc.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_sys_exc.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_sys_exc.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_sys_exc.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_throw.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_throw.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_throw.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_throw.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_tuple_params.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_tuple_params.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_tuple_params.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_tuple_params.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_types.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_types.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_types.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_types.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_unicode.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_unicode.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_unicode.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_unicode.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_urllib.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_urllib.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_urllib.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_urllib.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_ws_comma.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_ws_comma.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_ws_comma.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_ws_comma.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_xrange.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_xrange.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_xrange.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_xrange.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_xreadlines.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_xreadlines.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_xreadlines.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_xreadlines.py diff --git a/Darwin/lib/python3.4/lib2to3/fixes/fix_zip.py b/Darwin/lib/python3.5/lib2to3/fixes/fix_zip.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/fixes/fix_zip.py rename to Darwin/lib/python3.5/lib2to3/fixes/fix_zip.py diff --git a/Darwin/lib/python3.4/lib2to3/main.py b/Darwin/lib/python3.5/lib2to3/main.py similarity index 99% rename from Darwin/lib/python3.4/lib2to3/main.py rename to Darwin/lib/python3.5/lib2to3/main.py index 93bae90..1a1df01 100644 --- a/Darwin/lib/python3.4/lib2to3/main.py +++ b/Darwin/lib/python3.5/lib2to3/main.py @@ -2,7 +2,7 @@ Main program for 2to3. """ -from __future__ import with_statement +from __future__ import with_statement, print_function import sys import os diff --git a/Darwin/lib/python3.4/lib2to3/patcomp.py b/Darwin/lib/python3.5/lib2to3/patcomp.py similarity index 99% rename from Darwin/lib/python3.4/lib2to3/patcomp.py rename to Darwin/lib/python3.5/lib2to3/patcomp.py index 0a259e9..2012ec4 100644 --- a/Darwin/lib/python3.4/lib2to3/patcomp.py +++ b/Darwin/lib/python3.5/lib2to3/patcomp.py @@ -32,7 +32,7 @@ class PatternSyntaxError(Exception): def tokenize_wrapper(input): """Tokenizes a string suppressing significant whitespace.""" - skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) + skip = {token.NEWLINE, token.INDENT, token.DEDENT} tokens = tokenize.generate_tokens(io.StringIO(input).readline) for quintuple in tokens: type, value, start, end, line_text = quintuple diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/__init__.py b/Darwin/lib/python3.5/lib2to3/pgen2/__init__.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/__init__.py rename to Darwin/lib/python3.5/lib2to3/pgen2/__init__.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/conv.py b/Darwin/lib/python3.5/lib2to3/pgen2/conv.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/conv.py rename to Darwin/lib/python3.5/lib2to3/pgen2/conv.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/driver.py b/Darwin/lib/python3.5/lib2to3/pgen2/driver.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/driver.py rename to Darwin/lib/python3.5/lib2to3/pgen2/driver.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/grammar.py b/Darwin/lib/python3.5/lib2to3/pgen2/grammar.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/grammar.py rename to Darwin/lib/python3.5/lib2to3/pgen2/grammar.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/literals.py b/Darwin/lib/python3.5/lib2to3/pgen2/literals.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/literals.py rename to Darwin/lib/python3.5/lib2to3/pgen2/literals.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/parse.py b/Darwin/lib/python3.5/lib2to3/pgen2/parse.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/parse.py rename to Darwin/lib/python3.5/lib2to3/pgen2/parse.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/pgen.py b/Darwin/lib/python3.5/lib2to3/pgen2/pgen.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pgen2/pgen.py rename to Darwin/lib/python3.5/lib2to3/pgen2/pgen.py diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/token.py b/Darwin/lib/python3.5/lib2to3/pgen2/token.py similarity index 95% rename from Darwin/lib/python3.4/lib2to3/pgen2/token.py rename to Darwin/lib/python3.5/lib2to3/pgen2/token.py index 7599396..1a67955 100755 --- a/Darwin/lib/python3.4/lib2to3/pgen2/token.py +++ b/Darwin/lib/python3.5/lib2to3/pgen2/token.py @@ -62,8 +62,10 @@ OP = 52 COMMENT = 53 NL = 54 RARROW = 55 -ERRORTOKEN = 56 -N_TOKENS = 57 +AWAIT = 56 +ASYNC = 57 +ERRORTOKEN = 58 +N_TOKENS = 59 NT_OFFSET = 256 #--end constants-- diff --git a/Darwin/lib/python3.4/lib2to3/pgen2/tokenize.py b/Darwin/lib/python3.5/lib2to3/pgen2/tokenize.py similarity index 88% rename from Darwin/lib/python3.4/lib2to3/pgen2/tokenize.py rename to Darwin/lib/python3.5/lib2to3/pgen2/tokenize.py index 3dd1ee9..1ff1c61 100644 --- a/Darwin/lib/python3.4/lib2to3/pgen2/tokenize.py +++ b/Darwin/lib/python3.5/lib2to3/pgen2/tokenize.py @@ -220,7 +220,7 @@ class Untokenizer: for tok in iterable: toknum, tokval = tok[:2] - if toknum in (NAME, NUMBER): + if toknum in (NAME, NUMBER, ASYNC, AWAIT): tokval += ' ' if toknum == INDENT: @@ -366,6 +366,12 @@ def generate_tokens(readline): contline = None indents = [0] + # 'stashed' and 'async_*' are used for async/await parsing + stashed = None + async_def = False + async_def_indent = 0 + async_def_nl = False + while 1: # loop over lines in stream try: line = readline() @@ -406,6 +412,10 @@ def generate_tokens(readline): pos = pos + 1 if pos == max: break + if stashed: + yield stashed + stashed = None + if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') @@ -428,8 +438,19 @@ def generate_tokens(readline): "unindent does not match any outer indentation level", ("", lnum, pos, line)) indents = indents[:-1] + + if async_def and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + yield (DEDENT, '', (lnum, pos), (lnum, pos), line) + if async_def and async_def_nl and async_def_indent >= indents[-1]: + async_def = False + async_def_nl = False + async_def_indent = 0 + else: # continued statement if not line: raise TokenError("EOF in multi-line statement", (lnum, 0)) @@ -449,9 +470,18 @@ def generate_tokens(readline): newline = NEWLINE if parenlev > 0: newline = NL + elif async_def: + async_def_nl = True + if stashed: + yield stashed + stashed = None yield (newline, token, spos, epos, line) + elif initial == '#': assert not token.endswith("\n") + if stashed: + yield stashed + stashed = None yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] @@ -459,6 +489,9 @@ def generate_tokens(readline): if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] + if stashed: + yield stashed + stashed = None yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines @@ -476,22 +509,63 @@ def generate_tokens(readline): contline = line break else: # ordinary string + if stashed: + yield stashed + stashed = None yield (STRING, token, spos, epos, line) elif initial in namechars: # ordinary name - yield (NAME, token, spos, epos, line) + if token in ('async', 'await'): + if async_def: + yield (ASYNC if token == 'async' else AWAIT, + token, spos, epos, line) + continue + + tok = (NAME, token, spos, epos, line) + if token == 'async' and not stashed: + stashed = tok + continue + + if token == 'def': + if (stashed + and stashed[0] == NAME + and stashed[1] == 'async'): + + async_def = True + async_def_indent = indents[-1] + + yield (ASYNC, stashed[1], + stashed[2], stashed[3], + stashed[4]) + stashed = None + + if stashed: + yield stashed + stashed = None + + yield tok elif initial == '\\': # continued stmt # This yield is new; needed for better idempotency: + if stashed: + yield stashed + stashed = None yield (NL, token, spos, (lnum, pos), line) continued = 1 else: if initial in '([{': parenlev = parenlev + 1 elif initial in ')]}': parenlev = parenlev - 1 + if stashed: + yield stashed + stashed = None yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos = pos + 1 + if stashed: + yield stashed + stashed = None + for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') diff --git a/Darwin/lib/python3.4/lib2to3/pygram.py b/Darwin/lib/python3.5/lib2to3/pygram.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/pygram.py rename to Darwin/lib/python3.5/lib2to3/pygram.py diff --git a/Darwin/lib/python3.4/lib2to3/pytree.py b/Darwin/lib/python3.5/lib2to3/pytree.py similarity index 99% rename from Darwin/lib/python3.4/lib2to3/pytree.py rename to Darwin/lib/python3.5/lib2to3/pytree.py index c4a1be3..ad3592c 100644 --- a/Darwin/lib/python3.4/lib2to3/pytree.py +++ b/Darwin/lib/python3.5/lib2to3/pytree.py @@ -64,16 +64,6 @@ class Base(object): __hash__ = None # For Py3 compatibility. - def __ne__(self, other): - """ - Compare two nodes for inequality. - - This calls the method _eq(). - """ - if self.__class__ is not other.__class__: - return NotImplemented - return not self._eq(other) - def _eq(self, other): """ Compare two nodes for equality. diff --git a/Darwin/lib/python3.4/lib2to3/refactor.py b/Darwin/lib/python3.5/lib2to3/refactor.py similarity index 99% rename from Darwin/lib/python3.4/lib2to3/refactor.py rename to Darwin/lib/python3.5/lib2to3/refactor.py index 8100317..adf9996 100644 --- a/Darwin/lib/python3.4/lib2to3/refactor.py +++ b/Darwin/lib/python3.5/lib2to3/refactor.py @@ -57,7 +57,7 @@ def _get_head_types(pat): # Always return leafs if pat.type is None: raise _EveryNode - return set([pat.type]) + return {pat.type} if isinstance(pat, pytree.NegatedPattern): if pat.content: @@ -133,7 +133,7 @@ def _detect_future_features(source): def advance(): tok = next(gen) return tok[0], tok[1] - ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT)) + ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) features = set() try: while True: @@ -255,7 +255,7 @@ class RefactoringTool(object): fixer = fix_class(self.options, self.fixer_log) if fixer.explicit and self.explicit is not True and \ fix_mod_path not in self.explicit: - self.log_message("Skipping implicit fixer: %s", fix_name) + self.log_message("Skipping optional fixer: %s", fix_name) continue self.log_debug("Adding transformation: %s", fix_name) diff --git a/Darwin/lib/python3.5/lib2to3/tests/__init__.py b/Darwin/lib/python3.5/lib2to3/tests/__init__.py new file mode 100644 index 0000000..c5166fc --- /dev/null +++ b/Darwin/lib/python3.5/lib2to3/tests/__init__.py @@ -0,0 +1,9 @@ +# Author: Collin Winter + +import os +import unittest + +from test.support import load_package_tests + +def load_tests(*args): + return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Darwin/lib/python3.5/lib2to3/tests/__main__.py b/Darwin/lib/python3.5/lib2to3/tests/__main__.py new file mode 100644 index 0000000..40a23a2 --- /dev/null +++ b/Darwin/lib/python3.5/lib2to3/tests/__main__.py @@ -0,0 +1,4 @@ +from . import load_tests +import unittest + +unittest.main() diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/README b/Darwin/lib/python3.5/lib2to3/tests/data/README similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/README rename to Darwin/lib/python3.5/lib2to3/tests/data/README diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/bom.py b/Darwin/lib/python3.5/lib2to3/tests/data/bom.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/bom.py rename to Darwin/lib/python3.5/lib2to3/tests/data/bom.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/crlf.py b/Darwin/lib/python3.5/lib2to3/tests/data/crlf.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/crlf.py rename to Darwin/lib/python3.5/lib2to3/tests/data/crlf.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/different_encoding.py b/Darwin/lib/python3.5/lib2to3/tests/data/different_encoding.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/different_encoding.py rename to Darwin/lib/python3.5/lib2to3/tests/data/different_encoding.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/false_encoding.py b/Darwin/lib/python3.5/lib2to3/tests/data/false_encoding.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/false_encoding.py rename to Darwin/lib/python3.5/lib2to3/tests/data/false_encoding.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/bad_order.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/bad_order.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/bad_order.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/bad_order.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/__init__.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/__init__.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/__init__.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/__init__.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_explicit.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_explicit.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_explicit.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_explicit.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_first.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_first.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_first.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_first.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_last.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_last.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_last.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_last.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_parrot.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_parrot.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_parrot.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_parrot.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_preorder.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_preorder.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/myfixes/fix_preorder.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/myfixes/fix_preorder.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/no_fixer_cls.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/no_fixer_cls.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/no_fixer_cls.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/no_fixer_cls.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/fixers/parrot_example.py b/Darwin/lib/python3.5/lib2to3/tests/data/fixers/parrot_example.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/fixers/parrot_example.py rename to Darwin/lib/python3.5/lib2to3/tests/data/fixers/parrot_example.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/infinite_recursion.py b/Darwin/lib/python3.5/lib2to3/tests/data/infinite_recursion.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/infinite_recursion.py rename to Darwin/lib/python3.5/lib2to3/tests/data/infinite_recursion.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/py2_test_grammar.py b/Darwin/lib/python3.5/lib2to3/tests/data/py2_test_grammar.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/py2_test_grammar.py rename to Darwin/lib/python3.5/lib2to3/tests/data/py2_test_grammar.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/data/py3_test_grammar.py b/Darwin/lib/python3.5/lib2to3/tests/data/py3_test_grammar.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/data/py3_test_grammar.py rename to Darwin/lib/python3.5/lib2to3/tests/data/py3_test_grammar.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/pytree_idempotency.py b/Darwin/lib/python3.5/lib2to3/tests/pytree_idempotency.py similarity index 98% rename from Darwin/lib/python3.4/lib2to3/tests/pytree_idempotency.py rename to Darwin/lib/python3.5/lib2to3/tests/pytree_idempotency.py index 731c403..c6359bf 100755 --- a/Darwin/lib/python3.4/lib2to3/tests/pytree_idempotency.py +++ b/Darwin/lib/python3.5/lib2to3/tests/pytree_idempotency.py @@ -4,6 +4,8 @@ """Main program for testing the infrastructure.""" +from __future__ import print_function + __author__ = "Guido van Rossum " # Support imports (need to be imported first) diff --git a/Darwin/lib/python3.4/lib2to3/tests/support.py b/Darwin/lib/python3.5/lib2to3/tests/support.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/support.py rename to Darwin/lib/python3.5/lib2to3/tests/support.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_all_fixers.py b/Darwin/lib/python3.5/lib2to3/tests/test_all_fixers.py similarity index 82% rename from Darwin/lib/python3.4/lib2to3/tests/test_all_fixers.py rename to Darwin/lib/python3.5/lib2to3/tests/test_all_fixers.py index f64b3d9..15079fe 100644 --- a/Darwin/lib/python3.4/lib2to3/tests/test_all_fixers.py +++ b/Darwin/lib/python3.5/lib2to3/tests/test_all_fixers.py @@ -7,12 +7,14 @@ running time. # Python imports import unittest +import test.support # Local imports from lib2to3 import refactor from . import support +@test.support.requires_resource('cpu') class Test_all(support.TestCase): def setUp(self): @@ -21,3 +23,6 @@ class Test_all(support.TestCase): def test_all_project_files(self): for filepath in support.all_project_files(): self.refactor.refactor_file(filepath) + +if __name__ == '__main__': + unittest.main() diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_fixers.py b/Darwin/lib/python3.5/lib2to3/tests/test_fixers.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/test_fixers.py rename to Darwin/lib/python3.5/lib2to3/tests/test_fixers.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_main.py b/Darwin/lib/python3.5/lib2to3/tests/test_main.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/test_main.py rename to Darwin/lib/python3.5/lib2to3/tests/test_main.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_parser.py b/Darwin/lib/python3.5/lib2to3/tests/test_parser.py similarity index 82% rename from Darwin/lib/python3.4/lib2to3/tests/test_parser.py rename to Darwin/lib/python3.5/lib2to3/tests/test_parser.py index b64469c..b533c01 100644 --- a/Darwin/lib/python3.4/lib2to3/tests/test_parser.py +++ b/Darwin/lib/python3.5/lib2to3/tests/test_parser.py @@ -55,12 +55,64 @@ class TestMatrixMultiplication(GrammarTest): class TestYieldFrom(GrammarTest): - def test_matrix_multiplication_operator(self): + def test_yield_from(self): self.validate("yield from x") self.validate("(yield from x) + y") self.invalid_syntax("yield from") +class TestAsyncAwait(GrammarTest): + def test_await_expr(self): + self.validate("""async def foo(): + await x + """) + + self.validate("""async def foo(): + + def foo(): pass + + def foo(): pass + + await x + """) + + self.validate("""async def foo(): return await a""") + + self.validate("""def foo(): + def foo(): pass + async def foo(): await x + """) + + self.invalid_syntax("await x") + self.invalid_syntax("""def foo(): + await x""") + + self.invalid_syntax("""def foo(): + def foo(): pass + async def foo(): pass + await x + """) + + def test_async_var(self): + self.validate("""async = 1""") + self.validate("""await = 1""") + self.validate("""def async(): pass""") + + def test_async_with(self): + self.validate("""async def foo(): + async for a in b: pass""") + + self.invalid_syntax("""def foo(): + async for a in b: pass""") + + def test_async_for(self): + self.validate("""async def foo(): + async with a: pass""") + + self.invalid_syntax("""def foo(): + async with a: pass""") + + class TestRaiseChanges(GrammarTest): def test_2x_style_1(self): self.validate("raise") @@ -90,7 +142,7 @@ class TestRaiseChanges(GrammarTest): self.invalid_syntax("raise E from") -# Adaptated from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef +# Adapted from Python 3's Lib/test/test_grammar.py:GrammarTests.testFuncdef class TestFunctionAnnotations(GrammarTest): def test_1(self): self.validate("""def f(x) -> list: pass""") diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_pytree.py b/Darwin/lib/python3.5/lib2to3/tests/test_pytree.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/test_pytree.py rename to Darwin/lib/python3.5/lib2to3/tests/test_pytree.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_refactor.py b/Darwin/lib/python3.5/lib2to3/tests/test_refactor.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/test_refactor.py rename to Darwin/lib/python3.5/lib2to3/tests/test_refactor.py diff --git a/Darwin/lib/python3.4/lib2to3/tests/test_util.py b/Darwin/lib/python3.5/lib2to3/tests/test_util.py similarity index 100% rename from Darwin/lib/python3.4/lib2to3/tests/test_util.py rename to Darwin/lib/python3.5/lib2to3/tests/test_util.py diff --git a/Darwin/lib/python3.4/linecache.py b/Darwin/lib/python3.5/linecache.py similarity index 51% rename from Darwin/lib/python3.4/linecache.py rename to Darwin/lib/python3.5/linecache.py index 02a9eb5..3afcce1 100644 --- a/Darwin/lib/python3.4/linecache.py +++ b/Darwin/lib/python3.5/linecache.py @@ -1,10 +1,11 @@ -"""Cache lines from files. +"""Cache lines from Python source files. This is intended to read lines from modules imported -- hence if a filename is not found, it will look down the module search path for a file by that name. """ +import functools import sys import os import tokenize @@ -21,7 +22,9 @@ def getline(filename, lineno, module_globals=None): # The cache -cache = {} # The cache +# The cache. Maps filenames to either a thunk which will provide source code, +# or a tuple (size, mtime, lines, fullname) once loaded. +cache = {} def clearcache(): @@ -32,13 +35,19 @@ def clearcache(): def getlines(filename, module_globals=None): - """Get the lines for a file from the cache. + """Get the lines for a Python source file from the cache. Update the cache if it doesn't contain an entry for this file already.""" if filename in cache: - return cache[filename][2] - else: + entry = cache[filename] + if len(entry) != 1: + return cache[filename][2] + + try: return updatecache(filename, module_globals) + except MemoryError: + clearcache() + return [] def checkcache(filename=None): @@ -54,7 +63,11 @@ def checkcache(filename=None): return for filename in filenames: - size, mtime, lines, fullname = cache[filename] + entry = cache[filename] + if len(entry) == 1: + # lazy cache entry, leave it lazy. + continue + size, mtime, lines, fullname = entry if mtime is None: continue # no-op for files loaded via a __loader__ try: @@ -72,7 +85,8 @@ def updatecache(filename, module_globals=None): and return an empty list.""" if filename in cache: - del cache[filename] + if len(cache[filename]) != 1: + del cache[filename] if not filename or (filename.startswith('<') and filename.endswith('>')): return [] @@ -82,27 +96,23 @@ def updatecache(filename, module_globals=None): except OSError: basename = filename - # Try for a __loader__, if available - if module_globals and '__loader__' in module_globals: - name = module_globals.get('__name__') - loader = module_globals['__loader__'] - get_source = getattr(loader, 'get_source', None) - - if name and get_source: - try: - data = get_source(name) - except (ImportError, OSError): - pass - else: - if data is None: - # No luck, the PEP302 loader cannot find the source - # for this module. - return [] - cache[filename] = ( - len(data), None, - [line+'\n' for line in data.splitlines()], fullname - ) - return cache[filename][2] + # Realise a lazy loader based lookup if there is one + # otherwise try to lookup right now. + if lazycache(filename, module_globals): + try: + data = cache[filename][0]() + except (ImportError, OSError): + pass + else: + if data is None: + # No luck, the PEP302 loader cannot find the source + # for this module. + return [] + cache[filename] = ( + len(data), None, + [line+'\n' for line in data.splitlines()], fullname + ) + return cache[filename][2] # Try looking through the module search path, which is only useful # when handling a relative filename. @@ -132,3 +142,36 @@ def updatecache(filename, module_globals=None): size, mtime = stat.st_size, stat.st_mtime cache[filename] = size, mtime, lines, fullname return lines + + +def lazycache(filename, module_globals): + """Seed the cache for filename with module_globals. + + The module loader will be asked for the source only when getlines is + called, not immediately. + + If there is an entry in the cache already, it is not altered. + + :return: True if a lazy load is registered in the cache, + otherwise False. To register such a load a module loader with a + get_source method must be found, the filename must be a cachable + filename, and the filename must not be already cached. + """ + if filename in cache: + if len(cache[filename]) == 1: + return True + else: + return False + if not filename or (filename.startswith('<') and filename.endswith('>')): + return False + # Try for a __loader__, if available + if module_globals and '__loader__' in module_globals: + name = module_globals.get('__name__') + loader = module_globals['__loader__'] + get_source = getattr(loader, 'get_source', None) + + if name and get_source: + get_lines = functools.partial(get_source, name) + cache[filename] = (get_lines,) + return True + return False diff --git a/Darwin/lib/python3.4/locale.py b/Darwin/lib/python3.5/locale.py similarity index 90% rename from Darwin/lib/python3.4/locale.py rename to Darwin/lib/python3.5/locale.py index fa24829..ceaa6d8 100644 --- a/Darwin/lib/python3.4/locale.py +++ b/Darwin/lib/python3.5/locale.py @@ -301,8 +301,8 @@ def str(val): """Convert float to integer, taking the locale into account.""" return format("%.12g", val) -def atof(string, func=float): - "Parses a string as a float according to the locale settings." +def delocalize(string): + "Parses a string as a normalized number according to the locale settings." #First, get rid of the grouping ts = localeconv()['thousands_sep'] if ts: @@ -311,12 +311,15 @@ def atof(string, func=float): dd = localeconv()['decimal_point'] if dd: string = string.replace(dd, '.') - #finally, parse the string - return func(string) + return string -def atoi(str): +def atof(string, func=float): + "Parses a string as a float according to the locale settings." + return func(delocalize(string)) + +def atoi(string): "Converts a string to an integer according to the locale settings." - return atof(str, int) + return int(delocalize(string)) def _test(): setlocale(LC_ALL, "") @@ -696,7 +699,9 @@ locale_encoding_alias = { 'euc_kr': 'eucKR', 'utf_8': 'UTF-8', 'koi8_r': 'KOI8-R', + 'koi8_t': 'KOI8-T', 'koi8_u': 'KOI8-U', + 'kz1048': 'RK1048', 'cp1251': 'CP1251', 'cp1255': 'CP1255', 'cp1256': 'CP1256', @@ -817,16 +822,23 @@ for k, v in sorted(locale_encoding_alias.items()): # updated 'sr_cs' -> 'sr_RS.UTF-8' to 'sr_CS.UTF-8' # updated 'sr_cs.utf8@latn' -> 'sr_RS.UTF-8@latin' to 'sr_CS.UTF-8@latin' # updated 'sr_cs@latn' -> 'sr_RS.UTF-8@latin' to 'sr_CS.UTF-8@latin' +# +# SS 2014-10-01: +# Updated alias mapping with glibc 2.19 supported locales. locale_alias = { 'a3': 'az_AZ.KOI8-C', 'a3_az': 'az_AZ.KOI8-C', 'a3_az.koic': 'az_AZ.KOI8-C', + 'aa_dj': 'aa_DJ.ISO8859-1', + 'aa_er': 'aa_ER.UTF-8', + 'aa_et': 'aa_ET.UTF-8', 'af': 'af_ZA.ISO8859-1', 'af_za': 'af_ZA.ISO8859-1', 'am': 'am_ET.UTF-8', 'am_et': 'am_ET.UTF-8', 'american': 'en_US.ISO8859-1', + 'an_es': 'an_ES.ISO8859-15', 'ar': 'ar_AA.ISO8859-6', 'ar_aa': 'ar_AA.ISO8859-6', 'ar_ae': 'ar_AE.ISO8859-6', @@ -850,44 +862,60 @@ locale_alias = { 'arabic': 'ar_AA.ISO8859-6', 'as': 'as_IN.UTF-8', 'as_in': 'as_IN.UTF-8', + 'ast_es': 'ast_ES.ISO8859-15', + 'ayc_pe': 'ayc_PE.UTF-8', 'az': 'az_AZ.ISO8859-9E', 'az_az': 'az_AZ.ISO8859-9E', 'az_az.iso88599e': 'az_AZ.ISO8859-9E', 'be': 'be_BY.CP1251', 'be@latin': 'be_BY.UTF-8@latin', + 'be_bg.utf8': 'bg_BG.UTF-8', 'be_by': 'be_BY.CP1251', 'be_by@latin': 'be_BY.UTF-8@latin', + 'bem_zm': 'bem_ZM.UTF-8', + 'ber_dz': 'ber_DZ.UTF-8', + 'ber_ma': 'ber_MA.UTF-8', 'bg': 'bg_BG.CP1251', 'bg_bg': 'bg_BG.CP1251', + 'bho_in': 'bho_IN.UTF-8', + 'bn_bd': 'bn_BD.UTF-8', 'bn_in': 'bn_IN.UTF-8', + 'bo_cn': 'bo_CN.UTF-8', 'bo_in': 'bo_IN.UTF-8', 'bokmal': 'nb_NO.ISO8859-1', 'bokm\xe5l': 'nb_NO.ISO8859-1', 'br': 'br_FR.ISO8859-1', 'br_fr': 'br_FR.ISO8859-1', + 'brx_in': 'brx_IN.UTF-8', 'bs': 'bs_BA.ISO8859-2', 'bs_ba': 'bs_BA.ISO8859-2', 'bulgarian': 'bg_BG.CP1251', + 'byn_er': 'byn_ER.UTF-8', 'c': 'C', 'c-french': 'fr_CA.ISO8859-1', 'c.ascii': 'C', 'c.en': 'C', 'c.iso88591': 'en_US.ISO8859-1', + 'c.utf8': 'en_US.UTF-8', 'c_c': 'C', 'c_c.c': 'C', 'ca': 'ca_ES.ISO8859-1', 'ca_ad': 'ca_AD.ISO8859-1', 'ca_es': 'ca_ES.ISO8859-1', + 'ca_es@valencia': 'ca_ES.ISO8859-15@valencia', 'ca_fr': 'ca_FR.ISO8859-1', 'ca_it': 'ca_IT.ISO8859-1', 'catalan': 'ca_ES.ISO8859-1', 'cextend': 'en_US.ISO8859-1', 'chinese-s': 'zh_CN.eucCN', 'chinese-t': 'zh_TW.eucTW', + 'crh_ua': 'crh_UA.UTF-8', 'croatian': 'hr_HR.ISO8859-2', 'cs': 'cs_CZ.ISO8859-2', 'cs_cs': 'cs_CZ.ISO8859-2', 'cs_cz': 'cs_CZ.ISO8859-2', + 'csb_pl': 'csb_PL.UTF-8', + 'cv_ru': 'cv_RU.UTF-8', 'cy': 'cy_GB.ISO8859-1', 'cy_gb': 'cy_GB.ISO8859-1', 'cz': 'cs_CZ.ISO8859-2', @@ -902,25 +930,34 @@ locale_alias = { 'de_be': 'de_BE.ISO8859-1', 'de_ch': 'de_CH.ISO8859-1', 'de_de': 'de_DE.ISO8859-1', + 'de_li.utf8': 'de_LI.UTF-8', 'de_lu': 'de_LU.ISO8859-1', 'deutsch': 'de_DE.ISO8859-1', + 'doi_in': 'doi_IN.UTF-8', 'dutch': 'nl_NL.ISO8859-1', 'dutch.iso88591': 'nl_BE.ISO8859-1', + 'dv_mv': 'dv_MV.UTF-8', + 'dz_bt': 'dz_BT.UTF-8', 'ee': 'ee_EE.ISO8859-4', 'ee_ee': 'ee_EE.ISO8859-4', 'eesti': 'et_EE.ISO8859-1', 'el': 'el_GR.ISO8859-7', + 'el_cy': 'el_CY.ISO8859-7', 'el_gr': 'el_GR.ISO8859-7', 'el_gr@euro': 'el_GR.ISO8859-15', 'en': 'en_US.ISO8859-1', + 'en_ag': 'en_AG.UTF-8', 'en_au': 'en_AU.ISO8859-1', 'en_be': 'en_BE.ISO8859-1', 'en_bw': 'en_BW.ISO8859-1', 'en_ca': 'en_CA.ISO8859-1', + 'en_dk': 'en_DK.ISO8859-1', + 'en_dl.utf8': 'en_DL.UTF-8', 'en_gb': 'en_GB.ISO8859-1', 'en_hk': 'en_HK.ISO8859-1', 'en_ie': 'en_IE.ISO8859-1', 'en_in': 'en_IN.ISO8859-1', + 'en_ng': 'en_NG.UTF-8', 'en_nz': 'en_NZ.ISO8859-1', 'en_ph': 'en_PH.ISO8859-1', 'en_sg': 'en_SG.ISO8859-1', @@ -928,7 +965,9 @@ locale_alias = { 'en_us': 'en_US.ISO8859-1', 'en_us@euro@euro': 'en_US.ISO8859-15', 'en_za': 'en_ZA.ISO8859-1', + 'en_zm': 'en_ZM.UTF-8', 'en_zw': 'en_ZW.ISO8859-1', + 'en_zw.utf8': 'en_ZS.UTF-8', 'eng_gb': 'en_GB.ISO8859-1', 'english': 'en_EN.ISO8859-1', 'english_uk': 'en_GB.ISO8859-1', @@ -936,7 +975,9 @@ locale_alias = { 'english_united-states.437': 'C', 'english_us': 'en_US.ISO8859-1', 'eo': 'eo_XX.ISO8859-3', + 'eo.utf8': 'eo.UTF-8', 'eo_eo': 'eo_EO.ISO8859-3', + 'eo_us.utf8': 'eo_US.UTF-8', 'eo_xx': 'eo_XX.ISO8859-3', 'es': 'es_ES.ISO8859-1', 'es_ar': 'es_AR.ISO8859-1', @@ -944,6 +985,7 @@ locale_alias = { 'es_cl': 'es_CL.ISO8859-1', 'es_co': 'es_CO.ISO8859-1', 'es_cr': 'es_CR.ISO8859-1', + 'es_cu': 'es_CU.UTF-8', 'es_do': 'es_DO.ISO8859-1', 'es_ec': 'es_EC.ISO8859-1', 'es_es': 'es_ES.ISO8859-1', @@ -964,11 +1006,14 @@ locale_alias = { 'et_ee': 'et_EE.ISO8859-15', 'eu': 'eu_ES.ISO8859-1', 'eu_es': 'eu_ES.ISO8859-1', + 'eu_fr': 'eu_FR.ISO8859-1', 'fa': 'fa_IR.UTF-8', 'fa_ir': 'fa_IR.UTF-8', 'fa_ir.isiri3342': 'fa_IR.ISIRI-3342', + 'ff_sn': 'ff_SN.UTF-8', 'fi': 'fi_FI.ISO8859-15', 'fi_fi': 'fi_FI.ISO8859-15', + 'fil_ph': 'fil_PH.UTF-8', 'finnish': 'fi_FI.ISO8859-1', 'fo': 'fo_FO.ISO8859-1', 'fo_fo': 'fo_FO.ISO8859-1', @@ -983,6 +1028,9 @@ locale_alias = { 'french': 'fr_FR.ISO8859-1', 'french.iso88591': 'fr_CH.ISO8859-1', 'french_france': 'fr_FR.ISO8859-1', + 'fur_it': 'fur_IT.UTF-8', + 'fy_de': 'fy_DE.UTF-8', + 'fy_nl': 'fy_NL.UTF-8', 'ga': 'ga_IE.ISO8859-1', 'ga_ie': 'ga_IE.ISO8859-1', 'galego': 'gl_ES.ISO8859-1', @@ -993,12 +1041,15 @@ locale_alias = { 'german': 'de_DE.ISO8859-1', 'german.iso88591': 'de_CH.ISO8859-1', 'german_germany': 'de_DE.ISO8859-1', + 'gez_er': 'gez_ER.UTF-8', + 'gez_et': 'gez_ET.UTF-8', 'gl': 'gl_ES.ISO8859-1', 'gl_es': 'gl_ES.ISO8859-1', 'greek': 'el_GR.ISO8859-7', 'gu_in': 'gu_IN.UTF-8', 'gv': 'gv_GB.ISO8859-1', 'gv_gb': 'gv_GB.ISO8859-1', + 'ha_ng': 'ha_NG.UTF-8', 'he': 'he_IL.ISO8859-8', 'he_il': 'he_IL.ISO8859-8', 'hebrew': 'he_IL.ISO8859-8', @@ -1010,12 +1061,20 @@ locale_alias = { 'hr': 'hr_HR.ISO8859-2', 'hr_hr': 'hr_HR.ISO8859-2', 'hrvatski': 'hr_HR.ISO8859-2', + 'hsb_de': 'hsb_DE.ISO8859-2', + 'ht_ht': 'ht_HT.UTF-8', 'hu': 'hu_HU.ISO8859-2', 'hu_hu': 'hu_HU.ISO8859-2', 'hungarian': 'hu_HU.ISO8859-2', + 'hy_am': 'hy_AM.UTF-8', + 'hy_am.armscii8': 'hy_AM.ARMSCII_8', + 'ia': 'ia.UTF-8', + 'ia_fr': 'ia_FR.UTF-8', 'icelandic': 'is_IS.ISO8859-1', 'id': 'id_ID.ISO8859-1', 'id_id': 'id_ID.ISO8859-1', + 'ig_ng': 'ig_NG.UTF-8', + 'ik_ca': 'ik_CA.UTF-8', 'in': 'id_ID.ISO8859-1', 'in_id': 'id_ID.ISO8859-1', 'is': 'is_IS.ISO8859-1', @@ -1035,6 +1094,7 @@ locale_alias = { 'iu_ca.nunacom8': 'iu_CA.NUNACOM-8', 'iw': 'he_IL.ISO8859-8', 'iw_il': 'he_IL.ISO8859-8', + 'iw_il.utf8': 'iw_IL.UTF-8', 'ja': 'ja_JP.eucJP', 'ja_jp': 'ja_JP.eucJP', 'ja_jp.euc': 'ja_JP.eucJP', @@ -1050,6 +1110,7 @@ locale_alias = { 'ka_ge.georgianacademy': 'ka_GE.GEORGIAN-ACADEMY', 'ka_ge.georgianps': 'ka_GE.GEORGIAN-PS', 'ka_ge.georgianrs': 'ka_GE.GEORGIAN-ACADEMY', + 'kk_kz': 'kk_KZ.RK1048', 'kl': 'kl_GL.ISO8859-1', 'kl_gl': 'kl_GL.ISO8859-1', 'km_kh': 'km_KH.UTF-8', @@ -1058,14 +1119,22 @@ locale_alias = { 'ko': 'ko_KR.eucKR', 'ko_kr': 'ko_KR.eucKR', 'ko_kr.euc': 'ko_KR.eucKR', + 'kok_in': 'kok_IN.UTF-8', 'korean': 'ko_KR.eucKR', 'korean.euc': 'ko_KR.eucKR', 'ks': 'ks_IN.UTF-8', 'ks_in': 'ks_IN.UTF-8', + 'ks_in@devanagari.utf8': 'ks_IN.UTF-8@devanagari', + 'ku_tr': 'ku_TR.ISO8859-9', 'kw': 'kw_GB.ISO8859-1', 'kw_gb': 'kw_GB.ISO8859-1', 'ky': 'ky_KG.UTF-8', 'ky_kg': 'ky_KG.UTF-8', + 'lb_lu': 'lb_LU.UTF-8', + 'lg_ug': 'lg_UG.ISO8859-10', + 'li_be': 'li_BE.UTF-8', + 'li_nl': 'li_NL.UTF-8', + 'lij_it': 'lij_IT.UTF-8', 'lithuanian': 'lt_LT.ISO8859-13', 'lo': 'lo_LA.MULELAO-1', 'lo_la': 'lo_LA.MULELAO-1', @@ -1076,24 +1145,37 @@ locale_alias = { 'lt_lt': 'lt_LT.ISO8859-13', 'lv': 'lv_LV.ISO8859-13', 'lv_lv': 'lv_LV.ISO8859-13', + 'mag_in': 'mag_IN.UTF-8', 'mai': 'mai_IN.UTF-8', 'mai_in': 'mai_IN.UTF-8', + 'mg_mg': 'mg_MG.ISO8859-15', + 'mhr_ru': 'mhr_RU.UTF-8', 'mi': 'mi_NZ.ISO8859-1', 'mi_nz': 'mi_NZ.ISO8859-1', 'mk': 'mk_MK.ISO8859-5', 'mk_mk': 'mk_MK.ISO8859-5', 'ml': 'ml_IN.UTF-8', 'ml_in': 'ml_IN.UTF-8', + 'mn_mn': 'mn_MN.UTF-8', + 'mni_in': 'mni_IN.UTF-8', 'mr': 'mr_IN.UTF-8', 'mr_in': 'mr_IN.UTF-8', 'ms': 'ms_MY.ISO8859-1', 'ms_my': 'ms_MY.ISO8859-1', 'mt': 'mt_MT.ISO8859-3', 'mt_mt': 'mt_MT.ISO8859-3', + 'my_mm': 'my_MM.UTF-8', + 'nan_tw@latin': 'nan_TW.UTF-8@latin', 'nb': 'nb_NO.ISO8859-1', 'nb_no': 'nb_NO.ISO8859-1', + 'nds_de': 'nds_DE.UTF-8', + 'nds_nl': 'nds_NL.UTF-8', 'ne_np': 'ne_NP.UTF-8', + 'nhn_mx': 'nhn_MX.UTF-8', + 'niu_nu': 'niu_NU.UTF-8', + 'niu_nz': 'niu_NZ.UTF-8', 'nl': 'nl_NL.ISO8859-1', + 'nl_aw': 'nl_AW.UTF-8', 'nl_be': 'nl_BE.ISO8859-1', 'nl_nl': 'nl_NL.ISO8859-1', 'nn': 'nn_NO.ISO8859-1', @@ -1113,10 +1195,15 @@ locale_alias = { 'nynorsk': 'nn_NO.ISO8859-1', 'oc': 'oc_FR.ISO8859-1', 'oc_fr': 'oc_FR.ISO8859-1', + 'om_et': 'om_ET.UTF-8', + 'om_ke': 'om_KE.ISO8859-1', 'or': 'or_IN.UTF-8', 'or_in': 'or_IN.UTF-8', + 'os_ru': 'os_RU.UTF-8', 'pa': 'pa_IN.UTF-8', 'pa_in': 'pa_IN.UTF-8', + 'pa_pk': 'pa_PK.UTF-8', + 'pap_an': 'pap_AN.UTF-8', 'pd': 'pd_US.ISO8859-1', 'pd_de': 'pd_DE.ISO8859-1', 'pd_us': 'pd_US.ISO8859-1', @@ -1131,6 +1218,7 @@ locale_alias = { 'posix-utf2': 'C', 'pp': 'pp_AN.ISO8859-1', 'pp_an': 'pp_AN.ISO8859-1', + 'ps_af': 'ps_AF.UTF-8', 'pt': 'pt_PT.ISO8859-1', 'pt_br': 'pt_BR.ISO8859-1', 'pt_pt': 'pt_PT.ISO8859-1', @@ -1144,8 +1232,13 @@ locale_alias = { 'russian': 'ru_RU.ISO8859-5', 'rw': 'rw_RW.ISO8859-1', 'rw_rw': 'rw_RW.ISO8859-1', + 'sa_in': 'sa_IN.UTF-8', + 'sat_in': 'sat_IN.UTF-8', + 'sc_it': 'sc_IT.UTF-8', 'sd': 'sd_IN.UTF-8', 'sd_in': 'sd_IN.UTF-8', + 'sd_in@devanagari.utf8': 'sd_IN.UTF-8@devanagari', + 'sd_pk': 'sd_PK.UTF-8', 'se_no': 'se_NO.UTF-8', 'serbocroatian': 'sr_RS.UTF-8@latin', 'sh': 'sr_RS.UTF-8@latin', @@ -1154,8 +1247,10 @@ locale_alias = { 'sh_hr.iso88592': 'hr_HR.ISO8859-2', 'sh_sp': 'sr_CS.ISO8859-2', 'sh_yu': 'sr_RS.UTF-8@latin', + 'shs_ca': 'shs_CA.UTF-8', 'si': 'si_LK.UTF-8', 'si_lk': 'si_LK.UTF-8', + 'sid_et': 'sid_ET.UTF-8', 'sinhala': 'si_LK.UTF-8', 'sk': 'sk_SK.ISO8859-2', 'sk_sk': 'sk_SK.ISO8859-2', @@ -1165,12 +1260,17 @@ locale_alias = { 'slovak': 'sk_SK.ISO8859-2', 'slovene': 'sl_SI.ISO8859-2', 'slovenian': 'sl_SI.ISO8859-2', + 'so_dj': 'so_DJ.ISO8859-1', + 'so_et': 'so_ET.UTF-8', + 'so_ke': 'so_KE.ISO8859-1', + 'so_so': 'so_SO.ISO8859-1', 'sp': 'sr_CS.ISO8859-5', 'sp_yu': 'sr_CS.ISO8859-5', 'spanish': 'es_ES.ISO8859-1', 'spanish_spain': 'es_ES.ISO8859-1', 'sq': 'sq_AL.ISO8859-2', 'sq_al': 'sq_AL.ISO8859-2', + 'sq_mk': 'sq_MK.UTF-8', 'sr': 'sr_RS.UTF-8', 'sr@cyrillic': 'sr_RS.UTF-8', 'sr@latn': 'sr_CS.UTF-8@latin', @@ -1187,6 +1287,8 @@ locale_alias = { 'sr_yu.iso88595': 'sr_CS.ISO8859-5', 'sr_yu.iso88595@cyrillic': 'sr_CS.ISO8859-5', 'sr_yu.microsoftcp1251@cyrillic': 'sr_CS.CP1251', + 'sr_yu.utf8': 'sr_RS.UTF-8', + 'sr_yu.utf8@cyrillic': 'sr_RS.UTF-8', 'sr_yu@cyrillic': 'sr_RS.UTF-8', 'ss': 'ss_ZA.ISO8859-1', 'ss_za': 'ss_ZA.ISO8859-1', @@ -1195,12 +1297,17 @@ locale_alias = { 'sv': 'sv_SE.ISO8859-1', 'sv_fi': 'sv_FI.ISO8859-1', 'sv_se': 'sv_SE.ISO8859-1', + 'sw_ke': 'sw_KE.UTF-8', + 'sw_tz': 'sw_TZ.UTF-8', 'swedish': 'sv_SE.ISO8859-1', + 'szl_pl': 'szl_PL.UTF-8', 'ta': 'ta_IN.TSCII-0', 'ta_in': 'ta_IN.TSCII-0', 'ta_in.tscii': 'ta_IN.TSCII-0', 'ta_in.tscii0': 'ta_IN.TSCII-0', + 'ta_lk': 'ta_LK.UTF-8', 'te': 'te_IN.UTF-8', + 'te_in': 'te_IN.UTF-8', 'tg': 'tg_TJ.KOI8-C', 'tg_tj': 'tg_TJ.KOI8-C', 'th': 'th_TH.ISO8859-11', @@ -1208,23 +1315,31 @@ locale_alias = { 'th_th.tactis': 'th_TH.TIS620', 'th_th.tis620': 'th_TH.TIS620', 'thai': 'th_TH.ISO8859-11', + 'ti_er': 'ti_ER.UTF-8', + 'ti_et': 'ti_ET.UTF-8', + 'tig_er': 'tig_ER.UTF-8', + 'tk_tm': 'tk_TM.UTF-8', 'tl': 'tl_PH.ISO8859-1', 'tl_ph': 'tl_PH.ISO8859-1', 'tn': 'tn_ZA.ISO8859-15', 'tn_za': 'tn_ZA.ISO8859-15', 'tr': 'tr_TR.ISO8859-9', + 'tr_cy': 'tr_CY.ISO8859-9', 'tr_tr': 'tr_TR.ISO8859-9', 'ts': 'ts_ZA.ISO8859-1', 'ts_za': 'ts_ZA.ISO8859-1', 'tt': 'tt_RU.TATAR-CYR', 'tt_ru': 'tt_RU.TATAR-CYR', 'tt_ru.tatarcyr': 'tt_RU.TATAR-CYR', + 'tt_ru@iqtelif': 'tt_RU.UTF-8@iqtelif', 'turkish': 'tr_TR.ISO8859-9', + 'ug_cn': 'ug_CN.UTF-8', 'uk': 'uk_UA.KOI8-U', 'uk_ua': 'uk_UA.KOI8-U', 'univ': 'en_US.utf', 'universal': 'en_US.utf', 'universal.utf8@ucs4': 'en_US.UTF-8', + 'unm_us': 'unm_US.UTF-8', 'ur': 'ur_PK.CP1256', 'ur_in': 'ur_IN.UTF-8', 'ur_pk': 'ur_PK.CP1256', @@ -1241,16 +1356,23 @@ locale_alias = { 'vi_vn.viscii111': 'vi_VN.VISCII', 'wa': 'wa_BE.ISO8859-1', 'wa_be': 'wa_BE.ISO8859-1', + 'wae_ch': 'wae_CH.UTF-8', + 'wal_et': 'wal_ET.UTF-8', + 'wo_sn': 'wo_SN.UTF-8', 'xh': 'xh_ZA.ISO8859-1', 'xh_za': 'xh_ZA.ISO8859-1', 'yi': 'yi_US.CP1255', 'yi_us': 'yi_US.CP1255', + 'yo_ng': 'yo_NG.UTF-8', + 'yue_hk': 'yue_HK.UTF-8', 'zh': 'zh_CN.eucCN', 'zh_cn': 'zh_CN.gb2312', 'zh_cn.big5': 'zh_TW.big5', 'zh_cn.euc': 'zh_CN.eucCN', 'zh_hk': 'zh_HK.big5hkscs', 'zh_hk.big5hk': 'zh_HK.big5hkscs', + 'zh_sg': 'zh_SG.GB2312', + 'zh_sg.gbk': 'zh_SG.GBK', 'zh_tw': 'zh_TW.big5', 'zh_tw.euc': 'zh_TW.eucTW', 'zh_tw.euctw': 'zh_TW.eucTW', diff --git a/Darwin/lib/python3.4/logging/__init__.py b/Darwin/lib/python3.5/logging/__init__.py similarity index 96% rename from Darwin/lib/python3.4/logging/__init__.py rename to Darwin/lib/python3.5/logging/__init__.py index dcfd9f6..104b0be 100644 --- a/Darwin/lib/python3.4/logging/__init__.py +++ b/Darwin/lib/python3.5/logging/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2001-2014 by Vinay Sajip. All Rights Reserved. +# Copyright 2001-2015 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, @@ -18,7 +18,7 @@ Logging package for Python. Based on PEP 282 and comments thereto in comp.lang.python. -Copyright (C) 2001-2014 Vinay Sajip. All Rights Reserved. +Copyright (C) 2001-2015 Vinay Sajip. All Rights Reserved. To use, simply 'import logging' and log away! """ @@ -51,34 +51,6 @@ __date__ = "07 February 2010" # Miscellaneous module data #--------------------------------------------------------------------------- -# -# _srcfile is used when walking the stack to check when we've got the first -# caller stack frame. -# -if hasattr(sys, 'frozen'): #support for py2exe - _srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:]) -else: - _srcfile = __file__ -_srcfile = os.path.normcase(_srcfile) - - -if hasattr(sys, '_getframe'): - currentframe = lambda: sys._getframe(3) -else: #pragma: no cover - def currentframe(): - """Return the frame object for the caller's stack frame.""" - try: - raise Exception - except Exception: - return sys.exc_info()[2].tb_frame.f_back - -# _srcfile is only used in conjunction with sys._getframe(). -# To provide compatibility with older versions of Python, set _srcfile -# to None if _getframe() is not available; this value will prevent -# findCaller() from being called. -#if not hasattr(sys, "_getframe"): -# _srcfile = None - # #_startTime is used as the base when calculating the relative time of events # @@ -157,7 +129,8 @@ def getLevelName(level): Otherwise, the string "Level %s" % level is returned. """ - return _levelToName.get(level, ("Level %s" % level)) + # See Issue #22386 for the reason for this convoluted expression + return _levelToName.get(level, _nameToLevel.get(level, ("Level %s" % level))) def addLevelName(level, levelName): """ @@ -172,6 +145,40 @@ def addLevelName(level, levelName): finally: _releaseLock() +if hasattr(sys, '_getframe'): + currentframe = lambda: sys._getframe(3) +else: #pragma: no cover + def currentframe(): + """Return the frame object for the caller's stack frame.""" + try: + raise Exception + except Exception: + return sys.exc_info()[2].tb_frame.f_back + +# +# _srcfile is used when walking the stack to check when we've got the first +# caller stack frame, by skipping frames whose filename is that of this +# module's source. It therefore should contain the filename of this module's +# source file. +# +# Ordinarily we would use __file__ for this, but frozen modules don't always +# have __file__ set, for some reason (see Issue #21736). Thus, we get the +# filename from a handy code object from a function defined in this module. +# (There's no particular reason for picking addLevelName.) +# + +_srcfile = os.path.normcase(addLevelName.__code__.co_filename) + +# _srcfile is only used in conjunction with sys._getframe(). +# To provide compatibility with older versions of Python, set _srcfile +# to None if _getframe() is not available; this value will prevent +# findCaller() from being called. You can also do this if you want to avoid +# the overhead of fetching caller information, even when _getframe() is +# available. +#if not hasattr(sys, '_getframe'): +# _srcfile = None + + def _checkLevel(level): if isinstance(level, int): rv = level @@ -309,6 +316,8 @@ class LogRecord(object): return ''%(self.name, self.levelno, self.pathname, self.lineno, self.msg) + __repr__ = __str__ + def getMessage(self): """ Return the message for this LogRecord. @@ -1004,14 +1013,19 @@ class FileHandler(StreamHandler): """ self.acquire() try: - if self.stream: - self.flush() - if hasattr(self.stream, "close"): - self.stream.close() - self.stream = None - # Issue #19523: call unconditionally to - # prevent a handler leak when delay is set - StreamHandler.close(self) + try: + if self.stream: + try: + self.flush() + finally: + stream = self.stream + self.stream = None + if hasattr(stream, "close"): + stream.close() + finally: + # Issue #19523: call unconditionally to + # prevent a handler leak when delay is set + StreamHandler.close(self) finally: self.release() @@ -1079,7 +1093,6 @@ class PlaceHolder(object): # # Determine which class to use when instantiating loggers. # -_loggerClass = None def setLoggerClass(klass): """ @@ -1098,7 +1111,6 @@ def getLoggerClass(): """ Return the class to be used when instantiating a logger. """ - return _loggerClass class Manager(object): @@ -1295,12 +1307,11 @@ class Logger(Filterer): if self.isEnabledFor(ERROR): self._log(ERROR, msg, args, **kwargs) - def exception(self, msg, *args, **kwargs): + def exception(self, msg, *args, exc_info=True, **kwargs): """ Convenience method for logging an ERROR with exception information. """ - kwargs['exc_info'] = True - self.error(msg, *args, **kwargs) + self.error(msg, *args, exc_info=exc_info, **kwargs) def critical(self, msg, *args, **kwargs): """ @@ -1395,7 +1406,9 @@ class Logger(Filterer): else: # pragma: no cover fn, lno, func = "(unknown file)", 0, "(unknown function)" if exc_info: - if not isinstance(exc_info, tuple): + if isinstance(exc_info, BaseException): + exc_info = (type(exc_info), exc_info, exc_info.__traceback__) + elif not isinstance(exc_info, tuple): exc_info = sys.exc_info() record = self.makeRecord(self.name, level, fn, lno, msg, args, exc_info, func, extra, sinfo) @@ -1605,12 +1618,11 @@ class LoggerAdapter(object): """ self.log(ERROR, msg, *args, **kwargs) - def exception(self, msg, *args, **kwargs): + def exception(self, msg, *args, exc_info=True, **kwargs): """ Delegate an exception call to the underlying logger. """ - kwargs["exc_info"] = True - self.log(ERROR, msg, *args, **kwargs) + self.log(ERROR, msg, *args, exc_info=exc_info, **kwargs) def critical(self, msg, *args, **kwargs): """ @@ -1716,7 +1728,7 @@ def basicConfig(**kwargs): _acquireLock() try: if len(root.handlers) == 0: - handlers = kwargs.get("handlers") + handlers = kwargs.pop("handlers", None) if handlers is None: if "stream" in kwargs and "filename" in kwargs: raise ValueError("'stream' and 'filename' should not be " @@ -1726,28 +1738,31 @@ def basicConfig(**kwargs): raise ValueError("'stream' or 'filename' should not be " "specified together with 'handlers'") if handlers is None: - filename = kwargs.get("filename") + filename = kwargs.pop("filename", None) + mode = kwargs.pop("filemode", 'a') if filename: - mode = kwargs.get("filemode", 'a') h = FileHandler(filename, mode) else: - stream = kwargs.get("stream") + stream = kwargs.pop("stream", None) h = StreamHandler(stream) handlers = [h] - dfs = kwargs.get("datefmt", None) - style = kwargs.get("style", '%') + dfs = kwargs.pop("datefmt", None) + style = kwargs.pop("style", '%') if style not in _STYLES: raise ValueError('Style must be one of: %s' % ','.join( _STYLES.keys())) - fs = kwargs.get("format", _STYLES[style][1]) + fs = kwargs.pop("format", _STYLES[style][1]) fmt = Formatter(fs, dfs, style) for h in handlers: if h.formatter is None: h.setFormatter(fmt) root.addHandler(h) - level = kwargs.get("level") + level = kwargs.pop("level", None) if level is not None: root.setLevel(level) + if kwargs: + keys = ', '.join(kwargs.keys()) + raise ValueError('Unrecognised argument(s): %s' % keys) finally: _releaseLock() @@ -1789,14 +1804,13 @@ def error(msg, *args, **kwargs): basicConfig() root.error(msg, *args, **kwargs) -def exception(msg, *args, **kwargs): +def exception(msg, *args, exc_info=True, **kwargs): """ Log a message with severity 'ERROR' on the root logger, with exception information. If the logger has no handlers, basicConfig() is called to add a console handler with a pre-defined format. """ - kwargs['exc_info'] = True - error(msg, *args, **kwargs) + error(msg, *args, exc_info=exc_info, **kwargs) def warning(msg, *args, **kwargs): """ diff --git a/Darwin/lib/python3.4/logging/config.py b/Darwin/lib/python3.5/logging/config.py similarity index 99% rename from Darwin/lib/python3.4/logging/config.py rename to Darwin/lib/python3.5/logging/config.py index 895fb26..8a99923 100644 --- a/Darwin/lib/python3.4/logging/config.py +++ b/Darwin/lib/python3.5/logging/config.py @@ -116,11 +116,12 @@ def _create_formatters(cp): sectname = "formatter_%s" % form fs = cp.get(sectname, "format", raw=True, fallback=None) dfs = cp.get(sectname, "datefmt", raw=True, fallback=None) + stl = cp.get(sectname, "style", raw=True, fallback='%') c = logging.Formatter class_name = cp[sectname].get("class") if class_name: c = _resolve(class_name) - f = c(fs, dfs) + f = c(fs, dfs, stl) formatters[form] = f return formatters @@ -660,7 +661,12 @@ class DictConfigurator(BaseConfigurator): fmt = config.get('format', None) dfmt = config.get('datefmt', None) style = config.get('style', '%') - result = logging.Formatter(fmt, dfmt, style) + cname = config.get('class', None) + if not cname: + c = logging.Formatter + else: + c = _resolve(cname) + result = c(fmt, dfmt, style) return result def configure_filter(self, config): diff --git a/Darwin/lib/python3.4/logging/handlers.py b/Darwin/lib/python3.5/logging/handlers.py similarity index 96% rename from Darwin/lib/python3.4/logging/handlers.py rename to Darwin/lib/python3.5/logging/handlers.py index f547d17..02a5fc1 100644 --- a/Darwin/lib/python3.4/logging/handlers.py +++ b/Darwin/lib/python3.5/logging/handlers.py @@ -1,4 +1,4 @@ -# Copyright 2001-2013 by Vinay Sajip. All Rights Reserved. +# Copyright 2001-2015 by Vinay Sajip. All Rights Reserved. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose and without fee is hereby granted, @@ -18,7 +18,7 @@ Additional handlers for the logging package for Python. The core package is based on PEP 282 and comments thereto in comp.lang.python. -Copyright (C) 2001-2013 Vinay Sajip. All Rights Reserved. +Copyright (C) 2001-2015 Vinay Sajip. All Rights Reserved. To use, simply 'import logging.handlers' and log away! """ @@ -463,6 +463,7 @@ class WatchedFileHandler(logging.FileHandler): # we have an open file handle, clean it up self.stream.flush() self.stream.close() + self.stream = None # See Issue #21742: _open () might fail. # open a new file handle and get new stat info from that fd self.stream = self._open() self._statstream() @@ -626,9 +627,10 @@ class SocketHandler(logging.Handler): """ self.acquire() try: - if self.sock: - self.sock.close() + sock = self.sock + if sock: self.sock = None + sock.close() logging.Handler.close(self) finally: self.release() @@ -878,21 +880,21 @@ class SysLogHandler(logging.Handler): The record is formatted, and then sent to the syslog server. If exception information is present, it is NOT sent to the server. """ - msg = self.format(record) - if self.ident: - msg = self.ident + msg - if self.append_nul: - msg += '\000' - - # We need to convert record level to lowercase, maybe this will - # change in the future. - prio = '<%d>' % self.encodePriority(self.facility, - self.mapPriority(record.levelname)) - prio = prio.encode('utf-8') - # Message is a string. Convert to bytes as required by RFC 5424 - msg = msg.encode('utf-8') - msg = prio + msg try: + msg = self.format(record) + if self.ident: + msg = self.ident + msg + if self.append_nul: + msg += '\000' + + # We need to convert record level to lowercase, maybe this will + # change in the future. + prio = '<%d>' % self.encodePriority(self.facility, + self.mapPriority(record.levelname)) + prio = prio.encode('utf-8') + # Message is a string. Convert to bytes as required by RFC 5424 + msg = msg.encode('utf-8') + msg = prio + msg if self.unixsocket: try: self.socket.send(msg) @@ -930,11 +932,11 @@ class SMTPHandler(logging.Handler): default is one second). """ logging.Handler.__init__(self) - if isinstance(mailhost, tuple): + if isinstance(mailhost, (list, tuple)): self.mailhost, self.mailport = mailhost else: self.mailhost, self.mailport = mailhost, None - if isinstance(credentials, tuple): + if isinstance(credentials, (list, tuple)): self.username, self.password = credentials else: self.username = None @@ -1088,7 +1090,8 @@ class HTTPHandler(logging.Handler): A class which sends records to a Web server, using either GET or POST semantics. """ - def __init__(self, host, url, method="GET", secure=False, credentials=None): + def __init__(self, host, url, method="GET", secure=False, credentials=None, + context=None): """ Initialize the instance with the host, the request URL, and the method ("GET" or "POST") @@ -1097,11 +1100,15 @@ class HTTPHandler(logging.Handler): method = method.upper() if method not in ["GET", "POST"]: raise ValueError("method must be GET or POST") + if not secure and context is not None: + raise ValueError("context parameter only makes sense " + "with secure=True") self.host = host self.url = url self.method = method self.secure = secure self.credentials = credentials + self.context = context def mapLogRecord(self, record): """ @@ -1121,7 +1128,7 @@ class HTTPHandler(logging.Handler): import http.client, urllib.parse host = self.host if self.secure: - h = http.client.HTTPSConnection(host) + h = http.client.HTTPSConnection(host, context=self.context) else: h = http.client.HTTPConnection(host) url = self.url @@ -1207,8 +1214,10 @@ class BufferingHandler(logging.Handler): This version just flushes and chains to the parent class' close(). """ - self.flush() - logging.Handler.close(self) + try: + self.flush() + finally: + logging.Handler.close(self) class MemoryHandler(BufferingHandler): """ @@ -1262,13 +1271,15 @@ class MemoryHandler(BufferingHandler): """ Flush, set the target to None and lose the buffer. """ - self.flush() - self.acquire() try: - self.target = None - BufferingHandler.close(self) + self.flush() finally: - self.release() + self.acquire() + try: + self.target = None + BufferingHandler.close(self) + finally: + self.release() class QueueHandler(logging.Handler): @@ -1344,7 +1355,7 @@ if threading: """ _sentinel = None - def __init__(self, queue, *handlers): + def __init__(self, queue, *handlers, respect_handler_level=False): """ Initialise an instance with the specified queue and handlers. @@ -1353,6 +1364,7 @@ if threading: self.handlers = handlers self._stop = threading.Event() self._thread = None + self.respect_handler_level = respect_handler_level def dequeue(self, block): """ @@ -1393,7 +1405,12 @@ if threading: """ record = self.prepare(record) for handler in self.handlers: - handler.handle(record) + if not self.respect_handler_level: + process = True + else: + process = record.levelno >= handler.level + if process: + handler.handle(record) def _monitor(self): """ diff --git a/Darwin/lib/python3.4/lzma.py b/Darwin/lib/python3.5/lzma.py similarity index 60% rename from Darwin/lib/python3.4/lzma.py rename to Darwin/lib/python3.5/lzma.py index f1d3958..89528b6 100644 --- a/Darwin/lib/python3.4/lzma.py +++ b/Darwin/lib/python3.5/lzma.py @@ -25,17 +25,16 @@ import builtins import io from _lzma import * from _lzma import _encode_filter_properties, _decode_filter_properties +import _compression _MODE_CLOSED = 0 _MODE_READ = 1 -_MODE_READ_EOF = 2 +# Value 2 no longer used _MODE_WRITE = 3 -_BUFFER_SIZE = 8192 - -class LZMAFile(io.BufferedIOBase): +class LZMAFile(_compression.BaseStream): """A file object providing transparent LZMA (de)compression. @@ -92,8 +91,6 @@ class LZMAFile(io.BufferedIOBase): self._fp = None self._closefp = False self._mode = _MODE_CLOSED - self._pos = 0 - self._size = -1 if mode in ("r", "rb"): if check != -1: @@ -105,19 +102,13 @@ class LZMAFile(io.BufferedIOBase): if format is None: format = FORMAT_AUTO mode_code = _MODE_READ - # Save the args to pass to the LZMADecompressor initializer. - # If the file contains multiple compressed streams, each - # stream will need a separate decompressor object. - self._init_args = {"format":format, "filters":filters} - self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = b"" - self._buffer_offset = 0 elif mode in ("w", "wb", "a", "ab", "x", "xb"): if format is None: format = FORMAT_XZ mode_code = _MODE_WRITE self._compressor = LZMACompressor(format=format, check=check, preset=preset, filters=filters) + self._pos = 0 else: raise ValueError("Invalid mode: {!r}".format(mode)) @@ -133,6 +124,11 @@ class LZMAFile(io.BufferedIOBase): else: raise TypeError("filename must be a str or bytes object, or a file") + if self._mode == _MODE_READ: + raw = _compression.DecompressReader(self._fp, LZMADecompressor, + trailing_error=LZMAError, format=format, filters=filters) + self._buffer = io.BufferedReader(raw) + def close(self): """Flush and close the file. @@ -142,9 +138,9 @@ class LZMAFile(io.BufferedIOBase): if self._mode == _MODE_CLOSED: return try: - if self._mode in (_MODE_READ, _MODE_READ_EOF): - self._decompressor = None - self._buffer = b"" + if self._mode == _MODE_READ: + self._buffer.close() + self._buffer = None elif self._mode == _MODE_WRITE: self._fp.write(self._compressor.flush()) self._compressor = None @@ -169,123 +165,18 @@ class LZMAFile(io.BufferedIOBase): def seekable(self): """Return whether the file supports seeking.""" - return self.readable() and self._fp.seekable() + return self.readable() and self._buffer.seekable() def readable(self): """Return whether the file was opened for reading.""" self._check_not_closed() - return self._mode in (_MODE_READ, _MODE_READ_EOF) + return self._mode == _MODE_READ def writable(self): """Return whether the file was opened for writing.""" self._check_not_closed() return self._mode == _MODE_WRITE - # Mode-checking helper functions. - - def _check_not_closed(self): - if self.closed: - raise ValueError("I/O operation on closed file") - - def _check_can_read(self): - if self._mode not in (_MODE_READ, _MODE_READ_EOF): - self._check_not_closed() - raise io.UnsupportedOperation("File not open for reading") - - def _check_can_write(self): - if self._mode != _MODE_WRITE: - self._check_not_closed() - raise io.UnsupportedOperation("File not open for writing") - - def _check_can_seek(self): - if self._mode not in (_MODE_READ, _MODE_READ_EOF): - self._check_not_closed() - raise io.UnsupportedOperation("Seeking is only supported " - "on files open for reading") - if not self._fp.seekable(): - raise io.UnsupportedOperation("The underlying file object " - "does not support seeking") - - # Fill the readahead buffer if it is empty. Returns False on EOF. - def _fill_buffer(self): - if self._mode == _MODE_READ_EOF: - return False - # Depending on the input data, our call to the decompressor may not - # return any data. In this case, try again after reading another block. - while self._buffer_offset == len(self._buffer): - rawblock = (self._decompressor.unused_data or - self._fp.read(_BUFFER_SIZE)) - - if not rawblock: - if self._decompressor.eof: - self._mode = _MODE_READ_EOF - self._size = self._pos - return False - else: - raise EOFError("Compressed file ended before the " - "end-of-stream marker was reached") - - if self._decompressor.eof: - # Continue to next stream. - self._decompressor = LZMADecompressor(**self._init_args) - try: - self._buffer = self._decompressor.decompress(rawblock) - except LZMAError: - # Trailing data isn't a valid compressed stream; ignore it. - self._mode = _MODE_READ_EOF - self._size = self._pos - return False - else: - self._buffer = self._decompressor.decompress(rawblock) - self._buffer_offset = 0 - return True - - # Read data until EOF. - # If return_data is false, consume the data without returning it. - def _read_all(self, return_data=True): - # The loop assumes that _buffer_offset is 0. Ensure that this is true. - self._buffer = self._buffer[self._buffer_offset:] - self._buffer_offset = 0 - - blocks = [] - while self._fill_buffer(): - if return_data: - blocks.append(self._buffer) - self._pos += len(self._buffer) - self._buffer = b"" - if return_data: - return b"".join(blocks) - - # Read a block of up to n bytes. - # If return_data is false, consume the data without returning it. - def _read_block(self, n, return_data=True): - # If we have enough data buffered, return immediately. - end = self._buffer_offset + n - if end <= len(self._buffer): - data = self._buffer[self._buffer_offset : end] - self._buffer_offset = end - self._pos += len(data) - return data if return_data else None - - # The loop assumes that _buffer_offset is 0. Ensure that this is true. - self._buffer = self._buffer[self._buffer_offset:] - self._buffer_offset = 0 - - blocks = [] - while n > 0 and self._fill_buffer(): - if n < len(self._buffer): - data = self._buffer[:n] - self._buffer_offset = n - else: - data = self._buffer - self._buffer = b"" - if return_data: - blocks.append(data) - self._pos += len(data) - n -= len(data) - if return_data: - return b"".join(blocks) - def peek(self, size=-1): """Return buffered data without advancing the file position. @@ -293,9 +184,9 @@ class LZMAFile(io.BufferedIOBase): The exact number of bytes returned is unspecified. """ self._check_can_read() - if not self._fill_buffer(): - return b"" - return self._buffer[self._buffer_offset:] + # Relies on the undocumented fact that BufferedReader.peek() always + # returns at least one byte (except at EOF) + return self._buffer.peek(size) def read(self, size=-1): """Read up to size uncompressed bytes from the file. @@ -304,38 +195,19 @@ class LZMAFile(io.BufferedIOBase): Returns b"" if the file is already at EOF. """ self._check_can_read() - if size == 0: - return b"" - elif size < 0: - return self._read_all() - else: - return self._read_block(size) + return self._buffer.read(size) def read1(self, size=-1): """Read up to size uncompressed bytes, while trying to avoid - making multiple reads from the underlying stream. + making multiple reads from the underlying stream. Reads up to a + buffer's worth of data if size is negative. Returns b"" if the file is at EOF. """ - # Usually, read1() calls _fp.read() at most once. However, sometimes - # this does not give enough data for the decompressor to make progress. - # In this case we make multiple reads, to avoid returning b"". self._check_can_read() - if (size == 0 or - # Only call _fill_buffer() if the buffer is actually empty. - # This gives a significant speedup if *size* is small. - (self._buffer_offset == len(self._buffer) and not self._fill_buffer())): - return b"" - if size > 0: - data = self._buffer[self._buffer_offset : - self._buffer_offset + size] - self._buffer_offset += len(data) - else: - data = self._buffer[self._buffer_offset:] - self._buffer = b"" - self._buffer_offset = 0 - self._pos += len(data) - return data + if size < 0: + size = io.DEFAULT_BUFFER_SIZE + return self._buffer.read1(size) def readline(self, size=-1): """Read a line of uncompressed bytes from the file. @@ -345,15 +217,7 @@ class LZMAFile(io.BufferedIOBase): case the line may be incomplete). Returns b'' if already at EOF. """ self._check_can_read() - # Shortcut for the common case - the whole line is in the buffer. - if size < 0: - end = self._buffer.find(b"\n", self._buffer_offset) + 1 - if end > 0: - line = self._buffer[self._buffer_offset : end] - self._buffer_offset = end - self._pos += len(line) - return line - return io.BufferedIOBase.readline(self, size) + return self._buffer.readline(size) def write(self, data): """Write a bytes object to the file. @@ -368,16 +232,7 @@ class LZMAFile(io.BufferedIOBase): self._pos += len(data) return len(data) - # Rewind the file to the beginning of the data stream. - def _rewind(self): - self._fp.seek(0, 0) - self._mode = _MODE_READ - self._pos = 0 - self._decompressor = LZMADecompressor(**self._init_args) - self._buffer = b"" - self._buffer_offset = 0 - - def seek(self, offset, whence=0): + def seek(self, offset, whence=io.SEEK_SET): """Change the file position. The new position is specified by offset, relative to the @@ -389,38 +244,17 @@ class LZMAFile(io.BufferedIOBase): Returns the new file position. - Note that seeking is emulated, sp depending on the parameters, + Note that seeking is emulated, so depending on the parameters, this operation may be extremely slow. """ self._check_can_seek() - - # Recalculate offset as an absolute file position. - if whence == 0: - pass - elif whence == 1: - offset = self._pos + offset - elif whence == 2: - # Seeking relative to EOF - we need to know the file's size. - if self._size < 0: - self._read_all(return_data=False) - offset = self._size + offset - else: - raise ValueError("Invalid value for whence: {}".format(whence)) - - # Make it so that offset is the number of bytes to skip forward. - if offset < self._pos: - self._rewind() - else: - offset -= self._pos - - # Read and discard data until we reach the desired position. - self._read_block(offset, return_data=False) - - return self._pos + return self._buffer.seek(offset, whence) def tell(self): """Return the current file position.""" self._check_not_closed() + if self._mode == _MODE_READ: + return self._buffer.tell() return self._pos diff --git a/Darwin/lib/python3.4/macpath.py b/Darwin/lib/python3.5/macpath.py similarity index 89% rename from Darwin/lib/python3.4/macpath.py rename to Darwin/lib/python3.5/macpath.py index d34f9e9..a90d105 100644 --- a/Darwin/lib/python3.4/macpath.py +++ b/Darwin/lib/python3.5/macpath.py @@ -50,20 +50,26 @@ def isabs(s): def join(s, *p): - colon = _get_colon(s) - path = s - for t in p: - if (not s) or isabs(t): - path = t - continue - if t[:1] == colon: - t = t[1:] - if colon not in path: - path = colon + path - if path[-1:] != colon: - path = path + colon - path = path + t - return path + try: + colon = _get_colon(s) + path = s + if not p: + path[:0] + colon #23780: Ensure compatible data type even if p is null. + for t in p: + if (not path) or isabs(t): + path = t + continue + if t[:1] == colon: + t = t[1:] + if colon not in path: + path = colon + path + if path[-1:] != colon: + path = path + colon + path = path + t + return path + except (TypeError, AttributeError, BytesWarning): + genericpath._check_arg_types('join', s, *p) + raise def split(s): diff --git a/Darwin/lib/python3.4/macurl2path.py b/Darwin/lib/python3.5/macurl2path.py similarity index 100% rename from Darwin/lib/python3.4/macurl2path.py rename to Darwin/lib/python3.5/macurl2path.py diff --git a/Darwin/lib/python3.4/mailbox.py b/Darwin/lib/python3.5/mailbox.py similarity index 99% rename from Darwin/lib/python3.4/mailbox.py rename to Darwin/lib/python3.5/mailbox.py index 8a25a19..24d4aec 100644 --- a/Darwin/lib/python3.4/mailbox.py +++ b/Darwin/lib/python3.5/mailbox.py @@ -103,7 +103,7 @@ class Mailbox: def itervalues(self): """Return an iterator over all messages.""" - for key in self.keys(): + for key in self.iterkeys(): try: value = self[key] except KeyError: @@ -119,7 +119,7 @@ class Mailbox: def iteritems(self): """Return an iterator over (key, message) tuples.""" - for key in self.keys(): + for key in self.iterkeys(): try: value = self[key] except KeyError: @@ -154,7 +154,7 @@ class Mailbox: def popitem(self): """Delete an arbitrary (key, message) pair and return it.""" - for key in self.keys(): + for key in self.iterkeys(): return (key, self.pop(key)) # This is only run once. else: raise KeyError('No messages in mailbox') @@ -162,7 +162,7 @@ class Mailbox: def update(self, arg=None): """Change the messages that correspond to certain keys.""" if hasattr(arg, 'iteritems'): - source = arg.items() + source = arg.iteritems() elif hasattr(arg, 'items'): source = arg.items() else: @@ -559,7 +559,7 @@ class Maildir(Mailbox): def next(self): """Return the next message in a one-time iteration.""" if not hasattr(self, '_onetime_keys'): - self._onetime_keys = iter(self.keys()) + self._onetime_keys = self.iterkeys() while True: try: return self[next(self._onetime_keys)] @@ -722,10 +722,14 @@ class _singlefileMailbox(Mailbox): def close(self): """Flush and close the mailbox.""" - self.flush() - if self._locked: - self.unlock() - self._file.close() # Sync has been done by self.flush() above. + try: + self.flush() + finally: + try: + if self._locked: + self.unlock() + finally: + self._file.close() # Sync has been done by self.flush() above. def _lookup(self, key=None): """Return (start, stop) or raise KeyError.""" @@ -1078,7 +1082,7 @@ class MH(Mailbox): def __len__(self): """Return a count of messages in the mailbox.""" - return len(list(self.keys())) + return len(list(self.iterkeys())) def lock(self): """Lock the mailbox.""" @@ -1192,7 +1196,7 @@ class MH(Mailbox): sequences = self.get_sequences() prev = 0 changes = [] - for key in self.keys(): + for key in self.iterkeys(): if key - 1 != prev: changes.append((key, prev + 1)) if hasattr(os, 'link'): @@ -1230,8 +1234,8 @@ class MH(Mailbox): class Babyl(_singlefileMailbox): """An Rmail-style Babyl mailbox.""" - _special_labels = frozenset(('unseen', 'deleted', 'filed', 'answered', - 'forwarded', 'edited', 'resent')) + _special_labels = frozenset({'unseen', 'deleted', 'filed', 'answered', + 'forwarded', 'edited', 'resent'}) def __init__(self, path, factory=None, create=True): """Initialize a Babyl mailbox.""" @@ -1949,7 +1953,7 @@ class _ProxyFile: while True: line = self.readline() if not line: - raise StopIteration + return yield line def tell(self): @@ -1966,9 +1970,11 @@ class _ProxyFile: def close(self): """Close the file.""" if hasattr(self, '_file'): - if hasattr(self._file, 'close'): - self._file.close() - del self._file + try: + if hasattr(self._file, 'close'): + self._file.close() + finally: + del self._file def _read(self, size, read_method): """Read size bytes using read_method.""" @@ -1980,7 +1986,7 @@ class _ProxyFile: return result def __enter__(self): - """Context manager protocol support.""" + """Context management protocol support.""" return self def __exit__(self, *exc): diff --git a/Darwin/lib/python3.4/mailcap.py b/Darwin/lib/python3.5/mailcap.py similarity index 100% rename from Darwin/lib/python3.4/mailcap.py rename to Darwin/lib/python3.5/mailcap.py diff --git a/Darwin/lib/python3.4/mimetypes.py b/Darwin/lib/python3.5/mimetypes.py similarity index 99% rename from Darwin/lib/python3.4/mimetypes.py rename to Darwin/lib/python3.5/mimetypes.py index b98c874..d64726b 100644 --- a/Darwin/lib/python3.4/mimetypes.py +++ b/Darwin/lib/python3.5/mimetypes.py @@ -246,7 +246,8 @@ class MimeTypes: except EnvironmentError: break else: - yield ctype + if '\0' not in ctype: + yield ctype i += 1 with _winreg.OpenKey(_winreg.HKEY_CLASSES_ROOT, '') as hkcr: diff --git a/Darwin/lib/python3.4/modulefinder.py b/Darwin/lib/python3.5/modulefinder.py similarity index 97% rename from Darwin/lib/python3.4/modulefinder.py rename to Darwin/lib/python3.5/modulefinder.py index cc5b8cc..50f2462 100644 --- a/Darwin/lib/python3.4/modulefinder.py +++ b/Darwin/lib/python3.5/modulefinder.py @@ -1,7 +1,7 @@ """Find modules used by a script, using introspection.""" import dis -import importlib._bootstrap +import importlib._bootstrap_external import importlib.machinery import marshal import os @@ -223,7 +223,7 @@ class ModuleFinder: if not m.__path__: return modules = {} - # 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"]. + # 'suffixes' used to be a list hardcoded to [".py", ".pyc"]. # But we must also collect Python extension modules - although # we cannot separate normal dlls from Python extensions. suffixes = [] @@ -289,7 +289,7 @@ class ModuleFinder: co = compile(fp.read()+'\n', pathname, 'exec') elif type == imp.PY_COMPILED: try: - marshal_data = importlib._bootstrap._validate_bytecode_header(fp.read()) + marshal_data = importlib._bootstrap_external._validate_bytecode_header(fp.read()) except ImportError as exc: self.msgout(2, "raise ImportError: " + str(exc), pathname) raise @@ -568,11 +568,12 @@ class ModuleFinder: if isinstance(consts[i], type(co)): consts[i] = self.replace_paths_in_code(consts[i]) - return types.CodeType(co.co_argcount, co.co_nlocals, co.co_stacksize, - co.co_flags, co.co_code, tuple(consts), co.co_names, - co.co_varnames, new_filename, co.co_name, - co.co_firstlineno, co.co_lnotab, - co.co_freevars, co.co_cellvars) + return types.CodeType(co.co_argcount, co.co_kwonlyargcount, + co.co_nlocals, co.co_stacksize, co.co_flags, + co.co_code, tuple(consts), co.co_names, + co.co_varnames, new_filename, co.co_name, + co.co_firstlineno, co.co_lnotab, co.co_freevars, + co.co_cellvars) def test(): diff --git a/Darwin/lib/python3.4/multiprocessing/__init__.py b/Darwin/lib/python3.5/multiprocessing/__init__.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/__init__.py rename to Darwin/lib/python3.5/multiprocessing/__init__.py diff --git a/Darwin/lib/python3.4/multiprocessing/connection.py b/Darwin/lib/python3.5/multiprocessing/connection.py similarity index 95% rename from Darwin/lib/python3.4/multiprocessing/connection.py rename to Darwin/lib/python3.5/multiprocessing/connection.py index 3bc716f..4c32237 100644 --- a/Darwin/lib/python3.4/multiprocessing/connection.py +++ b/Darwin/lib/python3.5/multiprocessing/connection.py @@ -220,7 +220,7 @@ class _ConnectionBase: def recv_bytes_into(self, buf, offset=0): """ - Receive bytes data into a writeable buffer-like object. + Receive bytes data into a writeable bytes-like object. Return the number of bytes read. """ self._check_closed() @@ -365,10 +365,7 @@ class Connection(_ConnectionBase): def _send(self, buf, write=_write): remaining = len(buf) while True: - try: - n = write(self._handle, buf) - except InterruptedError: - continue + n = write(self._handle, buf) remaining -= n if remaining == 0: break @@ -379,10 +376,7 @@ class Connection(_ConnectionBase): handle = self._handle remaining = size while remaining > 0: - try: - chunk = read(handle, remaining) - except InterruptedError: - continue + chunk = read(handle, remaining) n = len(chunk) if n == 0: if remaining == size: @@ -400,17 +394,14 @@ class Connection(_ConnectionBase): if n > 16384: # The payload is large so Nagle's algorithm won't be triggered # and we'd better avoid the cost of concatenation. - chunks = [header, buf] - elif n > 0: + self._send(header) + self._send(buf) + else: # Issue # 20540: concatenate before sending, to avoid delays due # to Nagle's algorithm on a TCP socket. - chunks = [header + buf] - else: - # This code path is necessary to avoid "broken pipe" errors - # when sending a 0-length buffer if the other end closed the pipe. - chunks = [header] - for chunk in chunks: - self._send(chunk) + # Also note we want to avoid sending a 0-length buffer separately, + # to avoid "broken pipe" errors if the other end closed the pipe. + self._send(header + buf) def _recv_bytes(self, maxsize=None): buf = self._recv(4) @@ -469,9 +460,10 @@ class Listener(object): ''' Close the bound socket or named pipe of `self`. ''' - if self._listener is not None: - self._listener.close() + listener = self._listener + if listener is not None: self._listener = None + listener.close() address = property(lambda self: self._listener._address) last_accepted = property(lambda self: self._listener._last_accepted) @@ -598,20 +590,18 @@ class SocketListener(object): self._unlink = None def accept(self): - while True: - try: - s, self._last_accepted = self._socket.accept() - except InterruptedError: - pass - else: - break + s, self._last_accepted = self._socket.accept() s.setblocking(True) return Connection(s.detach()) def close(self): - self._socket.close() - if self._unlink is not None: - self._unlink() + try: + self._socket.close() + finally: + unlink = self._unlink + if unlink is not None: + self._unlink = None + unlink() def SocketClient(address): @@ -844,7 +834,7 @@ if sys.platform == 'win32': try: ov, err = _winapi.ReadFile(fileno(), 0, True) except OSError as e: - err = e.winerror + ov, err = None, e.winerror if err not in _ready_errors: raise if err == _winapi.ERROR_IO_PENDING: @@ -853,7 +843,16 @@ if sys.platform == 'win32': else: # If o.fileno() is an overlapped pipe handle and # err == 0 then there is a zero length message - # in the pipe, but it HAS NOT been consumed. + # in the pipe, but it HAS NOT been consumed... + if ov and sys.getwindowsversion()[:2] >= (6, 2): + # ... except on Windows 8 and later, where + # the message HAS been consumed. + try: + _, err = ov.GetOverlappedResult(False) + except OSError as e: + err = e.winerror + if not err and hasattr(o, '_got_empty_message'): + o._got_empty_message = True ready_objects.add(o) timeout = 0 diff --git a/Darwin/lib/python3.4/multiprocessing/context.py b/Darwin/lib/python3.5/multiprocessing/context.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/context.py rename to Darwin/lib/python3.5/multiprocessing/context.py diff --git a/Darwin/lib/python3.4/multiprocessing/dummy/__init__.py b/Darwin/lib/python3.5/multiprocessing/dummy/__init__.py similarity index 95% rename from Darwin/lib/python3.4/multiprocessing/dummy/__init__.py rename to Darwin/lib/python3.5/multiprocessing/dummy/__init__.py index 97f7af7..1abea64 100644 --- a/Darwin/lib/python3.4/multiprocessing/dummy/__init__.py +++ b/Darwin/lib/python3.5/multiprocessing/dummy/__init__.py @@ -86,7 +86,7 @@ class Namespace(object): if not name.startswith('_'): temp.append('%s=%r' % (name, value)) temp.sort() - return 'Namespace(%s)' % str.join(', ', temp) + return '%s(%s)' % (self.__class__.__name__, ', '.join(temp)) dict = dict list = list @@ -104,7 +104,7 @@ class Value(object): self._value = value value = property(_get, _set) def __repr__(self): - return '<%r(%r, %r)>'%(type(self).__name__,self._typecode,self._value) + return '<%s(%r, %r)>'%(type(self).__name__,self._typecode,self._value) def Manager(): return sys.modules[__name__] diff --git a/Darwin/lib/python3.4/multiprocessing/dummy/connection.py b/Darwin/lib/python3.5/multiprocessing/dummy/connection.py similarity index 92% rename from Darwin/lib/python3.4/multiprocessing/dummy/connection.py rename to Darwin/lib/python3.5/multiprocessing/dummy/connection.py index 694ef96..1984375 100644 --- a/Darwin/lib/python3.4/multiprocessing/dummy/connection.py +++ b/Darwin/lib/python3.5/multiprocessing/dummy/connection.py @@ -59,9 +59,8 @@ class Connection(object): return True if timeout <= 0.0: return False - self._in.not_empty.acquire() - self._in.not_empty.wait(timeout) - self._in.not_empty.release() + with self._in.not_empty: + self._in.not_empty.wait(timeout) return self._in.qsize() > 0 def close(self): diff --git a/Darwin/lib/python3.4/multiprocessing/forkserver.py b/Darwin/lib/python3.5/multiprocessing/forkserver.py similarity index 94% rename from Darwin/lib/python3.4/multiprocessing/forkserver.py rename to Darwin/lib/python3.5/multiprocessing/forkserver.py index 387517e..b27cba5 100644 --- a/Darwin/lib/python3.4/multiprocessing/forkserver.py +++ b/Darwin/lib/python3.5/multiprocessing/forkserver.py @@ -107,7 +107,7 @@ class ForkServer(object): address = connection.arbitrary_address('AF_UNIX') listener.bind(address) os.chmod(address, 0o600) - listener.listen(100) + listener.listen() # all client processes own the write end of the "alive" pipe; # when they all terminate the read end becomes ready. @@ -188,8 +188,6 @@ def main(listener_fd, alive_r, preload, main_path=None, sys_path=None): finally: os._exit(code) - except InterruptedError: - pass except OSError as e: if e.errno != errno.ECONNABORTED: raise @@ -230,13 +228,7 @@ def read_unsigned(fd): data = b'' length = UNSIGNED_STRUCT.size while len(data) < length: - while True: - try: - s = os.read(fd, length - len(data)) - except InterruptedError: - pass - else: - break + s = os.read(fd, length - len(data)) if not s: raise EOFError('unexpected EOF') data += s @@ -245,13 +237,7 @@ def read_unsigned(fd): def write_unsigned(fd, n): msg = UNSIGNED_STRUCT.pack(n) while msg: - while True: - try: - nbytes = os.write(fd, msg) - except InterruptedError: - pass - else: - break + nbytes = os.write(fd, msg) if nbytes == 0: raise RuntimeError('should not get here') msg = msg[nbytes:] diff --git a/Darwin/lib/python3.4/multiprocessing/heap.py b/Darwin/lib/python3.5/multiprocessing/heap.py similarity index 92% rename from Darwin/lib/python3.4/multiprocessing/heap.py rename to Darwin/lib/python3.5/multiprocessing/heap.py index 344a45f..44d9638 100644 --- a/Darwin/lib/python3.4/multiprocessing/heap.py +++ b/Darwin/lib/python3.5/multiprocessing/heap.py @@ -54,7 +54,9 @@ if sys.platform == 'win32': def __setstate__(self, state): self.size, self.name = self._state = state self.buffer = mmap.mmap(-1, self.size, tagname=self.name) - assert _winapi.GetLastError() == _winapi.ERROR_ALREADY_EXISTS + # XXX Temporarily preventing buildbot failures while determining + # XXX the correct long-term fix. See issue 23060 + #assert _winapi.GetLastError() == _winapi.ERROR_ALREADY_EXISTS else: @@ -69,7 +71,14 @@ else: os.unlink(name) util.Finalize(self, os.close, (self.fd,)) with open(self.fd, 'wb', closefd=False) as f: - f.write(b'\0'*size) + bs = 1024 * 1024 + if size >= bs: + zeros = b'\0' * bs + for _ in range(size // bs): + f.write(zeros) + del zeros + f.write(b'\0' * (size % bs)) + assert f.tell() == size self.buffer = mmap.mmap(self.fd, self.size) def reduce_arena(a): @@ -216,9 +225,8 @@ class Heap(object): assert 0 <= size < sys.maxsize if os.getpid() != self._lastpid: self.__init__() # reinitialize after fork - self._lock.acquire() - self._free_pending_blocks() - try: + with self._lock: + self._free_pending_blocks() size = self._roundup(max(size,1), self._alignment) (arena, start, stop) = self._malloc(size) new_stop = start + size @@ -227,8 +235,6 @@ class Heap(object): block = (arena, start, new_stop) self._allocated_blocks.add(block) return block - finally: - self._lock.release() # # Class representing a chunk of an mmap -- can be inherited by child process diff --git a/Darwin/lib/python3.4/multiprocessing/managers.py b/Darwin/lib/python3.5/multiprocessing/managers.py similarity index 97% rename from Darwin/lib/python3.4/multiprocessing/managers.py rename to Darwin/lib/python3.5/multiprocessing/managers.py index 66d46fc..776656e 100644 --- a/Darwin/lib/python3.4/multiprocessing/managers.py +++ b/Darwin/lib/python3.5/multiprocessing/managers.py @@ -65,8 +65,8 @@ class Token(object): (self.typeid, self.address, self.id) = state def __repr__(self): - return 'Token(typeid=%r, address=%r, id=%r)' % \ - (self.typeid, self.address, self.id) + return '%s(typeid=%r, address=%r, id=%r)' % \ + (self.__class__.__name__, self.typeid, self.address, self.id) # # Function for communication with a manager's server process @@ -306,8 +306,7 @@ class Server(object): ''' Return some info --- useful to spot problems with refcounting ''' - self.mutex.acquire() - try: + with self.mutex: result = [] keys = list(self.id_to_obj.keys()) keys.sort() @@ -317,8 +316,6 @@ class Server(object): (ident, self.id_to_refcount[ident], str(self.id_to_obj[ident][0])[:75])) return '\n'.join(result) - finally: - self.mutex.release() def number_of_objects(self, c): ''' @@ -343,8 +340,7 @@ class Server(object): ''' Create a new shared object and return its id ''' - self.mutex.acquire() - try: + with self.mutex: callable, exposed, method_to_typeid, proxytype = \ self.registry[typeid] @@ -374,8 +370,6 @@ class Server(object): # has been created. self.incref(c, ident) return ident, tuple(exposed) - finally: - self.mutex.release() def get_methods(self, c, token): ''' @@ -392,22 +386,16 @@ class Server(object): self.serve_client(c) def incref(self, c, ident): - self.mutex.acquire() - try: + with self.mutex: self.id_to_refcount[ident] += 1 - finally: - self.mutex.release() def decref(self, c, ident): - self.mutex.acquire() - try: + with self.mutex: assert self.id_to_refcount[ident] >= 1 self.id_to_refcount[ident] -= 1 if self.id_to_refcount[ident] == 0: del self.id_to_obj[ident], self.id_to_refcount[ident] util.debug('disposing of obj with id %r', ident) - finally: - self.mutex.release() # # Class to represent state of a manager @@ -671,14 +659,11 @@ class BaseProxy(object): def __init__(self, token, serializer, manager=None, authkey=None, exposed=None, incref=True): - BaseProxy._mutex.acquire() - try: + with BaseProxy._mutex: tls_idset = BaseProxy._address_to_local.get(token.address, None) if tls_idset is None: tls_idset = util.ForkAwareLocal(), ProcessLocalSet() BaseProxy._address_to_local[token.address] = tls_idset - finally: - BaseProxy._mutex.release() # self._tls is used to record the connection used by this # thread to communicate with the manager at token.address @@ -818,8 +803,8 @@ class BaseProxy(object): return self._getvalue() def __repr__(self): - return '<%s object, typeid %r at %s>' % \ - (type(self).__name__, self._token.typeid, '0x%x' % id(self)) + return '<%s object, typeid %r at %#x>' % \ + (type(self).__name__, self._token.typeid, id(self)) def __str__(self): ''' @@ -916,7 +901,7 @@ class Namespace(object): if not name.startswith('_'): temp.append('%s=%r' % (name, value)) temp.sort() - return 'Namespace(%s)' % str.join(', ', temp) + return '%s(%s)' % (self.__class__.__name__, ', '.join(temp)) class Value(object): def __init__(self, typecode, value, lock=True): diff --git a/Darwin/lib/python3.4/multiprocessing/pool.py b/Darwin/lib/python3.5/multiprocessing/pool.py similarity index 95% rename from Darwin/lib/python3.4/multiprocessing/pool.py rename to Darwin/lib/python3.5/multiprocessing/pool.py index 8832a5c..6d25469 100644 --- a/Darwin/lib/python3.4/multiprocessing/pool.py +++ b/Darwin/lib/python3.5/multiprocessing/pool.py @@ -87,7 +87,7 @@ class MaybeEncodingError(Exception): self.exc) def __repr__(self): - return "" % str(self) + return "<%s: %s>" % (self.__class__.__name__, self) def worker(inqueue, outqueue, initializer=None, initargs=(), maxtasks=None, @@ -374,25 +374,34 @@ class Pool(object): thread = threading.current_thread() for taskseq, set_length in iter(taskqueue.get, None): + task = None i = -1 - for i, task in enumerate(taskseq): - if thread._state: - util.debug('task handler found thread._state != RUN') - break - try: - put(task) - except Exception as e: - job, ind = task[:2] + try: + for i, task in enumerate(taskseq): + if thread._state: + util.debug('task handler found thread._state != RUN') + break try: - cache[job]._set(ind, (False, e)) - except KeyError: - pass - else: + put(task) + except Exception as e: + job, ind = task[:2] + try: + cache[job]._set(ind, (False, e)) + except KeyError: + pass + else: + if set_length: + util.debug('doing set_length()') + set_length(i+1) + continue + break + except Exception as ex: + job, ind = task[:2] if task else (0, 0) + if job in cache: + cache[job]._set(ind + 1, (False, ex)) if set_length: util.debug('doing set_length()') set_length(i+1) - continue - break else: util.debug('task handler got sentinel') @@ -666,8 +675,7 @@ class IMapIterator(object): return self def next(self, timeout=None): - self._cond.acquire() - try: + with self._cond: try: item = self._items.popleft() except IndexError: @@ -680,8 +688,6 @@ class IMapIterator(object): if self._index == self._length: raise StopIteration raise TimeoutError - finally: - self._cond.release() success, value = item if success: @@ -691,8 +697,7 @@ class IMapIterator(object): __next__ = next # XXX def _set(self, i, obj): - self._cond.acquire() - try: + with self._cond: if self._index == i: self._items.append(obj) self._index += 1 @@ -706,18 +711,13 @@ class IMapIterator(object): if self._index == self._length: del self._cache[self._job] - finally: - self._cond.release() def _set_length(self, length): - self._cond.acquire() - try: + with self._cond: self._length = length if self._index == self._length: self._cond.notify() del self._cache[self._job] - finally: - self._cond.release() # # Class whose instances are returned by `Pool.imap_unordered()` @@ -726,15 +726,12 @@ class IMapIterator(object): class IMapUnorderedIterator(IMapIterator): def _set(self, i, obj): - self._cond.acquire() - try: + with self._cond: self._items.append(obj) self._index += 1 self._cond.notify() if self._index == self._length: del self._cache[self._job] - finally: - self._cond.release() # # @@ -760,10 +757,7 @@ class ThreadPool(Pool): @staticmethod def _help_stuff_finish(inqueue, task_handler, size): # put sentinels at head of inqueue to make workers finish - inqueue.not_empty.acquire() - try: + with inqueue.not_empty: inqueue.queue.clear() inqueue.queue.extend([None] * size) inqueue.not_empty.notify_all() - finally: - inqueue.not_empty.release() diff --git a/Darwin/lib/python3.4/multiprocessing/popen_fork.py b/Darwin/lib/python3.5/multiprocessing/popen_fork.py similarity index 96% rename from Darwin/lib/python3.4/multiprocessing/popen_fork.py rename to Darwin/lib/python3.5/multiprocessing/popen_fork.py index 367e72e..d2ebd7c 100644 --- a/Darwin/lib/python3.4/multiprocessing/popen_fork.py +++ b/Darwin/lib/python3.5/multiprocessing/popen_fork.py @@ -1,7 +1,6 @@ import os import sys import signal -import errno from . import util @@ -29,8 +28,6 @@ class Popen(object): try: pid, sts = os.waitpid(self.pid, flag) except OSError as e: - if e.errno == errno.EINTR: - continue # Child process not yet created. See #1731717 # e.errno == errno.ECHILD == 10 return None diff --git a/Darwin/lib/python3.4/multiprocessing/popen_forkserver.py b/Darwin/lib/python3.5/multiprocessing/popen_forkserver.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/popen_forkserver.py rename to Darwin/lib/python3.5/multiprocessing/popen_forkserver.py diff --git a/Darwin/lib/python3.4/multiprocessing/popen_spawn_posix.py b/Darwin/lib/python3.5/multiprocessing/popen_spawn_posix.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/popen_spawn_posix.py rename to Darwin/lib/python3.5/multiprocessing/popen_spawn_posix.py diff --git a/Darwin/lib/python3.4/multiprocessing/popen_spawn_win32.py b/Darwin/lib/python3.5/multiprocessing/popen_spawn_win32.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/popen_spawn_win32.py rename to Darwin/lib/python3.5/multiprocessing/popen_spawn_win32.py diff --git a/Darwin/lib/python3.4/multiprocessing/process.py b/Darwin/lib/python3.5/multiprocessing/process.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/process.py rename to Darwin/lib/python3.5/multiprocessing/process.py diff --git a/Darwin/lib/python3.4/multiprocessing/queues.py b/Darwin/lib/python3.5/multiprocessing/queues.py similarity index 94% rename from Darwin/lib/python3.4/multiprocessing/queues.py rename to Darwin/lib/python3.5/multiprocessing/queues.py index f650771..786a303 100644 --- a/Darwin/lib/python3.4/multiprocessing/queues.py +++ b/Darwin/lib/python3.5/multiprocessing/queues.py @@ -35,7 +35,8 @@ class Queue(object): def __init__(self, maxsize=0, *, ctx): if maxsize <= 0: - maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX + # Can raise ImportError (see issues #3770 and #23400) + from .synchronize import SEM_VALUE_MAX as maxsize self._maxsize = maxsize self._reader, self._writer = connection.Pipe(duplex=False) self._rlock = ctx.Lock() @@ -81,14 +82,11 @@ class Queue(object): if not self._sem.acquire(block, timeout): raise Full - self._notempty.acquire() - try: + with self._notempty: if self._thread is None: self._start_thread() self._buffer.append(obj) self._notempty.notify() - finally: - self._notempty.release() def get(self, block=True, timeout=None): if block and timeout is None: @@ -132,9 +130,13 @@ class Queue(object): def close(self): self._closed = True - self._reader.close() - if self._close: - self._close() + try: + self._reader.close() + finally: + close = self._close + if close: + self._close = None + close() def join_thread(self): debug('Queue.join_thread()') @@ -201,12 +203,9 @@ class Queue(object): @staticmethod def _finalize_close(buffer, notempty): debug('telling queue thread to quit') - notempty.acquire() - try: + with notempty: buffer.append(_sentinel) notempty.notify() - finally: - notempty.release() @staticmethod def _feed(buffer, notempty, send_bytes, writelock, close, ignore_epipe): @@ -295,35 +294,24 @@ class JoinableQueue(Queue): if not self._sem.acquire(block, timeout): raise Full - self._notempty.acquire() - self._cond.acquire() - try: + with self._notempty, self._cond: if self._thread is None: self._start_thread() self._buffer.append(obj) self._unfinished_tasks.release() self._notempty.notify() - finally: - self._cond.release() - self._notempty.release() def task_done(self): - self._cond.acquire() - try: + with self._cond: if not self._unfinished_tasks.acquire(False): raise ValueError('task_done() called too many times') if self._unfinished_tasks._semlock._is_zero(): self._cond.notify_all() - finally: - self._cond.release() def join(self): - self._cond.acquire() - try: + with self._cond: if not self._unfinished_tasks._semlock._is_zero(): self._cond.wait() - finally: - self._cond.release() # # Simplified Queue type -- really just a locked pipe diff --git a/Darwin/lib/python3.4/multiprocessing/reduction.py b/Darwin/lib/python3.5/multiprocessing/reduction.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/reduction.py rename to Darwin/lib/python3.5/multiprocessing/reduction.py diff --git a/Darwin/lib/python3.4/multiprocessing/resource_sharer.py b/Darwin/lib/python3.5/multiprocessing/resource_sharer.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/resource_sharer.py rename to Darwin/lib/python3.5/multiprocessing/resource_sharer.py diff --git a/Darwin/lib/python3.4/multiprocessing/semaphore_tracker.py b/Darwin/lib/python3.5/multiprocessing/semaphore_tracker.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/semaphore_tracker.py rename to Darwin/lib/python3.5/multiprocessing/semaphore_tracker.py diff --git a/Darwin/lib/python3.4/multiprocessing/sharedctypes.py b/Darwin/lib/python3.5/multiprocessing/sharedctypes.py similarity index 94% rename from Darwin/lib/python3.4/multiprocessing/sharedctypes.py rename to Darwin/lib/python3.5/multiprocessing/sharedctypes.py index 0c17825..4258f59 100644 --- a/Darwin/lib/python3.4/multiprocessing/sharedctypes.py +++ b/Darwin/lib/python3.5/multiprocessing/sharedctypes.py @@ -188,6 +188,12 @@ class SynchronizedBase(object): self.acquire = self._lock.acquire self.release = self._lock.release + def __enter__(self): + return self._lock.__enter__() + + def __exit__(self, *args): + return self._lock.__exit__(*args) + def __reduce__(self): assert_spawning(self) return synchronized, (self._obj, self._lock) @@ -212,32 +218,20 @@ class SynchronizedArray(SynchronizedBase): return len(self._obj) def __getitem__(self, i): - self.acquire() - try: + with self: return self._obj[i] - finally: - self.release() def __setitem__(self, i, value): - self.acquire() - try: + with self: self._obj[i] = value - finally: - self.release() def __getslice__(self, start, stop): - self.acquire() - try: + with self: return self._obj[start:stop] - finally: - self.release() def __setslice__(self, start, stop, values): - self.acquire() - try: + with self: self._obj[start:stop] = values - finally: - self.release() class SynchronizedString(SynchronizedArray): diff --git a/Darwin/lib/python3.4/multiprocessing/spawn.py b/Darwin/lib/python3.5/multiprocessing/spawn.py similarity index 100% rename from Darwin/lib/python3.4/multiprocessing/spawn.py rename to Darwin/lib/python3.5/multiprocessing/spawn.py diff --git a/Darwin/lib/python3.4/multiprocessing/synchronize.py b/Darwin/lib/python3.5/multiprocessing/synchronize.py similarity index 94% rename from Darwin/lib/python3.4/multiprocessing/synchronize.py rename to Darwin/lib/python3.5/multiprocessing/synchronize.py index dea1cbd..d4bdf0e 100644 --- a/Darwin/lib/python3.4/multiprocessing/synchronize.py +++ b/Darwin/lib/python3.5/multiprocessing/synchronize.py @@ -134,7 +134,7 @@ class Semaphore(SemLock): value = self._semlock._get_value() except Exception: value = 'unknown' - return '' % value + return '<%s(value=%s)>' % (self.__class__.__name__, value) # # Bounded semaphore @@ -150,8 +150,8 @@ class BoundedSemaphore(Semaphore): value = self._semlock._get_value() except Exception: value = 'unknown' - return '' % \ - (value, self._semlock.maxvalue) + return '<%s(value=%s, maxvalue=%s)>' % \ + (self.__class__.__name__, value, self._semlock.maxvalue) # # Non-recursive lock @@ -176,7 +176,7 @@ class Lock(SemLock): name = 'SomeOtherProcess' except Exception: name = 'unknown' - return '' % name + return '<%s(owner=%s)>' % (self.__class__.__name__, name) # # Recursive lock @@ -202,7 +202,7 @@ class RLock(SemLock): name, count = 'SomeOtherProcess', 'nonzero' except Exception: name, count = 'unknown', 'unknown' - return '' % (name, count) + return '<%s(%s, %s)>' % (self.__class__.__name__, name, count) # # Condition variable @@ -243,7 +243,7 @@ class Condition(object): self._woken_count._semlock._get_value()) except Exception: num_waiters = 'unknown' - return '' % (self._lock, num_waiters) + return '<%s(%s, %s)>' % (self.__class__.__name__, self._lock, num_waiters) def wait(self, timeout=None): assert self._lock._semlock._is_mine(), \ @@ -337,34 +337,24 @@ class Event(object): self._flag = ctx.Semaphore(0) def is_set(self): - self._cond.acquire() - try: + with self._cond: if self._flag.acquire(False): self._flag.release() return True return False - finally: - self._cond.release() def set(self): - self._cond.acquire() - try: + with self._cond: self._flag.acquire(False) self._flag.release() self._cond.notify_all() - finally: - self._cond.release() def clear(self): - self._cond.acquire() - try: + with self._cond: self._flag.acquire(False) - finally: - self._cond.release() def wait(self, timeout=None): - self._cond.acquire() - try: + with self._cond: if self._flag.acquire(False): self._flag.release() else: @@ -374,8 +364,6 @@ class Event(object): self._flag.release() return True return False - finally: - self._cond.release() # # Barrier diff --git a/Darwin/lib/python3.4/multiprocessing/util.py b/Darwin/lib/python3.5/multiprocessing/util.py similarity index 96% rename from Darwin/lib/python3.4/multiprocessing/util.py rename to Darwin/lib/python3.5/multiprocessing/util.py index 0b695e4..ea5443d 100644 --- a/Darwin/lib/python3.4/multiprocessing/util.py +++ b/Darwin/lib/python3.5/multiprocessing/util.py @@ -212,10 +212,11 @@ class Finalize(object): obj = None if obj is None: - return '' + return '<%s object, dead>' % self.__class__.__name__ - x = ' 0 and s[:1] in _get_bothseps(s) + return len(s) > 0 and s[0] in _get_bothseps(s) # Join two (or more) paths. def join(path, *paths): - sep = _get_sep(path) - seps = _get_bothseps(path) - colon = _get_colon(path) - result_drive, result_path = splitdrive(path) - for p in paths: - p_drive, p_path = splitdrive(p) - if p_path and p_path[0] in seps: - # Second path is absolute - if p_drive or not result_drive: - result_drive = p_drive - result_path = p_path - continue - elif p_drive and p_drive != result_drive: - if p_drive.lower() != result_drive.lower(): - # Different drives => ignore the first path entirely - result_drive = p_drive + if isinstance(path, bytes): + sep = b'\\' + seps = b'\\/' + colon = b':' + else: + sep = '\\' + seps = '\\/' + colon = ':' + try: + if not paths: + path[:0] + sep #23780: Ensure compatible data type even if p is null. + result_drive, result_path = splitdrive(path) + for p in paths: + p_drive, p_path = splitdrive(p) + if p_path and p_path[0] in seps: + # Second path is absolute + if p_drive or not result_drive: + result_drive = p_drive result_path = p_path continue - # Same drive in different case - result_drive = p_drive - # Second path is relative to the first - if result_path and result_path[-1] not in seps: - result_path = result_path + sep - result_path = result_path + p_path - ## add separator between UNC and non-absolute path - if (result_path and result_path[0] not in seps and - result_drive and result_drive[-1:] != colon): - return result_drive + sep + result_path - return result_drive + result_path + elif p_drive and p_drive != result_drive: + if p_drive.lower() != result_drive.lower(): + # Different drives => ignore the first path entirely + result_drive = p_drive + result_path = p_path + continue + # Same drive in different case + result_drive = p_drive + # Second path is relative to the first + if result_path and result_path[-1] not in seps: + result_path = result_path + sep + result_path = result_path + p_path + ## add separator between UNC and non-absolute path + if (result_path and result_path[0] not in seps and + result_drive and result_drive[-1:] != colon): + return result_drive + sep + result_path + return result_drive + result_path + except (TypeError, AttributeError, BytesWarning): + genericpath._check_arg_types('join', path, *paths) + raise # Split a path in a drive specification (a drive letter followed by a @@ -155,10 +136,16 @@ def splitdrive(p): Paths cannot contain both a drive letter and a UNC path. """ - empty = _get_empty(p) - if len(p) > 1: - sep = _get_sep(p) - normp = p.replace(_get_altsep(p), sep) + if len(p) >= 2: + if isinstance(p, bytes): + sep = b'\\' + altsep = b'/' + colon = b':' + else: + sep = '\\' + altsep = '/' + colon = ':' + normp = p.replace(altsep, sep) if (normp[0:2] == sep*2) and (normp[2:3] != sep): # is a UNC path: # vvvvvvvvvvvvvvvvvvvv drive letter or UNC path @@ -166,18 +153,18 @@ def splitdrive(p): # directory ^^^^^^^^^^^^^^^ index = normp.find(sep, 2) if index == -1: - return empty, p + return p[:0], p index2 = normp.find(sep, index + 1) # a UNC path can't have two slashes in a row # (after the initial two) if index2 == index + 1: - return empty, p + return p[:0], p if index2 == -1: index2 = len(p) return p[:index2], p[index2:] - if normp[1:2] == _get_colon(p): + if normp[1:2] == colon: return p[:2], p[2:] - return empty, p + return p[:0], p # Parse UNC paths @@ -221,10 +208,7 @@ def split(p): i -= 1 head, tail = p[:i], p[i:] # now tail has no slashes # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1:] in seps: - head2 = head2[:-1] - head = head2 or head + head = head.rstrip(seps) or head return d + head, tail @@ -234,8 +218,10 @@ def split(p): # It is always true that root + ext == p. def splitext(p): - return genericpath._splitext(p, _get_sep(p), _get_altsep(p), - _get_dot(p)) + if isinstance(p, bytes): + return genericpath._splitext(p, b'\\', b'/', b'.') + else: + return genericpath._splitext(p, '\\', '/', '.') splitext.__doc__ = genericpath._splitext.__doc__ @@ -343,7 +329,7 @@ def expanduser(path): userhome = join(drive, os.environ['HOMEPATH']) if isinstance(path, bytes): - userhome = userhome.encode(sys.getfilesystemencoding()) + userhome = os.fsencode(userhome) if i != 1: #~user userhome = join(dirname(userhome), path[1:i]) @@ -369,13 +355,14 @@ def expandvars(path): Unknown variables are left unchanged.""" if isinstance(path, bytes): - if ord('$') not in path and ord('%') not in path: + if b'$' not in path and b'%' not in path: return path import string varchars = bytes(string.ascii_letters + string.digits + '_-', 'ascii') quote = b'\'' percent = b'%' brace = b'{' + rbrace = b'}' dollar = b'$' environ = getattr(os, 'environb', None) else: @@ -386,6 +373,7 @@ def expandvars(path): quote = '\'' percent = '%' brace = '{' + rbrace = '}' dollar = '$' environ = os.environ res = path[:0] @@ -400,7 +388,7 @@ def expandvars(path): index = path.index(c) res += c + path[:index + 1] except ValueError: - res += path + res += c + path index = pathlen - 1 elif c == percent: # variable or '%' if path[index + 1:index + 2] == percent: @@ -432,15 +420,9 @@ def expandvars(path): path = path[index+2:] pathlen = len(path) try: - if isinstance(path, bytes): - index = path.index(b'}') - else: - index = path.index('}') + index = path.index(rbrace) except ValueError: - if isinstance(path, bytes): - res += b'${' + path - else: - res += '${' + path + res += dollar + brace + path index = pathlen - 1 else: var = path[:index] @@ -450,10 +432,7 @@ def expandvars(path): else: value = environ[var] except KeyError: - if isinstance(path, bytes): - value = b'${' + var + b'}' - else: - value = '${' + var + '}' + value = dollar + brace + var + rbrace res += value else: var = path[:0] @@ -485,16 +464,25 @@ def expandvars(path): def normpath(path): """Normalize path, eliminating double slashes, etc.""" - sep = _get_sep(path) - dotdot = _get_dot(path) * 2 - special_prefixes = _get_special(path) + if isinstance(path, bytes): + sep = b'\\' + altsep = b'/' + curdir = b'.' + pardir = b'..' + special_prefixes = (b'\\\\.\\', b'\\\\?\\') + else: + sep = '\\' + altsep = '/' + curdir = '.' + pardir = '..' + special_prefixes = ('\\\\.\\', '\\\\?\\') if path.startswith(special_prefixes): # in the case of paths with these prefixes: # \\.\ -> device names # \\?\ -> literal paths # do not do any normalization, but return the path unchanged return path - path = path.replace(_get_altsep(path), sep) + path = path.replace(altsep, sep) prefix, path = splitdrive(path) # collapse initial backslashes @@ -505,13 +493,13 @@ def normpath(path): comps = path.split(sep) i = 0 while i < len(comps): - if not comps[i] or comps[i] == _get_dot(path): + if not comps[i] or comps[i] == curdir: del comps[i] - elif comps[i] == dotdot: - if i > 0 and comps[i-1] != dotdot: + elif comps[i] == pardir: + if i > 0 and comps[i-1] != pardir: del comps[i-1:i+1] i -= 1 - elif i == 0 and prefix.endswith(_get_sep(path)): + elif i == 0 and prefix.endswith(sep): del comps[i] else: i += 1 @@ -519,7 +507,7 @@ def normpath(path): i += 1 # If the path is now empty, substitute '.' if not prefix and not comps: - comps.append(_get_dot(path)) + comps.append(curdir) return prefix + sep.join(comps) @@ -559,42 +547,109 @@ realpath = abspath supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and sys.getwindowsversion()[3] >= 2) -def relpath(path, start=curdir): +def relpath(path, start=None): """Return a relative version of a path""" - sep = _get_sep(path) + if isinstance(path, bytes): + sep = b'\\' + curdir = b'.' + pardir = b'..' + else: + sep = '\\' + curdir = '.' + pardir = '..' - if start is curdir: - start = _get_dot(path) + if start is None: + start = curdir if not path: raise ValueError("no path specified") - start_abs = abspath(normpath(start)) - path_abs = abspath(normpath(path)) - start_drive, start_rest = splitdrive(start_abs) - path_drive, path_rest = splitdrive(path_abs) - if normcase(start_drive) != normcase(path_drive): - error = "path is on mount '{0}', start on mount '{1}'".format( - path_drive, start_drive) - raise ValueError(error) + try: + start_abs = abspath(normpath(start)) + path_abs = abspath(normpath(path)) + start_drive, start_rest = splitdrive(start_abs) + path_drive, path_rest = splitdrive(path_abs) + if normcase(start_drive) != normcase(path_drive): + raise ValueError("path is on mount %r, start on mount %r" % ( + path_drive, start_drive)) - start_list = [x for x in start_rest.split(sep) if x] - path_list = [x for x in path_rest.split(sep) if x] - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if normcase(e1) != normcase(e2): - break - i += 1 + start_list = [x for x in start_rest.split(sep) if x] + path_list = [x for x in path_rest.split(sep) if x] + # Work out how much of the filepath is shared by start and path. + i = 0 + for e1, e2 in zip(start_list, path_list): + if normcase(e1) != normcase(e2): + break + i += 1 - if isinstance(path, bytes): - pardir = b'..' + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + except (TypeError, ValueError, AttributeError, BytesWarning, DeprecationWarning): + genericpath._check_arg_types('relpath', path, start) + raise + + +# Return the longest common sub-path of the sequence of paths given as input. +# The function is case-insensitive and 'separator-insensitive', i.e. if the +# only difference between two paths is the use of '\' versus '/' as separator, +# they are deemed to be equal. +# +# However, the returned path will have the standard '\' separator (even if the +# given paths had the alternative '/' separator) and will have the case of the +# first path given in the sequence. Additionally, any trailing separator is +# stripped from the returned path. + +def commonpath(paths): + """Given a sequence of path names, returns the longest common sub-path.""" + + if not paths: + raise ValueError('commonpath() arg is an empty sequence') + + if isinstance(paths[0], bytes): + sep = b'\\' + altsep = b'/' + curdir = b'.' else: - pardir = '..' - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return _get_dot(path) - return join(*rel_list) + sep = '\\' + altsep = '/' + curdir = '.' + + try: + drivesplits = [splitdrive(p.replace(altsep, sep).lower()) for p in paths] + split_paths = [p.split(sep) for d, p in drivesplits] + + try: + isabs, = set(p[:1] == sep for d, p in drivesplits) + except ValueError: + raise ValueError("Can't mix absolute and relative paths") from None + + # Check that all drive letters or UNC paths match. The check is made only + # now otherwise type errors for mixing strings and bytes would not be + # caught. + if len(set(d for d, p in drivesplits)) != 1: + raise ValueError("Paths don't have the same drive") + + drive, path = splitdrive(paths[0].replace(altsep, sep)) + common = path.split(sep) + common = [c for c in common if c and c != curdir] + + split_paths = [[c for c in s if c and c != curdir] for s in split_paths] + s1 = min(split_paths) + s2 = max(split_paths) + for i, c in enumerate(s1): + if c != s2[i]: + common = common[:i] + break + else: + common = common[:len(s1)] + + prefix = drive + sep if isabs else drive + return prefix + sep.join(common) + except (TypeError, AttributeError): + genericpath._check_arg_types('commonpath', *paths) + raise # determine if two files are in fact the same file diff --git a/Darwin/lib/python3.4/nturl2path.py b/Darwin/lib/python3.5/nturl2path.py similarity index 100% rename from Darwin/lib/python3.4/nturl2path.py rename to Darwin/lib/python3.5/nturl2path.py diff --git a/Darwin/lib/python3.4/numbers.py b/Darwin/lib/python3.5/numbers.py similarity index 98% rename from Darwin/lib/python3.4/numbers.py rename to Darwin/lib/python3.5/numbers.py index b206457..7eedc63 100644 --- a/Darwin/lib/python3.4/numbers.py +++ b/Darwin/lib/python3.5/numbers.py @@ -141,11 +141,6 @@ class Complex(Number): """self == other""" raise NotImplementedError - def __ne__(self, other): - """self != other""" - # The default __ne__ doesn't negate __eq__ until 3.0. - return not (self == other) - Complex.register(complex) diff --git a/Darwin/lib/python3.4/opcode.py b/Darwin/lib/python3.5/opcode.py similarity index 91% rename from Darwin/lib/python3.4/opcode.py rename to Darwin/lib/python3.5/opcode.py index 0bd1ee6..4c826a7 100644 --- a/Darwin/lib/python3.4/opcode.py +++ b/Darwin/lib/python3.5/opcode.py @@ -70,6 +70,9 @@ def_op('UNARY_NOT', 12) def_op('UNARY_INVERT', 15) +def_op('BINARY_MATRIX_MULTIPLY', 16) +def_op('INPLACE_MATRIX_MULTIPLY', 17) + def_op('BINARY_POWER', 19) def_op('BINARY_MULTIPLY', 20) @@ -82,7 +85,10 @@ def_op('BINARY_TRUE_DIVIDE', 27) def_op('INPLACE_FLOOR_DIVIDE', 28) def_op('INPLACE_TRUE_DIVIDE', 29) -def_op('STORE_MAP', 54) +def_op('GET_AITER', 50) +def_op('GET_ANEXT', 51) +def_op('BEFORE_ASYNC_WITH', 52) + def_op('INPLACE_ADD', 55) def_op('INPLACE_SUBTRACT', 56) def_op('INPLACE_MULTIPLY', 57) @@ -97,10 +103,12 @@ def_op('BINARY_XOR', 65) def_op('BINARY_OR', 66) def_op('INPLACE_POWER', 67) def_op('GET_ITER', 68) +def_op('GET_YIELD_FROM_ITER', 69) def_op('PRINT_EXPR', 70) def_op('LOAD_BUILD_CLASS', 71) def_op('YIELD_FROM', 72) +def_op('GET_AWAITABLE', 73) def_op('INPLACE_LSHIFT', 75) def_op('INPLACE_RSHIFT', 76) @@ -108,7 +116,8 @@ def_op('INPLACE_AND', 77) def_op('INPLACE_XOR', 78) def_op('INPLACE_OR', 79) def_op('BREAK_LOOP', 80) -def_op('WITH_CLEANUP', 81) +def_op('WITH_CLEANUP_START', 81) +def_op('WITH_CLEANUP_FINISH', 82) def_op('RETURN_VALUE', 83) def_op('IMPORT_STAR', 84) @@ -194,7 +203,15 @@ def_op('MAP_ADD', 147) def_op('LOAD_CLASSDEREF', 148) hasfree.append(148) +jrel_op('SETUP_ASYNC_WITH', 154) + def_op('EXTENDED_ARG', 144) EXTENDED_ARG = 144 +def_op('BUILD_LIST_UNPACK', 149) +def_op('BUILD_MAP_UNPACK', 150) +def_op('BUILD_MAP_UNPACK_WITH_CALL', 151) +def_op('BUILD_TUPLE_UNPACK', 152) +def_op('BUILD_SET_UNPACK', 153) + del def_op, name_op, jrel_op, jabs_op diff --git a/Darwin/lib/python3.4/operator.py b/Darwin/lib/python3.5/operator.py similarity index 80% rename from Darwin/lib/python3.4/operator.py rename to Darwin/lib/python3.5/operator.py index b60349f..0e2e53e 100644 --- a/Darwin/lib/python3.4/operator.py +++ b/Darwin/lib/python3.5/operator.py @@ -12,12 +12,12 @@ This is the pure Python implementation of the module. __all__ = ['abs', 'add', 'and_', 'attrgetter', 'concat', 'contains', 'countOf', 'delitem', 'eq', 'floordiv', 'ge', 'getitem', 'gt', 'iadd', 'iand', - 'iconcat', 'ifloordiv', 'ilshift', 'imod', 'imul', 'index', - 'indexOf', 'inv', 'invert', 'ior', 'ipow', 'irshift', 'is_', - 'is_not', 'isub', 'itemgetter', 'itruediv', 'ixor', 'le', - 'length_hint', 'lshift', 'lt', 'methodcaller', 'mod', 'mul', 'ne', - 'neg', 'not_', 'or_', 'pos', 'pow', 'rshift', 'setitem', 'sub', - 'truediv', 'truth', 'xor'] + 'iconcat', 'ifloordiv', 'ilshift', 'imatmul', 'imod', 'imul', + 'index', 'indexOf', 'inv', 'invert', 'ior', 'ipow', 'irshift', + 'is_', 'is_not', 'isub', 'itemgetter', 'itruediv', 'ixor', 'le', + 'length_hint', 'lshift', 'lt', 'matmul', 'methodcaller', 'mod', + 'mul', 'ne', 'neg', 'not_', 'or_', 'pos', 'pow', 'rshift', + 'setitem', 'sub', 'truediv', 'truth', 'xor'] from builtins import abs as _abs @@ -105,6 +105,10 @@ def mul(a, b): "Same as a * b." return a * b +def matmul(a, b): + "Same as a @ b." + return a @ b + def neg(a): "Same as -a." return -a @@ -227,10 +231,13 @@ class attrgetter: After h = attrgetter('name.first', 'name.last'), the call h(r) returns (r.name.first, r.name.last). """ + __slots__ = ('_attrs', '_call') + def __init__(self, attr, *attrs): if not attrs: if not isinstance(attr, str): raise TypeError('attribute name must be a string') + self._attrs = (attr,) names = attr.split('.') def func(obj): for name in names: @@ -238,7 +245,8 @@ class attrgetter: return obj self._call = func else: - getters = tuple(map(attrgetter, (attr,) + attrs)) + self._attrs = (attr,) + attrs + getters = tuple(map(attrgetter, self._attrs)) def func(obj): return tuple(getter(obj) for getter in getters) self._call = func @@ -246,19 +254,30 @@ class attrgetter: def __call__(self, obj): return self._call(obj) + def __repr__(self): + return '%s.%s(%s)' % (self.__class__.__module__, + self.__class__.__qualname__, + ', '.join(map(repr, self._attrs))) + + def __reduce__(self): + return self.__class__, self._attrs + class itemgetter: """ Return a callable object that fetches the given item(s) from its operand. After f = itemgetter(2), the call f(r) returns r[2]. After g = itemgetter(2, 5, 3), the call g(r) returns (r[2], r[5], r[3]) """ + __slots__ = ('_items', '_call') + def __init__(self, item, *items): if not items: + self._items = (item,) def func(obj): return obj[item] self._call = func else: - items = (item,) + items + self._items = items = (item,) + items def func(obj): return tuple(obj[i] for i in items) self._call = func @@ -266,6 +285,14 @@ class itemgetter: def __call__(self, obj): return self._call(obj) + def __repr__(self): + return '%s.%s(%s)' % (self.__class__.__module__, + self.__class__.__name__, + ', '.join(map(repr, self._items))) + + def __reduce__(self): + return self.__class__, self._items + class methodcaller: """ Return a callable object that calls the given method on its operand. @@ -273,6 +300,7 @@ class methodcaller: After g = methodcaller('name', 'date', foo=1), the call g(r) returns r.name('date', foo=1). """ + __slots__ = ('_name', '_args', '_kwargs') def __init__(*args, **kwargs): if len(args) < 2: @@ -280,12 +308,30 @@ class methodcaller: raise TypeError(msg) self = args[0] self._name = args[1] + if not isinstance(self._name, str): + raise TypeError('method name must be a string') self._args = args[2:] self._kwargs = kwargs def __call__(self, obj): return getattr(obj, self._name)(*self._args, **self._kwargs) + def __repr__(self): + args = [repr(self._name)] + args.extend(map(repr, self._args)) + args.extend('%s=%r' % (k, v) for k, v in self._kwargs.items()) + return '%s.%s(%s)' % (self.__class__.__module__, + self.__class__.__name__, + ', '.join(args)) + + def __reduce__(self): + if not self._kwargs: + return self.__class__, (self._name,) + self._args + else: + from functools import partial + return partial(self.__class__, self._name, **self._kwargs), self._args + + # In-place Operations *********************************************************# def iadd(a, b): @@ -326,6 +372,11 @@ def imul(a, b): a *= b return a +def imatmul(a, b): + "Same as a @= b." + a @= b + return a + def ior(a, b): "Same as a |= b." a |= b @@ -383,6 +434,7 @@ __invert__ = invert __lshift__ = lshift __mod__ = mod __mul__ = mul +__matmul__ = matmul __neg__ = neg __or__ = or_ __pos__ = pos @@ -403,6 +455,7 @@ __ifloordiv__ = ifloordiv __ilshift__ = ilshift __imod__ = imod __imul__ = imul +__imatmul__ = imatmul __ior__ = ior __ipow__ = ipow __irshift__ = irshift diff --git a/Darwin/lib/python3.4/optparse.py b/Darwin/lib/python3.5/optparse.py similarity index 100% rename from Darwin/lib/python3.4/optparse.py rename to Darwin/lib/python3.5/optparse.py diff --git a/Darwin/lib/python3.4/os.py b/Darwin/lib/python3.5/os.py similarity index 92% rename from Darwin/lib/python3.4/os.py rename to Darwin/lib/python3.5/os.py index 8567f50..3d2c6d3 100644 --- a/Darwin/lib/python3.4/os.py +++ b/Darwin/lib/python3.5/os.py @@ -1,4 +1,4 @@ -r"""OS routines for Mac, NT, or Posix depending on what system we're on. +r"""OS routines for NT or Posix depending on what system we're on. This exports: - all functions from posix, nt or ce, e.g. unlink, stat, etc. @@ -61,6 +61,10 @@ if 'posix' in _names: except ImportError: pass + import posix + __all__.extend(_get_exports_list(posix)) + del posix + elif 'nt' in _names: name = 'nt' linesep = '\r\n' @@ -268,7 +272,7 @@ def renames(old, new): empty. Works like rename, except creation of any intermediate directories needed to make the new pathname good is attempted first. After the rename, directories corresponding to rightmost - path segments of the old name will be pruned way until either the + path segments of the old name will be pruned until either the whole path is consumed or a nonempty directory is found. Note: this function can fail with the new directory structure made @@ -312,13 +316,14 @@ def walk(top, topdown=True, onerror=None, followlinks=False): When topdown is true, the caller can modify the dirnames list in-place (e.g., via del or slice assignment), and walk will only recurse into the - subdirectories whose names remain in dirnames; this can be used to prune - the search, or to impose a specific order of visiting. Modifying - dirnames when topdown is false is ineffective, since the directories in - dirnames have already been generated by the time dirnames itself is - generated. + subdirectories whose names remain in dirnames; this can be used to prune the + search, or to impose a specific order of visiting. Modifying dirnames when + topdown is false is ineffective, since the directories in dirnames have + already been generated by the time dirnames itself is generated. No matter + the value of topdown, the list of subdirectories is retrieved before the + tuples for the directory and its subdirectories are generated. - By default errors from the os.listdir() call are ignored. If + By default errors from the os.scandir() call are ignored. If optional arg 'onerror' is specified, it should be a function; it will be called with one argument, an OSError instance. It can report the error to continue with the walk, or raise the exception @@ -344,9 +349,11 @@ def walk(top, topdown=True, onerror=None, followlinks=False): print("bytes in", len(files), "non-directory files") if 'CVS' in dirs: dirs.remove('CVS') # don't visit CVS directories + """ - islink, join, isdir = path.islink, path.join, path.isdir + dirs = [] + nondirs = [] # We may not have read permission for top, in which case we can't # get a list of the files the directory contains. os.walk @@ -354,28 +361,71 @@ def walk(top, topdown=True, onerror=None, followlinks=False): # minor reason when (say) a thousand readable directories are still # left to visit. That logic is copied here. try: - # Note that listdir is global in this module due + # Note that scandir is global in this module due # to earlier import-*. - names = listdir(top) - except OSError as err: + scandir_it = scandir(top) + except OSError as error: if onerror is not None: - onerror(err) + onerror(error) return - dirs, nondirs = [], [] - for name in names: - if isdir(join(top, name)): - dirs.append(name) - else: - nondirs.append(name) + while True: + try: + try: + entry = next(scandir_it) + except StopIteration: + break + except OSError as error: + if onerror is not None: + onerror(error) + return + try: + is_dir = entry.is_dir() + except OSError: + # If is_dir() raises an OSError, consider that the entry is not + # a directory, same behaviour than os.path.isdir(). + is_dir = False + + if is_dir: + dirs.append(entry.name) + else: + nondirs.append(entry.name) + + if not topdown and is_dir: + # Bottom-up: recurse into sub-directory, but exclude symlinks to + # directories if followlinks is False + if followlinks: + walk_into = True + else: + try: + is_symlink = entry.is_symlink() + except OSError: + # If is_symlink() raises an OSError, consider that the + # entry is not a symbolic link, same behaviour than + # os.path.islink(). + is_symlink = False + walk_into = not is_symlink + + if walk_into: + yield from walk(entry.path, topdown, onerror, followlinks) + + # Yield before recursion if going top down if topdown: yield top, dirs, nondirs - for name in dirs: - new_path = join(top, name) - if followlinks or not islink(new_path): - yield from walk(new_path, topdown, onerror, followlinks) - if not topdown: + + # Recurse into sub-directories + islink, join = path.islink, path.join + for name in dirs: + new_path = join(top, name) + # Issue #23605: os.path.islink() is used instead of caching + # entry.is_symlink() result during the loop on os.scandir() because + # the caller can replace the directory entry during the "yield" + # above. + if followlinks or not islink(new_path): + yield from walk(new_path, topdown, onerror, followlinks) + else: + # Yield after recursion if going bottom up yield top, dirs, nondirs __all__.append("walk") diff --git a/Darwin/lib/python3.4/pathlib.py b/Darwin/lib/python3.5/pathlib.py similarity index 88% rename from Darwin/lib/python3.4/pathlib.py rename to Darwin/lib/python3.5/pathlib.py index d3d1af8..01e66a0 100644 --- a/Darwin/lib/python3.4/pathlib.py +++ b/Darwin/lib/python3.5/pathlib.py @@ -8,23 +8,22 @@ import re import sys from collections import Sequence from contextlib import contextmanager -from errno import EINVAL, ENOENT +from errno import EINVAL, ENOENT, ENOTDIR from operator import attrgetter from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO from urllib.parse import quote_from_bytes as urlquote_from_bytes supports_symlinks = True -try: +if os.name == 'nt': import nt -except ImportError: - nt = None -else: if sys.getwindowsversion()[:2] >= (6, 0): from nt import _getfinalpathname else: supports_symlinks = False _getfinalpathname = None +else: + nt = None __all__ = [ @@ -74,6 +73,10 @@ class _Flavour(object): # parts. This makes the result of parsing e.g. # ("C:", "/", "a") reasonably intuitive. for part in it: + if not part: + continue + if altsep: + part = part.replace(altsep, sep) drv = self.splitroot(part)[0] if drv: break @@ -110,7 +113,7 @@ class _WindowsFlavour(_Flavour): has_drv = True pathmod = ntpath - is_supported = (nt is not None) + is_supported = (os.name == 'nt') drive_letters = ( set(chr(x) for x in range(ord('a'), ord('z') + 1)) | @@ -222,6 +225,36 @@ class _WindowsFlavour(_Flavour): # It's a path on a network drive => 'file://host/share/a/b' return 'file:' + urlquote_from_bytes(path.as_posix().encode('utf-8')) + def gethomedir(self, username): + if 'HOME' in os.environ: + userhome = os.environ['HOME'] + elif 'USERPROFILE' in os.environ: + userhome = os.environ['USERPROFILE'] + elif 'HOMEPATH' in os.environ: + try: + drv = os.environ['HOMEDRIVE'] + except KeyError: + drv = '' + userhome = drv + os.environ['HOMEPATH'] + else: + raise RuntimeError("Can't determine home directory") + + if username: + # Try to guess user home directory. By default all users + # directories are located in the same place and are named by + # corresponding usernames. If current user home directory points + # to nonstandard place, this guess is likely wrong. + if os.environ['USERNAME'] != username: + drv, root, parts = self.parse_parts((userhome,)) + if parts[-1] != os.environ['USERNAME']: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + parts[-1] = username + if drv or root: + userhome = drv + root + self.join(parts[1:]) + else: + userhome = self.join(parts) + return userhome class _PosixFlavour(_Flavour): sep = '/' @@ -305,6 +338,21 @@ class _PosixFlavour(_Flavour): bpath = bytes(path) return 'file://' + urlquote_from_bytes(bpath) + def gethomedir(self, username): + if not username: + try: + return os.environ['HOME'] + except KeyError: + import pwd + return pwd.getpwuid(os.getuid()).pw_dir + else: + import pwd + try: + return pwd.getpwnam(username).pw_dir + except KeyError: + raise RuntimeError("Can't determine home directory " + "for %r" % username) + _windows_flavour = _WindowsFlavour() _posix_flavour = _PosixFlavour() @@ -666,9 +714,6 @@ class PurePath(object): return NotImplemented return self._cparts == other._cparts and self._flavour is other._flavour - def __ne__(self, other): - return not self == other - def __hash__(self): try: return self._hash @@ -749,17 +794,20 @@ class PurePath(object): """Return a new path with the file name changed.""" if not self.name: raise ValueError("%r has an empty name" % (self,)) + drv, root, parts = self._flavour.parse_parts((name,)) + if (not name or name[-1] in [self._flavour.sep, self._flavour.altsep] + or drv or root or len(parts) != 1): + raise ValueError("Invalid name %r" % (name)) return self._from_parsed_parts(self._drv, self._root, self._parts[:-1] + [name]) def with_suffix(self, suffix): """Return a new path with the file suffix changed (or added, if none).""" # XXX if suffix is None, should the current suffix be removed? - drv, root, parts = self._flavour.parse_parts((suffix,)) - if drv or root or len(parts) != 1: + f = self._flavour + if f.sep in suffix or f.altsep and f.altsep in suffix: raise ValueError("Invalid suffix %r" % (suffix)) - suffix = parts[0] - if not suffix.startswith('.'): + if suffix and not suffix.startswith('.') or suffix == '.': raise ValueError("Invalid suffix %r" % (suffix)) name = self.name if not name: @@ -961,6 +1009,24 @@ class Path(PurePath): """ return cls(os.getcwd()) + @classmethod + def home(cls): + """Return a new path pointing to the user's home directory (as + returned by os.path.expanduser('~')). + """ + return cls(cls()._flavour.gethomedir(None)) + + def samefile(self, other_path): + """Return whether `other_file` is the same or not as this file. + (as returned by os.path.samefile(file, other_file)). + """ + st = self.stat() + try: + other_st = other_path.stat() + except AttributeError: + other_st = os.stat(other_path) + return os.path.samestat(st, other_st) + def iterdir(self): """Iterate over the files in this directory. Does not yield any result for the special paths '.' and '..'. @@ -1069,6 +1135,39 @@ class Path(PurePath): return io.open(str(self), mode, buffering, encoding, errors, newline, opener=self._opener) + def read_bytes(self): + """ + Open the file in bytes mode, read it, and close the file. + """ + with self.open(mode='rb') as f: + return f.read() + + def read_text(self, encoding=None, errors=None): + """ + Open the file in text mode, read it, and close the file. + """ + with self.open(mode='r', encoding=encoding, errors=errors) as f: + return f.read() + + def write_bytes(self, data): + """ + Open the file in bytes mode, write to it, and close the file. + """ + # type-check for the buffer interface before truncating the file + view = memoryview(data) + with self.open(mode='wb') as f: + return f.write(view) + + def write_text(self, data, encoding=None, errors=None): + """ + Open the file in text mode, write to it, and close the file. + """ + if not isinstance(data, str): + raise TypeError('data must be str, not %s' % + data.__class__.__name__) + with self.open(mode='w', encoding=encoding, errors=errors) as f: + return f.write(data) + def touch(self, mode=0o666, exist_ok=True): """ Create this file with the given access mode, if it doesn't exist. @@ -1092,14 +1191,21 @@ class Path(PurePath): fd = self._raw_open(flags, mode) os.close(fd) - def mkdir(self, mode=0o777, parents=False): + def mkdir(self, mode=0o777, parents=False, exist_ok=False): if self._closed: self._raise_closed() if not parents: - self._accessor.mkdir(self, mode) + try: + self._accessor.mkdir(self, mode) + except FileExistsError: + if not exist_ok or not self.is_dir(): + raise else: try: self._accessor.mkdir(self, mode) + except FileExistsError: + if not exist_ok or not self.is_dir(): + raise except OSError as e: if e.errno != ENOENT: raise @@ -1184,7 +1290,7 @@ class Path(PurePath): try: self.stat() except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise return False return True @@ -1196,7 +1302,7 @@ class Path(PurePath): try: return S_ISDIR(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) @@ -1210,7 +1316,7 @@ class Path(PurePath): try: return S_ISREG(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) @@ -1223,7 +1329,7 @@ class Path(PurePath): try: return S_ISLNK(self.lstat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist return False @@ -1235,7 +1341,7 @@ class Path(PurePath): try: return S_ISBLK(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) @@ -1248,7 +1354,7 @@ class Path(PurePath): try: return S_ISCHR(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) @@ -1261,7 +1367,7 @@ class Path(PurePath): try: return S_ISFIFO(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) @@ -1274,12 +1380,23 @@ class Path(PurePath): try: return S_ISSOCK(self.stat().st_mode) except OSError as e: - if e.errno != ENOENT: + if e.errno not in (ENOENT, ENOTDIR): raise # Path doesn't exist or is a broken symlink # (see https://bitbucket.org/pitrou/pathlib/issue/12/) return False + def expanduser(self): + """ Return a new path with expanded ~ and ~user constructs + (as returned by os.path.expanduser) + """ + if (not (self._drv or self._root) and + self._parts and self._parts[0][:1] == '~'): + homedir = self._flavour.gethomedir(self._parts[0][1:]) + return self._from_parts([homedir] + self._parts[1:]) + + return self + class PosixPath(Path, PurePosixPath): __slots__ = () diff --git a/Darwin/lib/python3.4/pdb.py b/Darwin/lib/python3.5/pdb.py similarity index 99% rename from Darwin/lib/python3.4/pdb.py rename to Darwin/lib/python3.5/pdb.py index dd7ceb8..cf2edbf 100755 --- a/Darwin/lib/python3.4/pdb.py +++ b/Darwin/lib/python3.5/pdb.py @@ -301,7 +301,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): # An 'Internal StopIteration' exception is an exception debug event # issued by the interpreter when handling a subgenerator run with # 'yield from' or a generator controled by a for loop. No exception has - # actually occured in this case. The debugger uses this debug event to + # actually occurred in this case. The debugger uses this debug event to # stop when the debuggee is returning from such generators. prefix = 'Internal ' if (not exc_traceback and exc_type is StopIteration) else '' @@ -673,7 +673,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): # now set the break point err = self.set_break(filename, line, temporary, cond, funcname) if err: - self.error(err, file=self.stdout) + self.error(err) else: bp = self.get_breaks(filename, line)[-1] self.message("Breakpoint %d at %s:%d" % @@ -1316,7 +1316,7 @@ class Pdb(bdb.Bdb, cmd.Cmd): return # Is it a class? if value.__class__ is type: - self.message('Class %s.%s' % (value.__module__, value.__name__)) + self.message('Class %s.%s' % (value.__module__, value.__qualname__)) return # None of the above... self.message(type(value)) diff --git a/Darwin/lib/python3.4/pickle.py b/Darwin/lib/python3.5/pickle.py similarity index 98% rename from Darwin/lib/python3.4/pickle.py rename to Darwin/lib/python3.5/pickle.py index 3fc2596..6c26c5e 100644 --- a/Darwin/lib/python3.4/pickle.py +++ b/Darwin/lib/python3.5/pickle.py @@ -242,7 +242,7 @@ class _Unframer: if not data: self.current_frame = None return self.file_readline() - if data[-1] != b'\n': + if data[-1] != b'\n'[0]: raise UnpicklingError( "pickle exhausted before end of frame") return data @@ -258,33 +258,31 @@ class _Unframer: # Tools used for pickling. -def _getattribute(obj, name, allow_qualname=False): - dotted_path = name.split(".") - if not allow_qualname and len(dotted_path) > 1: - raise AttributeError("Can't get qualified attribute {!r} on {!r}; " + - "use protocols >= 4 to enable support" - .format(name, obj)) - for subpath in dotted_path: +def _getattribute(obj, name): + for subpath in name.split('.'): if subpath == '': raise AttributeError("Can't get local attribute {!r} on {!r}" .format(name, obj)) try: + parent = obj obj = getattr(obj, subpath) except AttributeError: raise AttributeError("Can't get attribute {!r} on {!r}" .format(name, obj)) - return obj + return obj, parent -def whichmodule(obj, name, allow_qualname=False): +def whichmodule(obj, name): """Find the module an object belong to.""" module_name = getattr(obj, '__module__', None) if module_name is not None: return module_name - for module_name, module in sys.modules.items(): + # Protect the iteration by using a list copy of sys.modules against dynamic + # modules that trigger imports of other modules upon calls to getattr. + for module_name, module in list(sys.modules.items()): if module_name == '__main__' or module is None: continue try: - if _getattribute(module, name, allow_qualname) is obj: + if _getattribute(module, name)[0] is obj: return module_name except AttributeError: pass @@ -897,16 +895,16 @@ class _Pickler: write = self.write memo = self.memo - if name is None and self.proto >= 4: + if name is None: name = getattr(obj, '__qualname__', None) if name is None: name = obj.__name__ - module_name = whichmodule(obj, name, allow_qualname=self.proto >= 4) + module_name = whichmodule(obj, name) try: __import__(module_name, level=0) module = sys.modules[module_name] - obj2 = _getattribute(module, name, allow_qualname=self.proto >= 4) + obj2, parent = _getattribute(module, name) except (ImportError, KeyError, AttributeError): raise PicklingError( "Can't pickle %r: it's not found as %s.%s" % @@ -928,11 +926,16 @@ class _Pickler: else: write(EXT4 + pack("= 3. if self.proto >= 4: self.save(module_name) self.save(name) write(STACK_GLOBAL) + elif parent is not module: + self.save_reduce(getattr, (parent, lastname)) elif self.proto >= 3: write(GLOBAL + bytes(module_name, "utf-8") + b'\n' + bytes(name, "utf-8") + b'\n') @@ -942,7 +945,7 @@ class _Pickler: r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING if (module_name, name) in r_name_mapping: module_name, name = r_name_mapping[(module_name, name)] - if module_name in r_import_mapping: + elif module_name in r_import_mapping: module_name = r_import_mapping[module_name] try: write(GLOBAL + bytes(module_name, "ascii") + b'\n' + @@ -1368,11 +1371,13 @@ class _Unpickler: if self.proto < 3 and self.fix_imports: if (module, name) in _compat_pickle.NAME_MAPPING: module, name = _compat_pickle.NAME_MAPPING[(module, name)] - if module in _compat_pickle.IMPORT_MAPPING: + elif module in _compat_pickle.IMPORT_MAPPING: module = _compat_pickle.IMPORT_MAPPING[module] __import__(module, level=0) - return _getattribute(sys.modules[module], name, - allow_qualname=self.proto >= 4) + if self.proto >= 4: + return _getattribute(sys.modules[module], name)[0] + else: + return getattr(sys.modules[module], name) def load_reduce(self): stack = self.stack diff --git a/Darwin/lib/python3.4/pickletools.py b/Darwin/lib/python3.5/pickletools.py similarity index 98% rename from Darwin/lib/python3.4/pickletools.py rename to Darwin/lib/python3.5/pickletools.py index 71c2aa1..6b86723 100644 --- a/Darwin/lib/python3.4/pickletools.py +++ b/Darwin/lib/python3.5/pickletools.py @@ -2282,40 +2282,61 @@ def genops(pickle): def optimize(p): 'Optimize a pickle string by removing unused PUT opcodes' - not_a_put = object() - gets = { not_a_put } # set of args used by a GET opcode - opcodes = [] # (startpos, stoppos, putid) + put = 'PUT' + get = 'GET' + oldids = set() # set of all PUT ids + newids = {} # set of ids used by a GET opcode + opcodes = [] # (op, idx) or (pos, end_pos) proto = 0 + protoheader = b'' for opcode, arg, pos, end_pos in _genops(p, yield_end_pos=True): if 'PUT' in opcode.name: - opcodes.append((pos, end_pos, arg)) + oldids.add(arg) + opcodes.append((put, arg)) + elif opcode.name == 'MEMOIZE': + idx = len(oldids) + oldids.add(idx) + opcodes.append((put, idx)) elif 'FRAME' in opcode.name: pass - else: - if 'GET' in opcode.name: - gets.add(arg) - elif opcode.name == 'PROTO': - assert pos == 0, pos + elif 'GET' in opcode.name: + if opcode.proto > proto: + proto = opcode.proto + newids[arg] = None + opcodes.append((get, arg)) + elif opcode.name == 'PROTO': + if arg > proto: proto = arg - opcodes.append((pos, end_pos, not_a_put)) - prevpos, prevarg = pos, None + if pos == 0: + protoheader = p[pos: end_pos] + else: + opcodes.append((pos, end_pos)) + else: + opcodes.append((pos, end_pos)) + del oldids # Copy the opcodes except for PUTS without a corresponding GET out = io.BytesIO() - opcodes = iter(opcodes) - if proto >= 2: - # Write the PROTO header before any framing - start, stop, _ = next(opcodes) - out.write(p[start:stop]) - buf = pickle._Framer(out.write) + # Write the PROTO header before any framing + out.write(protoheader) + pickler = pickle._Pickler(out, proto) if proto >= 4: - buf.start_framing() - for start, stop, putid in opcodes: - if putid in gets: - buf.commit_frame() - buf.write(p[start:stop]) - if proto >= 4: - buf.end_framing() + pickler.framer.start_framing() + idx = 0 + for op, arg in opcodes: + if op is put: + if arg not in newids: + continue + data = pickler.put(idx) + newids[arg] = idx + idx += 1 + elif op is get: + data = pickler.get(newids[arg]) + else: + data = p[op:arg] + pickler.framer.commit_frame() + pickler.write(data) + pickler.framer.end_framing() return out.getvalue() ############################################################################## diff --git a/Darwin/lib/python3.4/pipes.py b/Darwin/lib/python3.5/pipes.py similarity index 100% rename from Darwin/lib/python3.4/pipes.py rename to Darwin/lib/python3.5/pipes.py diff --git a/Darwin/lib/python3.4/pkgutil.py b/Darwin/lib/python3.5/pkgutil.py similarity index 99% rename from Darwin/lib/python3.4/pkgutil.py rename to Darwin/lib/python3.5/pkgutil.py index e42b6eb..fc4a074 100644 --- a/Darwin/lib/python3.4/pkgutil.py +++ b/Darwin/lib/python3.5/pkgutil.py @@ -456,6 +456,8 @@ def get_loader(module_or_name): """ if module_or_name in sys.modules: module_or_name = sys.modules[module_or_name] + if module_or_name is None: + return None if isinstance(module_or_name, ModuleType): module = module_or_name loader = getattr(module, '__loader__', None) @@ -487,7 +489,7 @@ def find_loader(fullname): # pkgutil previously raised ImportError msg = "Error while finding loader for {!r} ({}: {})" raise ImportError(msg.format(fullname, type(ex), ex)) from ex - return spec.loader + return spec.loader if spec is not None else None def extend_path(path, name): @@ -614,7 +616,7 @@ def get_data(package, resource): return None # XXX needs test mod = (sys.modules.get(package) or - importlib._bootstrap._SpecMethods(spec).load()) + importlib._bootstrap._load(spec)) if mod is None or not hasattr(mod, '__file__'): return None diff --git a/Darwin/lib/python3.4/plat-darwin/IN.py b/Darwin/lib/python3.5/plat-darwin/IN.py similarity index 100% rename from Darwin/lib/python3.4/plat-darwin/IN.py rename to Darwin/lib/python3.5/plat-darwin/IN.py diff --git a/Darwin/lib/python3.4/plat-darwin/regen b/Darwin/lib/python3.5/plat-darwin/regen similarity index 100% rename from Darwin/lib/python3.4/plat-darwin/regen rename to Darwin/lib/python3.5/plat-darwin/regen diff --git a/Darwin/lib/python3.4/platform.py b/Darwin/lib/python3.5/platform.py similarity index 95% rename from Darwin/lib/python3.4/platform.py rename to Darwin/lib/python3.5/platform.py index c4ffe95..9096696 100755 --- a/Darwin/lib/python3.4/platform.py +++ b/Darwin/lib/python3.5/platform.py @@ -114,6 +114,8 @@ __version__ = '1.0.7' import collections import sys, os, re, subprocess +import warnings + ### Globals & Constants # Determine the platform's /dev/null device @@ -163,40 +165,39 @@ def libc_ver(executable=sys.executable, lib='', version='', # here to work around problems with Cygwin not being # able to open symlinks for reading executable = os.path.realpath(executable) - f = open(executable, 'rb') - binary = f.read(chunksize) - pos = 0 - while 1: - if b'libc' in binary or b'GLIBC' in binary: - m = _libc_search.search(binary, pos) - else: - m = None - if not m: - binary = f.read(chunksize) - if not binary: - break - pos = 0 - continue - libcinit, glibc, glibcversion, so, threads, soversion = [ - s.decode('latin1') if s is not None else s - for s in m.groups()] - if libcinit and not lib: - lib = 'libc' - elif glibc: - if lib != 'glibc': - lib = 'glibc' - version = glibcversion - elif glibcversion > version: - version = glibcversion - elif so: - if lib != 'glibc': + with open(executable, 'rb') as f: + binary = f.read(chunksize) + pos = 0 + while 1: + if b'libc' in binary or b'GLIBC' in binary: + m = _libc_search.search(binary, pos) + else: + m = None + if not m: + binary = f.read(chunksize) + if not binary: + break + pos = 0 + continue + libcinit, glibc, glibcversion, so, threads, soversion = [ + s.decode('latin1') if s is not None else s + for s in m.groups()] + if libcinit and not lib: lib = 'libc' - if soversion and soversion > version: - version = soversion - if threads and version[-len(threads):] != threads: - version = version + threads - pos = m.end() - f.close() + elif glibc: + if lib != 'glibc': + lib = 'glibc' + version = glibcversion + elif glibcversion > version: + version = glibcversion + elif so: + if lib != 'glibc': + lib = 'libc' + if soversion and soversion > version: + version = soversion + if threads and version[-len(threads):] != threads: + version = version + threads + pos = m.end() return lib, version def _dist_try_harder(distname, version, id): @@ -298,6 +299,15 @@ def linux_distribution(distname='', version='', id='', supported_dists=_supported_dists, full_distribution_name=1): + import warnings + warnings.warn("dist() and linux_distribution() functions are deprecated " + "in Python 3.5 and will be removed in Python 3.7", + PendingDeprecationWarning, stacklevel=2) + return _linux_distribution(distname, version, id, supported_dists, + full_distribution_name) + +def _linux_distribution(distname, version, id, supported_dists, + full_distribution_name): """ Tries to determine the name of the Linux OS distribution name. @@ -364,9 +374,13 @@ def dist(distname='', version='', id='', args given as parameters. """ - return linux_distribution(distname, version, id, - supported_dists=supported_dists, - full_distribution_name=0) + import warnings + warnings.warn("dist() and linux_distribution() functions are deprecated " + "in Python 3.5 and will be removed in Python 3.7", + PendingDeprecationWarning, stacklevel=2) + return _linux_distribution(distname, version, id, + supported_dists=supported_dists, + full_distribution_name=0) def popen(cmd, mode='r', bufsize=-1): @@ -426,7 +440,7 @@ def _syscmd_ver(system='', release='', version='', # Try some common cmd strings for cmd in ('ver', 'command /c ver', 'cmd /c ver'): try: - pipe = popen(cmd) + pipe = os.popen(cmd) info = pipe.read() if pipe.close(): raise OSError('command failed') @@ -1426,7 +1440,15 @@ def platform(aliased=0, terse=0): elif system in ('Linux',): # Linux based systems - distname, distversion, distid = dist('') + with warnings.catch_warnings(): + # see issue #1322 for more information + warnings.filterwarnings( + 'ignore', + 'dist\(\) and linux_distribution\(\) ' + 'functions are deprecated .*', + PendingDeprecationWarning, + ) + distname, distversion, distid = dist('') if distname and not terse: platform = _platform(system, release, machine, processor, 'with', diff --git a/Darwin/lib/python3.4/plistlib.py b/Darwin/lib/python3.5/plistlib.py similarity index 98% rename from Darwin/lib/python3.4/plistlib.py rename to Darwin/lib/python3.5/plistlib.py index dcb0f9c..b9946fd 100644 --- a/Darwin/lib/python3.4/plistlib.py +++ b/Darwin/lib/python3.5/plistlib.py @@ -619,10 +619,7 @@ class _BinaryPlistParser: offset_table_offset ) = struct.unpack('>6xBBQQQ', trailer) self._fp.seek(offset_table_offset) - offset_format = '>' + _BINARY_FORMAT[offset_size] * num_objects - self._ref_format = _BINARY_FORMAT[self._ref_size] - self._object_offsets = struct.unpack( - offset_format, self._fp.read(offset_size * num_objects)) + self._object_offsets = self._read_ints(num_objects, offset_size) return self._read_object(self._object_offsets[top_object]) except (OSError, IndexError, struct.error): @@ -638,9 +635,16 @@ class _BinaryPlistParser: return tokenL + def _read_ints(self, n, size): + data = self._fp.read(size * n) + if size in _BINARY_FORMAT: + return struct.unpack('>' + _BINARY_FORMAT[size] * n, data) + else: + return tuple(int.from_bytes(data[i: i + size], 'big') + for i in range(0, size * n, size)) + def _read_refs(self, n): - return struct.unpack( - '>' + self._ref_format * n, self._fp.read(n * self._ref_size)) + return self._read_ints(n, self._ref_size) def _read_object(self, offset): """ @@ -980,18 +984,16 @@ def load(fp, *, fmt=None, use_builtin_types=True, dict_type=dict): fp.seek(0) for info in _FORMATS.values(): if info['detect'](header): - p = info['parser']( - use_builtin_types=use_builtin_types, - dict_type=dict_type, - ) + P = info['parser'] break else: raise InvalidFileException() else: - p = _FORMATS[fmt]['parser'](use_builtin_types=use_builtin_types) + P = _FORMATS[fmt]['parser'] + p = P(use_builtin_types=use_builtin_types, dict_type=dict_type) return p.parse(fp) diff --git a/Darwin/lib/python3.4/poplib.py b/Darwin/lib/python3.5/poplib.py similarity index 93% rename from Darwin/lib/python3.4/poplib.py rename to Darwin/lib/python3.5/poplib.py index 23a3517..f672390 100644 --- a/Darwin/lib/python3.4/poplib.py +++ b/Darwin/lib/python3.5/poplib.py @@ -41,7 +41,7 @@ LF = b'\n' CRLF = CR+LF # maximal line length when calling readline(). This is to prevent -# reading arbitrary lenght lines. RFC 1939 limits POP3 line length to +# reading arbitrary length lines. RFC 1939 limits POP3 line length to # 512 characters, including CRLF. We have selected 2048 just to be on # the safe side. _MAXLINE = 2048 @@ -71,6 +71,7 @@ class POP3: UIDL [msg] uidl(msg = None) CAPA capa() STLS stls() + UTF8 utf8() Raises one exception: 'error_proto'. @@ -136,7 +137,7 @@ class POP3: # so only possibilities are ...LF, ...CRLF, CR...LF if line[-2:] == CRLF: return line[:-2], octets - if line[0] == CR: + if line[:1] == CR: return line[1:-1], octets return line[:-1], octets @@ -276,18 +277,23 @@ class POP3: def close(self): """Close the connection without assuming anything about it.""" - if self.file is not None: - self.file.close() - if self.sock is not None: - try: - self.sock.shutdown(socket.SHUT_RDWR) - except OSError as e: - # The server might already have closed the connection - if e.errno != errno.ENOTCONN: - raise - finally: - self.sock.close() - self.file = self.sock = None + try: + file = self.file + self.file = None + if file is not None: + file.close() + finally: + sock = self.sock + self.sock = None + if sock is not None: + try: + sock.shutdown(socket.SHUT_RDWR) + except OSError as e: + # The server might already have closed the connection + if e.errno != errno.ENOTCONN: + raise + finally: + sock.close() #__del__ = quit @@ -343,6 +349,12 @@ class POP3: return self._longcmd('UIDL') + def utf8(self): + """Try to enter UTF-8 mode (see RFC 6856). Returns server response. + """ + return self._shortcmd('UTF8') + + def capa(self): """Return server capabilities (RFC 2449) as a dictionary >>> c=poplib.POP3('localhost') @@ -387,9 +399,8 @@ class POP3: if context is None: context = ssl._create_stdlib_context() resp = self._shortcmd('STLS') - server_hostname = self.host if ssl.HAS_SNI else None self.sock = context.wrap_socket(self.sock, - server_hostname=server_hostname) + server_hostname=self.host) self.file = self.sock.makefile('rb') self._tls_established = True return resp @@ -430,9 +441,8 @@ if HAVE_SSL: def _create_socket(self, timeout): sock = POP3._create_socket(self, timeout) - server_hostname = self.host if ssl.HAS_SNI else None sock = self.context.wrap_socket(sock, - server_hostname=server_hostname) + server_hostname=self.host) return sock def stls(self, keyfile=None, certfile=None, context=None): diff --git a/Darwin/lib/python3.4/posixpath.py b/Darwin/lib/python3.5/posixpath.py similarity index 86% rename from Darwin/lib/python3.4/posixpath.py rename to Darwin/lib/python3.5/posixpath.py index 3e13239..09b8897 100644 --- a/Darwin/lib/python3.4/posixpath.py +++ b/Darwin/lib/python3.5/posixpath.py @@ -22,7 +22,8 @@ __all__ = ["normcase","isabs","join","splitdrive","split","splitext", "ismount", "expanduser","expandvars","normpath","abspath", "samefile","sameopenfile","samestat", "curdir","pardir","sep","pathsep","defpath","altsep","extsep", - "devnull","realpath","supports_unicode_filenames","relpath"] + "devnull","realpath","supports_unicode_filenames","relpath", + "commonpath"] # Strings representing various path-related bits and pieces. # These are primarily for export; internally, they are hardcoded. @@ -48,7 +49,6 @@ def _get_sep(path): def normcase(s): """Normalize case of pathname. Has no effect under Posix""" - # TODO: on Mac OS X, this should really return s.lower(). if not isinstance(s, (bytes, str)): raise TypeError("normcase() argument must be str or bytes, " "not '{}'".format(s.__class__.__name__)) @@ -76,6 +76,8 @@ def join(a, *p): sep = _get_sep(a) path = a try: + if not p: + path[:0] + sep #23780: Ensure compatible data type even if p is null. for b in p: if b.startswith(sep): path = b @@ -83,13 +85,8 @@ def join(a, *p): path += b else: path += sep + b - except TypeError: - valid_types = all(isinstance(s, (str, bytes, bytearray)) - for s in (a, ) + p) - if valid_types: - # Must have a mixture of text and binary data - raise TypeError("Can't mix strings and bytes in path " - "components.") from None + except (TypeError, AttributeError, BytesWarning): + genericpath._check_arg_types('join', a, *p) raise return path @@ -448,13 +445,58 @@ def relpath(path, start=None): if start is None: start = curdir - start_list = [x for x in abspath(start).split(sep) if x] - path_list = [x for x in abspath(path).split(sep) if x] + try: + start_list = [x for x in abspath(start).split(sep) if x] + path_list = [x for x in abspath(path).split(sep) if x] + # Work out how much of the filepath is shared by start and path. + i = len(commonprefix([start_list, path_list])) - # Work out how much of the filepath is shared by start and path. - i = len(commonprefix([start_list, path_list])) + rel_list = [pardir] * (len(start_list)-i) + path_list[i:] + if not rel_list: + return curdir + return join(*rel_list) + except (TypeError, AttributeError, BytesWarning, DeprecationWarning): + genericpath._check_arg_types('relpath', path, start) + raise - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) + +# Return the longest common sub-path of the sequence of paths given as input. +# The paths are not normalized before comparing them (this is the +# responsibility of the caller). Any trailing separator is stripped from the +# returned path. + +def commonpath(paths): + """Given a sequence of path names, returns the longest common sub-path.""" + + if not paths: + raise ValueError('commonpath() arg is an empty sequence') + + if isinstance(paths[0], bytes): + sep = b'/' + curdir = b'.' + else: + sep = '/' + curdir = '.' + + try: + split_paths = [path.split(sep) for path in paths] + + try: + isabs, = set(p[:1] == sep for p in paths) + except ValueError: + raise ValueError("Can't mix absolute and relative paths") from None + + split_paths = [[c for c in s if c and c != curdir] for s in split_paths] + s1 = min(split_paths) + s2 = max(split_paths) + common = s1 + for i, c in enumerate(s1): + if c != s2[i]: + common = s1[:i] + break + + prefix = sep if isabs else sep[:0] + return prefix + sep.join(common) + except (TypeError, AttributeError): + genericpath._check_arg_types('commonpath', *paths) + raise diff --git a/Darwin/lib/python3.5/pprint.py b/Darwin/lib/python3.5/pprint.py new file mode 100644 index 0000000..87649b4 --- /dev/null +++ b/Darwin/lib/python3.5/pprint.py @@ -0,0 +1,597 @@ +# Author: Fred L. Drake, Jr. +# fdrake@acm.org +# +# This is a simple little module I wrote to make life easier. I didn't +# see anything quite like it in the library, though I may have overlooked +# something. I wrote this when I was trying to read some heavily nested +# tuples with fairly non-descriptive content. This is modeled very much +# after Lisp/Scheme - style pretty-printing of lists. If you find it +# useful, thank small children who sleep at night. + +"""Support to pretty-print lists, tuples, & dictionaries recursively. + +Very simple, but useful, especially in debugging data structures. + +Classes +------- + +PrettyPrinter() + Handle pretty-printing operations onto a stream using a configured + set of formatting parameters. + +Functions +--------- + +pformat() + Format a Python object into a pretty-printed representation. + +pprint() + Pretty-print a Python object to a stream [default is sys.stdout]. + +saferepr() + Generate a 'standard' repr()-like value, but protect against recursive + data structures. + +""" + +import collections as _collections +import re +import sys as _sys +import types as _types +from io import StringIO as _StringIO + +__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr", + "PrettyPrinter"] + + +def pprint(object, stream=None, indent=1, width=80, depth=None, *, + compact=False): + """Pretty-print a Python object to a stream [default is sys.stdout].""" + printer = PrettyPrinter( + stream=stream, indent=indent, width=width, depth=depth, + compact=compact) + printer.pprint(object) + +def pformat(object, indent=1, width=80, depth=None, *, compact=False): + """Format a Python object into a pretty-printed representation.""" + return PrettyPrinter(indent=indent, width=width, depth=depth, + compact=compact).pformat(object) + +def saferepr(object): + """Version of repr() which can handle recursive data structures.""" + return _safe_repr(object, {}, None, 0)[0] + +def isreadable(object): + """Determine if saferepr(object) is readable by eval().""" + return _safe_repr(object, {}, None, 0)[1] + +def isrecursive(object): + """Determine if object requires a recursive representation.""" + return _safe_repr(object, {}, None, 0)[2] + +class _safe_key: + """Helper function for key functions when sorting unorderable objects. + + The wrapped-object will fallback to an Py2.x style comparison for + unorderable types (sorting first comparing the type name and then by + the obj ids). Does not work recursively, so dict.items() must have + _safe_key applied to both the key and the value. + + """ + + __slots__ = ['obj'] + + def __init__(self, obj): + self.obj = obj + + def __lt__(self, other): + try: + return self.obj < other.obj + except TypeError: + return ((str(type(self.obj)), id(self.obj)) < \ + (str(type(other.obj)), id(other.obj))) + +def _safe_tuple(t): + "Helper function for comparing 2-tuples" + return _safe_key(t[0]), _safe_key(t[1]) + +class PrettyPrinter: + def __init__(self, indent=1, width=80, depth=None, stream=None, *, + compact=False): + """Handle pretty printing operations onto a stream using a set of + configured parameters. + + indent + Number of spaces to indent for each level of nesting. + + width + Attempted maximum number of columns in the output. + + depth + The maximum depth to print out nested structures. + + stream + The desired output stream. If omitted (or false), the standard + output stream available at construction will be used. + + compact + If true, several items will be combined in one line. + + """ + indent = int(indent) + width = int(width) + if indent < 0: + raise ValueError('indent must be >= 0') + if depth is not None and depth <= 0: + raise ValueError('depth must be > 0') + if not width: + raise ValueError('width must be != 0') + self._depth = depth + self._indent_per_level = indent + self._width = width + if stream is not None: + self._stream = stream + else: + self._stream = _sys.stdout + self._compact = bool(compact) + + def pprint(self, object): + self._format(object, self._stream, 0, 0, {}, 0) + self._stream.write("\n") + + def pformat(self, object): + sio = _StringIO() + self._format(object, sio, 0, 0, {}, 0) + return sio.getvalue() + + def isrecursive(self, object): + return self.format(object, {}, 0, 0)[2] + + def isreadable(self, object): + s, readable, recursive = self.format(object, {}, 0, 0) + return readable and not recursive + + def _format(self, object, stream, indent, allowance, context, level): + objid = id(object) + if objid in context: + stream.write(_recursion(object)) + self._recursive = True + self._readable = False + return + rep = self._repr(object, context, level) + max_width = self._width - indent - allowance + if len(rep) > max_width: + p = self._dispatch.get(type(object).__repr__, None) + if p is not None: + context[objid] = 1 + p(self, object, stream, indent, allowance, context, level + 1) + del context[objid] + return + elif isinstance(object, dict): + context[objid] = 1 + self._pprint_dict(object, stream, indent, allowance, + context, level + 1) + del context[objid] + return + stream.write(rep) + + _dispatch = {} + + def _pprint_dict(self, object, stream, indent, allowance, context, level): + write = stream.write + write('{') + if self._indent_per_level > 1: + write((self._indent_per_level - 1) * ' ') + length = len(object) + if length: + items = sorted(object.items(), key=_safe_tuple) + self._format_dict_items(items, stream, indent, allowance + 1, + context, level) + write('}') + + _dispatch[dict.__repr__] = _pprint_dict + + def _pprint_ordered_dict(self, object, stream, indent, allowance, context, level): + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + '(') + self._format(list(object.items()), stream, + indent + len(cls.__name__) + 1, allowance + 1, + context, level) + stream.write(')') + + _dispatch[_collections.OrderedDict.__repr__] = _pprint_ordered_dict + + def _pprint_list(self, object, stream, indent, allowance, context, level): + stream.write('[') + self._format_items(object, stream, indent, allowance + 1, + context, level) + stream.write(']') + + _dispatch[list.__repr__] = _pprint_list + + def _pprint_tuple(self, object, stream, indent, allowance, context, level): + stream.write('(') + endchar = ',)' if len(object) == 1 else ')' + self._format_items(object, stream, indent, allowance + len(endchar), + context, level) + stream.write(endchar) + + _dispatch[tuple.__repr__] = _pprint_tuple + + def _pprint_set(self, object, stream, indent, allowance, context, level): + if not len(object): + stream.write(repr(object)) + return + typ = object.__class__ + if typ is set: + stream.write('{') + endchar = '}' + else: + stream.write(typ.__name__ + '({') + endchar = '})' + indent += len(typ.__name__) + 1 + object = sorted(object, key=_safe_key) + self._format_items(object, stream, indent, allowance + len(endchar), + context, level) + stream.write(endchar) + + _dispatch[set.__repr__] = _pprint_set + _dispatch[frozenset.__repr__] = _pprint_set + + def _pprint_str(self, object, stream, indent, allowance, context, level): + write = stream.write + if not len(object): + write(repr(object)) + return + chunks = [] + lines = object.splitlines(True) + if level == 1: + indent += 1 + allowance += 1 + max_width1 = max_width = self._width - indent + for i, line in enumerate(lines): + rep = repr(line) + if i == len(lines) - 1: + max_width1 -= allowance + if len(rep) <= max_width1: + chunks.append(rep) + else: + # A list of alternating (non-space, space) strings + parts = re.findall(r'\S*\s*', line) + assert parts + assert not parts[-1] + parts.pop() # drop empty last part + max_width2 = max_width + current = '' + for j, part in enumerate(parts): + candidate = current + part + if j == len(parts) - 1 and i == len(lines) - 1: + max_width2 -= allowance + if len(repr(candidate)) > max_width2: + if current: + chunks.append(repr(current)) + current = part + else: + current = candidate + if current: + chunks.append(repr(current)) + if len(chunks) == 1: + write(rep) + return + if level == 1: + write('(') + for i, rep in enumerate(chunks): + if i > 0: + write('\n' + ' '*indent) + write(rep) + if level == 1: + write(')') + + _dispatch[str.__repr__] = _pprint_str + + def _pprint_bytes(self, object, stream, indent, allowance, context, level): + write = stream.write + if len(object) <= 4: + write(repr(object)) + return + parens = level == 1 + if parens: + indent += 1 + allowance += 1 + write('(') + delim = '' + for rep in _wrap_bytes_repr(object, self._width - indent, allowance): + write(delim) + write(rep) + if not delim: + delim = '\n' + ' '*indent + if parens: + write(')') + + _dispatch[bytes.__repr__] = _pprint_bytes + + def _pprint_bytearray(self, object, stream, indent, allowance, context, level): + write = stream.write + write('bytearray(') + self._pprint_bytes(bytes(object), stream, indent + 10, + allowance + 1, context, level + 1) + write(')') + + _dispatch[bytearray.__repr__] = _pprint_bytearray + + def _pprint_mappingproxy(self, object, stream, indent, allowance, context, level): + stream.write('mappingproxy(') + self._format(object.copy(), stream, indent + 13, allowance + 1, + context, level) + stream.write(')') + + _dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy + + def _format_dict_items(self, items, stream, indent, allowance, context, + level): + write = stream.write + indent += self._indent_per_level + delimnl = ',\n' + ' ' * indent + last_index = len(items) - 1 + for i, (key, ent) in enumerate(items): + last = i == last_index + rep = self._repr(key, context, level) + write(rep) + write(': ') + self._format(ent, stream, indent + len(rep) + 2, + allowance if last else 1, + context, level) + if not last: + write(delimnl) + + def _format_items(self, items, stream, indent, allowance, context, level): + write = stream.write + indent += self._indent_per_level + if self._indent_per_level > 1: + write((self._indent_per_level - 1) * ' ') + delimnl = ',\n' + ' ' * indent + delim = '' + width = max_width = self._width - indent + 1 + it = iter(items) + try: + next_ent = next(it) + except StopIteration: + return + last = False + while not last: + ent = next_ent + try: + next_ent = next(it) + except StopIteration: + last = True + max_width -= allowance + width -= allowance + if self._compact: + rep = self._repr(ent, context, level) + w = len(rep) + 2 + if width < w: + width = max_width + if delim: + delim = delimnl + if width >= w: + width -= w + write(delim) + delim = ', ' + write(rep) + continue + write(delim) + delim = delimnl + self._format(ent, stream, indent, + allowance if last else 1, + context, level) + + def _repr(self, object, context, level): + repr, readable, recursive = self.format(object, context.copy(), + self._depth, level) + if not readable: + self._readable = False + if recursive: + self._recursive = True + return repr + + def format(self, object, context, maxlevels, level): + """Format object for a specific context, returning a string + and flags indicating whether the representation is 'readable' + and whether the object represents a recursive construct. + """ + return _safe_repr(object, context, maxlevels, level) + + def _pprint_default_dict(self, object, stream, indent, allowance, context, level): + if not len(object): + stream.write(repr(object)) + return + rdf = self._repr(object.default_factory, context, level) + cls = object.__class__ + indent += len(cls.__name__) + 1 + stream.write('%s(%s,\n%s' % (cls.__name__, rdf, ' ' * indent)) + self._pprint_dict(object, stream, indent, allowance + 1, context, level) + stream.write(')') + + _dispatch[_collections.defaultdict.__repr__] = _pprint_default_dict + + def _pprint_counter(self, object, stream, indent, allowance, context, level): + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + '({') + if self._indent_per_level > 1: + stream.write((self._indent_per_level - 1) * ' ') + items = object.most_common() + self._format_dict_items(items, stream, + indent + len(cls.__name__) + 1, allowance + 2, + context, level) + stream.write('})') + + _dispatch[_collections.Counter.__repr__] = _pprint_counter + + def _pprint_chain_map(self, object, stream, indent, allowance, context, level): + if not len(object.maps): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + '(') + indent += len(cls.__name__) + 1 + for i, m in enumerate(object.maps): + if i == len(object.maps) - 1: + self._format(m, stream, indent, allowance + 1, context, level) + stream.write(')') + else: + self._format(m, stream, indent, 1, context, level) + stream.write(',\n' + ' ' * indent) + + _dispatch[_collections.ChainMap.__repr__] = _pprint_chain_map + + def _pprint_deque(self, object, stream, indent, allowance, context, level): + if not len(object): + stream.write(repr(object)) + return + cls = object.__class__ + stream.write(cls.__name__ + '(') + indent += len(cls.__name__) + 1 + stream.write('[') + if object.maxlen is None: + self._format_items(object, stream, indent, allowance + 2, + context, level) + stream.write('])') + else: + self._format_items(object, stream, indent, 2, + context, level) + rml = self._repr(object.maxlen, context, level) + stream.write('],\n%smaxlen=%s)' % (' ' * indent, rml)) + + _dispatch[_collections.deque.__repr__] = _pprint_deque + + def _pprint_user_dict(self, object, stream, indent, allowance, context, level): + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserDict.__repr__] = _pprint_user_dict + + def _pprint_user_list(self, object, stream, indent, allowance, context, level): + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserList.__repr__] = _pprint_user_list + + def _pprint_user_string(self, object, stream, indent, allowance, context, level): + self._format(object.data, stream, indent, allowance, context, level - 1) + + _dispatch[_collections.UserString.__repr__] = _pprint_user_string + +# Return triple (repr_string, isreadable, isrecursive). + +def _safe_repr(object, context, maxlevels, level): + typ = type(object) + if typ in _builtin_scalars: + return repr(object), True, False + + r = getattr(typ, "__repr__", None) + if issubclass(typ, dict) and r is dict.__repr__: + if not object: + return "{}", True, False + objid = id(object) + if maxlevels and level >= maxlevels: + return "{...}", False, objid in context + if objid in context: + return _recursion(object), False, True + context[objid] = 1 + readable = True + recursive = False + components = [] + append = components.append + level += 1 + saferepr = _safe_repr + items = sorted(object.items(), key=_safe_tuple) + for k, v in items: + krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) + vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) + append("%s: %s" % (krepr, vrepr)) + readable = readable and kreadable and vreadable + if krecur or vrecur: + recursive = True + del context[objid] + return "{%s}" % ", ".join(components), readable, recursive + + if (issubclass(typ, list) and r is list.__repr__) or \ + (issubclass(typ, tuple) and r is tuple.__repr__): + if issubclass(typ, list): + if not object: + return "[]", True, False + format = "[%s]" + elif len(object) == 1: + format = "(%s,)" + else: + if not object: + return "()", True, False + format = "(%s)" + objid = id(object) + if maxlevels and level >= maxlevels: + return format % "...", False, objid in context + if objid in context: + return _recursion(object), False, True + context[objid] = 1 + readable = True + recursive = False + components = [] + append = components.append + level += 1 + for o in object: + orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) + append(orepr) + if not oreadable: + readable = False + if orecur: + recursive = True + del context[objid] + return format % ", ".join(components), readable, recursive + + rep = repr(object) + return rep, (rep and not rep.startswith('<')), False + +_builtin_scalars = frozenset({str, bytes, bytearray, int, float, complex, + bool, type(None)}) + +def _recursion(object): + return ("" + % (type(object).__name__, id(object))) + + +def _perfcheck(object=None): + import time + if object is None: + object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000 + p = PrettyPrinter() + t1 = time.time() + _safe_repr(object, {}, None, 0) + t2 = time.time() + p.pformat(object) + t3 = time.time() + print("_safe_repr:", t2 - t1) + print("pformat:", t3 - t2) + +def _wrap_bytes_repr(object, width, allowance): + current = b'' + last = len(object) // 4 * 4 + for i in range(0, len(object), 4): + part = object[i: i+4] + candidate = current + part + if i == last: + width -= allowance + if len(repr(candidate)) > width: + if current: + yield repr(current) + current = part + else: + current = candidate + if current: + yield repr(current) + +if __name__ == "__main__": + _perfcheck() diff --git a/Darwin/lib/python3.4/profile.py b/Darwin/lib/python3.5/profile.py similarity index 100% rename from Darwin/lib/python3.4/profile.py rename to Darwin/lib/python3.5/profile.py diff --git a/Darwin/lib/python3.4/pstats.py b/Darwin/lib/python3.5/pstats.py similarity index 100% rename from Darwin/lib/python3.4/pstats.py rename to Darwin/lib/python3.5/pstats.py diff --git a/Darwin/lib/python3.4/pty.py b/Darwin/lib/python3.5/pty.py similarity index 100% rename from Darwin/lib/python3.4/pty.py rename to Darwin/lib/python3.5/pty.py diff --git a/Darwin/lib/python3.4/py_compile.py b/Darwin/lib/python3.5/py_compile.py similarity index 91% rename from Darwin/lib/python3.4/py_compile.py rename to Darwin/lib/python3.5/py_compile.py index 1277b93..11c5b50 100644 --- a/Darwin/lib/python3.4/py_compile.py +++ b/Darwin/lib/python3.5/py_compile.py @@ -1,9 +1,9 @@ -"""Routine to "compile" a .py file to a .pyc (or .pyo) file. +"""Routine to "compile" a .py file to a .pyc file. This module has intimate knowledge of the format of .pyc files. """ -import importlib._bootstrap +import importlib._bootstrap_external import importlib.machinery import importlib.util import os @@ -67,7 +67,7 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1): :param file: The source file name. :param cfile: The target byte compiled file name. When not given, this - defaults to the PEP 3147 location. + defaults to the PEP 3147/PEP 488 location. :param dfile: Purported file name, i.e. the file name that shows up in error messages. Defaults to the source file name. :param doraise: Flag indicating whether or not an exception should be @@ -85,12 +85,12 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1): Note that it isn't necessary to byte-compile Python modules for execution efficiency -- Python itself byte-compiles a module when it is loaded, and if it can, writes out the bytecode to the - corresponding .pyc (or .pyo) file. + corresponding .pyc file. However, if a Python installation is shared between users, it is a good idea to byte-compile all modules upon installation, since other users may not be able to write in the source directories, - and thus they won't be able to write the .pyc/.pyo file, and then + and thus they won't be able to write the .pyc file, and then they would be byte-compiling every module each time it is loaded. This can slow down program start-up considerably. @@ -105,8 +105,9 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1): """ if cfile is None: if optimize >= 0: + optimization = optimize if optimize >= 1 else '' cfile = importlib.util.cache_from_source(file, - debug_override=not optimize) + optimization=optimization) else: cfile = importlib.util.cache_from_source(file) if os.path.islink(cfile): @@ -136,10 +137,10 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1): except FileExistsError: pass source_stats = loader.path_stats(file) - bytecode = importlib._bootstrap._code_to_bytecode( + bytecode = importlib._bootstrap_external._code_to_bytecode( code, source_stats['mtime'], source_stats['size']) - mode = importlib._bootstrap._calc_mode(file) - importlib._bootstrap._write_atomic(cfile, bytecode, mode) + mode = importlib._bootstrap_external._calc_mode(file) + importlib._bootstrap_external._write_atomic(cfile, bytecode, mode) return cfile @@ -178,7 +179,7 @@ def main(args=None): except PyCompileError as error: # return value to indicate at least one failure rv = 1 - sys.stderr.write(error.msg) + sys.stderr.write("%s\n" % error.msg) return rv if __name__ == "__main__": diff --git a/Darwin/lib/python3.4/pyclbr.py b/Darwin/lib/python3.5/pyclbr.py similarity index 100% rename from Darwin/lib/python3.4/pyclbr.py rename to Darwin/lib/python3.5/pyclbr.py diff --git a/Darwin/lib/python3.4/pydoc.py b/Darwin/lib/python3.5/pydoc.py similarity index 96% rename from Darwin/lib/python3.4/pydoc.py rename to Darwin/lib/python3.5/pydoc.py index 42f48e1..ee558bf 100755 --- a/Darwin/lib/python3.4/pydoc.py +++ b/Darwin/lib/python3.5/pydoc.py @@ -53,6 +53,7 @@ Richard Chamberlain, for the first implementation of textdoc. import builtins import importlib._bootstrap +import importlib._bootstrap_external import importlib.machinery import importlib.util import inspect @@ -64,6 +65,7 @@ import re import sys import time import tokenize +import urllib.parse import warnings from collections import deque from reprlib import Repr @@ -212,7 +214,7 @@ def classify_class_attrs(object): def ispackage(path): """Guess whether a path refers to a package directory.""" if os.path.isdir(path): - for ext in ('.py', '.pyc', '.pyo'): + for ext in ('.py', '.pyc'): if os.path.isfile(os.path.join(path, '__init__' + ext)): return True return False @@ -263,13 +265,12 @@ def synopsis(filename, cache={}): # XXX We probably don't need to pass in the loader here. spec = importlib.util.spec_from_file_location('__temp__', filename, loader=loader) - _spec = importlib._bootstrap._SpecMethods(spec) try: - module = _spec.load() + module = importlib._bootstrap._load(spec) except: return None del sys.modules['__temp__'] - result = (module.__doc__ or '').splitlines()[0] + result = module.__doc__.splitlines()[0] if module.__doc__ else None # Cache the result. cache[filename] = (mtime, result) return result @@ -292,14 +293,13 @@ def importfile(path): filename = os.path.basename(path) name, ext = os.path.splitext(filename) if is_bytecode: - loader = importlib._bootstrap.SourcelessFileLoader(name, path) + loader = importlib._bootstrap_external.SourcelessFileLoader(name, path) else: - loader = importlib._bootstrap.SourceFileLoader(name, path) + loader = importlib._bootstrap_external.SourceFileLoader(name, path) # XXX We probably don't need to pass in the loader here. spec = importlib.util.spec_from_file_location(name, path, loader=loader) - _spec = importlib._bootstrap._SpecMethods(spec) try: - return _spec.load() + return importlib._bootstrap._load(spec) except: raise ErrorDuringImport(path, sys.exc_info()) @@ -595,10 +595,15 @@ class HTMLDoc(Doc): elif pep: url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep) results.append('%s' % (url, escape(all))) + elif selfdot: + # Create a link for methods like 'self.method(...)' + # and use for attributes like 'self.attr' + if text[end:end+1] == '(': + results.append('self.' + self.namelink(name, methods)) + else: + results.append('self.%s' % name) elif text[end:end+1] == '(': results.append(self.namelink(name, methods, funcs, classes)) - elif selfdot: - results.append('self.%s' % name) else: results.append(self.namelink(name, classes)) here = end @@ -643,10 +648,7 @@ class HTMLDoc(Doc): head = '%s' % linkedname try: path = inspect.getabsfile(object) - url = path - if sys.platform == 'win32': - import nturl2path - url = nturl2path.pathname2url(path) + url = urllib.parse.quote(path) filelink = self.filelink(url, path) except TypeError: filelink = '(built-in)' @@ -953,7 +955,7 @@ class HTMLDoc(Doc): if not argspec: argspec = '(...)' - decl = title + argspec + (note and self.grey( + decl = title + self.escape(argspec) + (note and self.grey( '%s' % note)) if skipdocs: @@ -1404,14 +1406,13 @@ class _PlainTextDoc(TextDoc): def pager(text): """The first time this is called, determine what kind of pager to use.""" global pager - # Escape non-encodable characters to avoid encoding errors later - encoding = sys.getfilesystemencoding() - text = text.encode(encoding, 'backslashreplace').decode(encoding) pager = getpager() pager(text) def getpager(): """Decide what method to use for paging through text.""" + if not hasattr(sys.stdin, "isatty"): + return plainpager if not hasattr(sys.stdout, "isatty"): return plainpager if not sys.stdin.isatty() or not sys.stdout.isatty(): @@ -1447,39 +1448,64 @@ def plain(text): def pipepager(text, cmd): """Page through text by feeding it to another program.""" - pipe = os.popen(cmd, 'w') + import subprocess + proc = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE) try: - pipe.write(text) - pipe.close() + with io.TextIOWrapper(proc.stdin, errors='backslashreplace') as pipe: + try: + pipe.write(text) + except KeyboardInterrupt: + # We've hereby abandoned whatever text hasn't been written, + # but the pager is still in control of the terminal. + pass except OSError: pass # Ignore broken pipes caused by quitting the pager program. + while True: + try: + proc.wait() + break + except KeyboardInterrupt: + # Ignore ctl-c like the pager itself does. Otherwise the pager is + # left running and the terminal is in raw mode and unusable. + pass def tempfilepager(text, cmd): """Page through text by invoking a program on a temporary file.""" import tempfile filename = tempfile.mktemp() - with open(filename, 'w') as file: + with open(filename, 'w', errors='backslashreplace') as file: file.write(text) try: os.system(cmd + ' "' + filename + '"') finally: os.unlink(filename) +def _escape_stdout(text): + # Escape non-encodable characters to avoid encoding errors later + encoding = getattr(sys.stdout, 'encoding', None) or 'utf-8' + return text.encode(encoding, 'backslashreplace').decode(encoding) + def ttypager(text): """Page through text on a text terminal.""" - lines = plain(text).split('\n') + lines = plain(_escape_stdout(text)).split('\n') try: import tty fd = sys.stdin.fileno() old = tty.tcgetattr(fd) tty.setcbreak(fd) getchar = lambda: sys.stdin.read(1) - except (ImportError, AttributeError): + except (ImportError, AttributeError, io.UnsupportedOperation): tty = None getchar = lambda: sys.stdin.readline()[:-1][:1] try: - r = inc = os.environ.get('LINES', 25) - 1 + try: + h = int(os.environ.get('LINES', 0)) + except ValueError: + h = 0 + if h <= 1: + h = 25 + r = inc = h - 1 sys.stdout.write('\n'.join(lines[:inc]) + '\n') while lines[r:]: sys.stdout.write('-- more --') @@ -1505,7 +1531,7 @@ def ttypager(text): def plainpager(text): """Simply print unformatted text. This is the ultimate fallback.""" - sys.stdout.write(plain(text)) + sys.stdout.write(plain(_escape_stdout(text))) def describe(thing): """Produce a short description of the given thing.""" @@ -1563,8 +1589,11 @@ def resolve(thing, forceload=0): """Given an object or a path to an object, get the object and its name.""" if isinstance(thing, str): object = locate(thing, forceload) - if not object: - raise ImportError('no Python documentation found for %r' % thing) + if object is None: + raise ImportError('''\ +No Python documentation found for %r. +Use help() to get the interactive help utility. +Use help(str) for help on the str class.''' % thing) return object, thing else: name = getattr(thing, '__name__', None) @@ -1611,9 +1640,8 @@ def writedoc(thing, forceload=0): try: object, name = resolve(thing, forceload) page = html.page(describe(object), html.document(object, name)) - file = open(name + '.html', 'w', encoding='utf-8') - file.write(page) - file.close() + with open(name + '.html', 'w', encoding='utf-8') as file: + file.write(page) print('wrote', name + '.html') except (ImportError, ErrorDuringImport) as value: print(value) @@ -1633,7 +1661,7 @@ class Helper: # in pydoc_data/topics.py. # # CAUTION: if you change one of these dictionaries, be sure to adapt the - # list of needed labels in Doc/tools/sphinxext/pyspecific.py and + # list of needed labels in Doc/tools/pyspecific.py and # regenerate the pydoc_data/topics.py file by running # make pydoc-topics # in Doc/ and copying the output file into the Lib/ directory. @@ -1733,7 +1761,6 @@ class Helper: 'TRACEBACKS': 'TYPES', 'NONE': ('bltin-null-object', ''), 'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'), - 'FILES': ('bltin-file-objects', ''), 'SPECIALATTRIBUTES': ('specialattrs', ''), 'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'), 'MODULES': ('typesmodules', 'import'), @@ -1809,7 +1836,8 @@ class Helper: if inspect.stack()[1][3] == '?': self() return '' - return '' + return '<%s.%s instance>' % (self.__class__.__module__, + self.__class__.__qualname__) _GoInteractive = object() def __call__(self, request=_GoInteractive): @@ -1835,7 +1863,10 @@ has the same effect as typing a particular string at the help> prompt. break request = replace(request, '"', '', "'", '').strip() if request.lower() in ('q', 'quit'): break - self.help(request) + if request == 'help': + self.intro() + else: + self.help(request) def getline(self, prompt): """Read one line, using input() when appropriate.""" @@ -1849,8 +1880,7 @@ has the same effect as typing a particular string at the help> prompt. def help(self, request): if type(request) is type(''): request = request.strip() - if request == 'help': self.intro() - elif request == 'keywords': self.listkeywords() + if request == 'keywords': self.listkeywords() elif request == 'symbols': self.listsymbols() elif request == 'topics': self.listtopics() elif request == 'modules': self.listmodules() @@ -1863,6 +1893,7 @@ has the same effect as typing a particular string at the help> prompt. elif request in self.keywords: self.showtopic(request) elif request in self.topics: self.showtopic(request) elif request: doc(request, 'Help on %s:', output=self._output) + else: doc(str, 'Help on %s:', output=self._output) elif isinstance(request, Helper): self() else: doc(request, 'Help on %s:', output=self._output) self.output.write('\n') @@ -2058,14 +2089,13 @@ class ModuleScanner: else: path = None else: - _spec = importlib._bootstrap._SpecMethods(spec) try: - module = _spec.load() + module = importlib._bootstrap._load(spec) except ImportError: if onerror: onerror(modname) continue - desc = (module.__doc__ or '').splitlines()[0] + desc = module.__doc__.splitlines()[0] if module.__doc__ else '' path = getattr(module,'__file__',None) name = modname + ' - ' + desc if name.lower().find(key) >= 0: @@ -2174,8 +2204,8 @@ def _start_server(urlhandler, port): class DocServer(http.server.HTTPServer): def __init__(self, port, callback): - self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost' - self.address = ('', port) + self.host = 'localhost' + self.address = (self.host, port) self.callback = callback self.base.__init__(self, self.address, self.handler) self.quit = False @@ -2347,7 +2377,7 @@ def _url_handler(url, content_type="text/html"): def html_getfile(path): """Get and display a source file listing safely.""" - path = path.replace('%20', ' ') + path = urllib.parse.unquote(path) with tokenize.open(path) as fp: lines = html.escape(fp.read()) body = '
%s
' % lines diff --git a/Darwin/lib/python3.4/pydoc_data/__init__.py b/Darwin/lib/python3.5/pydoc_data/__init__.py similarity index 100% rename from Darwin/lib/python3.4/pydoc_data/__init__.py rename to Darwin/lib/python3.5/pydoc_data/__init__.py diff --git a/Darwin/lib/python3.4/pydoc_data/_pydoc.css b/Darwin/lib/python3.5/pydoc_data/_pydoc.css similarity index 100% rename from Darwin/lib/python3.4/pydoc_data/_pydoc.css rename to Darwin/lib/python3.5/pydoc_data/_pydoc.css diff --git a/Darwin/lib/python3.5/pydoc_data/topics.py b/Darwin/lib/python3.5/pydoc_data/topics.py new file mode 100644 index 0000000..f66dd62 --- /dev/null +++ b/Darwin/lib/python3.5/pydoc_data/topics.py @@ -0,0 +1,12881 @@ +# -*- coding: utf-8 -*- +# Autogenerated by Sphinx on Sat Sep 12 17:22:24 2015 +topics = {'assert': '\n' + 'The "assert" statement\n' + '**********************\n' + '\n' + 'Assert statements are a convenient way to insert debugging ' + 'assertions\n' + 'into a program:\n' + '\n' + ' assert_stmt ::= "assert" expression ["," expression]\n' + '\n' + 'The simple form, "assert expression", is equivalent to\n' + '\n' + ' if __debug__:\n' + ' if not expression: raise AssertionError\n' + '\n' + 'The extended form, "assert expression1, expression2", is ' + 'equivalent to\n' + '\n' + ' if __debug__:\n' + ' if not expression1: raise AssertionError(expression2)\n' + '\n' + 'These equivalences assume that "__debug__" and "AssertionError" ' + 'refer\n' + 'to the built-in variables with those names. In the current\n' + 'implementation, the built-in variable "__debug__" is "True" ' + 'under\n' + 'normal circumstances, "False" when optimization is requested ' + '(command\n' + 'line option -O). The current code generator emits no code for ' + 'an\n' + 'assert statement when optimization is requested at compile ' + 'time. Note\n' + 'that it is unnecessary to include the source code for the ' + 'expression\n' + 'that failed in the error message; it will be displayed as part ' + 'of the\n' + 'stack trace.\n' + '\n' + 'Assignments to "__debug__" are illegal. The value for the ' + 'built-in\n' + 'variable is determined when the interpreter starts.\n', + 'assignment': '\n' + 'Assignment statements\n' + '*********************\n' + '\n' + 'Assignment statements are used to (re)bind names to values ' + 'and to\n' + 'modify attributes or items of mutable objects:\n' + '\n' + ' assignment_stmt ::= (target_list "=")+ (expression_list | ' + 'yield_expression)\n' + ' target_list ::= target ("," target)* [","]\n' + ' target ::= identifier\n' + ' | "(" target_list ")"\n' + ' | "[" target_list "]"\n' + ' | attributeref\n' + ' | subscription\n' + ' | slicing\n' + ' | "*" target\n' + '\n' + '(See section *Primaries* for the syntax definitions for\n' + '*attributeref*, *subscription*, and *slicing*.)\n' + '\n' + 'An assignment statement evaluates the expression list ' + '(remember that\n' + 'this can be a single expression or a comma-separated list, ' + 'the latter\n' + 'yielding a tuple) and assigns the single resulting object to ' + 'each of\n' + 'the target lists, from left to right.\n' + '\n' + 'Assignment is defined recursively depending on the form of ' + 'the target\n' + '(list). When a target is part of a mutable object (an ' + 'attribute\n' + 'reference, subscription or slicing), the mutable object ' + 'must\n' + 'ultimately perform the assignment and decide about its ' + 'validity, and\n' + 'may raise an exception if the assignment is unacceptable. ' + 'The rules\n' + 'observed by various types and the exceptions raised are ' + 'given with the\n' + 'definition of the object types (see section *The standard ' + 'type\n' + 'hierarchy*).\n' + '\n' + 'Assignment of an object to a target list, optionally ' + 'enclosed in\n' + 'parentheses or square brackets, is recursively defined as ' + 'follows.\n' + '\n' + '* If the target list is a single target: The object is ' + 'assigned to\n' + ' that target.\n' + '\n' + '* If the target list is a comma-separated list of targets: ' + 'The\n' + ' object must be an iterable with the same number of items ' + 'as there\n' + ' are targets in the target list, and the items are ' + 'assigned, from\n' + ' left to right, to the corresponding targets.\n' + '\n' + ' * If the target list contains one target prefixed with an\n' + ' asterisk, called a "starred" target: The object must be ' + 'a sequence\n' + ' with at least as many items as there are targets in the ' + 'target\n' + ' list, minus one. The first items of the sequence are ' + 'assigned,\n' + ' from left to right, to the targets before the starred ' + 'target. The\n' + ' final items of the sequence are assigned to the targets ' + 'after the\n' + ' starred target. A list of the remaining items in the ' + 'sequence is\n' + ' then assigned to the starred target (the list can be ' + 'empty).\n' + '\n' + ' * Else: The object must be a sequence with the same number ' + 'of\n' + ' items as there are targets in the target list, and the ' + 'items are\n' + ' assigned, from left to right, to the corresponding ' + 'targets.\n' + '\n' + 'Assignment of an object to a single target is recursively ' + 'defined as\n' + 'follows.\n' + '\n' + '* If the target is an identifier (name):\n' + '\n' + ' * If the name does not occur in a "global" or "nonlocal" ' + 'statement\n' + ' in the current code block: the name is bound to the ' + 'object in the\n' + ' current local namespace.\n' + '\n' + ' * Otherwise: the name is bound to the object in the ' + 'global\n' + ' namespace or the outer namespace determined by ' + '"nonlocal",\n' + ' respectively.\n' + '\n' + ' The name is rebound if it was already bound. This may ' + 'cause the\n' + ' reference count for the object previously bound to the ' + 'name to reach\n' + ' zero, causing the object to be deallocated and its ' + 'destructor (if it\n' + ' has one) to be called.\n' + '\n' + '* If the target is a target list enclosed in parentheses or ' + 'in\n' + ' square brackets: The object must be an iterable with the ' + 'same number\n' + ' of items as there are targets in the target list, and its ' + 'items are\n' + ' assigned, from left to right, to the corresponding ' + 'targets.\n' + '\n' + '* If the target is an attribute reference: The primary ' + 'expression in\n' + ' the reference is evaluated. It should yield an object ' + 'with\n' + ' assignable attributes; if this is not the case, ' + '"TypeError" is\n' + ' raised. That object is then asked to assign the assigned ' + 'object to\n' + ' the given attribute; if it cannot perform the assignment, ' + 'it raises\n' + ' an exception (usually but not necessarily ' + '"AttributeError").\n' + '\n' + ' Note: If the object is a class instance and the attribute ' + 'reference\n' + ' occurs on both sides of the assignment operator, the RHS ' + 'expression,\n' + ' "a.x" can access either an instance attribute or (if no ' + 'instance\n' + ' attribute exists) a class attribute. The LHS target "a.x" ' + 'is always\n' + ' set as an instance attribute, creating it if necessary. ' + 'Thus, the\n' + ' two occurrences of "a.x" do not necessarily refer to the ' + 'same\n' + ' attribute: if the RHS expression refers to a class ' + 'attribute, the\n' + ' LHS creates a new instance attribute as the target of the\n' + ' assignment:\n' + '\n' + ' class Cls:\n' + ' x = 3 # class variable\n' + ' inst = Cls()\n' + ' inst.x = inst.x + 1 # writes inst.x as 4 leaving ' + 'Cls.x as 3\n' + '\n' + ' This description does not necessarily apply to descriptor\n' + ' attributes, such as properties created with "property()".\n' + '\n' + '* If the target is a subscription: The primary expression in ' + 'the\n' + ' reference is evaluated. It should yield either a mutable ' + 'sequence\n' + ' object (such as a list) or a mapping object (such as a ' + 'dictionary).\n' + ' Next, the subscript expression is evaluated.\n' + '\n' + ' If the primary is a mutable sequence object (such as a ' + 'list), the\n' + ' subscript must yield an integer. If it is negative, the ' + "sequence's\n" + ' length is added to it. The resulting value must be a ' + 'nonnegative\n' + " integer less than the sequence's length, and the sequence " + 'is asked\n' + ' to assign the assigned object to its item with that ' + 'index. If the\n' + ' index is out of range, "IndexError" is raised (assignment ' + 'to a\n' + ' subscripted sequence cannot add new items to a list).\n' + '\n' + ' If the primary is a mapping object (such as a dictionary), ' + 'the\n' + " subscript must have a type compatible with the mapping's " + 'key type,\n' + ' and the mapping is then asked to create a key/datum pair ' + 'which maps\n' + ' the subscript to the assigned object. This can either ' + 'replace an\n' + ' existing key/value pair with the same key value, or insert ' + 'a new\n' + ' key/value pair (if no key with the same value existed).\n' + '\n' + ' For user-defined objects, the "__setitem__()" method is ' + 'called with\n' + ' appropriate arguments.\n' + '\n' + '* If the target is a slicing: The primary expression in the\n' + ' reference is evaluated. It should yield a mutable ' + 'sequence object\n' + ' (such as a list). The assigned object should be a ' + 'sequence object\n' + ' of the same type. Next, the lower and upper bound ' + 'expressions are\n' + ' evaluated, insofar they are present; defaults are zero and ' + 'the\n' + " sequence's length. The bounds should evaluate to " + 'integers. If\n' + " either bound is negative, the sequence's length is added " + 'to it. The\n' + ' resulting bounds are clipped to lie between zero and the ' + "sequence's\n" + ' length, inclusive. Finally, the sequence object is asked ' + 'to replace\n' + ' the slice with the items of the assigned sequence. The ' + 'length of\n' + ' the slice may be different from the length of the assigned ' + 'sequence,\n' + ' thus changing the length of the target sequence, if the ' + 'target\n' + ' sequence allows it.\n' + '\n' + '**CPython implementation detail:** In the current ' + 'implementation, the\n' + 'syntax for targets is taken to be the same as for ' + 'expressions, and\n' + 'invalid syntax is rejected during the code generation phase, ' + 'causing\n' + 'less detailed error messages.\n' + '\n' + 'Although the definition of assignment implies that overlaps ' + 'between\n' + 'the left-hand side and the right-hand side are ' + "'simultanenous' (for\n" + 'example "a, b = b, a" swaps two variables), overlaps ' + '*within* the\n' + 'collection of assigned-to variables occur left-to-right, ' + 'sometimes\n' + 'resulting in confusion. For instance, the following program ' + 'prints\n' + '"[0, 2]":\n' + '\n' + ' x = [0, 1]\n' + ' i = 0\n' + ' i, x[i] = 1, 2 # i is updated, then x[i] is ' + 'updated\n' + ' print(x)\n' + '\n' + 'See also: **PEP 3132** - Extended Iterable Unpacking\n' + '\n' + ' The specification for the "*target" feature.\n' + '\n' + '\n' + 'Augmented assignment statements\n' + '===============================\n' + '\n' + 'Augmented assignment is the combination, in a single ' + 'statement, of a\n' + 'binary operation and an assignment statement:\n' + '\n' + ' augmented_assignment_stmt ::= augtarget augop ' + '(expression_list | yield_expression)\n' + ' augtarget ::= identifier | attributeref | ' + 'subscription | slicing\n' + ' augop ::= "+=" | "-=" | "*=" | "@=" | ' + '"/=" | "//=" | "%=" | "**="\n' + ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' + '\n' + '(See section *Primaries* for the syntax definitions of the ' + 'last three\n' + 'symbols.)\n' + '\n' + 'An augmented assignment evaluates the target (which, unlike ' + 'normal\n' + 'assignment statements, cannot be an unpacking) and the ' + 'expression\n' + 'list, performs the binary operation specific to the type of ' + 'assignment\n' + 'on the two operands, and assigns the result to the original ' + 'target.\n' + 'The target is only evaluated once.\n' + '\n' + 'An augmented assignment expression like "x += 1" can be ' + 'rewritten as\n' + '"x = x + 1" to achieve a similar, but not exactly equal ' + 'effect. In the\n' + 'augmented version, "x" is only evaluated once. Also, when ' + 'possible,\n' + 'the actual operation is performed *in-place*, meaning that ' + 'rather than\n' + 'creating a new object and assigning that to the target, the ' + 'old object\n' + 'is modified instead.\n' + '\n' + 'Unlike normal assignments, augmented assignments evaluate ' + 'the left-\n' + 'hand side *before* evaluating the right-hand side. For ' + 'example, "a[i]\n' + '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' + 'performs\n' + 'the addition, and lastly, it writes the result back to ' + '"a[i]".\n' + '\n' + 'With the exception of assigning to tuples and multiple ' + 'targets in a\n' + 'single statement, the assignment done by augmented ' + 'assignment\n' + 'statements is handled the same way as normal assignments. ' + 'Similarly,\n' + 'with the exception of the possible *in-place* behavior, the ' + 'binary\n' + 'operation performed by augmented assignment is the same as ' + 'the normal\n' + 'binary operations.\n' + '\n' + 'For targets which are attribute references, the same *caveat ' + 'about\n' + 'class and instance attributes* applies as for regular ' + 'assignments.\n', + 'atom-identifiers': '\n' + 'Identifiers (Names)\n' + '*******************\n' + '\n' + 'An identifier occurring as an atom is a name. See ' + 'section\n' + '*Identifiers and keywords* for lexical definition and ' + 'section *Naming\n' + 'and binding* for documentation of naming and binding.\n' + '\n' + 'When the name is bound to an object, evaluation of the ' + 'atom yields\n' + 'that object. When a name is not bound, an attempt to ' + 'evaluate it\n' + 'raises a "NameError" exception.\n' + '\n' + '**Private name mangling:** When an identifier that ' + 'textually occurs in\n' + 'a class definition begins with two or more underscore ' + 'characters and\n' + 'does not end in two or more underscores, it is ' + 'considered a *private\n' + 'name* of that class. Private names are transformed to ' + 'a longer form\n' + 'before code is generated for them. The transformation ' + 'inserts the\n' + 'class name, with leading underscores removed and a ' + 'single underscore\n' + 'inserted, in front of the name. For example, the ' + 'identifier "__spam"\n' + 'occurring in a class named "Ham" will be transformed ' + 'to "_Ham__spam".\n' + 'This transformation is independent of the syntactical ' + 'context in which\n' + 'the identifier is used. If the transformed name is ' + 'extremely long\n' + '(longer than 255 characters), implementation defined ' + 'truncation may\n' + 'happen. If the class name consists only of ' + 'underscores, no\n' + 'transformation is done.\n', + 'atom-literals': '\n' + 'Literals\n' + '********\n' + '\n' + 'Python supports string and bytes literals and various ' + 'numeric\n' + 'literals:\n' + '\n' + ' literal ::= stringliteral | bytesliteral\n' + ' | integer | floatnumber | imagnumber\n' + '\n' + 'Evaluation of a literal yields an object of the given ' + 'type (string,\n' + 'bytes, integer, floating point number, complex number) ' + 'with the given\n' + 'value. The value may be approximated in the case of ' + 'floating point\n' + 'and imaginary (complex) literals. See section *Literals* ' + 'for details.\n' + '\n' + 'All literals correspond to immutable data types, and ' + 'hence the\n' + "object's identity is less important than its value. " + 'Multiple\n' + 'evaluations of literals with the same value (either the ' + 'same\n' + 'occurrence in the program text or a different occurrence) ' + 'may obtain\n' + 'the same object or a different object with the same ' + 'value.\n', + 'attribute-access': '\n' + 'Customizing attribute access\n' + '****************************\n' + '\n' + 'The following methods can be defined to customize the ' + 'meaning of\n' + 'attribute access (use of, assignment to, or deletion ' + 'of "x.name") for\n' + 'class instances.\n' + '\n' + 'object.__getattr__(self, name)\n' + '\n' + ' Called when an attribute lookup has not found the ' + 'attribute in the\n' + ' usual places (i.e. it is not an instance attribute ' + 'nor is it found\n' + ' in the class tree for "self"). "name" is the ' + 'attribute name. This\n' + ' method should return the (computed) attribute value ' + 'or raise an\n' + ' "AttributeError" exception.\n' + '\n' + ' Note that if the attribute is found through the ' + 'normal mechanism,\n' + ' "__getattr__()" is not called. (This is an ' + 'intentional asymmetry\n' + ' between "__getattr__()" and "__setattr__()".) This ' + 'is done both for\n' + ' efficiency reasons and because otherwise ' + '"__getattr__()" would have\n' + ' no way to access other attributes of the instance. ' + 'Note that at\n' + ' least for instance variables, you can fake total ' + 'control by not\n' + ' inserting any values in the instance attribute ' + 'dictionary (but\n' + ' instead inserting them in another object). See ' + 'the\n' + ' "__getattribute__()" method below for a way to ' + 'actually get total\n' + ' control over attribute access.\n' + '\n' + 'object.__getattribute__(self, name)\n' + '\n' + ' Called unconditionally to implement attribute ' + 'accesses for\n' + ' instances of the class. If the class also defines ' + '"__getattr__()",\n' + ' the latter will not be called unless ' + '"__getattribute__()" either\n' + ' calls it explicitly or raises an "AttributeError". ' + 'This method\n' + ' should return the (computed) attribute value or ' + 'raise an\n' + ' "AttributeError" exception. In order to avoid ' + 'infinite recursion in\n' + ' this method, its implementation should always call ' + 'the base class\n' + ' method with the same name to access any attributes ' + 'it needs, for\n' + ' example, "object.__getattribute__(self, name)".\n' + '\n' + ' Note: This method may still be bypassed when ' + 'looking up special\n' + ' methods as the result of implicit invocation via ' + 'language syntax\n' + ' or built-in functions. See *Special method ' + 'lookup*.\n' + '\n' + 'object.__setattr__(self, name, value)\n' + '\n' + ' Called when an attribute assignment is attempted. ' + 'This is called\n' + ' instead of the normal mechanism (i.e. store the ' + 'value in the\n' + ' instance dictionary). *name* is the attribute name, ' + '*value* is the\n' + ' value to be assigned to it.\n' + '\n' + ' If "__setattr__()" wants to assign to an instance ' + 'attribute, it\n' + ' should call the base class method with the same ' + 'name, for example,\n' + ' "object.__setattr__(self, name, value)".\n' + '\n' + 'object.__delattr__(self, name)\n' + '\n' + ' Like "__setattr__()" but for attribute deletion ' + 'instead of\n' + ' assignment. This should only be implemented if ' + '"del obj.name" is\n' + ' meaningful for the object.\n' + '\n' + 'object.__dir__(self)\n' + '\n' + ' Called when "dir()" is called on the object. A ' + 'sequence must be\n' + ' returned. "dir()" converts the returned sequence to ' + 'a list and\n' + ' sorts it.\n' + '\n' + '\n' + 'Implementing Descriptors\n' + '========================\n' + '\n' + 'The following methods only apply when an instance of ' + 'the class\n' + 'containing the method (a so-called *descriptor* class) ' + 'appears in an\n' + '*owner* class (the descriptor must be in either the ' + "owner's class\n" + 'dictionary or in the class dictionary for one of its ' + 'parents). In the\n' + 'examples below, "the attribute" refers to the ' + 'attribute whose name is\n' + "the key of the property in the owner class' " + '"__dict__".\n' + '\n' + 'object.__get__(self, instance, owner)\n' + '\n' + ' Called to get the attribute of the owner class ' + '(class attribute\n' + ' access) or of an instance of that class (instance ' + 'attribute\n' + ' access). *owner* is always the owner class, while ' + '*instance* is the\n' + ' instance that the attribute was accessed through, ' + 'or "None" when\n' + ' the attribute is accessed through the *owner*. ' + 'This method should\n' + ' return the (computed) attribute value or raise an ' + '"AttributeError"\n' + ' exception.\n' + '\n' + 'object.__set__(self, instance, value)\n' + '\n' + ' Called to set the attribute on an instance ' + '*instance* of the owner\n' + ' class to a new value, *value*.\n' + '\n' + 'object.__delete__(self, instance)\n' + '\n' + ' Called to delete the attribute on an instance ' + '*instance* of the\n' + ' owner class.\n' + '\n' + 'The attribute "__objclass__" is interpreted by the ' + '"inspect" module as\n' + 'specifying the class where this object was defined ' + '(setting this\n' + 'appropriately can assist in runtime introspection of ' + 'dynamic class\n' + 'attributes). For callables, it may indicate that an ' + 'instance of the\n' + 'given type (or a subclass) is expected or required as ' + 'the first\n' + 'positional argument (for example, CPython sets this ' + 'attribute for\n' + 'unbound methods that are implemented in C).\n' + '\n' + '\n' + 'Invoking Descriptors\n' + '====================\n' + '\n' + 'In general, a descriptor is an object attribute with ' + '"binding\n' + 'behavior", one whose attribute access has been ' + 'overridden by methods\n' + 'in the descriptor protocol: "__get__()", "__set__()", ' + 'and\n' + '"__delete__()". If any of those methods are defined ' + 'for an object, it\n' + 'is said to be a descriptor.\n' + '\n' + 'The default behavior for attribute access is to get, ' + 'set, or delete\n' + "the attribute from an object's dictionary. For " + 'instance, "a.x" has a\n' + 'lookup chain starting with "a.__dict__[\'x\']", then\n' + '"type(a).__dict__[\'x\']", and continuing through the ' + 'base classes of\n' + '"type(a)" excluding metaclasses.\n' + '\n' + 'However, if the looked-up value is an object defining ' + 'one of the\n' + 'descriptor methods, then Python may override the ' + 'default behavior and\n' + 'invoke the descriptor method instead. Where this ' + 'occurs in the\n' + 'precedence chain depends on which descriptor methods ' + 'were defined and\n' + 'how they were called.\n' + '\n' + 'The starting point for descriptor invocation is a ' + 'binding, "a.x". How\n' + 'the arguments are assembled depends on "a":\n' + '\n' + 'Direct Call\n' + ' The simplest and least common call is when user ' + 'code directly\n' + ' invokes a descriptor method: "x.__get__(a)".\n' + '\n' + 'Instance Binding\n' + ' If binding to an object instance, "a.x" is ' + 'transformed into the\n' + ' call: "type(a).__dict__[\'x\'].__get__(a, ' + 'type(a))".\n' + '\n' + 'Class Binding\n' + ' If binding to a class, "A.x" is transformed into ' + 'the call:\n' + ' "A.__dict__[\'x\'].__get__(None, A)".\n' + '\n' + 'Super Binding\n' + ' If "a" is an instance of "super", then the binding ' + '"super(B,\n' + ' obj).m()" searches "obj.__class__.__mro__" for the ' + 'base class "A"\n' + ' immediately preceding "B" and then invokes the ' + 'descriptor with the\n' + ' call: "A.__dict__[\'m\'].__get__(obj, ' + 'obj.__class__)".\n' + '\n' + 'For instance bindings, the precedence of descriptor ' + 'invocation depends\n' + 'on the which descriptor methods are defined. A ' + 'descriptor can define\n' + 'any combination of "__get__()", "__set__()" and ' + '"__delete__()". If it\n' + 'does not define "__get__()", then accessing the ' + 'attribute will return\n' + 'the descriptor object itself unless there is a value ' + "in the object's\n" + 'instance dictionary. If the descriptor defines ' + '"__set__()" and/or\n' + '"__delete__()", it is a data descriptor; if it defines ' + 'neither, it is\n' + 'a non-data descriptor. Normally, data descriptors ' + 'define both\n' + '"__get__()" and "__set__()", while non-data ' + 'descriptors have just the\n' + '"__get__()" method. Data descriptors with "__set__()" ' + 'and "__get__()"\n' + 'defined always override a redefinition in an instance ' + 'dictionary. In\n' + 'contrast, non-data descriptors can be overridden by ' + 'instances.\n' + '\n' + 'Python methods (including "staticmethod()" and ' + '"classmethod()") are\n' + 'implemented as non-data descriptors. Accordingly, ' + 'instances can\n' + 'redefine and override methods. This allows individual ' + 'instances to\n' + 'acquire behaviors that differ from other instances of ' + 'the same class.\n' + '\n' + 'The "property()" function is implemented as a data ' + 'descriptor.\n' + 'Accordingly, instances cannot override the behavior of ' + 'a property.\n' + '\n' + '\n' + '__slots__\n' + '=========\n' + '\n' + 'By default, instances of classes have a dictionary for ' + 'attribute\n' + 'storage. This wastes space for objects having very ' + 'few instance\n' + 'variables. The space consumption can become acute ' + 'when creating large\n' + 'numbers of instances.\n' + '\n' + 'The default can be overridden by defining *__slots__* ' + 'in a class\n' + 'definition. The *__slots__* declaration takes a ' + 'sequence of instance\n' + 'variables and reserves just enough space in each ' + 'instance to hold a\n' + 'value for each variable. Space is saved because ' + '*__dict__* is not\n' + 'created for each instance.\n' + '\n' + 'object.__slots__\n' + '\n' + ' This class variable can be assigned a string, ' + 'iterable, or sequence\n' + ' of strings with variable names used by instances. ' + '*__slots__*\n' + ' reserves space for the declared variables and ' + 'prevents the\n' + ' automatic creation of *__dict__* and *__weakref__* ' + 'for each\n' + ' instance.\n' + '\n' + '\n' + 'Notes on using *__slots__*\n' + '--------------------------\n' + '\n' + '* When inheriting from a class without *__slots__*, ' + 'the *__dict__*\n' + ' attribute of that class will always be accessible, ' + 'so a *__slots__*\n' + ' definition in the subclass is meaningless.\n' + '\n' + '* Without a *__dict__* variable, instances cannot be ' + 'assigned new\n' + ' variables not listed in the *__slots__* definition. ' + 'Attempts to\n' + ' assign to an unlisted variable name raises ' + '"AttributeError". If\n' + ' dynamic assignment of new variables is desired, then ' + 'add\n' + ' "\'__dict__\'" to the sequence of strings in the ' + '*__slots__*\n' + ' declaration.\n' + '\n' + '* Without a *__weakref__* variable for each instance, ' + 'classes\n' + ' defining *__slots__* do not support weak references ' + 'to its\n' + ' instances. If weak reference support is needed, then ' + 'add\n' + ' "\'__weakref__\'" to the sequence of strings in the ' + '*__slots__*\n' + ' declaration.\n' + '\n' + '* *__slots__* are implemented at the class level by ' + 'creating\n' + ' descriptors (*Implementing Descriptors*) for each ' + 'variable name. As\n' + ' a result, class attributes cannot be used to set ' + 'default values for\n' + ' instance variables defined by *__slots__*; ' + 'otherwise, the class\n' + ' attribute would overwrite the descriptor ' + 'assignment.\n' + '\n' + '* The action of a *__slots__* declaration is limited ' + 'to the class\n' + ' where it is defined. As a result, subclasses will ' + 'have a *__dict__*\n' + ' unless they also define *__slots__* (which must only ' + 'contain names\n' + ' of any *additional* slots).\n' + '\n' + '* If a class defines a slot also defined in a base ' + 'class, the\n' + ' instance variable defined by the base class slot is ' + 'inaccessible\n' + ' (except by retrieving its descriptor directly from ' + 'the base class).\n' + ' This renders the meaning of the program undefined. ' + 'In the future, a\n' + ' check may be added to prevent this.\n' + '\n' + '* Nonempty *__slots__* does not work for classes ' + 'derived from\n' + ' "variable-length" built-in types such as "int", ' + '"bytes" and "tuple".\n' + '\n' + '* Any non-string iterable may be assigned to ' + '*__slots__*. Mappings\n' + ' may also be used; however, in the future, special ' + 'meaning may be\n' + ' assigned to the values corresponding to each key.\n' + '\n' + '* *__class__* assignment works only if both classes ' + 'have the same\n' + ' *__slots__*.\n', + 'attribute-references': '\n' + 'Attribute references\n' + '********************\n' + '\n' + 'An attribute reference is a primary followed by a ' + 'period and a name:\n' + '\n' + ' attributeref ::= primary "." identifier\n' + '\n' + 'The primary must evaluate to an object of a type ' + 'that supports\n' + 'attribute references, which most objects do. This ' + 'object is then\n' + 'asked to produce the attribute whose name is the ' + 'identifier. This\n' + 'production can be customized by overriding the ' + '"__getattr__()" method.\n' + 'If this attribute is not available, the exception ' + '"AttributeError" is\n' + 'raised. Otherwise, the type and value of the ' + 'object produced is\n' + 'determined by the object. Multiple evaluations of ' + 'the same attribute\n' + 'reference may yield different objects.\n', + 'augassign': '\n' + 'Augmented assignment statements\n' + '*******************************\n' + '\n' + 'Augmented assignment is the combination, in a single ' + 'statement, of a\n' + 'binary operation and an assignment statement:\n' + '\n' + ' augmented_assignment_stmt ::= augtarget augop ' + '(expression_list | yield_expression)\n' + ' augtarget ::= identifier | attributeref | ' + 'subscription | slicing\n' + ' augop ::= "+=" | "-=" | "*=" | "@=" | ' + '"/=" | "//=" | "%=" | "**="\n' + ' | ">>=" | "<<=" | "&=" | "^=" | "|="\n' + '\n' + '(See section *Primaries* for the syntax definitions of the ' + 'last three\n' + 'symbols.)\n' + '\n' + 'An augmented assignment evaluates the target (which, unlike ' + 'normal\n' + 'assignment statements, cannot be an unpacking) and the ' + 'expression\n' + 'list, performs the binary operation specific to the type of ' + 'assignment\n' + 'on the two operands, and assigns the result to the original ' + 'target.\n' + 'The target is only evaluated once.\n' + '\n' + 'An augmented assignment expression like "x += 1" can be ' + 'rewritten as\n' + '"x = x + 1" to achieve a similar, but not exactly equal ' + 'effect. In the\n' + 'augmented version, "x" is only evaluated once. Also, when ' + 'possible,\n' + 'the actual operation is performed *in-place*, meaning that ' + 'rather than\n' + 'creating a new object and assigning that to the target, the ' + 'old object\n' + 'is modified instead.\n' + '\n' + 'Unlike normal assignments, augmented assignments evaluate the ' + 'left-\n' + 'hand side *before* evaluating the right-hand side. For ' + 'example, "a[i]\n' + '+= f(x)" first looks-up "a[i]", then it evaluates "f(x)" and ' + 'performs\n' + 'the addition, and lastly, it writes the result back to ' + '"a[i]".\n' + '\n' + 'With the exception of assigning to tuples and multiple ' + 'targets in a\n' + 'single statement, the assignment done by augmented ' + 'assignment\n' + 'statements is handled the same way as normal assignments. ' + 'Similarly,\n' + 'with the exception of the possible *in-place* behavior, the ' + 'binary\n' + 'operation performed by augmented assignment is the same as ' + 'the normal\n' + 'binary operations.\n' + '\n' + 'For targets which are attribute references, the same *caveat ' + 'about\n' + 'class and instance attributes* applies as for regular ' + 'assignments.\n', + 'binary': '\n' + 'Binary arithmetic operations\n' + '****************************\n' + '\n' + 'The binary arithmetic operations have the conventional priority\n' + 'levels. Note that some of these operations also apply to ' + 'certain non-\n' + 'numeric types. Apart from the power operator, there are only ' + 'two\n' + 'levels, one for multiplicative operators and one for additive\n' + 'operators:\n' + '\n' + ' m_expr ::= u_expr | m_expr "*" u_expr | m_expr "@" m_expr |\n' + ' m_expr "//" u_expr| m_expr "/" u_expr |\n' + ' m_expr "%" u_expr\n' + ' a_expr ::= m_expr | a_expr "+" m_expr | a_expr "-" m_expr\n' + '\n' + 'The "*" (multiplication) operator yields the product of its ' + 'arguments.\n' + 'The arguments must either both be numbers, or one argument must ' + 'be an\n' + 'integer and the other must be a sequence. In the former case, ' + 'the\n' + 'numbers are converted to a common type and then multiplied ' + 'together.\n' + 'In the latter case, sequence repetition is performed; a ' + 'negative\n' + 'repetition factor yields an empty sequence.\n' + '\n' + 'The "@" (at) operator is intended to be used for matrix\n' + 'multiplication. No builtin Python types implement this ' + 'operator.\n' + '\n' + 'New in version 3.5.\n' + '\n' + 'The "/" (division) and "//" (floor division) operators yield ' + 'the\n' + 'quotient of their arguments. The numeric arguments are first\n' + 'converted to a common type. Division of integers yields a float, ' + 'while\n' + 'floor division of integers results in an integer; the result is ' + 'that\n' + "of mathematical division with the 'floor' function applied to " + 'the\n' + 'result. Division by zero raises the "ZeroDivisionError" ' + 'exception.\n' + '\n' + 'The "%" (modulo) operator yields the remainder from the division ' + 'of\n' + 'the first argument by the second. The numeric arguments are ' + 'first\n' + 'converted to a common type. A zero right argument raises the\n' + '"ZeroDivisionError" exception. The arguments may be floating ' + 'point\n' + 'numbers, e.g., "3.14%0.7" equals "0.34" (since "3.14" equals ' + '"4*0.7 +\n' + '0.34".) The modulo operator always yields a result with the ' + 'same sign\n' + 'as its second operand (or zero); the absolute value of the ' + 'result is\n' + 'strictly smaller than the absolute value of the second operand ' + '[1].\n' + '\n' + 'The floor division and modulo operators are connected by the ' + 'following\n' + 'identity: "x == (x//y)*y + (x%y)". Floor division and modulo ' + 'are also\n' + 'connected with the built-in function "divmod()": "divmod(x, y) ' + '==\n' + '(x//y, x%y)". [2].\n' + '\n' + 'In addition to performing the modulo operation on numbers, the ' + '"%"\n' + 'operator is also overloaded by string objects to perform ' + 'old-style\n' + 'string formatting (also known as interpolation). The syntax ' + 'for\n' + 'string formatting is described in the Python Library Reference,\n' + 'section *printf-style String Formatting*.\n' + '\n' + 'The floor division operator, the modulo operator, and the ' + '"divmod()"\n' + 'function are not defined for complex numbers. Instead, convert ' + 'to a\n' + 'floating point number using the "abs()" function if ' + 'appropriate.\n' + '\n' + 'The "+" (addition) operator yields the sum of its arguments. ' + 'The\n' + 'arguments must either both be numbers or both be sequences of ' + 'the same\n' + 'type. In the former case, the numbers are converted to a common ' + 'type\n' + 'and then added together. In the latter case, the sequences are\n' + 'concatenated.\n' + '\n' + 'The "-" (subtraction) operator yields the difference of its ' + 'arguments.\n' + 'The numeric arguments are first converted to a common type.\n', + 'bitwise': '\n' + 'Binary bitwise operations\n' + '*************************\n' + '\n' + 'Each of the three bitwise operations has a different priority ' + 'level:\n' + '\n' + ' and_expr ::= shift_expr | and_expr "&" shift_expr\n' + ' xor_expr ::= and_expr | xor_expr "^" and_expr\n' + ' or_expr ::= xor_expr | or_expr "|" xor_expr\n' + '\n' + 'The "&" operator yields the bitwise AND of its arguments, which ' + 'must\n' + 'be integers.\n' + '\n' + 'The "^" operator yields the bitwise XOR (exclusive OR) of its\n' + 'arguments, which must be integers.\n' + '\n' + 'The "|" operator yields the bitwise (inclusive) OR of its ' + 'arguments,\n' + 'which must be integers.\n', + 'bltin-code-objects': '\n' + 'Code Objects\n' + '************\n' + '\n' + 'Code objects are used by the implementation to ' + 'represent "pseudo-\n' + 'compiled" executable Python code such as a function ' + 'body. They differ\n' + "from function objects because they don't contain a " + 'reference to their\n' + 'global execution environment. Code objects are ' + 'returned by the built-\n' + 'in "compile()" function and can be extracted from ' + 'function objects\n' + 'through their "__code__" attribute. See also the ' + '"code" module.\n' + '\n' + 'A code object can be executed or evaluated by ' + 'passing it (instead of a\n' + 'source string) to the "exec()" or "eval()" built-in ' + 'functions.\n' + '\n' + 'See *The standard type hierarchy* for more ' + 'information.\n', + 'bltin-ellipsis-object': '\n' + 'The Ellipsis Object\n' + '*******************\n' + '\n' + 'This object is commonly used by slicing (see ' + '*Slicings*). It supports\n' + 'no special operations. There is exactly one ' + 'ellipsis object, named\n' + '"Ellipsis" (a built-in name). "type(Ellipsis)()" ' + 'produces the\n' + '"Ellipsis" singleton.\n' + '\n' + 'It is written as "Ellipsis" or "...".\n', + 'bltin-null-object': '\n' + 'The Null Object\n' + '***************\n' + '\n' + "This object is returned by functions that don't " + 'explicitly return a\n' + 'value. It supports no special operations. There is ' + 'exactly one null\n' + 'object, named "None" (a built-in name). ' + '"type(None)()" produces the\n' + 'same singleton.\n' + '\n' + 'It is written as "None".\n', + 'bltin-type-objects': '\n' + 'Type Objects\n' + '************\n' + '\n' + 'Type objects represent the various object types. An ' + "object's type is\n" + 'accessed by the built-in function "type()". There ' + 'are no special\n' + 'operations on types. The standard module "types" ' + 'defines names for\n' + 'all standard built-in types.\n' + '\n' + 'Types are written like this: "".\n', + 'booleans': '\n' + 'Boolean operations\n' + '******************\n' + '\n' + ' or_test ::= and_test | or_test "or" and_test\n' + ' and_test ::= not_test | and_test "and" not_test\n' + ' not_test ::= comparison | "not" not_test\n' + '\n' + 'In the context of Boolean operations, and also when ' + 'expressions are\n' + 'used by control flow statements, the following values are ' + 'interpreted\n' + 'as false: "False", "None", numeric zero of all types, and ' + 'empty\n' + 'strings and containers (including strings, tuples, lists,\n' + 'dictionaries, sets and frozensets). All other values are ' + 'interpreted\n' + 'as true. User-defined objects can customize their truth value ' + 'by\n' + 'providing a "__bool__()" method.\n' + '\n' + 'The operator "not" yields "True" if its argument is false, ' + '"False"\n' + 'otherwise.\n' + '\n' + 'The expression "x and y" first evaluates *x*; if *x* is false, ' + 'its\n' + 'value is returned; otherwise, *y* is evaluated and the ' + 'resulting value\n' + 'is returned.\n' + '\n' + 'The expression "x or y" first evaluates *x*; if *x* is true, ' + 'its value\n' + 'is returned; otherwise, *y* is evaluated and the resulting ' + 'value is\n' + 'returned.\n' + '\n' + '(Note that neither "and" nor "or" restrict the value and type ' + 'they\n' + 'return to "False" and "True", but rather return the last ' + 'evaluated\n' + 'argument. This is sometimes useful, e.g., if "s" is a string ' + 'that\n' + 'should be replaced by a default value if it is empty, the ' + 'expression\n' + '"s or \'foo\'" yields the desired value. Because "not" has to ' + 'create a\n' + 'new value, it returns a boolean value regardless of the type ' + 'of its\n' + 'argument (for example, "not \'foo\'" produces "False" rather ' + 'than "\'\'".)\n', + 'break': '\n' + 'The "break" statement\n' + '*********************\n' + '\n' + ' break_stmt ::= "break"\n' + '\n' + '"break" may only occur syntactically nested in a "for" or ' + '"while"\n' + 'loop, but not nested in a function or class definition within ' + 'that\n' + 'loop.\n' + '\n' + 'It terminates the nearest enclosing loop, skipping the optional ' + '"else"\n' + 'clause if the loop has one.\n' + '\n' + 'If a "for" loop is terminated by "break", the loop control ' + 'target\n' + 'keeps its current value.\n' + '\n' + 'When "break" passes control out of a "try" statement with a ' + '"finally"\n' + 'clause, that "finally" clause is executed before really leaving ' + 'the\n' + 'loop.\n', + 'callable-types': '\n' + 'Emulating callable objects\n' + '**************************\n' + '\n' + 'object.__call__(self[, args...])\n' + '\n' + ' Called when the instance is "called" as a function; ' + 'if this method\n' + ' is defined, "x(arg1, arg2, ...)" is a shorthand for\n' + ' "x.__call__(arg1, arg2, ...)".\n', + 'calls': '\n' + 'Calls\n' + '*****\n' + '\n' + 'A call calls a callable object (e.g., a *function*) with a ' + 'possibly\n' + 'empty series of *arguments*:\n' + '\n' + ' call ::= primary "(" [argument_list [","] | ' + 'comprehension] ")"\n' + ' argument_list ::= positional_arguments ["," ' + 'keyword_arguments]\n' + ' ["," "*" expression] ["," ' + 'keyword_arguments]\n' + ' ["," "**" expression]\n' + ' | keyword_arguments ["," "*" expression]\n' + ' ["," keyword_arguments] ["," "**" ' + 'expression]\n' + ' | "*" expression ["," keyword_arguments] ' + '["," "**" expression]\n' + ' | "**" expression\n' + ' positional_arguments ::= expression ("," expression)*\n' + ' keyword_arguments ::= keyword_item ("," keyword_item)*\n' + ' keyword_item ::= identifier "=" expression\n' + '\n' + 'An optional trailing comma may be present after the positional ' + 'and\n' + 'keyword arguments but does not affect the semantics.\n' + '\n' + 'The primary must evaluate to a callable object (user-defined\n' + 'functions, built-in functions, methods of built-in objects, ' + 'class\n' + 'objects, methods of class instances, and all objects having a\n' + '"__call__()" method are callable). All argument expressions are\n' + 'evaluated before the call is attempted. Please refer to section\n' + '*Function definitions* for the syntax of formal *parameter* ' + 'lists.\n' + '\n' + 'If keyword arguments are present, they are first converted to\n' + 'positional arguments, as follows. First, a list of unfilled ' + 'slots is\n' + 'created for the formal parameters. If there are N positional\n' + 'arguments, they are placed in the first N slots. Next, for each\n' + 'keyword argument, the identifier is used to determine the\n' + 'corresponding slot (if the identifier is the same as the first ' + 'formal\n' + 'parameter name, the first slot is used, and so on). If the slot ' + 'is\n' + 'already filled, a "TypeError" exception is raised. Otherwise, ' + 'the\n' + 'value of the argument is placed in the slot, filling it (even if ' + 'the\n' + 'expression is "None", it fills the slot). When all arguments ' + 'have\n' + 'been processed, the slots that are still unfilled are filled with ' + 'the\n' + 'corresponding default value from the function definition. ' + '(Default\n' + 'values are calculated, once, when the function is defined; thus, ' + 'a\n' + 'mutable object such as a list or dictionary used as default value ' + 'will\n' + "be shared by all calls that don't specify an argument value for " + 'the\n' + 'corresponding slot; this should usually be avoided.) If there ' + 'are any\n' + 'unfilled slots for which no default value is specified, a ' + '"TypeError"\n' + 'exception is raised. Otherwise, the list of filled slots is used ' + 'as\n' + 'the argument list for the call.\n' + '\n' + '**CPython implementation detail:** An implementation may provide\n' + 'built-in functions whose positional parameters do not have names, ' + 'even\n' + "if they are 'named' for the purpose of documentation, and which\n" + 'therefore cannot be supplied by keyword. In CPython, this is the ' + 'case\n' + 'for functions implemented in C that use "PyArg_ParseTuple()" to ' + 'parse\n' + 'their arguments.\n' + '\n' + 'If there are more positional arguments than there are formal ' + 'parameter\n' + 'slots, a "TypeError" exception is raised, unless a formal ' + 'parameter\n' + 'using the syntax "*identifier" is present; in this case, that ' + 'formal\n' + 'parameter receives a tuple containing the excess positional ' + 'arguments\n' + '(or an empty tuple if there were no excess positional ' + 'arguments).\n' + '\n' + 'If any keyword argument does not correspond to a formal ' + 'parameter\n' + 'name, a "TypeError" exception is raised, unless a formal ' + 'parameter\n' + 'using the syntax "**identifier" is present; in this case, that ' + 'formal\n' + 'parameter receives a dictionary containing the excess keyword\n' + 'arguments (using the keywords as keys and the argument values as\n' + 'corresponding values), or a (new) empty dictionary if there were ' + 'no\n' + 'excess keyword arguments.\n' + '\n' + 'If the syntax "*expression" appears in the function call, ' + '"expression"\n' + 'must evaluate to an iterable. Elements from this iterable are ' + 'treated\n' + 'as if they were additional positional arguments; if there are\n' + 'positional arguments *x1*, ..., *xN*, and "expression" evaluates ' + 'to a\n' + 'sequence *y1*, ..., *yM*, this is equivalent to a call with M+N\n' + 'positional arguments *x1*, ..., *xN*, *y1*, ..., *yM*.\n' + '\n' + 'A consequence of this is that although the "*expression" syntax ' + 'may\n' + 'appear *after* some keyword arguments, it is processed *before* ' + 'the\n' + 'keyword arguments (and the "**expression" argument, if any -- ' + 'see\n' + 'below). So:\n' + '\n' + ' >>> def f(a, b):\n' + ' ... print(a, b)\n' + ' ...\n' + ' >>> f(b=1, *(2,))\n' + ' 2 1\n' + ' >>> f(a=1, *(2,))\n' + ' Traceback (most recent call last):\n' + ' File "", line 1, in ?\n' + " TypeError: f() got multiple values for keyword argument 'a'\n" + ' >>> f(1, *(2,))\n' + ' 1 2\n' + '\n' + 'It is unusual for both keyword arguments and the "*expression" ' + 'syntax\n' + 'to be used in the same call, so in practice this confusion does ' + 'not\n' + 'arise.\n' + '\n' + 'If the syntax "**expression" appears in the function call,\n' + '"expression" must evaluate to a mapping, the contents of which ' + 'are\n' + 'treated as additional keyword arguments. In the case of a ' + 'keyword\n' + 'appearing in both "expression" and as an explicit keyword ' + 'argument, a\n' + '"TypeError" exception is raised.\n' + '\n' + 'Formal parameters using the syntax "*identifier" or ' + '"**identifier"\n' + 'cannot be used as positional argument slots or as keyword ' + 'argument\n' + 'names.\n' + '\n' + 'A call always returns some value, possibly "None", unless it ' + 'raises an\n' + 'exception. How this value is computed depends on the type of ' + 'the\n' + 'callable object.\n' + '\n' + 'If it is---\n' + '\n' + 'a user-defined function:\n' + ' The code block for the function is executed, passing it the\n' + ' argument list. The first thing the code block will do is bind ' + 'the\n' + ' formal parameters to the arguments; this is described in ' + 'section\n' + ' *Function definitions*. When the code block executes a ' + '"return"\n' + ' statement, this specifies the return value of the function ' + 'call.\n' + '\n' + 'a built-in function or method:\n' + ' The result is up to the interpreter; see *Built-in Functions* ' + 'for\n' + ' the descriptions of built-in functions and methods.\n' + '\n' + 'a class object:\n' + ' A new instance of that class is returned.\n' + '\n' + 'a class instance method:\n' + ' The corresponding user-defined function is called, with an ' + 'argument\n' + ' list that is one longer than the argument list of the call: ' + 'the\n' + ' instance becomes the first argument.\n' + '\n' + 'a class instance:\n' + ' The class must define a "__call__()" method; the effect is ' + 'then the\n' + ' same as if that method was called.\n', + 'class': '\n' + 'Class definitions\n' + '*****************\n' + '\n' + 'A class definition defines a class object (see section *The ' + 'standard\n' + 'type hierarchy*):\n' + '\n' + ' classdef ::= [decorators] "class" classname [inheritance] ' + '":" suite\n' + ' inheritance ::= "(" [parameter_list] ")"\n' + ' classname ::= identifier\n' + '\n' + 'A class definition is an executable statement. The inheritance ' + 'list\n' + 'usually gives a list of base classes (see *Customizing class ' + 'creation*\n' + 'for more advanced uses), so each item in the list should evaluate ' + 'to a\n' + 'class object which allows subclassing. Classes without an ' + 'inheritance\n' + 'list inherit, by default, from the base class "object"; hence,\n' + '\n' + ' class Foo:\n' + ' pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' class Foo(object):\n' + ' pass\n' + '\n' + "The class's suite is then executed in a new execution frame (see\n" + '*Naming and binding*), using a newly created local namespace and ' + 'the\n' + 'original global namespace. (Usually, the suite contains mostly\n' + "function definitions.) When the class's suite finishes " + 'execution, its\n' + 'execution frame is discarded but its local namespace is saved. ' + '[4] A\n' + 'class object is then created using the inheritance list for the ' + 'base\n' + 'classes and the saved local namespace for the attribute ' + 'dictionary.\n' + 'The class name is bound to this class object in the original ' + 'local\n' + 'namespace.\n' + '\n' + 'Class creation can be customized heavily using *metaclasses*.\n' + '\n' + 'Classes can also be decorated: just like when decorating ' + 'functions,\n' + '\n' + ' @f1(arg)\n' + ' @f2\n' + ' class Foo: pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' class Foo: pass\n' + ' Foo = f1(arg)(f2(Foo))\n' + '\n' + 'The evaluation rules for the decorator expressions are the same ' + 'as for\n' + 'function decorators. The result must be a class object, which is ' + 'then\n' + 'bound to the class name.\n' + '\n' + "**Programmer's note:** Variables defined in the class definition " + 'are\n' + 'class attributes; they are shared by instances. Instance ' + 'attributes\n' + 'can be set in a method with "self.name = value". Both class and\n' + 'instance attributes are accessible through the notation ' + '""self.name"",\n' + 'and an instance attribute hides a class attribute with the same ' + 'name\n' + 'when accessed in this way. Class attributes can be used as ' + 'defaults\n' + 'for instance attributes, but using mutable values there can lead ' + 'to\n' + 'unexpected results. *Descriptors* can be used to create ' + 'instance\n' + 'variables with different implementation details.\n' + '\n' + 'See also: **PEP 3115** - Metaclasses in Python 3 **PEP 3129** -\n' + ' Class Decorators\n', + 'comparisons': '\n' + 'Comparisons\n' + '***********\n' + '\n' + 'Unlike C, all comparison operations in Python have the same ' + 'priority,\n' + 'which is lower than that of any arithmetic, shifting or ' + 'bitwise\n' + 'operation. Also unlike C, expressions like "a < b < c" ' + 'have the\n' + 'interpretation that is conventional in mathematics:\n' + '\n' + ' comparison ::= or_expr ( comp_operator or_expr )*\n' + ' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n' + ' | "is" ["not"] | ["not"] "in"\n' + '\n' + 'Comparisons yield boolean values: "True" or "False".\n' + '\n' + 'Comparisons can be chained arbitrarily, e.g., "x < y <= z" ' + 'is\n' + 'equivalent to "x < y and y <= z", except that "y" is ' + 'evaluated only\n' + 'once (but in both cases "z" is not evaluated at all when "x ' + '< y" is\n' + 'found to be false).\n' + '\n' + 'Formally, if *a*, *b*, *c*, ..., *y*, *z* are expressions ' + 'and *op1*,\n' + '*op2*, ..., *opN* are comparison operators, then "a op1 b ' + 'op2 c ... y\n' + 'opN z" is equivalent to "a op1 b and b op2 c and ... y opN ' + 'z", except\n' + 'that each expression is evaluated at most once.\n' + '\n' + 'Note that "a op1 b op2 c" doesn\'t imply any kind of ' + 'comparison between\n' + '*a* and *c*, so that, e.g., "x < y > z" is perfectly legal ' + '(though\n' + 'perhaps not pretty).\n' + '\n' + 'The operators "<", ">", "==", ">=", "<=", and "!=" compare ' + 'the values\n' + 'of two objects. The objects need not have the same type. ' + 'If both are\n' + 'numbers, they are converted to a common type. Otherwise, ' + 'the "==" and\n' + '"!=" operators *always* consider objects of different types ' + 'to be\n' + 'unequal, while the "<", ">", ">=" and "<=" operators raise ' + 'a\n' + '"TypeError" when comparing objects of different types that ' + 'do not\n' + 'implement these operators for the given pair of types. You ' + 'can\n' + 'control comparison behavior of objects of non-built-in ' + 'types by\n' + 'defining rich comparison methods like "__gt__()", described ' + 'in section\n' + '*Basic customization*.\n' + '\n' + 'Comparison of objects of the same type depends on the ' + 'type:\n' + '\n' + '* Numbers are compared arithmetically.\n' + '\n' + '* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are ' + 'special. They\n' + ' are identical to themselves, "x is x" but are not equal ' + 'to\n' + ' themselves, "x != x". Additionally, comparing any value ' + 'to a\n' + ' not-a-number value will return "False". For example, ' + 'both "3 <\n' + ' float(\'NaN\')" and "float(\'NaN\') < 3" will return ' + '"False".\n' + '\n' + '* Bytes objects are compared lexicographically using the ' + 'numeric\n' + ' values of their elements.\n' + '\n' + '* Strings are compared lexicographically using the numeric\n' + ' equivalents (the result of the built-in function "ord()") ' + 'of their\n' + " characters. [3] String and bytes object can't be " + 'compared!\n' + '\n' + '* Tuples and lists are compared lexicographically using ' + 'comparison\n' + ' of corresponding elements. This means that to compare ' + 'equal, each\n' + ' element must compare equal and the two sequences must be ' + 'of the same\n' + ' type and have the same length.\n' + '\n' + ' If not equal, the sequences are ordered the same as their ' + 'first\n' + ' differing elements. For example, "[1,2,x] <= [1,2,y]" ' + 'has the same\n' + ' value as "x <= y". If the corresponding element does not ' + 'exist, the\n' + ' shorter sequence is ordered first (for example, "[1,2] < ' + '[1,2,3]").\n' + '\n' + '* Mappings (dictionaries) compare equal if and only if they ' + 'have the\n' + ' same "(key, value)" pairs. Order comparisons "(\'<\', ' + "'<=', '>=',\n" + ' \'>\')" raise "TypeError".\n' + '\n' + '* Sets and frozensets define comparison operators to mean ' + 'subset and\n' + ' superset tests. Those relations do not define total ' + 'orderings (the\n' + ' two sets "{1,2}" and "{2,3}" are not equal, nor subsets ' + 'of one\n' + ' another, nor supersets of one another). Accordingly, ' + 'sets are not\n' + ' appropriate arguments for functions which depend on total ' + 'ordering.\n' + ' For example, "min()", "max()", and "sorted()" produce ' + 'undefined\n' + ' results given a list of sets as inputs.\n' + '\n' + '* Most other objects of built-in types compare unequal ' + 'unless they\n' + ' are the same object; the choice whether one object is ' + 'considered\n' + ' smaller or larger than another one is made arbitrarily ' + 'but\n' + ' consistently within one execution of a program.\n' + '\n' + 'Comparison of objects of differing types depends on whether ' + 'either of\n' + 'the types provide explicit support for the comparison. ' + 'Most numeric\n' + 'types can be compared with one another. When cross-type ' + 'comparison is\n' + 'not supported, the comparison method returns ' + '"NotImplemented".\n' + '\n' + 'The operators "in" and "not in" test for membership. "x in ' + 's"\n' + 'evaluates to true if *x* is a member of *s*, and false ' + 'otherwise. "x\n' + 'not in s" returns the negation of "x in s". All built-in ' + 'sequences\n' + 'and set types support this as well as dictionary, for which ' + '"in" tests\n' + 'whether the dictionary has a given key. For container types ' + 'such as\n' + 'list, tuple, set, frozenset, dict, or collections.deque, ' + 'the\n' + 'expression "x in y" is equivalent to "any(x is e or x == e ' + 'for e in\n' + 'y)".\n' + '\n' + 'For the string and bytes types, "x in y" is true if and ' + 'only if *x* is\n' + 'a substring of *y*. An equivalent test is "y.find(x) != ' + '-1". Empty\n' + 'strings are always considered to be a substring of any ' + 'other string,\n' + 'so """ in "abc"" will return "True".\n' + '\n' + 'For user-defined classes which define the "__contains__()" ' + 'method, "x\n' + 'in y" is true if and only if "y.__contains__(x)" is true.\n' + '\n' + 'For user-defined classes which do not define ' + '"__contains__()" but do\n' + 'define "__iter__()", "x in y" is true if some value "z" ' + 'with "x == z"\n' + 'is produced while iterating over "y". If an exception is ' + 'raised\n' + 'during the iteration, it is as if "in" raised that ' + 'exception.\n' + '\n' + 'Lastly, the old-style iteration protocol is tried: if a ' + 'class defines\n' + '"__getitem__()", "x in y" is true if and only if there is a ' + 'non-\n' + 'negative integer index *i* such that "x == y[i]", and all ' + 'lower\n' + 'integer indices do not raise "IndexError" exception. (If ' + 'any other\n' + 'exception is raised, it is as if "in" raised that ' + 'exception).\n' + '\n' + 'The operator "not in" is defined to have the inverse true ' + 'value of\n' + '"in".\n' + '\n' + 'The operators "is" and "is not" test for object identity: ' + '"x is y" is\n' + 'true if and only if *x* and *y* are the same object. "x is ' + 'not y"\n' + 'yields the inverse truth value. [4]\n', + 'compound': '\n' + 'Compound statements\n' + '*******************\n' + '\n' + 'Compound statements contain (groups of) other statements; they ' + 'affect\n' + 'or control the execution of those other statements in some ' + 'way. In\n' + 'general, compound statements span multiple lines, although in ' + 'simple\n' + 'incarnations a whole compound statement may be contained in ' + 'one line.\n' + '\n' + 'The "if", "while" and "for" statements implement traditional ' + 'control\n' + 'flow constructs. "try" specifies exception handlers and/or ' + 'cleanup\n' + 'code for a group of statements, while the "with" statement ' + 'allows the\n' + 'execution of initialization and finalization code around a ' + 'block of\n' + 'code. Function and class definitions are also syntactically ' + 'compound\n' + 'statements.\n' + '\n' + "A compound statement consists of one or more 'clauses.' A " + 'clause\n' + "consists of a header and a 'suite.' The clause headers of a\n" + 'particular compound statement are all at the same indentation ' + 'level.\n' + 'Each clause header begins with a uniquely identifying keyword ' + 'and ends\n' + 'with a colon. A suite is a group of statements controlled by ' + 'a\n' + 'clause. A suite can be one or more semicolon-separated ' + 'simple\n' + 'statements on the same line as the header, following the ' + "header's\n" + 'colon, or it can be one or more indented statements on ' + 'subsequent\n' + 'lines. Only the latter form of a suite can contain nested ' + 'compound\n' + 'statements; the following is illegal, mostly because it ' + "wouldn't be\n" + 'clear to which "if" clause a following "else" clause would ' + 'belong:\n' + '\n' + ' if test1: if test2: print(x)\n' + '\n' + 'Also note that the semicolon binds tighter than the colon in ' + 'this\n' + 'context, so that in the following example, either all or none ' + 'of the\n' + '"print()" calls are executed:\n' + '\n' + ' if x < y < z: print(x); print(y); print(z)\n' + '\n' + 'Summarizing:\n' + '\n' + ' compound_stmt ::= if_stmt\n' + ' | while_stmt\n' + ' | for_stmt\n' + ' | try_stmt\n' + ' | with_stmt\n' + ' | funcdef\n' + ' | classdef\n' + ' | async_with_stmt\n' + ' | async_for_stmt\n' + ' | async_funcdef\n' + ' suite ::= stmt_list NEWLINE | NEWLINE INDENT ' + 'statement+ DEDENT\n' + ' statement ::= stmt_list NEWLINE | compound_stmt\n' + ' stmt_list ::= simple_stmt (";" simple_stmt)* [";"]\n' + '\n' + 'Note that statements always end in a "NEWLINE" possibly ' + 'followed by a\n' + '"DEDENT". Also note that optional continuation clauses always ' + 'begin\n' + 'with a keyword that cannot start a statement, thus there are ' + 'no\n' + 'ambiguities (the \'dangling "else"\' problem is solved in ' + 'Python by\n' + 'requiring nested "if" statements to be indented).\n' + '\n' + 'The formatting of the grammar rules in the following sections ' + 'places\n' + 'each clause on a separate line for clarity.\n' + '\n' + '\n' + 'The "if" statement\n' + '==================\n' + '\n' + 'The "if" statement is used for conditional execution:\n' + '\n' + ' if_stmt ::= "if" expression ":" suite\n' + ' ( "elif" expression ":" suite )*\n' + ' ["else" ":" suite]\n' + '\n' + 'It selects exactly one of the suites by evaluating the ' + 'expressions one\n' + 'by one until one is found to be true (see section *Boolean ' + 'operations*\n' + 'for the definition of true and false); then that suite is ' + 'executed\n' + '(and no other part of the "if" statement is executed or ' + 'evaluated).\n' + 'If all expressions are false, the suite of the "else" clause, ' + 'if\n' + 'present, is executed.\n' + '\n' + '\n' + 'The "while" statement\n' + '=====================\n' + '\n' + 'The "while" statement is used for repeated execution as long ' + 'as an\n' + 'expression is true:\n' + '\n' + ' while_stmt ::= "while" expression ":" suite\n' + ' ["else" ":" suite]\n' + '\n' + 'This repeatedly tests the expression and, if it is true, ' + 'executes the\n' + 'first suite; if the expression is false (which may be the ' + 'first time\n' + 'it is tested) the suite of the "else" clause, if present, is ' + 'executed\n' + 'and the loop terminates.\n' + '\n' + 'A "break" statement executed in the first suite terminates the ' + 'loop\n' + 'without executing the "else" clause\'s suite. A "continue" ' + 'statement\n' + 'executed in the first suite skips the rest of the suite and ' + 'goes back\n' + 'to testing the expression.\n' + '\n' + '\n' + 'The "for" statement\n' + '===================\n' + '\n' + 'The "for" statement is used to iterate over the elements of a ' + 'sequence\n' + '(such as a string, tuple or list) or other iterable object:\n' + '\n' + ' for_stmt ::= "for" target_list "in" expression_list ":" ' + 'suite\n' + ' ["else" ":" suite]\n' + '\n' + 'The expression list is evaluated once; it should yield an ' + 'iterable\n' + 'object. An iterator is created for the result of the\n' + '"expression_list". The suite is then executed once for each ' + 'item\n' + 'provided by the iterator, in the order returned by the ' + 'iterator. Each\n' + 'item in turn is assigned to the target list using the standard ' + 'rules\n' + 'for assignments (see *Assignment statements*), and then the ' + 'suite is\n' + 'executed. When the items are exhausted (which is immediately ' + 'when the\n' + 'sequence is empty or an iterator raises a "StopIteration" ' + 'exception),\n' + 'the suite in the "else" clause, if present, is executed, and ' + 'the loop\n' + 'terminates.\n' + '\n' + 'A "break" statement executed in the first suite terminates the ' + 'loop\n' + 'without executing the "else" clause\'s suite. A "continue" ' + 'statement\n' + 'executed in the first suite skips the rest of the suite and ' + 'continues\n' + 'with the next item, or with the "else" clause if there is no ' + 'next\n' + 'item.\n' + '\n' + 'The for-loop makes assignments to the variables(s) in the ' + 'target list.\n' + 'This overwrites all previous assignments to those variables ' + 'including\n' + 'those made in the suite of the for-loop:\n' + '\n' + ' for i in range(10):\n' + ' print(i)\n' + ' i = 5 # this will not affect the for-loop\n' + ' # because i will be overwritten with ' + 'the next\n' + ' # index in the range\n' + '\n' + 'Names in the target list are not deleted when the loop is ' + 'finished,\n' + 'but if the sequence is empty, they will not have been assigned ' + 'to at\n' + 'all by the loop. Hint: the built-in function "range()" ' + 'returns an\n' + 'iterator of integers suitable to emulate the effect of ' + 'Pascal\'s "for i\n' + ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, ' + '2]".\n' + '\n' + 'Note: There is a subtlety when the sequence is being modified ' + 'by the\n' + ' loop (this can only occur for mutable sequences, i.e. ' + 'lists). An\n' + ' internal counter is used to keep track of which item is used ' + 'next,\n' + ' and this is incremented on each iteration. When this ' + 'counter has\n' + ' reached the length of the sequence the loop terminates. ' + 'This means\n' + ' that if the suite deletes the current (or a previous) item ' + 'from the\n' + ' sequence, the next item will be skipped (since it gets the ' + 'index of\n' + ' the current item which has already been treated). Likewise, ' + 'if the\n' + ' suite inserts an item in the sequence before the current ' + 'item, the\n' + ' current item will be treated again the next time through the ' + 'loop.\n' + ' This can lead to nasty bugs that can be avoided by making a\n' + ' temporary copy using a slice of the whole sequence, e.g.,\n' + '\n' + ' for x in a[:]:\n' + ' if x < 0: a.remove(x)\n' + '\n' + '\n' + 'The "try" statement\n' + '===================\n' + '\n' + 'The "try" statement specifies exception handlers and/or ' + 'cleanup code\n' + 'for a group of statements:\n' + '\n' + ' try_stmt ::= try1_stmt | try2_stmt\n' + ' try1_stmt ::= "try" ":" suite\n' + ' ("except" [expression ["as" identifier]] ":" ' + 'suite)+\n' + ' ["else" ":" suite]\n' + ' ["finally" ":" suite]\n' + ' try2_stmt ::= "try" ":" suite\n' + ' "finally" ":" suite\n' + '\n' + 'The "except" clause(s) specify one or more exception handlers. ' + 'When no\n' + 'exception occurs in the "try" clause, no exception handler is\n' + 'executed. When an exception occurs in the "try" suite, a ' + 'search for an\n' + 'exception handler is started. This search inspects the except ' + 'clauses\n' + 'in turn until one is found that matches the exception. An ' + 'expression-\n' + 'less except clause, if present, must be last; it matches any\n' + 'exception. For an except clause with an expression, that ' + 'expression\n' + 'is evaluated, and the clause matches the exception if the ' + 'resulting\n' + 'object is "compatible" with the exception. An object is ' + 'compatible\n' + 'with an exception if it is the class or a base class of the ' + 'exception\n' + 'object or a tuple containing an item compatible with the ' + 'exception.\n' + '\n' + 'If no except clause matches the exception, the search for an ' + 'exception\n' + 'handler continues in the surrounding code and on the ' + 'invocation stack.\n' + '[1]\n' + '\n' + 'If the evaluation of an expression in the header of an except ' + 'clause\n' + 'raises an exception, the original search for a handler is ' + 'canceled and\n' + 'a search starts for the new exception in the surrounding code ' + 'and on\n' + 'the call stack (it is treated as if the entire "try" statement ' + 'raised\n' + 'the exception).\n' + '\n' + 'When a matching except clause is found, the exception is ' + 'assigned to\n' + 'the target specified after the "as" keyword in that except ' + 'clause, if\n' + "present, and the except clause's suite is executed. All " + 'except\n' + 'clauses must have an executable block. When the end of this ' + 'block is\n' + 'reached, execution continues normally after the entire try ' + 'statement.\n' + '(This means that if two nested handlers exist for the same ' + 'exception,\n' + 'and the exception occurs in the try clause of the inner ' + 'handler, the\n' + 'outer handler will not handle the exception.)\n' + '\n' + 'When an exception has been assigned using "as target", it is ' + 'cleared\n' + 'at the end of the except clause. This is as if\n' + '\n' + ' except E as N:\n' + ' foo\n' + '\n' + 'was translated to\n' + '\n' + ' except E as N:\n' + ' try:\n' + ' foo\n' + ' finally:\n' + ' del N\n' + '\n' + 'This means the exception must be assigned to a different name ' + 'to be\n' + 'able to refer to it after the except clause. Exceptions are ' + 'cleared\n' + 'because with the traceback attached to them, they form a ' + 'reference\n' + 'cycle with the stack frame, keeping all locals in that frame ' + 'alive\n' + 'until the next garbage collection occurs.\n' + '\n' + "Before an except clause's suite is executed, details about " + 'the\n' + 'exception are stored in the "sys" module and can be accessed ' + 'via\n' + '"sys.exc_info()". "sys.exc_info()" returns a 3-tuple ' + 'consisting of the\n' + 'exception class, the exception instance and a traceback object ' + '(see\n' + 'section *The standard type hierarchy*) identifying the point ' + 'in the\n' + 'program where the exception occurred. "sys.exc_info()" values ' + 'are\n' + 'restored to their previous values (before the call) when ' + 'returning\n' + 'from a function that handled an exception.\n' + '\n' + 'The optional "else" clause is executed if and when control ' + 'flows off\n' + 'the end of the "try" clause. [2] Exceptions in the "else" ' + 'clause are\n' + 'not handled by the preceding "except" clauses.\n' + '\n' + 'If "finally" is present, it specifies a \'cleanup\' handler. ' + 'The "try"\n' + 'clause is executed, including any "except" and "else" ' + 'clauses. If an\n' + 'exception occurs in any of the clauses and is not handled, ' + 'the\n' + 'exception is temporarily saved. The "finally" clause is ' + 'executed. If\n' + 'there is a saved exception it is re-raised at the end of the ' + '"finally"\n' + 'clause. If the "finally" clause raises another exception, the ' + 'saved\n' + 'exception is set as the context of the new exception. If the ' + '"finally"\n' + 'clause executes a "return" or "break" statement, the saved ' + 'exception\n' + 'is discarded:\n' + '\n' + ' >>> def f():\n' + ' ... try:\n' + ' ... 1/0\n' + ' ... finally:\n' + ' ... return 42\n' + ' ...\n' + ' >>> f()\n' + ' 42\n' + '\n' + 'The exception information is not available to the program ' + 'during\n' + 'execution of the "finally" clause.\n' + '\n' + 'When a "return", "break" or "continue" statement is executed ' + 'in the\n' + '"try" suite of a "try"..."finally" statement, the "finally" ' + 'clause is\n' + 'also executed \'on the way out.\' A "continue" statement is ' + 'illegal in\n' + 'the "finally" clause. (The reason is a problem with the ' + 'current\n' + 'implementation --- this restriction may be lifted in the ' + 'future).\n' + '\n' + 'The return value of a function is determined by the last ' + '"return"\n' + 'statement executed. Since the "finally" clause always ' + 'executes, a\n' + '"return" statement executed in the "finally" clause will ' + 'always be the\n' + 'last one executed:\n' + '\n' + ' >>> def foo():\n' + ' ... try:\n' + " ... return 'try'\n" + ' ... finally:\n' + " ... return 'finally'\n" + ' ...\n' + ' >>> foo()\n' + " 'finally'\n" + '\n' + 'Additional information on exceptions can be found in section\n' + '*Exceptions*, and information on using the "raise" statement ' + 'to\n' + 'generate exceptions may be found in section *The raise ' + 'statement*.\n' + '\n' + '\n' + 'The "with" statement\n' + '====================\n' + '\n' + 'The "with" statement is used to wrap the execution of a block ' + 'with\n' + 'methods defined by a context manager (see section *With ' + 'Statement\n' + 'Context Managers*). This allows common ' + '"try"..."except"..."finally"\n' + 'usage patterns to be encapsulated for convenient reuse.\n' + '\n' + ' with_stmt ::= "with" with_item ("," with_item)* ":" suite\n' + ' with_item ::= expression ["as" target]\n' + '\n' + 'The execution of the "with" statement with one "item" proceeds ' + 'as\n' + 'follows:\n' + '\n' + '1. The context expression (the expression given in the ' + '"with_item")\n' + ' is evaluated to obtain a context manager.\n' + '\n' + '2. The context manager\'s "__exit__()" is loaded for later ' + 'use.\n' + '\n' + '3. The context manager\'s "__enter__()" method is invoked.\n' + '\n' + '4. If a target was included in the "with" statement, the ' + 'return\n' + ' value from "__enter__()" is assigned to it.\n' + '\n' + ' Note: The "with" statement guarantees that if the ' + '"__enter__()"\n' + ' method returns without an error, then "__exit__()" will ' + 'always be\n' + ' called. Thus, if an error occurs during the assignment to ' + 'the\n' + ' target list, it will be treated the same as an error ' + 'occurring\n' + ' within the suite would be. See step 6 below.\n' + '\n' + '5. The suite is executed.\n' + '\n' + '6. The context manager\'s "__exit__()" method is invoked. If ' + 'an\n' + ' exception caused the suite to be exited, its type, value, ' + 'and\n' + ' traceback are passed as arguments to "__exit__()". ' + 'Otherwise, three\n' + ' "None" arguments are supplied.\n' + '\n' + ' If the suite was exited due to an exception, and the return ' + 'value\n' + ' from the "__exit__()" method was false, the exception is ' + 'reraised.\n' + ' If the return value was true, the exception is suppressed, ' + 'and\n' + ' execution continues with the statement following the ' + '"with"\n' + ' statement.\n' + '\n' + ' If the suite was exited for any reason other than an ' + 'exception, the\n' + ' return value from "__exit__()" is ignored, and execution ' + 'proceeds\n' + ' at the normal location for the kind of exit that was ' + 'taken.\n' + '\n' + 'With more than one item, the context managers are processed as ' + 'if\n' + 'multiple "with" statements were nested:\n' + '\n' + ' with A() as a, B() as b:\n' + ' suite\n' + '\n' + 'is equivalent to\n' + '\n' + ' with A() as a:\n' + ' with B() as b:\n' + ' suite\n' + '\n' + 'Changed in version 3.1: Support for multiple context ' + 'expressions.\n' + '\n' + 'See also: **PEP 0343** - The "with" statement\n' + '\n' + ' The specification, background, and examples for the ' + 'Python "with"\n' + ' statement.\n' + '\n' + '\n' + 'Function definitions\n' + '====================\n' + '\n' + 'A function definition defines a user-defined function object ' + '(see\n' + 'section *The standard type hierarchy*):\n' + '\n' + ' funcdef ::= [decorators] "def" funcname "(" ' + '[parameter_list] ")" ["->" expression] ":" suite\n' + ' decorators ::= decorator+\n' + ' decorator ::= "@" dotted_name ["(" [parameter_list ' + '[","]] ")"] NEWLINE\n' + ' dotted_name ::= identifier ("." identifier)*\n' + ' parameter_list ::= (defparameter ",")*\n' + ' | "*" [parameter] ("," defparameter)* ' + '["," "**" parameter]\n' + ' | "**" parameter\n' + ' | defparameter [","] )\n' + ' parameter ::= identifier [":" expression]\n' + ' defparameter ::= parameter ["=" expression]\n' + ' funcname ::= identifier\n' + '\n' + 'A function definition is an executable statement. Its ' + 'execution binds\n' + 'the function name in the current local namespace to a function ' + 'object\n' + '(a wrapper around the executable code for the function). ' + 'This\n' + 'function object contains a reference to the current global ' + 'namespace\n' + 'as the global namespace to be used when the function is ' + 'called.\n' + '\n' + 'The function definition does not execute the function body; ' + 'this gets\n' + 'executed only when the function is called. [3]\n' + '\n' + 'A function definition may be wrapped by one or more ' + '*decorator*\n' + 'expressions. Decorator expressions are evaluated when the ' + 'function is\n' + 'defined, in the scope that contains the function definition. ' + 'The\n' + 'result must be a callable, which is invoked with the function ' + 'object\n' + 'as the only argument. The returned value is bound to the ' + 'function name\n' + 'instead of the function object. Multiple decorators are ' + 'applied in\n' + 'nested fashion. For example, the following code\n' + '\n' + ' @f1(arg)\n' + ' @f2\n' + ' def func(): pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' def func(): pass\n' + ' func = f1(arg)(f2(func))\n' + '\n' + 'When one or more *parameters* have the form *parameter* "="\n' + '*expression*, the function is said to have "default parameter ' + 'values."\n' + 'For a parameter with a default value, the corresponding ' + '*argument* may\n' + "be omitted from a call, in which case the parameter's default " + 'value is\n' + 'substituted. If a parameter has a default value, all ' + 'following\n' + 'parameters up until the ""*"" must also have a default value ' + '--- this\n' + 'is a syntactic restriction that is not expressed by the ' + 'grammar.\n' + '\n' + '**Default parameter values are evaluated from left to right ' + 'when the\n' + 'function definition is executed.** This means that the ' + 'expression is\n' + 'evaluated once, when the function is defined, and that the ' + 'same "pre-\n' + 'computed" value is used for each call. This is especially ' + 'important\n' + 'to understand when a default parameter is a mutable object, ' + 'such as a\n' + 'list or a dictionary: if the function modifies the object ' + '(e.g. by\n' + 'appending an item to a list), the default value is in effect ' + 'modified.\n' + 'This is generally not what was intended. A way around this is ' + 'to use\n' + '"None" as the default, and explicitly test for it in the body ' + 'of the\n' + 'function, e.g.:\n' + '\n' + ' def whats_on_the_telly(penguin=None):\n' + ' if penguin is None:\n' + ' penguin = []\n' + ' penguin.append("property of the zoo")\n' + ' return penguin\n' + '\n' + 'Function call semantics are described in more detail in ' + 'section\n' + '*Calls*. A function call always assigns values to all ' + 'parameters\n' + 'mentioned in the parameter list, either from position ' + 'arguments, from\n' + 'keyword arguments, or from default values. If the form\n' + '""*identifier"" is present, it is initialized to a tuple ' + 'receiving any\n' + 'excess positional parameters, defaulting to the empty tuple. ' + 'If the\n' + 'form ""**identifier"" is present, it is initialized to a new\n' + 'dictionary receiving any excess keyword arguments, defaulting ' + 'to a new\n' + 'empty dictionary. Parameters after ""*"" or ""*identifier"" ' + 'are\n' + 'keyword-only parameters and may only be passed used keyword ' + 'arguments.\n' + '\n' + 'Parameters may have annotations of the form "": expression"" ' + 'following\n' + 'the parameter name. Any parameter may have an annotation even ' + 'those\n' + 'of the form "*identifier" or "**identifier". Functions may ' + 'have\n' + '"return" annotation of the form ""-> expression"" after the ' + 'parameter\n' + 'list. These annotations can be any valid Python expression ' + 'and are\n' + 'evaluated when the function definition is executed. ' + 'Annotations may\n' + 'be evaluated in a different order than they appear in the ' + 'source code.\n' + 'The presence of annotations does not change the semantics of ' + 'a\n' + 'function. The annotation values are available as values of a\n' + "dictionary keyed by the parameters' names in the " + '"__annotations__"\n' + 'attribute of the function object.\n' + '\n' + 'It is also possible to create anonymous functions (functions ' + 'not bound\n' + 'to a name), for immediate use in expressions. This uses ' + 'lambda\n' + 'expressions, described in section *Lambdas*. Note that the ' + 'lambda\n' + 'expression is merely a shorthand for a simplified function ' + 'definition;\n' + 'a function defined in a ""def"" statement can be passed around ' + 'or\n' + 'assigned to another name just like a function defined by a ' + 'lambda\n' + 'expression. The ""def"" form is actually more powerful since ' + 'it\n' + 'allows the execution of multiple statements and annotations.\n' + '\n' + "**Programmer's note:** Functions are first-class objects. A " + '""def""\n' + 'statement executed inside a function definition defines a ' + 'local\n' + 'function that can be returned or passed around. Free ' + 'variables used\n' + 'in the nested function can access the local variables of the ' + 'function\n' + 'containing the def. See section *Naming and binding* for ' + 'details.\n' + '\n' + 'See also: **PEP 3107** - Function Annotations\n' + '\n' + ' The original specification for function annotations.\n' + '\n' + '\n' + 'Class definitions\n' + '=================\n' + '\n' + 'A class definition defines a class object (see section *The ' + 'standard\n' + 'type hierarchy*):\n' + '\n' + ' classdef ::= [decorators] "class" classname ' + '[inheritance] ":" suite\n' + ' inheritance ::= "(" [parameter_list] ")"\n' + ' classname ::= identifier\n' + '\n' + 'A class definition is an executable statement. The ' + 'inheritance list\n' + 'usually gives a list of base classes (see *Customizing class ' + 'creation*\n' + 'for more advanced uses), so each item in the list should ' + 'evaluate to a\n' + 'class object which allows subclassing. Classes without an ' + 'inheritance\n' + 'list inherit, by default, from the base class "object"; ' + 'hence,\n' + '\n' + ' class Foo:\n' + ' pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' class Foo(object):\n' + ' pass\n' + '\n' + "The class's suite is then executed in a new execution frame " + '(see\n' + '*Naming and binding*), using a newly created local namespace ' + 'and the\n' + 'original global namespace. (Usually, the suite contains ' + 'mostly\n' + "function definitions.) When the class's suite finishes " + 'execution, its\n' + 'execution frame is discarded but its local namespace is saved. ' + '[4] A\n' + 'class object is then created using the inheritance list for ' + 'the base\n' + 'classes and the saved local namespace for the attribute ' + 'dictionary.\n' + 'The class name is bound to this class object in the original ' + 'local\n' + 'namespace.\n' + '\n' + 'Class creation can be customized heavily using *metaclasses*.\n' + '\n' + 'Classes can also be decorated: just like when decorating ' + 'functions,\n' + '\n' + ' @f1(arg)\n' + ' @f2\n' + ' class Foo: pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' class Foo: pass\n' + ' Foo = f1(arg)(f2(Foo))\n' + '\n' + 'The evaluation rules for the decorator expressions are the ' + 'same as for\n' + 'function decorators. The result must be a class object, which ' + 'is then\n' + 'bound to the class name.\n' + '\n' + "**Programmer's note:** Variables defined in the class " + 'definition are\n' + 'class attributes; they are shared by instances. Instance ' + 'attributes\n' + 'can be set in a method with "self.name = value". Both class ' + 'and\n' + 'instance attributes are accessible through the notation ' + '""self.name"",\n' + 'and an instance attribute hides a class attribute with the ' + 'same name\n' + 'when accessed in this way. Class attributes can be used as ' + 'defaults\n' + 'for instance attributes, but using mutable values there can ' + 'lead to\n' + 'unexpected results. *Descriptors* can be used to create ' + 'instance\n' + 'variables with different implementation details.\n' + '\n' + 'See also: **PEP 3115** - Metaclasses in Python 3 **PEP 3129** ' + '-\n' + ' Class Decorators\n' + '\n' + '\n' + 'Coroutines\n' + '==========\n' + '\n' + 'New in version 3.5.\n' + '\n' + '\n' + 'Coroutine function definition\n' + '-----------------------------\n' + '\n' + ' async_funcdef ::= [decorators] "async" "def" funcname "(" ' + '[parameter_list] ")" ["->" expression] ":" suite\n' + '\n' + 'Execution of Python coroutines can be suspended and resumed at ' + 'many\n' + 'points (see *coroutine*). In the body of a coroutine, any ' + '"await" and\n' + '"async" identifiers become reserved keywords; "await" ' + 'expressions,\n' + '"async for" and "async with" can only be used in coroutine ' + 'bodies.\n' + '\n' + 'Functions defined with "async def" syntax are always ' + 'coroutine\n' + 'functions, even if they do not contain "await" or "async" ' + 'keywords.\n' + '\n' + 'It is a "SyntaxError" to use "yield" expressions in "async ' + 'def"\n' + 'coroutines.\n' + '\n' + 'An example of a coroutine function:\n' + '\n' + ' async def func(param1, param2):\n' + ' do_stuff()\n' + ' await some_coroutine()\n' + '\n' + '\n' + 'The "async for" statement\n' + '-------------------------\n' + '\n' + ' async_for_stmt ::= "async" for_stmt\n' + '\n' + 'An *asynchronous iterable* is able to call asynchronous code ' + 'in its\n' + '*iter* implementation, and *asynchronous iterator* can call\n' + 'asynchronous code in its *next* method.\n' + '\n' + 'The "async for" statement allows convenient iteration over\n' + 'asynchronous iterators.\n' + '\n' + 'The following code:\n' + '\n' + ' async for TARGET in ITER:\n' + ' BLOCK\n' + ' else:\n' + ' BLOCK2\n' + '\n' + 'Is semantically equivalent to:\n' + '\n' + ' iter = (ITER)\n' + ' iter = await type(iter).__aiter__(iter)\n' + ' running = True\n' + ' while running:\n' + ' try:\n' + ' TARGET = await type(iter).__anext__(iter)\n' + ' except StopAsyncIteration:\n' + ' running = False\n' + ' else:\n' + ' BLOCK\n' + ' else:\n' + ' BLOCK2\n' + '\n' + 'See also "__aiter__()" and "__anext__()" for details.\n' + '\n' + 'It is a "SyntaxError" to use "async for" statement outside of ' + 'an\n' + '"async def" function.\n' + '\n' + '\n' + 'The "async with" statement\n' + '--------------------------\n' + '\n' + ' async_with_stmt ::= "async" with_stmt\n' + '\n' + 'An *asynchronous context manager* is a *context manager* that ' + 'is able\n' + 'to suspend execution in its *enter* and *exit* methods.\n' + '\n' + 'The following code:\n' + '\n' + ' async with EXPR as VAR:\n' + ' BLOCK\n' + '\n' + 'Is semantically equivalent to:\n' + '\n' + ' mgr = (EXPR)\n' + ' aexit = type(mgr).__aexit__\n' + ' aenter = type(mgr).__aenter__(mgr)\n' + ' exc = True\n' + '\n' + ' VAR = await aenter\n' + ' try:\n' + ' BLOCK\n' + ' except:\n' + ' if not await aexit(mgr, *sys.exc_info()):\n' + ' raise\n' + ' else:\n' + ' await aexit(mgr, None, None, None)\n' + '\n' + 'See also "__aenter__()" and "__aexit__()" for details.\n' + '\n' + 'It is a "SyntaxError" to use "async with" statement outside of ' + 'an\n' + '"async def" function.\n' + '\n' + 'See also: **PEP 492** - Coroutines with async and await ' + 'syntax\n' + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] The exception is propagated to the invocation stack ' + 'unless\n' + ' there is a "finally" clause which happens to raise ' + 'another\n' + ' exception. That new exception causes the old one to be ' + 'lost.\n' + '\n' + '[2] Currently, control "flows off the end" except in the case ' + 'of\n' + ' an exception or the execution of a "return", "continue", ' + 'or\n' + ' "break" statement.\n' + '\n' + '[3] A string literal appearing as the first statement in the\n' + " function body is transformed into the function's " + '"__doc__"\n' + " attribute and therefore the function's *docstring*.\n" + '\n' + '[4] A string literal appearing as the first statement in the ' + 'class\n' + ' body is transformed into the namespace\'s "__doc__" item ' + 'and\n' + " therefore the class's *docstring*.\n", + 'context-managers': '\n' + 'With Statement Context Managers\n' + '*******************************\n' + '\n' + 'A *context manager* is an object that defines the ' + 'runtime context to\n' + 'be established when executing a "with" statement. The ' + 'context manager\n' + 'handles the entry into, and the exit from, the desired ' + 'runtime context\n' + 'for the execution of the block of code. Context ' + 'managers are normally\n' + 'invoked using the "with" statement (described in ' + 'section *The with\n' + 'statement*), but can also be used by directly invoking ' + 'their methods.\n' + '\n' + 'Typical uses of context managers include saving and ' + 'restoring various\n' + 'kinds of global state, locking and unlocking ' + 'resources, closing opened\n' + 'files, etc.\n' + '\n' + 'For more information on context managers, see *Context ' + 'Manager Types*.\n' + '\n' + 'object.__enter__(self)\n' + '\n' + ' Enter the runtime context related to this object. ' + 'The "with"\n' + " statement will bind this method's return value to " + 'the target(s)\n' + ' specified in the "as" clause of the statement, if ' + 'any.\n' + '\n' + 'object.__exit__(self, exc_type, exc_value, traceback)\n' + '\n' + ' Exit the runtime context related to this object. ' + 'The parameters\n' + ' describe the exception that caused the context to ' + 'be exited. If the\n' + ' context was exited without an exception, all three ' + 'arguments will\n' + ' be "None".\n' + '\n' + ' If an exception is supplied, and the method wishes ' + 'to suppress the\n' + ' exception (i.e., prevent it from being propagated), ' + 'it should\n' + ' return a true value. Otherwise, the exception will ' + 'be processed\n' + ' normally upon exit from this method.\n' + '\n' + ' Note that "__exit__()" methods should not reraise ' + 'the passed-in\n' + " exception; this is the caller's responsibility.\n" + '\n' + 'See also: **PEP 0343** - The "with" statement\n' + '\n' + ' The specification, background, and examples for ' + 'the Python "with"\n' + ' statement.\n', + 'continue': '\n' + 'The "continue" statement\n' + '************************\n' + '\n' + ' continue_stmt ::= "continue"\n' + '\n' + '"continue" may only occur syntactically nested in a "for" or ' + '"while"\n' + 'loop, but not nested in a function or class definition or ' + '"finally"\n' + 'clause within that loop. It continues with the next cycle of ' + 'the\n' + 'nearest enclosing loop.\n' + '\n' + 'When "continue" passes control out of a "try" statement with ' + 'a\n' + '"finally" clause, that "finally" clause is executed before ' + 'really\n' + 'starting the next loop cycle.\n', + 'conversions': '\n' + 'Arithmetic conversions\n' + '**********************\n' + '\n' + 'When a description of an arithmetic operator below uses the ' + 'phrase\n' + '"the numeric arguments are converted to a common type," ' + 'this means\n' + 'that the operator implementation for built-in types works ' + 'as follows:\n' + '\n' + '* If either argument is a complex number, the other is ' + 'converted to\n' + ' complex;\n' + '\n' + '* otherwise, if either argument is a floating point number, ' + 'the\n' + ' other is converted to floating point;\n' + '\n' + '* otherwise, both must be integers and no conversion is ' + 'necessary.\n' + '\n' + 'Some additional rules apply for certain operators (e.g., a ' + 'string as a\n' + "left argument to the '%' operator). Extensions must define " + 'their own\n' + 'conversion behavior.\n', + 'customization': '\n' + 'Basic customization\n' + '*******************\n' + '\n' + 'object.__new__(cls[, ...])\n' + '\n' + ' Called to create a new instance of class *cls*. ' + '"__new__()" is a\n' + ' static method (special-cased so you need not declare ' + 'it as such)\n' + ' that takes the class of which an instance was ' + 'requested as its\n' + ' first argument. The remaining arguments are those ' + 'passed to the\n' + ' object constructor expression (the call to the ' + 'class). The return\n' + ' value of "__new__()" should be the new object instance ' + '(usually an\n' + ' instance of *cls*).\n' + '\n' + ' Typical implementations create a new instance of the ' + 'class by\n' + ' invoking the superclass\'s "__new__()" method using\n' + ' "super(currentclass, cls).__new__(cls[, ...])" with ' + 'appropriate\n' + ' arguments and then modifying the newly-created ' + 'instance as\n' + ' necessary before returning it.\n' + '\n' + ' If "__new__()" returns an instance of *cls*, then the ' + 'new\n' + ' instance\'s "__init__()" method will be invoked like\n' + ' "__init__(self[, ...])", where *self* is the new ' + 'instance and the\n' + ' remaining arguments are the same as were passed to ' + '"__new__()".\n' + '\n' + ' If "__new__()" does not return an instance of *cls*, ' + 'then the new\n' + ' instance\'s "__init__()" method will not be invoked.\n' + '\n' + ' "__new__()" is intended mainly to allow subclasses of ' + 'immutable\n' + ' types (like int, str, or tuple) to customize instance ' + 'creation. It\n' + ' is also commonly overridden in custom metaclasses in ' + 'order to\n' + ' customize class creation.\n' + '\n' + 'object.__init__(self[, ...])\n' + '\n' + ' Called after the instance has been created (by ' + '"__new__()"), but\n' + ' before it is returned to the caller. The arguments ' + 'are those\n' + ' passed to the class constructor expression. If a base ' + 'class has an\n' + ' "__init__()" method, the derived class\'s "__init__()" ' + 'method, if\n' + ' any, must explicitly call it to ensure proper ' + 'initialization of the\n' + ' base class part of the instance; for example:\n' + ' "BaseClass.__init__(self, [args...])".\n' + '\n' + ' Because "__new__()" and "__init__()" work together in ' + 'constructing\n' + ' objects ("__new__()" to create it, and "__init__()" to ' + 'customise\n' + ' it), no non-"None" value may be returned by ' + '"__init__()"; doing so\n' + ' will cause a "TypeError" to be raised at runtime.\n' + '\n' + 'object.__del__(self)\n' + '\n' + ' Called when the instance is about to be destroyed. ' + 'This is also\n' + ' called a destructor. If a base class has a ' + '"__del__()" method, the\n' + ' derived class\'s "__del__()" method, if any, must ' + 'explicitly call it\n' + ' to ensure proper deletion of the base class part of ' + 'the instance.\n' + ' Note that it is possible (though not recommended!) for ' + 'the\n' + ' "__del__()" method to postpone destruction of the ' + 'instance by\n' + ' creating a new reference to it. It may then be called ' + 'at a later\n' + ' time when this new reference is deleted. It is not ' + 'guaranteed that\n' + ' "__del__()" methods are called for objects that still ' + 'exist when\n' + ' the interpreter exits.\n' + '\n' + ' Note: "del x" doesn\'t directly call "x.__del__()" --- ' + 'the former\n' + ' decrements the reference count for "x" by one, and ' + 'the latter is\n' + ' only called when "x"\'s reference count reaches ' + 'zero. Some common\n' + ' situations that may prevent the reference count of ' + 'an object from\n' + ' going to zero include: circular references between ' + 'objects (e.g.,\n' + ' a doubly-linked list or a tree data structure with ' + 'parent and\n' + ' child pointers); a reference to the object on the ' + 'stack frame of\n' + ' a function that caught an exception (the traceback ' + 'stored in\n' + ' "sys.exc_info()[2]" keeps the stack frame alive); or ' + 'a reference\n' + ' to the object on the stack frame that raised an ' + 'unhandled\n' + ' exception in interactive mode (the traceback stored ' + 'in\n' + ' "sys.last_traceback" keeps the stack frame alive). ' + 'The first\n' + ' situation can only be remedied by explicitly ' + 'breaking the cycles;\n' + ' the second can be resolved by freeing the reference ' + 'to the\n' + ' traceback object when it is no longer useful, and ' + 'the third can\n' + ' be resolved by storing "None" in ' + '"sys.last_traceback". Circular\n' + ' references which are garbage are detected and ' + 'cleaned up when the\n' + " cyclic garbage collector is enabled (it's on by " + 'default). Refer\n' + ' to the documentation for the "gc" module for more ' + 'information\n' + ' about this topic.\n' + '\n' + ' Warning: Due to the precarious circumstances under ' + 'which\n' + ' "__del__()" methods are invoked, exceptions that ' + 'occur during\n' + ' their execution are ignored, and a warning is ' + 'printed to\n' + ' "sys.stderr" instead. Also, when "__del__()" is ' + 'invoked in\n' + ' response to a module being deleted (e.g., when ' + 'execution of the\n' + ' program is done), other globals referenced by the ' + '"__del__()"\n' + ' method may already have been deleted or in the ' + 'process of being\n' + ' torn down (e.g. the import machinery shutting ' + 'down). For this\n' + ' reason, "__del__()" methods should do the absolute ' + 'minimum needed\n' + ' to maintain external invariants. Starting with ' + 'version 1.5,\n' + ' Python guarantees that globals whose name begins ' + 'with a single\n' + ' underscore are deleted from their module before ' + 'other globals are\n' + ' deleted; if no other references to such globals ' + 'exist, this may\n' + ' help in assuring that imported modules are still ' + 'available at the\n' + ' time when the "__del__()" method is called.\n' + '\n' + 'object.__repr__(self)\n' + '\n' + ' Called by the "repr()" built-in function to compute ' + 'the "official"\n' + ' string representation of an object. If at all ' + 'possible, this\n' + ' should look like a valid Python expression that could ' + 'be used to\n' + ' recreate an object with the same value (given an ' + 'appropriate\n' + ' environment). If this is not possible, a string of ' + 'the form\n' + ' "<...some useful description...>" should be returned. ' + 'The return\n' + ' value must be a string object. If a class defines ' + '"__repr__()" but\n' + ' not "__str__()", then "__repr__()" is also used when ' + 'an "informal"\n' + ' string representation of instances of that class is ' + 'required.\n' + '\n' + ' This is typically used for debugging, so it is ' + 'important that the\n' + ' representation is information-rich and unambiguous.\n' + '\n' + 'object.__str__(self)\n' + '\n' + ' Called by "str(object)" and the built-in functions ' + '"format()" and\n' + ' "print()" to compute the "informal" or nicely ' + 'printable string\n' + ' representation of an object. The return value must be ' + 'a *string*\n' + ' object.\n' + '\n' + ' This method differs from "object.__repr__()" in that ' + 'there is no\n' + ' expectation that "__str__()" return a valid Python ' + 'expression: a\n' + ' more convenient or concise representation can be ' + 'used.\n' + '\n' + ' The default implementation defined by the built-in ' + 'type "object"\n' + ' calls "object.__repr__()".\n' + '\n' + 'object.__bytes__(self)\n' + '\n' + ' Called by "bytes()" to compute a byte-string ' + 'representation of an\n' + ' object. This should return a "bytes" object.\n' + '\n' + 'object.__format__(self, format_spec)\n' + '\n' + ' Called by the "format()" built-in function (and by ' + 'extension, the\n' + ' "str.format()" method of class "str") to produce a ' + '"formatted"\n' + ' string representation of an object. The "format_spec" ' + 'argument is a\n' + ' string that contains a description of the formatting ' + 'options\n' + ' desired. The interpretation of the "format_spec" ' + 'argument is up to\n' + ' the type implementing "__format__()", however most ' + 'classes will\n' + ' either delegate formatting to one of the built-in ' + 'types, or use a\n' + ' similar formatting option syntax.\n' + '\n' + ' See *Format Specification Mini-Language* for a ' + 'description of the\n' + ' standard formatting syntax.\n' + '\n' + ' The return value must be a string object.\n' + '\n' + ' Changed in version 3.4: The __format__ method of ' + '"object" itself\n' + ' raises a "TypeError" if passed any non-empty string.\n' + '\n' + 'object.__lt__(self, other)\n' + 'object.__le__(self, other)\n' + 'object.__eq__(self, other)\n' + 'object.__ne__(self, other)\n' + 'object.__gt__(self, other)\n' + 'object.__ge__(self, other)\n' + '\n' + ' These are the so-called "rich comparison" methods. ' + 'The\n' + ' correspondence between operator symbols and method ' + 'names is as\n' + ' follows: "xy" calls\n' + ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' + '\n' + ' A rich comparison method may return the singleton ' + '"NotImplemented"\n' + ' if it does not implement the operation for a given ' + 'pair of\n' + ' arguments. By convention, "False" and "True" are ' + 'returned for a\n' + ' successful comparison. However, these methods can ' + 'return any value,\n' + ' so if the comparison operator is used in a Boolean ' + 'context (e.g.,\n' + ' in the condition of an "if" statement), Python will ' + 'call "bool()"\n' + ' on the value to determine if the result is true or ' + 'false.\n' + '\n' + ' By default, "__ne__()" delegates to "__eq__()" and ' + 'inverts the\n' + ' result unless it is "NotImplemented". There are no ' + 'other implied\n' + ' relationships among the comparison operators, for ' + 'example, the\n' + ' truth of "(x.__hash__".\n' + '\n' + ' If a class that does not override "__eq__()" wishes to ' + 'suppress\n' + ' hash support, it should include "__hash__ = None" in ' + 'the class\n' + ' definition. A class which defines its own "__hash__()" ' + 'that\n' + ' explicitly raises a "TypeError" would be incorrectly ' + 'identified as\n' + ' hashable by an "isinstance(obj, collections.Hashable)" ' + 'call.\n' + '\n' + ' Note: By default, the "__hash__()" values of str, ' + 'bytes and\n' + ' datetime objects are "salted" with an unpredictable ' + 'random value.\n' + ' Although they remain constant within an individual ' + 'Python\n' + ' process, they are not predictable between repeated ' + 'invocations of\n' + ' Python.This is intended to provide protection ' + 'against a denial-\n' + ' of-service caused by carefully-chosen inputs that ' + 'exploit the\n' + ' worst case performance of a dict insertion, O(n^2) ' + 'complexity.\n' + ' See ' + 'http://www.ocert.org/advisories/ocert-2011-003.html for\n' + ' details.Changing hash values affects the iteration ' + 'order of\n' + ' dicts, sets and other mappings. Python has never ' + 'made guarantees\n' + ' about this ordering (and it typically varies between ' + '32-bit and\n' + ' 64-bit builds).See also "PYTHONHASHSEED".\n' + '\n' + ' Changed in version 3.3: Hash randomization is enabled ' + 'by default.\n' + '\n' + 'object.__bool__(self)\n' + '\n' + ' Called to implement truth value testing and the ' + 'built-in operation\n' + ' "bool()"; should return "False" or "True". When this ' + 'method is not\n' + ' defined, "__len__()" is called, if it is defined, and ' + 'the object is\n' + ' considered true if its result is nonzero. If a class ' + 'defines\n' + ' neither "__len__()" nor "__bool__()", all its ' + 'instances are\n' + ' considered true.\n', + 'debugger': '\n' + '"pdb" --- The Python Debugger\n' + '*****************************\n' + '\n' + '**Source code:** Lib/pdb.py\n' + '\n' + '======================================================================\n' + '\n' + 'The module "pdb" defines an interactive source code debugger ' + 'for\n' + 'Python programs. It supports setting (conditional) ' + 'breakpoints and\n' + 'single stepping at the source line level, inspection of stack ' + 'frames,\n' + 'source code listing, and evaluation of arbitrary Python code ' + 'in the\n' + 'context of any stack frame. It also supports post-mortem ' + 'debugging\n' + 'and can be called under program control.\n' + '\n' + 'The debugger is extensible -- it is actually defined as the ' + 'class\n' + '"Pdb". This is currently undocumented but easily understood by ' + 'reading\n' + 'the source. The extension interface uses the modules "bdb" ' + 'and "cmd".\n' + '\n' + 'The debugger\'s prompt is "(Pdb)". Typical usage to run a ' + 'program under\n' + 'control of the debugger is:\n' + '\n' + ' >>> import pdb\n' + ' >>> import mymodule\n' + " >>> pdb.run('mymodule.test()')\n" + ' > (0)?()\n' + ' (Pdb) continue\n' + ' > (1)?()\n' + ' (Pdb) continue\n' + " NameError: 'spam'\n" + ' > (1)?()\n' + ' (Pdb)\n' + '\n' + 'Changed in version 3.3: Tab-completion via the "readline" ' + 'module is\n' + 'available for commands and command arguments, e.g. the current ' + 'global\n' + 'and local names are offered as arguments of the "p" command.\n' + '\n' + '"pdb.py" can also be invoked as a script to debug other ' + 'scripts. For\n' + 'example:\n' + '\n' + ' python3 -m pdb myscript.py\n' + '\n' + 'When invoked as a script, pdb will automatically enter ' + 'post-mortem\n' + 'debugging if the program being debugged exits abnormally. ' + 'After post-\n' + 'mortem debugging (or after normal exit of the program), pdb ' + 'will\n' + "restart the program. Automatic restarting preserves pdb's " + 'state (such\n' + 'as breakpoints) and in most cases is more useful than quitting ' + 'the\n' + "debugger upon program's exit.\n" + '\n' + 'New in version 3.2: "pdb.py" now accepts a "-c" option that ' + 'executes\n' + 'commands as if given in a ".pdbrc" file, see *Debugger ' + 'Commands*.\n' + '\n' + 'The typical usage to break into the debugger from a running ' + 'program is\n' + 'to insert\n' + '\n' + ' import pdb; pdb.set_trace()\n' + '\n' + 'at the location you want to break into the debugger. You can ' + 'then\n' + 'step through the code following this statement, and continue ' + 'running\n' + 'without the debugger using the "continue" command.\n' + '\n' + 'The typical usage to inspect a crashed program is:\n' + '\n' + ' >>> import pdb\n' + ' >>> import mymodule\n' + ' >>> mymodule.test()\n' + ' Traceback (most recent call last):\n' + ' File "", line 1, in ?\n' + ' File "./mymodule.py", line 4, in test\n' + ' test2()\n' + ' File "./mymodule.py", line 3, in test2\n' + ' print(spam)\n' + ' NameError: spam\n' + ' >>> pdb.pm()\n' + ' > ./mymodule.py(3)test2()\n' + ' -> print(spam)\n' + ' (Pdb)\n' + '\n' + 'The module defines the following functions; each enters the ' + 'debugger\n' + 'in a slightly different way:\n' + '\n' + 'pdb.run(statement, globals=None, locals=None)\n' + '\n' + ' Execute the *statement* (given as a string or a code ' + 'object) under\n' + ' debugger control. The debugger prompt appears before any ' + 'code is\n' + ' executed; you can set breakpoints and type "continue", or ' + 'you can\n' + ' step through the statement using "step" or "next" (all ' + 'these\n' + ' commands are explained below). The optional *globals* and ' + '*locals*\n' + ' arguments specify the environment in which the code is ' + 'executed; by\n' + ' default the dictionary of the module "__main__" is used. ' + '(See the\n' + ' explanation of the built-in "exec()" or "eval()" ' + 'functions.)\n' + '\n' + 'pdb.runeval(expression, globals=None, locals=None)\n' + '\n' + ' Evaluate the *expression* (given as a string or a code ' + 'object)\n' + ' under debugger control. When "runeval()" returns, it ' + 'returns the\n' + ' value of the expression. Otherwise this function is ' + 'similar to\n' + ' "run()".\n' + '\n' + 'pdb.runcall(function, *args, **kwds)\n' + '\n' + ' Call the *function* (a function or method object, not a ' + 'string)\n' + ' with the given arguments. When "runcall()" returns, it ' + 'returns\n' + ' whatever the function call returned. The debugger prompt ' + 'appears\n' + ' as soon as the function is entered.\n' + '\n' + 'pdb.set_trace()\n' + '\n' + ' Enter the debugger at the calling stack frame. This is ' + 'useful to\n' + ' hard-code a breakpoint at a given point in a program, even ' + 'if the\n' + ' code is not otherwise being debugged (e.g. when an ' + 'assertion\n' + ' fails).\n' + '\n' + 'pdb.post_mortem(traceback=None)\n' + '\n' + ' Enter post-mortem debugging of the given *traceback* ' + 'object. If no\n' + ' *traceback* is given, it uses the one of the exception that ' + 'is\n' + ' currently being handled (an exception must be being handled ' + 'if the\n' + ' default is to be used).\n' + '\n' + 'pdb.pm()\n' + '\n' + ' Enter post-mortem debugging of the traceback found in\n' + ' "sys.last_traceback".\n' + '\n' + 'The "run*" functions and "set_trace()" are aliases for ' + 'instantiating\n' + 'the "Pdb" class and calling the method of the same name. If ' + 'you want\n' + 'to access further features, you have to do this yourself:\n' + '\n' + "class class pdb.Pdb(completekey='tab', stdin=None, " + 'stdout=None, skip=None, nosigint=False)\n' + '\n' + ' "Pdb" is the debugger class.\n' + '\n' + ' The *completekey*, *stdin* and *stdout* arguments are ' + 'passed to the\n' + ' underlying "cmd.Cmd" class; see the description there.\n' + '\n' + ' The *skip* argument, if given, must be an iterable of ' + 'glob-style\n' + ' module name patterns. The debugger will not step into ' + 'frames that\n' + ' originate in a module that matches one of these patterns. ' + '[1]\n' + '\n' + ' By default, Pdb sets a handler for the SIGINT signal (which ' + 'is sent\n' + ' when the user presses Ctrl-C on the console) when you give ' + 'a\n' + ' "continue" command. This allows you to break into the ' + 'debugger\n' + ' again by pressing Ctrl-C. If you want Pdb not to touch the ' + 'SIGINT\n' + ' handler, set *nosigint* tot true.\n' + '\n' + ' Example call to enable tracing with *skip*:\n' + '\n' + " import pdb; pdb.Pdb(skip=['django.*']).set_trace()\n" + '\n' + ' New in version 3.1: The *skip* argument.\n' + '\n' + ' New in version 3.2: The *nosigint* argument. Previously, a ' + 'SIGINT\n' + ' handler was never set by Pdb.\n' + '\n' + ' run(statement, globals=None, locals=None)\n' + ' runeval(expression, globals=None, locals=None)\n' + ' runcall(function, *args, **kwds)\n' + ' set_trace()\n' + '\n' + ' See the documentation for the functions explained ' + 'above.\n' + '\n' + '\n' + 'Debugger Commands\n' + '=================\n' + '\n' + 'The commands recognized by the debugger are listed below. ' + 'Most\n' + 'commands can be abbreviated to one or two letters as ' + 'indicated; e.g.\n' + '"h(elp)" means that either "h" or "help" can be used to enter ' + 'the help\n' + 'command (but not "he" or "hel", nor "H" or "Help" or "HELP").\n' + 'Arguments to commands must be separated by whitespace (spaces ' + 'or\n' + 'tabs). Optional arguments are enclosed in square brackets ' + '("[]") in\n' + 'the command syntax; the square brackets must not be typed.\n' + 'Alternatives in the command syntax are separated by a vertical ' + 'bar\n' + '("|").\n' + '\n' + 'Entering a blank line repeats the last command entered. ' + 'Exception: if\n' + 'the last command was a "list" command, the next 11 lines are ' + 'listed.\n' + '\n' + "Commands that the debugger doesn't recognize are assumed to be " + 'Python\n' + 'statements and are executed in the context of the program ' + 'being\n' + 'debugged. Python statements can also be prefixed with an ' + 'exclamation\n' + 'point ("!"). This is a powerful way to inspect the program ' + 'being\n' + 'debugged; it is even possible to change a variable or call a ' + 'function.\n' + 'When an exception occurs in such a statement, the exception ' + 'name is\n' + "printed but the debugger's state is not changed.\n" + '\n' + 'The debugger supports *aliases*. Aliases can have parameters ' + 'which\n' + 'allows one a certain level of adaptability to the context ' + 'under\n' + 'examination.\n' + '\n' + 'Multiple commands may be entered on a single line, separated ' + 'by ";;".\n' + '(A single ";" is not used as it is the separator for multiple ' + 'commands\n' + 'in a line that is passed to the Python parser.) No ' + 'intelligence is\n' + 'applied to separating the commands; the input is split at the ' + 'first\n' + '";;" pair, even if it is in the middle of a quoted string.\n' + '\n' + 'If a file ".pdbrc" exists in the user\'s home directory or in ' + 'the\n' + 'current directory, it is read in and executed as if it had ' + 'been typed\n' + 'at the debugger prompt. This is particularly useful for ' + 'aliases. If\n' + 'both files exist, the one in the home directory is read first ' + 'and\n' + 'aliases defined there can be overridden by the local file.\n' + '\n' + 'Changed in version 3.2: ".pdbrc" can now contain commands ' + 'that\n' + 'continue debugging, such as "continue" or "next". Previously, ' + 'these\n' + 'commands had no effect.\n' + '\n' + 'h(elp) [command]\n' + '\n' + ' Without argument, print the list of available commands. ' + 'With a\n' + ' *command* as argument, print help about that command. ' + '"help pdb"\n' + ' displays the full documentation (the docstring of the ' + '"pdb"\n' + ' module). Since the *command* argument must be an ' + 'identifier, "help\n' + ' exec" must be entered to get help on the "!" command.\n' + '\n' + 'w(here)\n' + '\n' + ' Print a stack trace, with the most recent frame at the ' + 'bottom. An\n' + ' arrow indicates the current frame, which determines the ' + 'context of\n' + ' most commands.\n' + '\n' + 'd(own) [count]\n' + '\n' + ' Move the current frame *count* (default one) levels down in ' + 'the\n' + ' stack trace (to a newer frame).\n' + '\n' + 'u(p) [count]\n' + '\n' + ' Move the current frame *count* (default one) levels up in ' + 'the stack\n' + ' trace (to an older frame).\n' + '\n' + 'b(reak) [([filename:]lineno | function) [, condition]]\n' + '\n' + ' With a *lineno* argument, set a break there in the current ' + 'file.\n' + ' With a *function* argument, set a break at the first ' + 'executable\n' + ' statement within that function. The line number may be ' + 'prefixed\n' + ' with a filename and a colon, to specify a breakpoint in ' + 'another\n' + " file (probably one that hasn't been loaded yet). The file " + 'is\n' + ' searched on "sys.path". Note that each breakpoint is ' + 'assigned a\n' + ' number to which all the other breakpoint commands refer.\n' + '\n' + ' If a second argument is present, it is an expression which ' + 'must\n' + ' evaluate to true before the breakpoint is honored.\n' + '\n' + ' Without argument, list all breaks, including for each ' + 'breakpoint,\n' + ' the number of times that breakpoint has been hit, the ' + 'current\n' + ' ignore count, and the associated condition if any.\n' + '\n' + 'tbreak [([filename:]lineno | function) [, condition]]\n' + '\n' + ' Temporary breakpoint, which is removed automatically when ' + 'it is\n' + ' first hit. The arguments are the same as for "break".\n' + '\n' + 'cl(ear) [filename:lineno | bpnumber [bpnumber ...]]\n' + '\n' + ' With a *filename:lineno* argument, clear all the ' + 'breakpoints at\n' + ' this line. With a space separated list of breakpoint ' + 'numbers, clear\n' + ' those breakpoints. Without argument, clear all breaks (but ' + 'first\n' + ' ask confirmation).\n' + '\n' + 'disable [bpnumber [bpnumber ...]]\n' + '\n' + ' Disable the breakpoints given as a space separated list of\n' + ' breakpoint numbers. Disabling a breakpoint means it cannot ' + 'cause\n' + ' the program to stop execution, but unlike clearing a ' + 'breakpoint, it\n' + ' remains in the list of breakpoints and can be ' + '(re-)enabled.\n' + '\n' + 'enable [bpnumber [bpnumber ...]]\n' + '\n' + ' Enable the breakpoints specified.\n' + '\n' + 'ignore bpnumber [count]\n' + '\n' + ' Set the ignore count for the given breakpoint number. If ' + 'count is\n' + ' omitted, the ignore count is set to 0. A breakpoint ' + 'becomes active\n' + ' when the ignore count is zero. When non-zero, the count ' + 'is\n' + ' decremented each time the breakpoint is reached and the ' + 'breakpoint\n' + ' is not disabled and any associated condition evaluates to ' + 'true.\n' + '\n' + 'condition bpnumber [condition]\n' + '\n' + ' Set a new *condition* for the breakpoint, an expression ' + 'which must\n' + ' evaluate to true before the breakpoint is honored. If ' + '*condition*\n' + ' is absent, any existing condition is removed; i.e., the ' + 'breakpoint\n' + ' is made unconditional.\n' + '\n' + 'commands [bpnumber]\n' + '\n' + ' Specify a list of commands for breakpoint number ' + '*bpnumber*. The\n' + ' commands themselves appear on the following lines. Type a ' + 'line\n' + ' containing just "end" to terminate the commands. An ' + 'example:\n' + '\n' + ' (Pdb) commands 1\n' + ' (com) p some_variable\n' + ' (com) end\n' + ' (Pdb)\n' + '\n' + ' To remove all commands from a breakpoint, type commands and ' + 'follow\n' + ' it immediately with "end"; that is, give no commands.\n' + '\n' + ' With no *bpnumber* argument, commands refers to the last ' + 'breakpoint\n' + ' set.\n' + '\n' + ' You can use breakpoint commands to start your program up ' + 'again.\n' + ' Simply use the continue command, or step, or any other ' + 'command that\n' + ' resumes execution.\n' + '\n' + ' Specifying any command resuming execution (currently ' + 'continue,\n' + ' step, next, return, jump, quit and their abbreviations) ' + 'terminates\n' + ' the command list (as if that command was immediately ' + 'followed by\n' + ' end). This is because any time you resume execution (even ' + 'with a\n' + ' simple next or step), you may encounter another ' + 'breakpoint--which\n' + ' could have its own command list, leading to ambiguities ' + 'about which\n' + ' list to execute.\n' + '\n' + " If you use the 'silent' command in the command list, the " + 'usual\n' + ' message about stopping at a breakpoint is not printed. ' + 'This may be\n' + ' desirable for breakpoints that are to print a specific ' + 'message and\n' + ' then continue. If none of the other commands print ' + 'anything, you\n' + ' see no sign that the breakpoint was reached.\n' + '\n' + 's(tep)\n' + '\n' + ' Execute the current line, stop at the first possible ' + 'occasion\n' + ' (either in a function that is called or on the next line in ' + 'the\n' + ' current function).\n' + '\n' + 'n(ext)\n' + '\n' + ' Continue execution until the next line in the current ' + 'function is\n' + ' reached or it returns. (The difference between "next" and ' + '"step"\n' + ' is that "step" stops inside a called function, while ' + '"next"\n' + ' executes called functions at (nearly) full speed, only ' + 'stopping at\n' + ' the next line in the current function.)\n' + '\n' + 'unt(il) [lineno]\n' + '\n' + ' Without argument, continue execution until the line with a ' + 'number\n' + ' greater than the current one is reached.\n' + '\n' + ' With a line number, continue execution until a line with a ' + 'number\n' + ' greater or equal to that is reached. In both cases, also ' + 'stop when\n' + ' the current frame returns.\n' + '\n' + ' Changed in version 3.2: Allow giving an explicit line ' + 'number.\n' + '\n' + 'r(eturn)\n' + '\n' + ' Continue execution until the current function returns.\n' + '\n' + 'c(ont(inue))\n' + '\n' + ' Continue execution, only stop when a breakpoint is ' + 'encountered.\n' + '\n' + 'j(ump) lineno\n' + '\n' + ' Set the next line that will be executed. Only available in ' + 'the\n' + ' bottom-most frame. This lets you jump back and execute ' + 'code again,\n' + " or jump forward to skip code that you don't want to run.\n" + '\n' + ' It should be noted that not all jumps are allowed -- for ' + 'instance\n' + ' it is not possible to jump into the middle of a "for" loop ' + 'or out\n' + ' of a "finally" clause.\n' + '\n' + 'l(ist) [first[, last]]\n' + '\n' + ' List source code for the current file. Without arguments, ' + 'list 11\n' + ' lines around the current line or continue the previous ' + 'listing.\n' + ' With "." as argument, list 11 lines around the current ' + 'line. With\n' + ' one argument, list 11 lines around at that line. With two\n' + ' arguments, list the given range; if the second argument is ' + 'less\n' + ' than the first, it is interpreted as a count.\n' + '\n' + ' The current line in the current frame is indicated by ' + '"->". If an\n' + ' exception is being debugged, the line where the exception ' + 'was\n' + ' originally raised or propagated is indicated by ">>", if it ' + 'differs\n' + ' from the current line.\n' + '\n' + ' New in version 3.2: The ">>" marker.\n' + '\n' + 'll | longlist\n' + '\n' + ' List all source code for the current function or frame.\n' + ' Interesting lines are marked as for "list".\n' + '\n' + ' New in version 3.2.\n' + '\n' + 'a(rgs)\n' + '\n' + ' Print the argument list of the current function.\n' + '\n' + 'p expression\n' + '\n' + ' Evaluate the *expression* in the current context and print ' + 'its\n' + ' value.\n' + '\n' + ' Note: "print()" can also be used, but is not a debugger ' + 'command\n' + ' --- this executes the Python "print()" function.\n' + '\n' + 'pp expression\n' + '\n' + ' Like the "p" command, except the value of the expression is ' + 'pretty-\n' + ' printed using the "pprint" module.\n' + '\n' + 'whatis expression\n' + '\n' + ' Print the type of the *expression*.\n' + '\n' + 'source expression\n' + '\n' + ' Try to get source code for the given object and display ' + 'it.\n' + '\n' + ' New in version 3.2.\n' + '\n' + 'display [expression]\n' + '\n' + ' Display the value of the expression if it changed, each ' + 'time\n' + ' execution stops in the current frame.\n' + '\n' + ' Without expression, list all display expressions for the ' + 'current\n' + ' frame.\n' + '\n' + ' New in version 3.2.\n' + '\n' + 'undisplay [expression]\n' + '\n' + ' Do not display the expression any more in the current ' + 'frame.\n' + ' Without expression, clear all display expressions for the ' + 'current\n' + ' frame.\n' + '\n' + ' New in version 3.2.\n' + '\n' + 'interact\n' + '\n' + ' Start an interative interpreter (using the "code" module) ' + 'whose\n' + ' global namespace contains all the (global and local) names ' + 'found in\n' + ' the current scope.\n' + '\n' + ' New in version 3.2.\n' + '\n' + 'alias [name [command]]\n' + '\n' + ' Create an alias called *name* that executes *command*. The ' + 'command\n' + ' must *not* be enclosed in quotes. Replaceable parameters ' + 'can be\n' + ' indicated by "%1", "%2", and so on, while "%*" is replaced ' + 'by all\n' + ' the parameters. If no command is given, the current alias ' + 'for\n' + ' *name* is shown. If no arguments are given, all aliases are ' + 'listed.\n' + '\n' + ' Aliases may be nested and can contain anything that can be ' + 'legally\n' + ' typed at the pdb prompt. Note that internal pdb commands ' + '*can* be\n' + ' overridden by aliases. Such a command is then hidden until ' + 'the\n' + ' alias is removed. Aliasing is recursively applied to the ' + 'first\n' + ' word of the command line; all other words in the line are ' + 'left\n' + ' alone.\n' + '\n' + ' As an example, here are two useful aliases (especially when ' + 'placed\n' + ' in the ".pdbrc" file):\n' + '\n' + ' # Print instance variables (usage "pi classInst")\n' + ' alias pi for k in %1.__dict__.keys(): ' + 'print("%1.",k,"=",%1.__dict__[k])\n' + ' # Print instance variables in self\n' + ' alias ps pi self\n' + '\n' + 'unalias name\n' + '\n' + ' Delete the specified alias.\n' + '\n' + '! statement\n' + '\n' + ' Execute the (one-line) *statement* in the context of the ' + 'current\n' + ' stack frame. The exclamation point can be omitted unless ' + 'the first\n' + ' word of the statement resembles a debugger command. To set ' + 'a\n' + ' global variable, you can prefix the assignment command with ' + 'a\n' + ' "global" statement on the same line, e.g.:\n' + '\n' + " (Pdb) global list_options; list_options = ['-l']\n" + ' (Pdb)\n' + '\n' + 'run [args ...]\n' + 'restart [args ...]\n' + '\n' + ' Restart the debugged Python program. If an argument is ' + 'supplied,\n' + ' it is split with "shlex" and the result is used as the new\n' + ' "sys.argv". History, breakpoints, actions and debugger ' + 'options are\n' + ' preserved. "restart" is an alias for "run".\n' + '\n' + 'q(uit)\n' + '\n' + ' Quit from the debugger. The program being executed is ' + 'aborted.\n' + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] Whether a frame is considered to originate in a certain ' + 'module\n' + ' is determined by the "__name__" in the frame globals.\n', + 'del': '\n' + 'The "del" statement\n' + '*******************\n' + '\n' + ' del_stmt ::= "del" target_list\n' + '\n' + 'Deletion is recursively defined very similar to the way assignment ' + 'is\n' + 'defined. Rather than spelling it out in full details, here are ' + 'some\n' + 'hints.\n' + '\n' + 'Deletion of a target list recursively deletes each target, from ' + 'left\n' + 'to right.\n' + '\n' + 'Deletion of a name removes the binding of that name from the local ' + 'or\n' + 'global namespace, depending on whether the name occurs in a ' + '"global"\n' + 'statement in the same code block. If the name is unbound, a\n' + '"NameError" exception will be raised.\n' + '\n' + 'Deletion of attribute references, subscriptions and slicings is ' + 'passed\n' + 'to the primary object involved; deletion of a slicing is in ' + 'general\n' + 'equivalent to assignment of an empty slice of the right type (but ' + 'even\n' + 'this is determined by the sliced object).\n' + '\n' + 'Changed in version 3.2: Previously it was illegal to delete a name\n' + 'from the local namespace if it occurs as a free variable in a ' + 'nested\n' + 'block.\n', + 'dict': '\n' + 'Dictionary displays\n' + '*******************\n' + '\n' + 'A dictionary display is a possibly empty series of key/datum ' + 'pairs\n' + 'enclosed in curly braces:\n' + '\n' + ' dict_display ::= "{" [key_datum_list | ' + 'dict_comprehension] "}"\n' + ' key_datum_list ::= key_datum ("," key_datum)* [","]\n' + ' key_datum ::= expression ":" expression\n' + ' dict_comprehension ::= expression ":" expression comp_for\n' + '\n' + 'A dictionary display yields a new dictionary object.\n' + '\n' + 'If a comma-separated sequence of key/datum pairs is given, they ' + 'are\n' + 'evaluated from left to right to define the entries of the ' + 'dictionary:\n' + 'each key object is used as a key into the dictionary to store the\n' + 'corresponding datum. This means that you can specify the same ' + 'key\n' + "multiple times in the key/datum list, and the final dictionary's " + 'value\n' + 'for that key will be the last one given.\n' + '\n' + 'A dict comprehension, in contrast to list and set comprehensions,\n' + 'needs two expressions separated with a colon followed by the ' + 'usual\n' + '"for" and "if" clauses. When the comprehension is run, the ' + 'resulting\n' + 'key and value elements are inserted in the new dictionary in the ' + 'order\n' + 'they are produced.\n' + '\n' + 'Restrictions on the types of the key values are listed earlier in\n' + 'section *The standard type hierarchy*. (To summarize, the key ' + 'type\n' + 'should be *hashable*, which excludes all mutable objects.) ' + 'Clashes\n' + 'between duplicate keys are not detected; the last datum ' + '(textually\n' + 'rightmost in the display) stored for a given key value prevails.\n', + 'dynamic-features': '\n' + 'Interaction with dynamic features\n' + '*********************************\n' + '\n' + 'Name resolution of free variables occurs at runtime, ' + 'not at compile\n' + 'time. This means that the following code will print ' + '42:\n' + '\n' + ' i = 10\n' + ' def f():\n' + ' print(i)\n' + ' i = 42\n' + ' f()\n' + '\n' + 'There are several cases where Python statements are ' + 'illegal when used\n' + 'in conjunction with nested scopes that contain free ' + 'variables.\n' + '\n' + 'If a variable is referenced in an enclosing scope, it ' + 'is illegal to\n' + 'delete the name. An error will be reported at compile ' + 'time.\n' + '\n' + 'The "eval()" and "exec()" functions do not have access ' + 'to the full\n' + 'environment for resolving names. Names may be ' + 'resolved in the local\n' + 'and global namespaces of the caller. Free variables ' + 'are not resolved\n' + 'in the nearest enclosing namespace, but in the global ' + 'namespace. [1]\n' + 'The "exec()" and "eval()" functions have optional ' + 'arguments to\n' + 'override the global and local namespace. If only one ' + 'namespace is\n' + 'specified, it is used for both.\n', + 'else': '\n' + 'The "if" statement\n' + '******************\n' + '\n' + 'The "if" statement is used for conditional execution:\n' + '\n' + ' if_stmt ::= "if" expression ":" suite\n' + ' ( "elif" expression ":" suite )*\n' + ' ["else" ":" suite]\n' + '\n' + 'It selects exactly one of the suites by evaluating the expressions ' + 'one\n' + 'by one until one is found to be true (see section *Boolean ' + 'operations*\n' + 'for the definition of true and false); then that suite is ' + 'executed\n' + '(and no other part of the "if" statement is executed or ' + 'evaluated).\n' + 'If all expressions are false, the suite of the "else" clause, if\n' + 'present, is executed.\n', + 'exceptions': '\n' + 'Exceptions\n' + '**********\n' + '\n' + 'Exceptions are a means of breaking out of the normal flow of ' + 'control\n' + 'of a code block in order to handle errors or other ' + 'exceptional\n' + 'conditions. An exception is *raised* at the point where the ' + 'error is\n' + 'detected; it may be *handled* by the surrounding code block ' + 'or by any\n' + 'code block that directly or indirectly invoked the code ' + 'block where\n' + 'the error occurred.\n' + '\n' + 'The Python interpreter raises an exception when it detects a ' + 'run-time\n' + 'error (such as division by zero). A Python program can ' + 'also\n' + 'explicitly raise an exception with the "raise" statement. ' + 'Exception\n' + 'handlers are specified with the "try" ... "except" ' + 'statement. The\n' + '"finally" clause of such a statement can be used to specify ' + 'cleanup\n' + 'code which does not handle the exception, but is executed ' + 'whether an\n' + 'exception occurred or not in the preceding code.\n' + '\n' + 'Python uses the "termination" model of error handling: an ' + 'exception\n' + 'handler can find out what happened and continue execution at ' + 'an outer\n' + 'level, but it cannot repair the cause of the error and retry ' + 'the\n' + 'failing operation (except by re-entering the offending piece ' + 'of code\n' + 'from the top).\n' + '\n' + 'When an exception is not handled at all, the interpreter ' + 'terminates\n' + 'execution of the program, or returns to its interactive main ' + 'loop. In\n' + 'either case, it prints a stack backtrace, except when the ' + 'exception is\n' + '"SystemExit".\n' + '\n' + 'Exceptions are identified by class instances. The "except" ' + 'clause is\n' + 'selected depending on the class of the instance: it must ' + 'reference the\n' + 'class of the instance or a base class thereof. The instance ' + 'can be\n' + 'received by the handler and can carry additional information ' + 'about the\n' + 'exceptional condition.\n' + '\n' + 'Note: Exception messages are not part of the Python API. ' + 'Their\n' + ' contents may change from one version of Python to the next ' + 'without\n' + ' warning and should not be relied on by code which will run ' + 'under\n' + ' multiple versions of the interpreter.\n' + '\n' + 'See also the description of the "try" statement in section ' + '*The try\n' + 'statement* and "raise" statement in section *The raise ' + 'statement*.\n' + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] This limitation occurs because the code that is executed ' + 'by\n' + ' these operations is not available at the time the module ' + 'is\n' + ' compiled.\n', + 'execmodel': '\n' + 'Execution model\n' + '***************\n' + '\n' + '\n' + 'Structure of a programm\n' + '=======================\n' + '\n' + 'A Python program is constructed from code blocks. A *block* ' + 'is a piece\n' + 'of Python program text that is executed as a unit. The ' + 'following are\n' + 'blocks: a module, a function body, and a class definition. ' + 'Each\n' + 'command typed interactively is a block. A script file (a ' + 'file given\n' + 'as standard input to the interpreter or specified as a ' + 'command line\n' + 'argument to the interpreter) is a code block. A script ' + 'command (a\n' + 'command specified on the interpreter command line with the ' + "'**-c**'\n" + 'option) is a code block. The string argument passed to the ' + 'built-in\n' + 'functions "eval()" and "exec()" is a code block.\n' + '\n' + 'A code block is executed in an *execution frame*. A frame ' + 'contains\n' + 'some administrative information (used for debugging) and ' + 'determines\n' + "where and how execution continues after the code block's " + 'execution has\n' + 'completed.\n' + '\n' + '\n' + 'Naming and binding\n' + '==================\n' + '\n' + '\n' + 'Binding of names\n' + '----------------\n' + '\n' + '*Names* refer to objects. Names are introduced by name ' + 'binding\n' + 'operations.\n' + '\n' + 'The following constructs bind names: formal parameters to ' + 'functions,\n' + '"import" statements, class and function definitions (these ' + 'bind the\n' + 'class or function name in the defining block), and targets ' + 'that are\n' + 'identifiers if occurring in an assignment, "for" loop header, ' + 'or after\n' + '"as" in a "with" statement or "except" clause. The "import" ' + 'statement\n' + 'of the form "from ... import *" binds all names defined in ' + 'the\n' + 'imported module, except those beginning with an underscore. ' + 'This form\n' + 'may only be used at the module level.\n' + '\n' + 'A target occurring in a "del" statement is also considered ' + 'bound for\n' + 'this purpose (though the actual semantics are to unbind the ' + 'name).\n' + '\n' + 'Each assignment or import statement occurs within a block ' + 'defined by a\n' + 'class or function definition or at the module level (the ' + 'top-level\n' + 'code block).\n' + '\n' + 'If a name is bound in a block, it is a local variable of that ' + 'block,\n' + 'unless declared as "nonlocal" or "global". If a name is ' + 'bound at the\n' + 'module level, it is a global variable. (The variables of the ' + 'module\n' + 'code block are local and global.) If a variable is used in a ' + 'code\n' + 'block but not defined there, it is a *free variable*.\n' + '\n' + 'Each occurrence of a name in the program text refers to the ' + '*binding*\n' + 'of that name established by the following name resolution ' + 'rules.\n' + '\n' + '\n' + 'Resolution of names\n' + '-------------------\n' + '\n' + 'A *scope* defines the visibility of a name within a block. ' + 'If a local\n' + 'variable is defined in a block, its scope includes that ' + 'block. If the\n' + 'definition occurs in a function block, the scope extends to ' + 'any blocks\n' + 'contained within the defining one, unless a contained block ' + 'introduces\n' + 'a different binding for the name.\n' + '\n' + 'When a name is used in a code block, it is resolved using the ' + 'nearest\n' + 'enclosing scope. The set of all such scopes visible to a ' + 'code block\n' + "is called the block's *environment*.\n" + '\n' + 'When a name is not found at all, a "NameError" exception is ' + 'raised. If\n' + 'the current scope is a function scope, and the name refers to ' + 'a local\n' + 'variable that has not yet been bound to a value at the point ' + 'where the\n' + 'name is used, an "UnboundLocalError" exception is raised.\n' + '"UnboundLocalError" is a subclass of "NameError".\n' + '\n' + 'If a name binding operation occurs anywhere within a code ' + 'block, all\n' + 'uses of the name within the block are treated as references ' + 'to the\n' + 'current block. This can lead to errors when a name is used ' + 'within a\n' + 'block before it is bound. This rule is subtle. Python ' + 'lacks\n' + 'declarations and allows name binding operations to occur ' + 'anywhere\n' + 'within a code block. The local variables of a code block can ' + 'be\n' + 'determined by scanning the entire text of the block for name ' + 'binding\n' + 'operations.\n' + '\n' + 'If the "global" statement occurs within a block, all uses of ' + 'the name\n' + 'specified in the statement refer to the binding of that name ' + 'in the\n' + 'top-level namespace. Names are resolved in the top-level ' + 'namespace by\n' + 'searching the global namespace, i.e. the namespace of the ' + 'module\n' + 'containing the code block, and the builtins namespace, the ' + 'namespace\n' + 'of the module "builtins". The global namespace is searched ' + 'first. If\n' + 'the name is not found there, the builtins namespace is ' + 'searched. The\n' + '"global" statement must precede all uses of the name.\n' + '\n' + 'The "global" statement has the same scope as a name binding ' + 'operation\n' + 'in the same block. If the nearest enclosing scope for a free ' + 'variable\n' + 'contains a global statement, the free variable is treated as ' + 'a global.\n' + '\n' + 'The "nonlocal" statement causes corresponding names to refer ' + 'to\n' + 'previously bound variables in the nearest enclosing function ' + 'scope.\n' + '"SyntaxError" is raised at compile time if the given name ' + 'does not\n' + 'exist in any enclosing function scope.\n' + '\n' + 'The namespace for a module is automatically created the first ' + 'time a\n' + 'module is imported. The main module for a script is always ' + 'called\n' + '"__main__".\n' + '\n' + 'Class definition blocks and arguments to "exec()" and ' + '"eval()" are\n' + 'special in the context of name resolution. A class definition ' + 'is an\n' + 'executable statement that may use and define names. These ' + 'references\n' + 'follow the normal rules for name resolution with an exception ' + 'that\n' + 'unbound local variables are looked up in the global ' + 'namespace. The\n' + 'namespace of the class definition becomes the attribute ' + 'dictionary of\n' + 'the class. The scope of names defined in a class block is ' + 'limited to\n' + 'the class block; it does not extend to the code blocks of ' + 'methods --\n' + 'this includes comprehensions and generator expressions since ' + 'they are\n' + 'implemented using a function scope. This means that the ' + 'following\n' + 'will fail:\n' + '\n' + ' class A:\n' + ' a = 42\n' + ' b = list(a + i for i in range(10))\n' + '\n' + '\n' + 'Builtins and restricted execution\n' + '---------------------------------\n' + '\n' + 'The builtins namespace associated with the execution of a ' + 'code block\n' + 'is actually found by looking up the name "__builtins__" in ' + 'its global\n' + 'namespace; this should be a dictionary or a module (in the ' + 'latter case\n' + "the module's dictionary is used). By default, when in the " + '"__main__"\n' + 'module, "__builtins__" is the built-in module "builtins"; ' + 'when in any\n' + 'other module, "__builtins__" is an alias for the dictionary ' + 'of the\n' + '"builtins" module itself. "__builtins__" can be set to a ' + 'user-created\n' + 'dictionary to create a weak form of restricted execution.\n' + '\n' + '**CPython implementation detail:** Users should not touch\n' + '"__builtins__"; it is strictly an implementation detail. ' + 'Users\n' + 'wanting to override values in the builtins namespace should ' + '"import"\n' + 'the "builtins" module and modify its attributes ' + 'appropriately.\n' + '\n' + '\n' + 'Interaction with dynamic features\n' + '---------------------------------\n' + '\n' + 'Name resolution of free variables occurs at runtime, not at ' + 'compile\n' + 'time. This means that the following code will print 42:\n' + '\n' + ' i = 10\n' + ' def f():\n' + ' print(i)\n' + ' i = 42\n' + ' f()\n' + '\n' + 'There are several cases where Python statements are illegal ' + 'when used\n' + 'in conjunction with nested scopes that contain free ' + 'variables.\n' + '\n' + 'If a variable is referenced in an enclosing scope, it is ' + 'illegal to\n' + 'delete the name. An error will be reported at compile time.\n' + '\n' + 'The "eval()" and "exec()" functions do not have access to the ' + 'full\n' + 'environment for resolving names. Names may be resolved in ' + 'the local\n' + 'and global namespaces of the caller. Free variables are not ' + 'resolved\n' + 'in the nearest enclosing namespace, but in the global ' + 'namespace. [1]\n' + 'The "exec()" and "eval()" functions have optional arguments ' + 'to\n' + 'override the global and local namespace. If only one ' + 'namespace is\n' + 'specified, it is used for both.\n' + '\n' + '\n' + 'Exceptions\n' + '==========\n' + '\n' + 'Exceptions are a means of breaking out of the normal flow of ' + 'control\n' + 'of a code block in order to handle errors or other ' + 'exceptional\n' + 'conditions. An exception is *raised* at the point where the ' + 'error is\n' + 'detected; it may be *handled* by the surrounding code block ' + 'or by any\n' + 'code block that directly or indirectly invoked the code block ' + 'where\n' + 'the error occurred.\n' + '\n' + 'The Python interpreter raises an exception when it detects a ' + 'run-time\n' + 'error (such as division by zero). A Python program can also\n' + 'explicitly raise an exception with the "raise" statement. ' + 'Exception\n' + 'handlers are specified with the "try" ... "except" ' + 'statement. The\n' + '"finally" clause of such a statement can be used to specify ' + 'cleanup\n' + 'code which does not handle the exception, but is executed ' + 'whether an\n' + 'exception occurred or not in the preceding code.\n' + '\n' + 'Python uses the "termination" model of error handling: an ' + 'exception\n' + 'handler can find out what happened and continue execution at ' + 'an outer\n' + 'level, but it cannot repair the cause of the error and retry ' + 'the\n' + 'failing operation (except by re-entering the offending piece ' + 'of code\n' + 'from the top).\n' + '\n' + 'When an exception is not handled at all, the interpreter ' + 'terminates\n' + 'execution of the program, or returns to its interactive main ' + 'loop. In\n' + 'either case, it prints a stack backtrace, except when the ' + 'exception is\n' + '"SystemExit".\n' + '\n' + 'Exceptions are identified by class instances. The "except" ' + 'clause is\n' + 'selected depending on the class of the instance: it must ' + 'reference the\n' + 'class of the instance or a base class thereof. The instance ' + 'can be\n' + 'received by the handler and can carry additional information ' + 'about the\n' + 'exceptional condition.\n' + '\n' + 'Note: Exception messages are not part of the Python API. ' + 'Their\n' + ' contents may change from one version of Python to the next ' + 'without\n' + ' warning and should not be relied on by code which will run ' + 'under\n' + ' multiple versions of the interpreter.\n' + '\n' + 'See also the description of the "try" statement in section ' + '*The try\n' + 'statement* and "raise" statement in section *The raise ' + 'statement*.\n' + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] This limitation occurs because the code that is executed ' + 'by\n' + ' these operations is not available at the time the module ' + 'is\n' + ' compiled.\n', + 'exprlists': '\n' + 'Expression lists\n' + '****************\n' + '\n' + ' expression_list ::= expression ( "," expression )* [","]\n' + '\n' + 'An expression list containing at least one comma yields a ' + 'tuple. The\n' + 'length of the tuple is the number of expressions in the ' + 'list. The\n' + 'expressions are evaluated from left to right.\n' + '\n' + 'The trailing comma is required only to create a single tuple ' + '(a.k.a. a\n' + '*singleton*); it is optional in all other cases. A single ' + 'expression\n' + "without a trailing comma doesn't create a tuple, but rather " + 'yields the\n' + 'value of that expression. (To create an empty tuple, use an ' + 'empty pair\n' + 'of parentheses: "()".)\n', + 'floating': '\n' + 'Floating point literals\n' + '***********************\n' + '\n' + 'Floating point literals are described by the following ' + 'lexical\n' + 'definitions:\n' + '\n' + ' floatnumber ::= pointfloat | exponentfloat\n' + ' pointfloat ::= [intpart] fraction | intpart "."\n' + ' exponentfloat ::= (intpart | pointfloat) exponent\n' + ' intpart ::= digit+\n' + ' fraction ::= "." digit+\n' + ' exponent ::= ("e" | "E") ["+" | "-"] digit+\n' + '\n' + 'Note that the integer and exponent parts are always ' + 'interpreted using\n' + 'radix 10. For example, "077e010" is legal, and denotes the ' + 'same number\n' + 'as "77e10". The allowed range of floating point literals is\n' + 'implementation-dependent. Some examples of floating point ' + 'literals:\n' + '\n' + ' 3.14 10. .001 1e100 3.14e-10 0e0\n' + '\n' + 'Note that numeric literals do not include a sign; a phrase ' + 'like "-1"\n' + 'is actually an expression composed of the unary operator "-" ' + 'and the\n' + 'literal "1".\n', + 'for': '\n' + 'The "for" statement\n' + '*******************\n' + '\n' + 'The "for" statement is used to iterate over the elements of a ' + 'sequence\n' + '(such as a string, tuple or list) or other iterable object:\n' + '\n' + ' for_stmt ::= "for" target_list "in" expression_list ":" suite\n' + ' ["else" ":" suite]\n' + '\n' + 'The expression list is evaluated once; it should yield an iterable\n' + 'object. An iterator is created for the result of the\n' + '"expression_list". The suite is then executed once for each item\n' + 'provided by the iterator, in the order returned by the iterator. ' + 'Each\n' + 'item in turn is assigned to the target list using the standard ' + 'rules\n' + 'for assignments (see *Assignment statements*), and then the suite ' + 'is\n' + 'executed. When the items are exhausted (which is immediately when ' + 'the\n' + 'sequence is empty or an iterator raises a "StopIteration" ' + 'exception),\n' + 'the suite in the "else" clause, if present, is executed, and the ' + 'loop\n' + 'terminates.\n' + '\n' + 'A "break" statement executed in the first suite terminates the ' + 'loop\n' + 'without executing the "else" clause\'s suite. A "continue" ' + 'statement\n' + 'executed in the first suite skips the rest of the suite and ' + 'continues\n' + 'with the next item, or with the "else" clause if there is no next\n' + 'item.\n' + '\n' + 'The for-loop makes assignments to the variables(s) in the target ' + 'list.\n' + 'This overwrites all previous assignments to those variables ' + 'including\n' + 'those made in the suite of the for-loop:\n' + '\n' + ' for i in range(10):\n' + ' print(i)\n' + ' i = 5 # this will not affect the for-loop\n' + ' # because i will be overwritten with the ' + 'next\n' + ' # index in the range\n' + '\n' + 'Names in the target list are not deleted when the loop is ' + 'finished,\n' + 'but if the sequence is empty, they will not have been assigned to ' + 'at\n' + 'all by the loop. Hint: the built-in function "range()" returns an\n' + "iterator of integers suitable to emulate the effect of Pascal's " + '"for i\n' + ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, ' + '2]".\n' + '\n' + 'Note: There is a subtlety when the sequence is being modified by ' + 'the\n' + ' loop (this can only occur for mutable sequences, i.e. lists). ' + 'An\n' + ' internal counter is used to keep track of which item is used ' + 'next,\n' + ' and this is incremented on each iteration. When this counter ' + 'has\n' + ' reached the length of the sequence the loop terminates. This ' + 'means\n' + ' that if the suite deletes the current (or a previous) item from ' + 'the\n' + ' sequence, the next item will be skipped (since it gets the index ' + 'of\n' + ' the current item which has already been treated). Likewise, if ' + 'the\n' + ' suite inserts an item in the sequence before the current item, ' + 'the\n' + ' current item will be treated again the next time through the ' + 'loop.\n' + ' This can lead to nasty bugs that can be avoided by making a\n' + ' temporary copy using a slice of the whole sequence, e.g.,\n' + '\n' + ' for x in a[:]:\n' + ' if x < 0: a.remove(x)\n', + 'formatstrings': '\n' + 'Format String Syntax\n' + '********************\n' + '\n' + 'The "str.format()" method and the "Formatter" class share ' + 'the same\n' + 'syntax for format strings (although in the case of ' + '"Formatter",\n' + 'subclasses can define their own format string syntax).\n' + '\n' + 'Format strings contain "replacement fields" surrounded by ' + 'curly braces\n' + '"{}". Anything that is not contained in braces is ' + 'considered literal\n' + 'text, which is copied unchanged to the output. If you ' + 'need to include\n' + 'a brace character in the literal text, it can be escaped ' + 'by doubling:\n' + '"{{" and "}}".\n' + '\n' + 'The grammar for a replacement field is as follows:\n' + '\n' + ' replacement_field ::= "{" [field_name] ["!" ' + 'conversion] [":" format_spec] "}"\n' + ' field_name ::= arg_name ("." attribute_name ' + '| "[" element_index "]")*\n' + ' arg_name ::= [identifier | integer]\n' + ' attribute_name ::= identifier\n' + ' element_index ::= integer | index_string\n' + ' index_string ::= +\n' + ' conversion ::= "r" | "s" | "a"\n' + ' format_spec ::= \n' + '\n' + 'In less formal terms, the replacement field can start ' + 'with a\n' + '*field_name* that specifies the object whose value is to ' + 'be formatted\n' + 'and inserted into the output instead of the replacement ' + 'field. The\n' + '*field_name* is optionally followed by a *conversion* ' + 'field, which is\n' + 'preceded by an exclamation point "\'!\'", and a ' + '*format_spec*, which is\n' + 'preceded by a colon "\':\'". These specify a non-default ' + 'format for the\n' + 'replacement value.\n' + '\n' + 'See also the *Format Specification Mini-Language* ' + 'section.\n' + '\n' + 'The *field_name* itself begins with an *arg_name* that is ' + 'either a\n' + "number or a keyword. If it's a number, it refers to a " + 'positional\n' + "argument, and if it's a keyword, it refers to a named " + 'keyword\n' + 'argument. If the numerical arg_names in a format string ' + 'are 0, 1, 2,\n' + '... in sequence, they can all be omitted (not just some) ' + 'and the\n' + 'numbers 0, 1, 2, ... will be automatically inserted in ' + 'that order.\n' + 'Because *arg_name* is not quote-delimited, it is not ' + 'possible to\n' + 'specify arbitrary dictionary keys (e.g., the strings ' + '"\'10\'" or\n' + '"\':-]\'") within a format string. The *arg_name* can be ' + 'followed by any\n' + 'number of index or attribute expressions. An expression ' + 'of the form\n' + '"\'.name\'" selects the named attribute using ' + '"getattr()", while an\n' + 'expression of the form "\'[index]\'" does an index lookup ' + 'using\n' + '"__getitem__()".\n' + '\n' + 'Changed in version 3.1: The positional argument ' + 'specifiers can be\n' + 'omitted, so "\'{} {}\'" is equivalent to "\'{0} {1}\'".\n' + '\n' + 'Some simple format string examples:\n' + '\n' + ' "First, thou shalt count to {0}" # References first ' + 'positional argument\n' + ' "Bring me a {}" # Implicitly ' + 'references the first positional argument\n' + ' "From {} to {}" # Same as "From {0} ' + 'to {1}"\n' + ' "My quest is {name}" # References keyword ' + "argument 'name'\n" + ' "Weight in tons {0.weight}" # \'weight\' ' + 'attribute of first positional arg\n' + ' "Units destroyed: {players[0]}" # First element of ' + "keyword argument 'players'.\n" + '\n' + 'The *conversion* field causes a type coercion before ' + 'formatting.\n' + 'Normally, the job of formatting a value is done by the ' + '"__format__()"\n' + 'method of the value itself. However, in some cases it is ' + 'desirable to\n' + 'force a type to be formatted as a string, overriding its ' + 'own\n' + 'definition of formatting. By converting the value to a ' + 'string before\n' + 'calling "__format__()", the normal formatting logic is ' + 'bypassed.\n' + '\n' + 'Three conversion flags are currently supported: "\'!s\'" ' + 'which calls\n' + '"str()" on the value, "\'!r\'" which calls "repr()" and ' + '"\'!a\'" which\n' + 'calls "ascii()".\n' + '\n' + 'Some examples:\n' + '\n' + ' "Harold\'s a clever {0!s}" # Calls str() on the ' + 'argument first\n' + ' "Bring out the holy {name!r}" # Calls repr() on the ' + 'argument first\n' + ' "More {!a}" # Calls ascii() on ' + 'the argument first\n' + '\n' + 'The *format_spec* field contains a specification of how ' + 'the value\n' + 'should be presented, including such details as field ' + 'width, alignment,\n' + 'padding, decimal precision and so on. Each value type ' + 'can define its\n' + 'own "formatting mini-language" or interpretation of the ' + '*format_spec*.\n' + '\n' + 'Most built-in types support a common formatting ' + 'mini-language, which\n' + 'is described in the next section.\n' + '\n' + 'A *format_spec* field can also include nested replacement ' + 'fields\n' + 'within it. These nested replacement fields can contain ' + 'only a field\n' + 'name; conversion flags and format specifications are not ' + 'allowed. The\n' + 'replacement fields within the format_spec are substituted ' + 'before the\n' + '*format_spec* string is interpreted. This allows the ' + 'formatting of a\n' + 'value to be dynamically specified.\n' + '\n' + 'See the *Format examples* section for some examples.\n' + '\n' + '\n' + 'Format Specification Mini-Language\n' + '==================================\n' + '\n' + '"Format specifications" are used within replacement ' + 'fields contained\n' + 'within a format string to define how individual values ' + 'are presented\n' + '(see *Format String Syntax*). They can also be passed ' + 'directly to the\n' + 'built-in "format()" function. Each formattable type may ' + 'define how\n' + 'the format specification is to be interpreted.\n' + '\n' + 'Most built-in types implement the following options for ' + 'format\n' + 'specifications, although some of the formatting options ' + 'are only\n' + 'supported by the numeric types.\n' + '\n' + 'A general convention is that an empty format string ' + '("""") produces\n' + 'the same result as if you had called "str()" on the ' + 'value. A non-empty\n' + 'format string typically modifies the result.\n' + '\n' + 'The general form of a *standard format specifier* is:\n' + '\n' + ' format_spec ::= ' + '[[fill]align][sign][#][0][width][,][.precision][type]\n' + ' fill ::= \n' + ' align ::= "<" | ">" | "=" | "^"\n' + ' sign ::= "+" | "-" | " "\n' + ' width ::= integer\n' + ' precision ::= integer\n' + ' type ::= "b" | "c" | "d" | "e" | "E" | "f" | ' + '"F" | "g" | "G" | "n" | "o" | "s" | "x" | "X" | "%"\n' + '\n' + 'If a valid *align* value is specified, it can be preceded ' + 'by a *fill*\n' + 'character that can be any character and defaults to a ' + 'space if\n' + 'omitted. Note that it is not possible to use "{" and "}" ' + 'as *fill*\n' + 'char while using the "str.format()" method; this ' + 'limitation however\n' + 'doesn\'t affect the "format()" function.\n' + '\n' + 'The meaning of the various alignment options is as ' + 'follows:\n' + '\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | Option | ' + 'Meaning ' + '|\n' + ' ' + '+===========+============================================================+\n' + ' | "\'<\'" | Forces the field to be left-aligned ' + 'within the available |\n' + ' | | space (this is the default for most ' + 'objects). |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'>\'" | Forces the field to be right-aligned ' + 'within the available |\n' + ' | | space (this is the default for ' + 'numbers). |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'=\'" | Forces the padding to be placed after ' + 'the sign (if any) |\n' + ' | | but before the digits. This is used for ' + 'printing fields |\n' + " | | in the form '+000000120'. This alignment " + 'option is only |\n' + ' | | valid for numeric ' + 'types. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'^\'" | Forces the field to be centered within ' + 'the available |\n' + ' | | ' + 'space. ' + '|\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + '\n' + 'Note that unless a minimum field width is defined, the ' + 'field width\n' + 'will always be the same size as the data to fill it, so ' + 'that the\n' + 'alignment option has no meaning in this case.\n' + '\n' + 'The *sign* option is only valid for number types, and can ' + 'be one of\n' + 'the following:\n' + '\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | Option | ' + 'Meaning ' + '|\n' + ' ' + '+===========+============================================================+\n' + ' | "\'+\'" | indicates that a sign should be used ' + 'for both positive as |\n' + ' | | well as negative ' + 'numbers. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'-\'" | indicates that a sign should be used ' + 'only for negative |\n' + ' | | numbers (this is the default ' + 'behavior). |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | space | indicates that a leading space should be ' + 'used on positive |\n' + ' | | numbers, and a minus sign on negative ' + 'numbers. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + '\n' + 'The "\'#\'" option causes the "alternate form" to be used ' + 'for the\n' + 'conversion. The alternate form is defined differently ' + 'for different\n' + 'types. This option is only valid for integer, float, ' + 'complex and\n' + 'Decimal types. For integers, when binary, octal, or ' + 'hexadecimal output\n' + 'is used, this option adds the prefix respective "\'0b\'", ' + '"\'0o\'", or\n' + '"\'0x\'" to the output value. For floats, complex and ' + 'Decimal the\n' + 'alternate form causes the result of the conversion to ' + 'always contain a\n' + 'decimal-point character, even if no digits follow it. ' + 'Normally, a\n' + 'decimal-point character appears in the result of these ' + 'conversions\n' + 'only if a digit follows it. In addition, for "\'g\'" and ' + '"\'G\'"\n' + 'conversions, trailing zeros are not removed from the ' + 'result.\n' + '\n' + 'The "\',\'" option signals the use of a comma for a ' + 'thousands separator.\n' + 'For a locale aware separator, use the "\'n\'" integer ' + 'presentation type\n' + 'instead.\n' + '\n' + 'Changed in version 3.1: Added the "\',\'" option (see ' + 'also **PEP 378**).\n' + '\n' + '*width* is a decimal integer defining the minimum field ' + 'width. If not\n' + 'specified, then the field width will be determined by the ' + 'content.\n' + '\n' + 'Preceding the *width* field by a zero ("\'0\'") character ' + 'enables sign-\n' + 'aware zero-padding for numeric types. This is equivalent ' + 'to a *fill*\n' + 'character of "\'0\'" with an *alignment* type of ' + '"\'=\'".\n' + '\n' + 'The *precision* is a decimal number indicating how many ' + 'digits should\n' + 'be displayed after the decimal point for a floating point ' + 'value\n' + 'formatted with "\'f\'" and "\'F\'", or before and after ' + 'the decimal point\n' + 'for a floating point value formatted with "\'g\'" or ' + '"\'G\'". For non-\n' + 'number types the field indicates the maximum field size - ' + 'in other\n' + 'words, how many characters will be used from the field ' + 'content. The\n' + '*precision* is not allowed for integer values.\n' + '\n' + 'Finally, the *type* determines how the data should be ' + 'presented.\n' + '\n' + 'The available string presentation types are:\n' + '\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | Type | ' + 'Meaning ' + '|\n' + ' ' + '+===========+============================================================+\n' + ' | "\'s\'" | String format. This is the default ' + 'type for strings and |\n' + ' | | may be ' + 'omitted. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | None | The same as ' + '"\'s\'". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + '\n' + 'The available integer presentation types are:\n' + '\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | Type | ' + 'Meaning ' + '|\n' + ' ' + '+===========+============================================================+\n' + ' | "\'b\'" | Binary format. Outputs the number in ' + 'base 2. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'c\'" | Character. Converts the integer to the ' + 'corresponding |\n' + ' | | unicode character before ' + 'printing. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'d\'" | Decimal Integer. Outputs the number in ' + 'base 10. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'o\'" | Octal format. Outputs the number in ' + 'base 8. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'x\'" | Hex format. Outputs the number in base ' + '16, using lower- |\n' + ' | | case letters for the digits above ' + '9. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'X\'" | Hex format. Outputs the number in base ' + '16, using upper- |\n' + ' | | case letters for the digits above ' + '9. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'n\'" | Number. This is the same as "\'d\'", ' + 'except that it uses the |\n' + ' | | current locale setting to insert the ' + 'appropriate number |\n' + ' | | separator ' + 'characters. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | None | The same as ' + '"\'d\'". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + '\n' + 'In addition to the above presentation types, integers can ' + 'be formatted\n' + 'with the floating point presentation types listed below ' + '(except "\'n\'"\n' + 'and None). When doing so, "float()" is used to convert ' + 'the integer to\n' + 'a floating point number before formatting.\n' + '\n' + 'The available presentation types for floating point and ' + 'decimal values\n' + 'are:\n' + '\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | Type | ' + 'Meaning ' + '|\n' + ' ' + '+===========+============================================================+\n' + ' | "\'e\'" | Exponent notation. Prints the number ' + 'in scientific |\n' + " | | notation using the letter 'e' to " + 'indicate the exponent. |\n' + ' | | The default precision is ' + '"6". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'E\'" | Exponent notation. Same as "\'e\'" ' + 'except it uses an upper |\n' + " | | case 'E' as the separator " + 'character. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'f\'" | Fixed point. Displays the number as a ' + 'fixed-point number. |\n' + ' | | The default precision is ' + '"6". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'F\'" | Fixed point. Same as "\'f\'", but ' + 'converts "nan" to "NAN" |\n' + ' | | and "inf" to ' + '"INF". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'g\'" | General format. For a given precision ' + '"p >= 1", this |\n' + ' | | rounds the number to "p" significant ' + 'digits and then |\n' + ' | | formats the result in either fixed-point ' + 'format or in |\n' + ' | | scientific notation, depending on its ' + 'magnitude. The |\n' + ' | | precise rules are as follows: suppose ' + 'that the result |\n' + ' | | formatted with presentation type "\'e\'" ' + 'and precision "p-1" |\n' + ' | | would have exponent "exp". Then if "-4 ' + '<= exp < p", the |\n' + ' | | number is formatted with presentation ' + 'type "\'f\'" and |\n' + ' | | precision "p-1-exp". Otherwise, the ' + 'number is formatted |\n' + ' | | with presentation type "\'e\'" and ' + 'precision "p-1". In both |\n' + ' | | cases insignificant trailing zeros are ' + 'removed from the |\n' + ' | | significand, and the decimal point is ' + 'also removed if |\n' + ' | | there are no remaining digits following ' + 'it. Positive and |\n' + ' | | negative infinity, positive and negative ' + 'zero, and nans, |\n' + ' | | are formatted as "inf", "-inf", "0", ' + '"-0" and "nan" |\n' + ' | | respectively, regardless of the ' + 'precision. A precision of |\n' + ' | | "0" is treated as equivalent to a ' + 'precision of "1". The |\n' + ' | | default precision is ' + '"6". |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'G\'" | General format. Same as "\'g\'" except ' + 'switches to "\'E\'" if |\n' + ' | | the number gets too large. The ' + 'representations of infinity |\n' + ' | | and NaN are uppercased, ' + 'too. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'n\'" | Number. This is the same as "\'g\'", ' + 'except that it uses the |\n' + ' | | current locale setting to insert the ' + 'appropriate number |\n' + ' | | separator ' + 'characters. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | "\'%\'" | Percentage. Multiplies the number by ' + '100 and displays in |\n' + ' | | fixed ("\'f\'") format, followed by a ' + 'percent sign. |\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + ' | None | Similar to "\'g\'", except that ' + 'fixed-point notation, when |\n' + ' | | used, has at least one digit past the ' + 'decimal point. The |\n' + ' | | default precision is as high as needed ' + 'to represent the |\n' + ' | | particular value. The overall effect is ' + 'to match the |\n' + ' | | output of "str()" as altered by the ' + 'other format |\n' + ' | | ' + 'modifiers. ' + '|\n' + ' ' + '+-----------+------------------------------------------------------------+\n' + '\n' + '\n' + 'Format examples\n' + '===============\n' + '\n' + 'This section contains examples of the new format syntax ' + 'and comparison\n' + 'with the old "%"-formatting.\n' + '\n' + 'In most of the cases the syntax is similar to the old ' + '"%"-formatting,\n' + 'with the addition of the "{}" and with ":" used instead ' + 'of "%". For\n' + 'example, "\'%03.2f\'" can be translated to ' + '"\'{:03.2f}\'".\n' + '\n' + 'The new format syntax also supports new and different ' + 'options, shown\n' + 'in the follow examples.\n' + '\n' + 'Accessing arguments by position:\n' + '\n' + " >>> '{0}, {1}, {2}'.format('a', 'b', 'c')\n" + " 'a, b, c'\n" + " >>> '{}, {}, {}'.format('a', 'b', 'c') # 3.1+ only\n" + " 'a, b, c'\n" + " >>> '{2}, {1}, {0}'.format('a', 'b', 'c')\n" + " 'c, b, a'\n" + " >>> '{2}, {1}, {0}'.format(*'abc') # unpacking " + 'argument sequence\n' + " 'c, b, a'\n" + " >>> '{0}{1}{0}'.format('abra', 'cad') # arguments' " + 'indices can be repeated\n' + " 'abracadabra'\n" + '\n' + 'Accessing arguments by name:\n' + '\n' + " >>> 'Coordinates: {latitude}, " + "{longitude}'.format(latitude='37.24N', " + "longitude='-115.81W')\n" + " 'Coordinates: 37.24N, -115.81W'\n" + " >>> coord = {'latitude': '37.24N', 'longitude': " + "'-115.81W'}\n" + " >>> 'Coordinates: {latitude}, " + "{longitude}'.format(**coord)\n" + " 'Coordinates: 37.24N, -115.81W'\n" + '\n' + "Accessing arguments' attributes:\n" + '\n' + ' >>> c = 3-5j\n' + " >>> ('The complex number {0} is formed from the real " + "part {0.real} '\n" + " ... 'and the imaginary part {0.imag}.').format(c)\n" + " 'The complex number (3-5j) is formed from the real " + "part 3.0 and the imaginary part -5.0.'\n" + ' >>> class Point:\n' + ' ... def __init__(self, x, y):\n' + ' ... self.x, self.y = x, y\n' + ' ... def __str__(self):\n' + " ... return 'Point({self.x}, " + "{self.y})'.format(self=self)\n" + ' ...\n' + ' >>> str(Point(4, 2))\n' + " 'Point(4, 2)'\n" + '\n' + "Accessing arguments' items:\n" + '\n' + ' >>> coord = (3, 5)\n' + " >>> 'X: {0[0]}; Y: {0[1]}'.format(coord)\n" + " 'X: 3; Y: 5'\n" + '\n' + 'Replacing "%s" and "%r":\n' + '\n' + ' >>> "repr() shows quotes: {!r}; str() doesn\'t: ' + '{!s}".format(\'test1\', \'test2\')\n' + ' "repr() shows quotes: \'test1\'; str() doesn\'t: ' + 'test2"\n' + '\n' + 'Aligning the text and specifying a width:\n' + '\n' + " >>> '{:<30}'.format('left aligned')\n" + " 'left aligned '\n" + " >>> '{:>30}'.format('right aligned')\n" + " ' right aligned'\n" + " >>> '{:^30}'.format('centered')\n" + " ' centered '\n" + " >>> '{:*^30}'.format('centered') # use '*' as a fill " + 'char\n' + " '***********centered***********'\n" + '\n' + 'Replacing "%+f", "%-f", and "% f" and specifying a sign:\n' + '\n' + " >>> '{:+f}; {:+f}'.format(3.14, -3.14) # show it " + 'always\n' + " '+3.140000; -3.140000'\n" + " >>> '{: f}; {: f}'.format(3.14, -3.14) # show a space " + 'for positive numbers\n' + " ' 3.140000; -3.140000'\n" + " >>> '{:-f}; {:-f}'.format(3.14, -3.14) # show only " + "the minus -- same as '{:f}; {:f}'\n" + " '3.140000; -3.140000'\n" + '\n' + 'Replacing "%x" and "%o" and converting the value to ' + 'different bases:\n' + '\n' + ' >>> # format also supports binary numbers\n' + ' >>> "int: {0:d}; hex: {0:x}; oct: {0:o}; bin: ' + '{0:b}".format(42)\n' + " 'int: 42; hex: 2a; oct: 52; bin: 101010'\n" + ' >>> # with 0x, 0o, or 0b as prefix:\n' + ' >>> "int: {0:d}; hex: {0:#x}; oct: {0:#o}; bin: ' + '{0:#b}".format(42)\n' + " 'int: 42; hex: 0x2a; oct: 0o52; bin: 0b101010'\n" + '\n' + 'Using the comma as a thousands separator:\n' + '\n' + " >>> '{:,}'.format(1234567890)\n" + " '1,234,567,890'\n" + '\n' + 'Expressing a percentage:\n' + '\n' + ' >>> points = 19\n' + ' >>> total = 22\n' + " >>> 'Correct answers: {:.2%}'.format(points/total)\n" + " 'Correct answers: 86.36%'\n" + '\n' + 'Using type-specific formatting:\n' + '\n' + ' >>> import datetime\n' + ' >>> d = datetime.datetime(2010, 7, 4, 12, 15, 58)\n' + " >>> '{:%Y-%m-%d %H:%M:%S}'.format(d)\n" + " '2010-07-04 12:15:58'\n" + '\n' + 'Nesting arguments and more complex examples:\n' + '\n' + " >>> for align, text in zip('<^>', ['left', 'center', " + "'right']):\n" + " ... '{0:{fill}{align}16}'.format(text, fill=align, " + 'align=align)\n' + ' ...\n' + " 'left<<<<<<<<<<<<'\n" + " '^^^^^center^^^^^'\n" + " '>>>>>>>>>>>right'\n" + ' >>>\n' + ' >>> octets = [192, 168, 0, 1]\n' + " >>> '{:02X}{:02X}{:02X}{:02X}'.format(*octets)\n" + " 'C0A80001'\n" + ' >>> int(_, 16)\n' + ' 3232235521\n' + ' >>>\n' + ' >>> width = 5\n' + ' >>> for num in range(5,12): #doctest: ' + '+NORMALIZE_WHITESPACE\n' + " ... for base in 'dXob':\n" + " ... print('{0:{width}{base}}'.format(num, " + "base=base, width=width), end=' ')\n" + ' ... print()\n' + ' ...\n' + ' 5 5 5 101\n' + ' 6 6 6 110\n' + ' 7 7 7 111\n' + ' 8 8 10 1000\n' + ' 9 9 11 1001\n' + ' 10 A 12 1010\n' + ' 11 B 13 1011\n', + 'function': '\n' + 'Function definitions\n' + '********************\n' + '\n' + 'A function definition defines a user-defined function object ' + '(see\n' + 'section *The standard type hierarchy*):\n' + '\n' + ' funcdef ::= [decorators] "def" funcname "(" ' + '[parameter_list] ")" ["->" expression] ":" suite\n' + ' decorators ::= decorator+\n' + ' decorator ::= "@" dotted_name ["(" [parameter_list ' + '[","]] ")"] NEWLINE\n' + ' dotted_name ::= identifier ("." identifier)*\n' + ' parameter_list ::= (defparameter ",")*\n' + ' | "*" [parameter] ("," defparameter)* ' + '["," "**" parameter]\n' + ' | "**" parameter\n' + ' | defparameter [","] )\n' + ' parameter ::= identifier [":" expression]\n' + ' defparameter ::= parameter ["=" expression]\n' + ' funcname ::= identifier\n' + '\n' + 'A function definition is an executable statement. Its ' + 'execution binds\n' + 'the function name in the current local namespace to a function ' + 'object\n' + '(a wrapper around the executable code for the function). ' + 'This\n' + 'function object contains a reference to the current global ' + 'namespace\n' + 'as the global namespace to be used when the function is ' + 'called.\n' + '\n' + 'The function definition does not execute the function body; ' + 'this gets\n' + 'executed only when the function is called. [3]\n' + '\n' + 'A function definition may be wrapped by one or more ' + '*decorator*\n' + 'expressions. Decorator expressions are evaluated when the ' + 'function is\n' + 'defined, in the scope that contains the function definition. ' + 'The\n' + 'result must be a callable, which is invoked with the function ' + 'object\n' + 'as the only argument. The returned value is bound to the ' + 'function name\n' + 'instead of the function object. Multiple decorators are ' + 'applied in\n' + 'nested fashion. For example, the following code\n' + '\n' + ' @f1(arg)\n' + ' @f2\n' + ' def func(): pass\n' + '\n' + 'is equivalent to\n' + '\n' + ' def func(): pass\n' + ' func = f1(arg)(f2(func))\n' + '\n' + 'When one or more *parameters* have the form *parameter* "="\n' + '*expression*, the function is said to have "default parameter ' + 'values."\n' + 'For a parameter with a default value, the corresponding ' + '*argument* may\n' + "be omitted from a call, in which case the parameter's default " + 'value is\n' + 'substituted. If a parameter has a default value, all ' + 'following\n' + 'parameters up until the ""*"" must also have a default value ' + '--- this\n' + 'is a syntactic restriction that is not expressed by the ' + 'grammar.\n' + '\n' + '**Default parameter values are evaluated from left to right ' + 'when the\n' + 'function definition is executed.** This means that the ' + 'expression is\n' + 'evaluated once, when the function is defined, and that the ' + 'same "pre-\n' + 'computed" value is used for each call. This is especially ' + 'important\n' + 'to understand when a default parameter is a mutable object, ' + 'such as a\n' + 'list or a dictionary: if the function modifies the object ' + '(e.g. by\n' + 'appending an item to a list), the default value is in effect ' + 'modified.\n' + 'This is generally not what was intended. A way around this is ' + 'to use\n' + '"None" as the default, and explicitly test for it in the body ' + 'of the\n' + 'function, e.g.:\n' + '\n' + ' def whats_on_the_telly(penguin=None):\n' + ' if penguin is None:\n' + ' penguin = []\n' + ' penguin.append("property of the zoo")\n' + ' return penguin\n' + '\n' + 'Function call semantics are described in more detail in ' + 'section\n' + '*Calls*. A function call always assigns values to all ' + 'parameters\n' + 'mentioned in the parameter list, either from position ' + 'arguments, from\n' + 'keyword arguments, or from default values. If the form\n' + '""*identifier"" is present, it is initialized to a tuple ' + 'receiving any\n' + 'excess positional parameters, defaulting to the empty tuple. ' + 'If the\n' + 'form ""**identifier"" is present, it is initialized to a new\n' + 'dictionary receiving any excess keyword arguments, defaulting ' + 'to a new\n' + 'empty dictionary. Parameters after ""*"" or ""*identifier"" ' + 'are\n' + 'keyword-only parameters and may only be passed used keyword ' + 'arguments.\n' + '\n' + 'Parameters may have annotations of the form "": expression"" ' + 'following\n' + 'the parameter name. Any parameter may have an annotation even ' + 'those\n' + 'of the form "*identifier" or "**identifier". Functions may ' + 'have\n' + '"return" annotation of the form ""-> expression"" after the ' + 'parameter\n' + 'list. These annotations can be any valid Python expression ' + 'and are\n' + 'evaluated when the function definition is executed. ' + 'Annotations may\n' + 'be evaluated in a different order than they appear in the ' + 'source code.\n' + 'The presence of annotations does not change the semantics of ' + 'a\n' + 'function. The annotation values are available as values of a\n' + "dictionary keyed by the parameters' names in the " + '"__annotations__"\n' + 'attribute of the function object.\n' + '\n' + 'It is also possible to create anonymous functions (functions ' + 'not bound\n' + 'to a name), for immediate use in expressions. This uses ' + 'lambda\n' + 'expressions, described in section *Lambdas*. Note that the ' + 'lambda\n' + 'expression is merely a shorthand for a simplified function ' + 'definition;\n' + 'a function defined in a ""def"" statement can be passed around ' + 'or\n' + 'assigned to another name just like a function defined by a ' + 'lambda\n' + 'expression. The ""def"" form is actually more powerful since ' + 'it\n' + 'allows the execution of multiple statements and annotations.\n' + '\n' + "**Programmer's note:** Functions are first-class objects. A " + '""def""\n' + 'statement executed inside a function definition defines a ' + 'local\n' + 'function that can be returned or passed around. Free ' + 'variables used\n' + 'in the nested function can access the local variables of the ' + 'function\n' + 'containing the def. See section *Naming and binding* for ' + 'details.\n' + '\n' + 'See also: **PEP 3107** - Function Annotations\n' + '\n' + ' The original specification for function annotations.\n', + 'global': '\n' + 'The "global" statement\n' + '**********************\n' + '\n' + ' global_stmt ::= "global" identifier ("," identifier)*\n' + '\n' + 'The "global" statement is a declaration which holds for the ' + 'entire\n' + 'current code block. It means that the listed identifiers are to ' + 'be\n' + 'interpreted as globals. It would be impossible to assign to a ' + 'global\n' + 'variable without "global", although free variables may refer to\n' + 'globals without being declared global.\n' + '\n' + 'Names listed in a "global" statement must not be used in the ' + 'same code\n' + 'block textually preceding that "global" statement.\n' + '\n' + 'Names listed in a "global" statement must not be defined as ' + 'formal\n' + 'parameters or in a "for" loop control target, "class" ' + 'definition,\n' + 'function definition, or "import" statement.\n' + '\n' + '**CPython implementation detail:** The current implementation ' + 'does not\n' + 'enforce the two restrictions, but programs should not abuse ' + 'this\n' + 'freedom, as future implementations may enforce them or silently ' + 'change\n' + 'the meaning of the program.\n' + '\n' + '**Programmer\'s note:** the "global" is a directive to the ' + 'parser. It\n' + 'applies only to code parsed at the same time as the "global"\n' + 'statement. In particular, a "global" statement contained in a ' + 'string\n' + 'or code object supplied to the built-in "exec()" function does ' + 'not\n' + 'affect the code block *containing* the function call, and code\n' + 'contained in such a string is unaffected by "global" statements ' + 'in the\n' + 'code containing the function call. The same applies to the ' + '"eval()"\n' + 'and "compile()" functions.\n', + 'id-classes': '\n' + 'Reserved classes of identifiers\n' + '*******************************\n' + '\n' + 'Certain classes of identifiers (besides keywords) have ' + 'special\n' + 'meanings. These classes are identified by the patterns of ' + 'leading and\n' + 'trailing underscore characters:\n' + '\n' + '"_*"\n' + ' Not imported by "from module import *". The special ' + 'identifier "_"\n' + ' is used in the interactive interpreter to store the ' + 'result of the\n' + ' last evaluation; it is stored in the "builtins" module. ' + 'When not\n' + ' in interactive mode, "_" has no special meaning and is ' + 'not defined.\n' + ' See section *The import statement*.\n' + '\n' + ' Note: The name "_" is often used in conjunction with\n' + ' internationalization; refer to the documentation for ' + 'the\n' + ' "gettext" module for more information on this ' + 'convention.\n' + '\n' + '"__*__"\n' + ' System-defined names. These names are defined by the ' + 'interpreter\n' + ' and its implementation (including the standard library). ' + 'Current\n' + ' system names are discussed in the *Special method names* ' + 'section\n' + ' and elsewhere. More will likely be defined in future ' + 'versions of\n' + ' Python. *Any* use of "__*__" names, in any context, that ' + 'does not\n' + ' follow explicitly documented use, is subject to breakage ' + 'without\n' + ' warning.\n' + '\n' + '"__*"\n' + ' Class-private names. Names in this category, when used ' + 'within the\n' + ' context of a class definition, are re-written to use a ' + 'mangled form\n' + ' to help avoid name clashes between "private" attributes ' + 'of base and\n' + ' derived classes. See section *Identifiers (Names)*.\n', + 'identifiers': '\n' + 'Identifiers and keywords\n' + '************************\n' + '\n' + 'Identifiers (also referred to as *names*) are described by ' + 'the\n' + 'following lexical definitions.\n' + '\n' + 'The syntax of identifiers in Python is based on the Unicode ' + 'standard\n' + 'annex UAX-31, with elaboration and changes as defined ' + 'below; see also\n' + '**PEP 3131** for further details.\n' + '\n' + 'Within the ASCII range (U+0001..U+007F), the valid ' + 'characters for\n' + 'identifiers are the same as in Python 2.x: the uppercase ' + 'and lowercase\n' + 'letters "A" through "Z", the underscore "_" and, except for ' + 'the first\n' + 'character, the digits "0" through "9".\n' + '\n' + 'Python 3.0 introduces additional characters from outside ' + 'the ASCII\n' + 'range (see **PEP 3131**). For these characters, the ' + 'classification\n' + 'uses the version of the Unicode Character Database as ' + 'included in the\n' + '"unicodedata" module.\n' + '\n' + 'Identifiers are unlimited in length. Case is significant.\n' + '\n' + ' identifier ::= xid_start xid_continue*\n' + ' id_start ::= \n' + ' id_continue ::= \n' + ' xid_start ::= \n' + ' xid_continue ::= \n' + '\n' + 'The Unicode category codes mentioned above stand for:\n' + '\n' + '* *Lu* - uppercase letters\n' + '\n' + '* *Ll* - lowercase letters\n' + '\n' + '* *Lt* - titlecase letters\n' + '\n' + '* *Lm* - modifier letters\n' + '\n' + '* *Lo* - other letters\n' + '\n' + '* *Nl* - letter numbers\n' + '\n' + '* *Mn* - nonspacing marks\n' + '\n' + '* *Mc* - spacing combining marks\n' + '\n' + '* *Nd* - decimal numbers\n' + '\n' + '* *Pc* - connector punctuations\n' + '\n' + '* *Other_ID_Start* - explicit list of characters in ' + 'PropList.txt to\n' + ' support backwards compatibility\n' + '\n' + '* *Other_ID_Continue* - likewise\n' + '\n' + 'All identifiers are converted into the normal form NFKC ' + 'while parsing;\n' + 'comparison of identifiers is based on NFKC.\n' + '\n' + 'A non-normative HTML file listing all valid identifier ' + 'characters for\n' + 'Unicode 4.1 can be found at http://www.dcl.hpi.uni-\n' + 'potsdam.de/home/loewis/table-3131.html.\n' + '\n' + '\n' + 'Keywords\n' + '========\n' + '\n' + 'The following identifiers are used as reserved words, or ' + '*keywords* of\n' + 'the language, and cannot be used as ordinary identifiers. ' + 'They must\n' + 'be spelled exactly as written here:\n' + '\n' + ' False class finally is return\n' + ' None continue for lambda try\n' + ' True def from nonlocal while\n' + ' and del global not with\n' + ' as elif if or yield\n' + ' assert else import pass\n' + ' break except in raise\n' + '\n' + '\n' + 'Reserved classes of identifiers\n' + '===============================\n' + '\n' + 'Certain classes of identifiers (besides keywords) have ' + 'special\n' + 'meanings. These classes are identified by the patterns of ' + 'leading and\n' + 'trailing underscore characters:\n' + '\n' + '"_*"\n' + ' Not imported by "from module import *". The special ' + 'identifier "_"\n' + ' is used in the interactive interpreter to store the ' + 'result of the\n' + ' last evaluation; it is stored in the "builtins" module. ' + 'When not\n' + ' in interactive mode, "_" has no special meaning and is ' + 'not defined.\n' + ' See section *The import statement*.\n' + '\n' + ' Note: The name "_" is often used in conjunction with\n' + ' internationalization; refer to the documentation for ' + 'the\n' + ' "gettext" module for more information on this ' + 'convention.\n' + '\n' + '"__*__"\n' + ' System-defined names. These names are defined by the ' + 'interpreter\n' + ' and its implementation (including the standard ' + 'library). Current\n' + ' system names are discussed in the *Special method names* ' + 'section\n' + ' and elsewhere. More will likely be defined in future ' + 'versions of\n' + ' Python. *Any* use of "__*__" names, in any context, ' + 'that does not\n' + ' follow explicitly documented use, is subject to breakage ' + 'without\n' + ' warning.\n' + '\n' + '"__*"\n' + ' Class-private names. Names in this category, when used ' + 'within the\n' + ' context of a class definition, are re-written to use a ' + 'mangled form\n' + ' to help avoid name clashes between "private" attributes ' + 'of base and\n' + ' derived classes. See section *Identifiers (Names)*.\n', + 'if': '\n' + 'The "if" statement\n' + '******************\n' + '\n' + 'The "if" statement is used for conditional execution:\n' + '\n' + ' if_stmt ::= "if" expression ":" suite\n' + ' ( "elif" expression ":" suite )*\n' + ' ["else" ":" suite]\n' + '\n' + 'It selects exactly one of the suites by evaluating the expressions ' + 'one\n' + 'by one until one is found to be true (see section *Boolean ' + 'operations*\n' + 'for the definition of true and false); then that suite is executed\n' + '(and no other part of the "if" statement is executed or evaluated).\n' + 'If all expressions are false, the suite of the "else" clause, if\n' + 'present, is executed.\n', + 'imaginary': '\n' + 'Imaginary literals\n' + '******************\n' + '\n' + 'Imaginary literals are described by the following lexical ' + 'definitions:\n' + '\n' + ' imagnumber ::= (floatnumber | intpart) ("j" | "J")\n' + '\n' + 'An imaginary literal yields a complex number with a real part ' + 'of 0.0.\n' + 'Complex numbers are represented as a pair of floating point ' + 'numbers\n' + 'and have the same restrictions on their range. To create a ' + 'complex\n' + 'number with a nonzero real part, add a floating point number ' + 'to it,\n' + 'e.g., "(3+4j)". Some examples of imaginary literals:\n' + '\n' + ' 3.14j 10.j 10j .001j 1e100j 3.14e-10j\n', + 'import': '\n' + 'The "import" statement\n' + '**********************\n' + '\n' + ' import_stmt ::= "import" module ["as" name] ( "," module ' + '["as" name] )*\n' + ' | "from" relative_module "import" identifier ' + '["as" name]\n' + ' ( "," identifier ["as" name] )*\n' + ' | "from" relative_module "import" "(" ' + 'identifier ["as" name]\n' + ' ( "," identifier ["as" name] )* [","] ")"\n' + ' | "from" module "import" "*"\n' + ' module ::= (identifier ".")* identifier\n' + ' relative_module ::= "."* module | "."+\n' + ' name ::= identifier\n' + '\n' + 'The basic import statement (no "from" clause) is executed in ' + 'two\n' + 'steps:\n' + '\n' + '1. find a module, loading and initializing it if necessary\n' + '\n' + '2. define a name or names in the local namespace for the scope\n' + ' where the "import" statement occurs.\n' + '\n' + 'When the statement contains multiple clauses (separated by ' + 'commas) the\n' + 'two steps are carried out separately for each clause, just as ' + 'though\n' + 'the clauses had been separated out into individiual import ' + 'statements.\n' + '\n' + 'The details of the first step, finding and loading modules are\n' + 'described in greater detail in the section on the *import ' + 'system*,\n' + 'which also describes the various types of packages and modules ' + 'that\n' + 'can be imported, as well as all the hooks that can be used to\n' + 'customize the import system. Note that failures in this step ' + 'may\n' + 'indicate either that the module could not be located, *or* that ' + 'an\n' + 'error occurred while initializing the module, which includes ' + 'execution\n' + "of the module's code.\n" + '\n' + 'If the requested module is retrieved successfully, it will be ' + 'made\n' + 'available in the local namespace in one of three ways:\n' + '\n' + '* If the module name is followed by "as", then the name ' + 'following\n' + ' "as" is bound directly to the imported module.\n' + '\n' + '* If no other name is specified, and the module being imported ' + 'is a\n' + " top level module, the module's name is bound in the local " + 'namespace\n' + ' as a reference to the imported module\n' + '\n' + '* If the module being imported is *not* a top level module, then ' + 'the\n' + ' name of the top level package that contains the module is ' + 'bound in\n' + ' the local namespace as a reference to the top level package. ' + 'The\n' + ' imported module must be accessed using its full qualified ' + 'name\n' + ' rather than directly\n' + '\n' + 'The "from" form uses a slightly more complex process:\n' + '\n' + '1. find the module specified in the "from" clause, loading and\n' + ' initializing it if necessary;\n' + '\n' + '2. for each of the identifiers specified in the "import" ' + 'clauses:\n' + '\n' + ' 1. check if the imported module has an attribute by that ' + 'name\n' + '\n' + ' 2. if not, attempt to import a submodule with that name and ' + 'then\n' + ' check the imported module again for that attribute\n' + '\n' + ' 3. if the attribute is not found, "ImportError" is raised.\n' + '\n' + ' 4. otherwise, a reference to that value is stored in the ' + 'local\n' + ' namespace, using the name in the "as" clause if it is ' + 'present,\n' + ' otherwise using the attribute name\n' + '\n' + 'Examples:\n' + '\n' + ' import foo # foo imported and bound locally\n' + ' import foo.bar.baz # foo.bar.baz imported, foo bound ' + 'locally\n' + ' import foo.bar.baz as fbb # foo.bar.baz imported and bound ' + 'as fbb\n' + ' from foo.bar import baz # foo.bar.baz imported and bound ' + 'as baz\n' + ' from foo import attr # foo imported and foo.attr bound ' + 'as attr\n' + '\n' + 'If the list of identifiers is replaced by a star ("\'*\'"), all ' + 'public\n' + 'names defined in the module are bound in the local namespace for ' + 'the\n' + 'scope where the "import" statement occurs.\n' + '\n' + 'The *public names* defined by a module are determined by ' + 'checking the\n' + 'module\'s namespace for a variable named "__all__"; if defined, ' + 'it must\n' + 'be a sequence of strings which are names defined or imported by ' + 'that\n' + 'module. The names given in "__all__" are all considered public ' + 'and\n' + 'are required to exist. If "__all__" is not defined, the set of ' + 'public\n' + "names includes all names found in the module's namespace which " + 'do not\n' + 'begin with an underscore character ("\'_\'"). "__all__" should ' + 'contain\n' + 'the entire public API. It is intended to avoid accidentally ' + 'exporting\n' + 'items that are not part of the API (such as library modules ' + 'which were\n' + 'imported and used within the module).\n' + '\n' + 'The wild card form of import --- "from module import *" --- is ' + 'only\n' + 'allowed at the module level. Attempting to use it in class or\n' + 'function definitions will raise a "SyntaxError".\n' + '\n' + 'When specifying what module to import you do not have to specify ' + 'the\n' + 'absolute name of the module. When a module or package is ' + 'contained\n' + 'within another package it is possible to make a relative import ' + 'within\n' + 'the same top package without having to mention the package name. ' + 'By\n' + 'using leading dots in the specified module or package after ' + '"from" you\n' + 'can specify how high to traverse up the current package ' + 'hierarchy\n' + 'without specifying exact names. One leading dot means the ' + 'current\n' + 'package where the module making the import exists. Two dots ' + 'means up\n' + 'one package level. Three dots is up two levels, etc. So if you ' + 'execute\n' + '"from . import mod" from a module in the "pkg" package then you ' + 'will\n' + 'end up importing "pkg.mod". If you execute "from ..subpkg2 ' + 'import mod"\n' + 'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". ' + 'The\n' + 'specification for relative imports is contained within **PEP ' + '328**.\n' + '\n' + '"importlib.import_module()" is provided to support applications ' + 'that\n' + 'determine dynamically the modules to be loaded.\n' + '\n' + '\n' + 'Future statements\n' + '=================\n' + '\n' + 'A *future statement* is a directive to the compiler that a ' + 'particular\n' + 'module should be compiled using syntax or semantics that will ' + 'be\n' + 'available in a specified future release of Python where the ' + 'feature\n' + 'becomes standard.\n' + '\n' + 'The future statement is intended to ease migration to future ' + 'versions\n' + 'of Python that introduce incompatible changes to the language. ' + 'It\n' + 'allows use of the new features on a per-module basis before the\n' + 'release in which the feature becomes standard.\n' + '\n' + ' future_statement ::= "from" "__future__" "import" feature ' + '["as" name]\n' + ' ("," feature ["as" name])*\n' + ' | "from" "__future__" "import" "(" ' + 'feature ["as" name]\n' + ' ("," feature ["as" name])* [","] ")"\n' + ' feature ::= identifier\n' + ' name ::= identifier\n' + '\n' + 'A future statement must appear near the top of the module. The ' + 'only\n' + 'lines that can appear before a future statement are:\n' + '\n' + '* the module docstring (if any),\n' + '\n' + '* comments,\n' + '\n' + '* blank lines, and\n' + '\n' + '* other future statements.\n' + '\n' + 'The features recognized by Python 3.0 are "absolute_import",\n' + '"division", "generators", "unicode_literals", "print_function",\n' + '"nested_scopes" and "with_statement". They are all redundant ' + 'because\n' + 'they are always enabled, and only kept for backwards ' + 'compatibility.\n' + '\n' + 'A future statement is recognized and treated specially at ' + 'compile\n' + 'time: Changes to the semantics of core constructs are often\n' + 'implemented by generating different code. It may even be the ' + 'case\n' + 'that a new feature introduces new incompatible syntax (such as a ' + 'new\n' + 'reserved word), in which case the compiler may need to parse ' + 'the\n' + 'module differently. Such decisions cannot be pushed off until\n' + 'runtime.\n' + '\n' + 'For any given release, the compiler knows which feature names ' + 'have\n' + 'been defined, and raises a compile-time error if a future ' + 'statement\n' + 'contains a feature not known to it.\n' + '\n' + 'The direct runtime semantics are the same as for any import ' + 'statement:\n' + 'there is a standard module "__future__", described later, and it ' + 'will\n' + 'be imported in the usual way at the time the future statement ' + 'is\n' + 'executed.\n' + '\n' + 'The interesting runtime semantics depend on the specific ' + 'feature\n' + 'enabled by the future statement.\n' + '\n' + 'Note that there is nothing special about the statement:\n' + '\n' + ' import __future__ [as name]\n' + '\n' + "That is not a future statement; it's an ordinary import " + 'statement with\n' + 'no special semantics or syntax restrictions.\n' + '\n' + 'Code compiled by calls to the built-in functions "exec()" and\n' + '"compile()" that occur in a module "M" containing a future ' + 'statement\n' + 'will, by default, use the new syntax or semantics associated ' + 'with the\n' + 'future statement. This can be controlled by optional arguments ' + 'to\n' + '"compile()" --- see the documentation of that function for ' + 'details.\n' + '\n' + 'A future statement typed at an interactive interpreter prompt ' + 'will\n' + 'take effect for the rest of the interpreter session. If an\n' + 'interpreter is started with the *-i* option, is passed a script ' + 'name\n' + 'to execute, and the script includes a future statement, it will ' + 'be in\n' + 'effect in the interactive session started after the script is\n' + 'executed.\n' + '\n' + 'See also: **PEP 236** - Back to the __future__\n' + '\n' + ' The original proposal for the __future__ mechanism.\n', + 'in': '\n' + 'Comparisons\n' + '***********\n' + '\n' + 'Unlike C, all comparison operations in Python have the same ' + 'priority,\n' + 'which is lower than that of any arithmetic, shifting or bitwise\n' + 'operation. Also unlike C, expressions like "a < b < c" have the\n' + 'interpretation that is conventional in mathematics:\n' + '\n' + ' comparison ::= or_expr ( comp_operator or_expr )*\n' + ' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n' + ' | "is" ["not"] | ["not"] "in"\n' + '\n' + 'Comparisons yield boolean values: "True" or "False".\n' + '\n' + 'Comparisons can be chained arbitrarily, e.g., "x < y <= z" is\n' + 'equivalent to "x < y and y <= z", except that "y" is evaluated only\n' + 'once (but in both cases "z" is not evaluated at all when "x < y" is\n' + 'found to be false).\n' + '\n' + 'Formally, if *a*, *b*, *c*, ..., *y*, *z* are expressions and ' + '*op1*,\n' + '*op2*, ..., *opN* are comparison operators, then "a op1 b op2 c ... ' + 'y\n' + 'opN z" is equivalent to "a op1 b and b op2 c and ... y opN z", ' + 'except\n' + 'that each expression is evaluated at most once.\n' + '\n' + 'Note that "a op1 b op2 c" doesn\'t imply any kind of comparison ' + 'between\n' + '*a* and *c*, so that, e.g., "x < y > z" is perfectly legal (though\n' + 'perhaps not pretty).\n' + '\n' + 'The operators "<", ">", "==", ">=", "<=", and "!=" compare the ' + 'values\n' + 'of two objects. The objects need not have the same type. If both ' + 'are\n' + 'numbers, they are converted to a common type. Otherwise, the "==" ' + 'and\n' + '"!=" operators *always* consider objects of different types to be\n' + 'unequal, while the "<", ">", ">=" and "<=" operators raise a\n' + '"TypeError" when comparing objects of different types that do not\n' + 'implement these operators for the given pair of types. You can\n' + 'control comparison behavior of objects of non-built-in types by\n' + 'defining rich comparison methods like "__gt__()", described in ' + 'section\n' + '*Basic customization*.\n' + '\n' + 'Comparison of objects of the same type depends on the type:\n' + '\n' + '* Numbers are compared arithmetically.\n' + '\n' + '* The values "float(\'NaN\')" and "Decimal(\'NaN\')" are special. ' + 'They\n' + ' are identical to themselves, "x is x" but are not equal to\n' + ' themselves, "x != x". Additionally, comparing any value to a\n' + ' not-a-number value will return "False". For example, both "3 <\n' + ' float(\'NaN\')" and "float(\'NaN\') < 3" will return "False".\n' + '\n' + '* Bytes objects are compared lexicographically using the numeric\n' + ' values of their elements.\n' + '\n' + '* Strings are compared lexicographically using the numeric\n' + ' equivalents (the result of the built-in function "ord()") of ' + 'their\n' + " characters. [3] String and bytes object can't be compared!\n" + '\n' + '* Tuples and lists are compared lexicographically using comparison\n' + ' of corresponding elements. This means that to compare equal, ' + 'each\n' + ' element must compare equal and the two sequences must be of the ' + 'same\n' + ' type and have the same length.\n' + '\n' + ' If not equal, the sequences are ordered the same as their first\n' + ' differing elements. For example, "[1,2,x] <= [1,2,y]" has the ' + 'same\n' + ' value as "x <= y". If the corresponding element does not exist, ' + 'the\n' + ' shorter sequence is ordered first (for example, "[1,2] < ' + '[1,2,3]").\n' + '\n' + '* Mappings (dictionaries) compare equal if and only if they have ' + 'the\n' + ' same "(key, value)" pairs. Order comparisons "(\'<\', \'<=\', ' + "'>=',\n" + ' \'>\')" raise "TypeError".\n' + '\n' + '* Sets and frozensets define comparison operators to mean subset ' + 'and\n' + ' superset tests. Those relations do not define total orderings ' + '(the\n' + ' two sets "{1,2}" and "{2,3}" are not equal, nor subsets of one\n' + ' another, nor supersets of one another). Accordingly, sets are ' + 'not\n' + ' appropriate arguments for functions which depend on total ' + 'ordering.\n' + ' For example, "min()", "max()", and "sorted()" produce undefined\n' + ' results given a list of sets as inputs.\n' + '\n' + '* Most other objects of built-in types compare unequal unless they\n' + ' are the same object; the choice whether one object is considered\n' + ' smaller or larger than another one is made arbitrarily but\n' + ' consistently within one execution of a program.\n' + '\n' + 'Comparison of objects of differing types depends on whether either ' + 'of\n' + 'the types provide explicit support for the comparison. Most ' + 'numeric\n' + 'types can be compared with one another. When cross-type comparison ' + 'is\n' + 'not supported, the comparison method returns "NotImplemented".\n' + '\n' + 'The operators "in" and "not in" test for membership. "x in s"\n' + 'evaluates to true if *x* is a member of *s*, and false otherwise. ' + '"x\n' + 'not in s" returns the negation of "x in s". All built-in sequences\n' + 'and set types support this as well as dictionary, for which "in" ' + 'tests\n' + 'whether the dictionary has a given key. For container types such as\n' + 'list, tuple, set, frozenset, dict, or collections.deque, the\n' + 'expression "x in y" is equivalent to "any(x is e or x == e for e in\n' + 'y)".\n' + '\n' + 'For the string and bytes types, "x in y" is true if and only if *x* ' + 'is\n' + 'a substring of *y*. An equivalent test is "y.find(x) != -1". ' + 'Empty\n' + 'strings are always considered to be a substring of any other ' + 'string,\n' + 'so """ in "abc"" will return "True".\n' + '\n' + 'For user-defined classes which define the "__contains__()" method, ' + '"x\n' + 'in y" is true if and only if "y.__contains__(x)" is true.\n' + '\n' + 'For user-defined classes which do not define "__contains__()" but ' + 'do\n' + 'define "__iter__()", "x in y" is true if some value "z" with "x == ' + 'z"\n' + 'is produced while iterating over "y". If an exception is raised\n' + 'during the iteration, it is as if "in" raised that exception.\n' + '\n' + 'Lastly, the old-style iteration protocol is tried: if a class ' + 'defines\n' + '"__getitem__()", "x in y" is true if and only if there is a non-\n' + 'negative integer index *i* such that "x == y[i]", and all lower\n' + 'integer indices do not raise "IndexError" exception. (If any other\n' + 'exception is raised, it is as if "in" raised that exception).\n' + '\n' + 'The operator "not in" is defined to have the inverse true value of\n' + '"in".\n' + '\n' + 'The operators "is" and "is not" test for object identity: "x is y" ' + 'is\n' + 'true if and only if *x* and *y* are the same object. "x is not y"\n' + 'yields the inverse truth value. [4]\n', + 'integers': '\n' + 'Integer literals\n' + '****************\n' + '\n' + 'Integer literals are described by the following lexical ' + 'definitions:\n' + '\n' + ' integer ::= decimalinteger | octinteger | hexinteger ' + '| bininteger\n' + ' decimalinteger ::= nonzerodigit digit* | "0"+\n' + ' nonzerodigit ::= "1"..."9"\n' + ' digit ::= "0"..."9"\n' + ' octinteger ::= "0" ("o" | "O") octdigit+\n' + ' hexinteger ::= "0" ("x" | "X") hexdigit+\n' + ' bininteger ::= "0" ("b" | "B") bindigit+\n' + ' octdigit ::= "0"..."7"\n' + ' hexdigit ::= digit | "a"..."f" | "A"..."F"\n' + ' bindigit ::= "0" | "1"\n' + '\n' + 'There is no limit for the length of integer literals apart ' + 'from what\n' + 'can be stored in available memory.\n' + '\n' + 'Note that leading zeros in a non-zero decimal number are not ' + 'allowed.\n' + 'This is for disambiguation with C-style octal literals, which ' + 'Python\n' + 'used before version 3.0.\n' + '\n' + 'Some examples of integer literals:\n' + '\n' + ' 7 2147483647 0o177 ' + '0b100110111\n' + ' 3 79228162514264337593543950336 0o377 ' + '0xdeadbeef\n', + 'lambda': '\n' + 'Lambdas\n' + '*******\n' + '\n' + ' lambda_expr ::= "lambda" [parameter_list]: expression\n' + ' lambda_expr_nocond ::= "lambda" [parameter_list]: ' + 'expression_nocond\n' + '\n' + 'Lambda expressions (sometimes called lambda forms) are used to ' + 'create\n' + 'anonymous functions. The expression "lambda arguments: ' + 'expression"\n' + 'yields a function object. The unnamed object behaves like a ' + 'function\n' + 'object defined with\n' + '\n' + ' def (arguments):\n' + ' return expression\n' + '\n' + 'See section *Function definitions* for the syntax of parameter ' + 'lists.\n' + 'Note that functions created with lambda expressions cannot ' + 'contain\n' + 'statements or annotations.\n', + 'lists': '\n' + 'List displays\n' + '*************\n' + '\n' + 'A list display is a possibly empty series of expressions enclosed ' + 'in\n' + 'square brackets:\n' + '\n' + ' list_display ::= "[" [expression_list | comprehension] "]"\n' + '\n' + 'A list display yields a new list object, the contents being ' + 'specified\n' + 'by either a list of expressions or a comprehension. When a ' + 'comma-\n' + 'separated list of expressions is supplied, its elements are ' + 'evaluated\n' + 'from left to right and placed into the list object in that ' + 'order.\n' + 'When a comprehension is supplied, the list is constructed from ' + 'the\n' + 'elements resulting from the comprehension.\n', + 'naming': '\n' + 'Naming and binding\n' + '******************\n' + '\n' + '\n' + 'Binding of names\n' + '================\n' + '\n' + '*Names* refer to objects. Names are introduced by name binding\n' + 'operations.\n' + '\n' + 'The following constructs bind names: formal parameters to ' + 'functions,\n' + '"import" statements, class and function definitions (these bind ' + 'the\n' + 'class or function name in the defining block), and targets that ' + 'are\n' + 'identifiers if occurring in an assignment, "for" loop header, or ' + 'after\n' + '"as" in a "with" statement or "except" clause. The "import" ' + 'statement\n' + 'of the form "from ... import *" binds all names defined in the\n' + 'imported module, except those beginning with an underscore. ' + 'This form\n' + 'may only be used at the module level.\n' + '\n' + 'A target occurring in a "del" statement is also considered bound ' + 'for\n' + 'this purpose (though the actual semantics are to unbind the ' + 'name).\n' + '\n' + 'Each assignment or import statement occurs within a block ' + 'defined by a\n' + 'class or function definition or at the module level (the ' + 'top-level\n' + 'code block).\n' + '\n' + 'If a name is bound in a block, it is a local variable of that ' + 'block,\n' + 'unless declared as "nonlocal" or "global". If a name is bound ' + 'at the\n' + 'module level, it is a global variable. (The variables of the ' + 'module\n' + 'code block are local and global.) If a variable is used in a ' + 'code\n' + 'block but not defined there, it is a *free variable*.\n' + '\n' + 'Each occurrence of a name in the program text refers to the ' + '*binding*\n' + 'of that name established by the following name resolution ' + 'rules.\n' + '\n' + '\n' + 'Resolution of names\n' + '===================\n' + '\n' + 'A *scope* defines the visibility of a name within a block. If a ' + 'local\n' + 'variable is defined in a block, its scope includes that block. ' + 'If the\n' + 'definition occurs in a function block, the scope extends to any ' + 'blocks\n' + 'contained within the defining one, unless a contained block ' + 'introduces\n' + 'a different binding for the name.\n' + '\n' + 'When a name is used in a code block, it is resolved using the ' + 'nearest\n' + 'enclosing scope. The set of all such scopes visible to a code ' + 'block\n' + "is called the block's *environment*.\n" + '\n' + 'When a name is not found at all, a "NameError" exception is ' + 'raised. If\n' + 'the current scope is a function scope, and the name refers to a ' + 'local\n' + 'variable that has not yet been bound to a value at the point ' + 'where the\n' + 'name is used, an "UnboundLocalError" exception is raised.\n' + '"UnboundLocalError" is a subclass of "NameError".\n' + '\n' + 'If a name binding operation occurs anywhere within a code block, ' + 'all\n' + 'uses of the name within the block are treated as references to ' + 'the\n' + 'current block. This can lead to errors when a name is used ' + 'within a\n' + 'block before it is bound. This rule is subtle. Python lacks\n' + 'declarations and allows name binding operations to occur ' + 'anywhere\n' + 'within a code block. The local variables of a code block can ' + 'be\n' + 'determined by scanning the entire text of the block for name ' + 'binding\n' + 'operations.\n' + '\n' + 'If the "global" statement occurs within a block, all uses of the ' + 'name\n' + 'specified in the statement refer to the binding of that name in ' + 'the\n' + 'top-level namespace. Names are resolved in the top-level ' + 'namespace by\n' + 'searching the global namespace, i.e. the namespace of the ' + 'module\n' + 'containing the code block, and the builtins namespace, the ' + 'namespace\n' + 'of the module "builtins". The global namespace is searched ' + 'first. If\n' + 'the name is not found there, the builtins namespace is ' + 'searched. The\n' + '"global" statement must precede all uses of the name.\n' + '\n' + 'The "global" statement has the same scope as a name binding ' + 'operation\n' + 'in the same block. If the nearest enclosing scope for a free ' + 'variable\n' + 'contains a global statement, the free variable is treated as a ' + 'global.\n' + '\n' + 'The "nonlocal" statement causes corresponding names to refer to\n' + 'previously bound variables in the nearest enclosing function ' + 'scope.\n' + '"SyntaxError" is raised at compile time if the given name does ' + 'not\n' + 'exist in any enclosing function scope.\n' + '\n' + 'The namespace for a module is automatically created the first ' + 'time a\n' + 'module is imported. The main module for a script is always ' + 'called\n' + '"__main__".\n' + '\n' + 'Class definition blocks and arguments to "exec()" and "eval()" ' + 'are\n' + 'special in the context of name resolution. A class definition is ' + 'an\n' + 'executable statement that may use and define names. These ' + 'references\n' + 'follow the normal rules for name resolution with an exception ' + 'that\n' + 'unbound local variables are looked up in the global namespace. ' + 'The\n' + 'namespace of the class definition becomes the attribute ' + 'dictionary of\n' + 'the class. The scope of names defined in a class block is ' + 'limited to\n' + 'the class block; it does not extend to the code blocks of ' + 'methods --\n' + 'this includes comprehensions and generator expressions since ' + 'they are\n' + 'implemented using a function scope. This means that the ' + 'following\n' + 'will fail:\n' + '\n' + ' class A:\n' + ' a = 42\n' + ' b = list(a + i for i in range(10))\n' + '\n' + '\n' + 'Builtins and restricted execution\n' + '=================================\n' + '\n' + 'The builtins namespace associated with the execution of a code ' + 'block\n' + 'is actually found by looking up the name "__builtins__" in its ' + 'global\n' + 'namespace; this should be a dictionary or a module (in the ' + 'latter case\n' + "the module's dictionary is used). By default, when in the " + '"__main__"\n' + 'module, "__builtins__" is the built-in module "builtins"; when ' + 'in any\n' + 'other module, "__builtins__" is an alias for the dictionary of ' + 'the\n' + '"builtins" module itself. "__builtins__" can be set to a ' + 'user-created\n' + 'dictionary to create a weak form of restricted execution.\n' + '\n' + '**CPython implementation detail:** Users should not touch\n' + '"__builtins__"; it is strictly an implementation detail. Users\n' + 'wanting to override values in the builtins namespace should ' + '"import"\n' + 'the "builtins" module and modify its attributes appropriately.\n' + '\n' + '\n' + 'Interaction with dynamic features\n' + '=================================\n' + '\n' + 'Name resolution of free variables occurs at runtime, not at ' + 'compile\n' + 'time. This means that the following code will print 42:\n' + '\n' + ' i = 10\n' + ' def f():\n' + ' print(i)\n' + ' i = 42\n' + ' f()\n' + '\n' + 'There are several cases where Python statements are illegal when ' + 'used\n' + 'in conjunction with nested scopes that contain free variables.\n' + '\n' + 'If a variable is referenced in an enclosing scope, it is illegal ' + 'to\n' + 'delete the name. An error will be reported at compile time.\n' + '\n' + 'The "eval()" and "exec()" functions do not have access to the ' + 'full\n' + 'environment for resolving names. Names may be resolved in the ' + 'local\n' + 'and global namespaces of the caller. Free variables are not ' + 'resolved\n' + 'in the nearest enclosing namespace, but in the global ' + 'namespace. [1]\n' + 'The "exec()" and "eval()" functions have optional arguments to\n' + 'override the global and local namespace. If only one namespace ' + 'is\n' + 'specified, it is used for both.\n', + 'nonlocal': '\n' + 'The "nonlocal" statement\n' + '************************\n' + '\n' + ' nonlocal_stmt ::= "nonlocal" identifier ("," identifier)*\n' + '\n' + 'The "nonlocal" statement causes the listed identifiers to ' + 'refer to\n' + 'previously bound variables in the nearest enclosing scope ' + 'excluding\n' + 'globals. This is important because the default behavior for ' + 'binding is\n' + 'to search the local namespace first. The statement allows\n' + 'encapsulated code to rebind variables outside of the local ' + 'scope\n' + 'besides the global (module) scope.\n' + '\n' + 'Names listed in a "nonlocal" statement, unlike those listed in ' + 'a\n' + '"global" statement, must refer to pre-existing bindings in an\n' + 'enclosing scope (the scope in which a new binding should be ' + 'created\n' + 'cannot be determined unambiguously).\n' + '\n' + 'Names listed in a "nonlocal" statement must not collide with ' + 'pre-\n' + 'existing bindings in the local scope.\n' + '\n' + 'See also: **PEP 3104** - Access to Names in Outer Scopes\n' + '\n' + ' The specification for the "nonlocal" statement.\n', + 'numbers': '\n' + 'Numeric literals\n' + '****************\n' + '\n' + 'There are three types of numeric literals: integers, floating ' + 'point\n' + 'numbers, and imaginary numbers. There are no complex literals\n' + '(complex numbers can be formed by adding a real number and an\n' + 'imaginary number).\n' + '\n' + 'Note that numeric literals do not include a sign; a phrase like ' + '"-1"\n' + 'is actually an expression composed of the unary operator ' + '\'"-"\' and the\n' + 'literal "1".\n', + 'numeric-types': '\n' + 'Emulating numeric types\n' + '***********************\n' + '\n' + 'The following methods can be defined to emulate numeric ' + 'objects.\n' + 'Methods corresponding to operations that are not ' + 'supported by the\n' + 'particular kind of number implemented (e.g., bitwise ' + 'operations for\n' + 'non-integral numbers) should be left undefined.\n' + '\n' + 'object.__add__(self, other)\n' + 'object.__sub__(self, other)\n' + 'object.__mul__(self, other)\n' + 'object.__matmul__(self, other)\n' + 'object.__truediv__(self, other)\n' + 'object.__floordiv__(self, other)\n' + 'object.__mod__(self, other)\n' + 'object.__divmod__(self, other)\n' + 'object.__pow__(self, other[, modulo])\n' + 'object.__lshift__(self, other)\n' + 'object.__rshift__(self, other)\n' + 'object.__and__(self, other)\n' + 'object.__xor__(self, other)\n' + 'object.__or__(self, other)\n' + '\n' + ' These methods are called to implement the binary ' + 'arithmetic\n' + ' operations ("+", "-", "*", "@", "/", "//", "%", ' + '"divmod()",\n' + ' "pow()", "**", "<<", ">>", "&", "^", "|"). For ' + 'instance, to\n' + ' evaluate the expression "x + y", where *x* is an ' + 'instance of a\n' + ' class that has an "__add__()" method, "x.__add__(y)" ' + 'is called.\n' + ' The "__divmod__()" method should be the equivalent to ' + 'using\n' + ' "__floordiv__()" and "__mod__()"; it should not be ' + 'related to\n' + ' "__truediv__()". Note that "__pow__()" should be ' + 'defined to accept\n' + ' an optional third argument if the ternary version of ' + 'the built-in\n' + ' "pow()" function is to be supported.\n' + '\n' + ' If one of those methods does not support the operation ' + 'with the\n' + ' supplied arguments, it should return ' + '"NotImplemented".\n' + '\n' + 'object.__radd__(self, other)\n' + 'object.__rsub__(self, other)\n' + 'object.__rmul__(self, other)\n' + 'object.__rmatmul__(self, other)\n' + 'object.__rtruediv__(self, other)\n' + 'object.__rfloordiv__(self, other)\n' + 'object.__rmod__(self, other)\n' + 'object.__rdivmod__(self, other)\n' + 'object.__rpow__(self, other)\n' + 'object.__rlshift__(self, other)\n' + 'object.__rrshift__(self, other)\n' + 'object.__rand__(self, other)\n' + 'object.__rxor__(self, other)\n' + 'object.__ror__(self, other)\n' + '\n' + ' These methods are called to implement the binary ' + 'arithmetic\n' + ' operations ("+", "-", "*", "@", "/", "//", "%", ' + '"divmod()",\n' + ' "pow()", "**", "<<", ">>", "&", "^", "|") with ' + 'reflected (swapped)\n' + ' operands. These functions are only called if the left ' + 'operand does\n' + ' not support the corresponding operation and the ' + 'operands are of\n' + ' different types. [2] For instance, to evaluate the ' + 'expression "x -\n' + ' y", where *y* is an instance of a class that has an ' + '"__rsub__()"\n' + ' method, "y.__rsub__(x)" is called if "x.__sub__(y)" ' + 'returns\n' + ' *NotImplemented*.\n' + '\n' + ' Note that ternary "pow()" will not try calling ' + '"__rpow__()" (the\n' + ' coercion rules would become too complicated).\n' + '\n' + " Note: If the right operand's type is a subclass of the " + 'left\n' + " operand's type and that subclass provides the " + 'reflected method\n' + ' for the operation, this method will be called before ' + 'the left\n' + " operand's non-reflected method. This behavior " + 'allows subclasses\n' + " to override their ancestors' operations.\n" + '\n' + 'object.__iadd__(self, other)\n' + 'object.__isub__(self, other)\n' + 'object.__imul__(self, other)\n' + 'object.__imatmul__(self, other)\n' + 'object.__itruediv__(self, other)\n' + 'object.__ifloordiv__(self, other)\n' + 'object.__imod__(self, other)\n' + 'object.__ipow__(self, other[, modulo])\n' + 'object.__ilshift__(self, other)\n' + 'object.__irshift__(self, other)\n' + 'object.__iand__(self, other)\n' + 'object.__ixor__(self, other)\n' + 'object.__ior__(self, other)\n' + '\n' + ' These methods are called to implement the augmented ' + 'arithmetic\n' + ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", ' + '"%=", "**=",\n' + ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' + 'attempt to\n' + ' do the operation in-place (modifying *self*) and ' + 'return the result\n' + ' (which could be, but does not have to be, *self*). If ' + 'a specific\n' + ' method is not defined, the augmented assignment falls ' + 'back to the\n' + ' normal methods. For instance, if *x* is an instance ' + 'of a class\n' + ' with an "__iadd__()" method, "x += y" is equivalent to ' + '"x =\n' + ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' + '"y.__radd__(x)" are\n' + ' considered, as with the evaluation of "x + y". In ' + 'certain\n' + ' situations, augmented assignment can result in ' + 'unexpected errors\n' + " (see *Why does a_tuple[i] += ['item'] raise an " + 'exception when the\n' + ' addition works?*), but this behavior is in fact part ' + 'of the data\n' + ' model.\n' + '\n' + 'object.__neg__(self)\n' + 'object.__pos__(self)\n' + 'object.__abs__(self)\n' + 'object.__invert__(self)\n' + '\n' + ' Called to implement the unary arithmetic operations ' + '("-", "+",\n' + ' "abs()" and "~").\n' + '\n' + 'object.__complex__(self)\n' + 'object.__int__(self)\n' + 'object.__float__(self)\n' + 'object.__round__(self[, n])\n' + '\n' + ' Called to implement the built-in functions ' + '"complex()", "int()",\n' + ' "float()" and "round()". Should return a value of the ' + 'appropriate\n' + ' type.\n' + '\n' + 'object.__index__(self)\n' + '\n' + ' Called to implement "operator.index()", and whenever ' + 'Python needs\n' + ' to losslessly convert the numeric object to an integer ' + 'object (such\n' + ' as in slicing, or in the built-in "bin()", "hex()" and ' + '"oct()"\n' + ' functions). Presence of this method indicates that the ' + 'numeric\n' + ' object is an integer type. Must return an integer.\n' + '\n' + ' Note: In order to have a coherent integer type class, ' + 'when\n' + ' "__index__()" is defined "__int__()" should also be ' + 'defined, and\n' + ' both should return the same value.\n', + 'objects': '\n' + 'Objects, values and types\n' + '*************************\n' + '\n' + "*Objects* are Python's abstraction for data. All data in a " + 'Python\n' + 'program is represented by objects or by relations between ' + 'objects. (In\n' + "a sense, and in conformance to Von Neumann's model of a " + '"stored\n' + 'program computer," code is also represented by objects.)\n' + '\n' + "Every object has an identity, a type and a value. An object's\n" + '*identity* never changes once it has been created; you may ' + 'think of it\n' + 'as the object\'s address in memory. The \'"is"\' operator ' + 'compares the\n' + 'identity of two objects; the "id()" function returns an ' + 'integer\n' + 'representing its identity.\n' + '\n' + '**CPython implementation detail:** For CPython, "id(x)" is the ' + 'memory\n' + 'address where "x" is stored.\n' + '\n' + "An object's type determines the operations that the object " + 'supports\n' + '(e.g., "does it have a length?") and also defines the possible ' + 'values\n' + 'for objects of that type. The "type()" function returns an ' + "object's\n" + 'type (which is an object itself). Like its identity, an ' + "object's\n" + '*type* is also unchangeable. [1]\n' + '\n' + 'The *value* of some objects can change. Objects whose value ' + 'can\n' + 'change are said to be *mutable*; objects whose value is ' + 'unchangeable\n' + 'once they are created are called *immutable*. (The value of an\n' + 'immutable container object that contains a reference to a ' + 'mutable\n' + "object can change when the latter's value is changed; however " + 'the\n' + 'container is still considered immutable, because the collection ' + 'of\n' + 'objects it contains cannot be changed. So, immutability is ' + 'not\n' + 'strictly the same as having an unchangeable value, it is more ' + 'subtle.)\n' + "An object's mutability is determined by its type; for " + 'instance,\n' + 'numbers, strings and tuples are immutable, while dictionaries ' + 'and\n' + 'lists are mutable.\n' + '\n' + 'Objects are never explicitly destroyed; however, when they ' + 'become\n' + 'unreachable they may be garbage-collected. An implementation ' + 'is\n' + 'allowed to postpone garbage collection or omit it altogether ' + '--- it is\n' + 'a matter of implementation quality how garbage collection is\n' + 'implemented, as long as no objects are collected that are ' + 'still\n' + 'reachable.\n' + '\n' + '**CPython implementation detail:** CPython currently uses a ' + 'reference-\n' + 'counting scheme with (optional) delayed detection of cyclically ' + 'linked\n' + 'garbage, which collects most objects as soon as they become\n' + 'unreachable, but is not guaranteed to collect garbage ' + 'containing\n' + 'circular references. See the documentation of the "gc" module ' + 'for\n' + 'information on controlling the collection of cyclic garbage. ' + 'Other\n' + 'implementations act differently and CPython may change. Do not ' + 'depend\n' + 'on immediate finalization of objects when they become ' + 'unreachable (so\n' + 'you should always close files explicitly).\n' + '\n' + "Note that the use of the implementation's tracing or debugging\n" + 'facilities may keep objects alive that would normally be ' + 'collectable.\n' + 'Also note that catching an exception with a ' + '\'"try"..."except"\'\n' + 'statement may keep objects alive.\n' + '\n' + 'Some objects contain references to "external" resources such as ' + 'open\n' + 'files or windows. It is understood that these resources are ' + 'freed\n' + 'when the object is garbage-collected, but since garbage ' + 'collection is\n' + 'not guaranteed to happen, such objects also provide an explicit ' + 'way to\n' + 'release the external resource, usually a "close()" method. ' + 'Programs\n' + 'are strongly recommended to explicitly close such objects. ' + 'The\n' + '\'"try"..."finally"\' statement and the \'"with"\' statement ' + 'provide\n' + 'convenient ways to do this.\n' + '\n' + 'Some objects contain references to other objects; these are ' + 'called\n' + '*containers*. Examples of containers are tuples, lists and\n' + "dictionaries. The references are part of a container's value. " + 'In\n' + 'most cases, when we talk about the value of a container, we ' + 'imply the\n' + 'values, not the identities of the contained objects; however, ' + 'when we\n' + 'talk about the mutability of a container, only the identities ' + 'of the\n' + 'immediately contained objects are implied. So, if an ' + 'immutable\n' + 'container (like a tuple) contains a reference to a mutable ' + 'object, its\n' + 'value changes if that mutable object is changed.\n' + '\n' + 'Types affect almost all aspects of object behavior. Even the\n' + 'importance of object identity is affected in some sense: for ' + 'immutable\n' + 'types, operations that compute new values may actually return ' + 'a\n' + 'reference to any existing object with the same type and value, ' + 'while\n' + 'for mutable objects this is not allowed. E.g., after "a = 1; b ' + '= 1",\n' + '"a" and "b" may or may not refer to the same object with the ' + 'value\n' + 'one, depending on the implementation, but after "c = []; d = ' + '[]", "c"\n' + 'and "d" are guaranteed to refer to two different, unique, ' + 'newly\n' + 'created empty lists. (Note that "c = d = []" assigns the same ' + 'object\n' + 'to both "c" and "d".)\n', + 'operator-summary': '\n' + 'Operator precedence\n' + '*******************\n' + '\n' + 'The following table summarizes the operator precedence ' + 'in Python, from\n' + 'lowest precedence (least binding) to highest ' + 'precedence (most\n' + 'binding). Operators in the same box have the same ' + 'precedence. Unless\n' + 'the syntax is explicitly given, operators are binary. ' + 'Operators in\n' + 'the same box group left to right (except for ' + 'exponentiation, which\n' + 'groups from right to left).\n' + '\n' + 'Note that comparisons, membership tests, and identity ' + 'tests, all have\n' + 'the same precedence and have a left-to-right chaining ' + 'feature as\n' + 'described in the *Comparisons* section.\n' + '\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| Operator | ' + 'Description |\n' + '+=================================================+=======================================+\n' + '| "lambda" | ' + 'Lambda expression |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "if" -- "else" | ' + 'Conditional expression |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "or" | ' + 'Boolean OR |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "and" | ' + 'Boolean AND |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "not" "x" | ' + 'Boolean NOT |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "in", "not in", "is", "is not", "<", "<=", ">", | ' + 'Comparisons, including membership |\n' + '| ">=", "!=", "==" | ' + 'tests and identity tests |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "|" | ' + 'Bitwise OR |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "^" | ' + 'Bitwise XOR |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "&" | ' + 'Bitwise AND |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "<<", ">>" | ' + 'Shifts |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "+", "-" | ' + 'Addition and subtraction |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "*", "@", "/", "//", "%" | ' + 'Multiplication, matrix multiplication |\n' + '| | ' + 'division, remainder [5] |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "+x", "-x", "~x" | ' + 'Positive, negative, bitwise NOT |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "**" | ' + 'Exponentiation [6] |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "await" "x" | ' + 'Await expression |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "x[index]", "x[index:index]", | ' + 'Subscription, slicing, call, |\n' + '| "x(arguments...)", "x.attribute" | ' + 'attribute reference |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '| "(expressions...)", "[expressions...]", "{key: | ' + 'Binding or tuple display, list |\n' + '| value...}", "{expressions...}" | ' + 'display, dictionary display, set |\n' + '| | ' + 'display |\n' + '+-------------------------------------------------+---------------------------------------+\n' + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] While "abs(x%y) < abs(y)" is true mathematically, ' + 'for floats\n' + ' it may not be true numerically due to roundoff. ' + 'For example, and\n' + ' assuming a platform on which a Python float is an ' + 'IEEE 754 double-\n' + ' precision number, in order that "-1e-100 % 1e100" ' + 'have the same\n' + ' sign as "1e100", the computed result is "-1e-100 + ' + '1e100", which\n' + ' is numerically exactly equal to "1e100". The ' + 'function\n' + ' "math.fmod()" returns a result whose sign matches ' + 'the sign of the\n' + ' first argument instead, and so returns "-1e-100" ' + 'in this case.\n' + ' Which approach is more appropriate depends on the ' + 'application.\n' + '\n' + '[2] If x is very close to an exact integer multiple of ' + "y, it's\n" + ' possible for "x//y" to be one larger than ' + '"(x-x%y)//y" due to\n' + ' rounding. In such cases, Python returns the ' + 'latter result, in\n' + ' order to preserve that "divmod(x,y)[0] * y + x % ' + 'y" be very close\n' + ' to "x".\n' + '\n' + '[3] While comparisons between strings make sense at ' + 'the byte\n' + ' level, they may be counter-intuitive to users. ' + 'For example, the\n' + ' strings ""\\u00C7"" and ""\\u0043\\u0327"" compare ' + 'differently, even\n' + ' though they both represent the same unicode ' + 'character (LATIN\n' + ' CAPITAL LETTER C WITH CEDILLA). To compare ' + 'strings in a human\n' + ' recognizable way, compare using ' + '"unicodedata.normalize()".\n' + '\n' + '[4] Due to automatic garbage-collection, free lists, ' + 'and the\n' + ' dynamic nature of descriptors, you may notice ' + 'seemingly unusual\n' + ' behaviour in certain uses of the "is" operator, ' + 'like those\n' + ' involving comparisons between instance methods, or ' + 'constants.\n' + ' Check their documentation for more info.\n' + '\n' + '[5] The "%" operator is also used for string ' + 'formatting; the same\n' + ' precedence applies.\n' + '\n' + '[6] The power operator "**" binds less tightly than an ' + 'arithmetic\n' + ' or bitwise unary operator on its right, that is, ' + '"2**-1" is "0.5".\n', + 'pass': '\n' + 'The "pass" statement\n' + '********************\n' + '\n' + ' pass_stmt ::= "pass"\n' + '\n' + '"pass" is a null operation --- when it is executed, nothing ' + 'happens.\n' + 'It is useful as a placeholder when a statement is required\n' + 'syntactically, but no code needs to be executed, for example:\n' + '\n' + ' def f(arg): pass # a function that does nothing (yet)\n' + '\n' + ' class C: pass # a class with no methods (yet)\n', + 'power': '\n' + 'The power operator\n' + '******************\n' + '\n' + 'The power operator binds more tightly than unary operators on ' + 'its\n' + 'left; it binds less tightly than unary operators on its right. ' + 'The\n' + 'syntax is:\n' + '\n' + ' power ::= await ["**" u_expr]\n' + '\n' + 'Thus, in an unparenthesized sequence of power and unary ' + 'operators, the\n' + 'operators are evaluated from right to left (this does not ' + 'constrain\n' + 'the evaluation order for the operands): "-1**2" results in "-1".\n' + '\n' + 'The power operator has the same semantics as the built-in ' + '"pow()"\n' + 'function, when called with two arguments: it yields its left ' + 'argument\n' + 'raised to the power of its right argument. The numeric arguments ' + 'are\n' + 'first converted to a common type, and the result is of that ' + 'type.\n' + '\n' + 'For int operands, the result has the same type as the operands ' + 'unless\n' + 'the second argument is negative; in that case, all arguments are\n' + 'converted to float and a float result is delivered. For example,\n' + '"10**2" returns "100", but "10**-2" returns "0.01".\n' + '\n' + 'Raising "0.0" to a negative power results in a ' + '"ZeroDivisionError".\n' + 'Raising a negative number to a fractional power results in a ' + '"complex"\n' + 'number. (In earlier versions it raised a "ValueError".)\n', + 'raise': '\n' + 'The "raise" statement\n' + '*********************\n' + '\n' + ' raise_stmt ::= "raise" [expression ["from" expression]]\n' + '\n' + 'If no expressions are present, "raise" re-raises the last ' + 'exception\n' + 'that was active in the current scope. If no exception is active ' + 'in\n' + 'the current scope, a "RuntimeError" exception is raised ' + 'indicating\n' + 'that this is an error.\n' + '\n' + 'Otherwise, "raise" evaluates the first expression as the ' + 'exception\n' + 'object. It must be either a subclass or an instance of\n' + '"BaseException". If it is a class, the exception instance will ' + 'be\n' + 'obtained when needed by instantiating the class with no ' + 'arguments.\n' + '\n' + "The *type* of the exception is the exception instance's class, " + 'the\n' + '*value* is the instance itself.\n' + '\n' + 'A traceback object is normally created automatically when an ' + 'exception\n' + 'is raised and attached to it as the "__traceback__" attribute, ' + 'which\n' + 'is writable. You can create an exception and set your own ' + 'traceback in\n' + 'one step using the "with_traceback()" exception method (which ' + 'returns\n' + 'the same exception instance, with its traceback set to its ' + 'argument),\n' + 'like so:\n' + '\n' + ' raise Exception("foo occurred").with_traceback(tracebackobj)\n' + '\n' + 'The "from" clause is used for exception chaining: if given, the ' + 'second\n' + '*expression* must be another exception class or instance, which ' + 'will\n' + 'then be attached to the raised exception as the "__cause__" ' + 'attribute\n' + '(which is writable). If the raised exception is not handled, ' + 'both\n' + 'exceptions will be printed:\n' + '\n' + ' >>> try:\n' + ' ... print(1 / 0)\n' + ' ... except Exception as exc:\n' + ' ... raise RuntimeError("Something bad happened") from exc\n' + ' ...\n' + ' Traceback (most recent call last):\n' + ' File "", line 2, in \n' + ' ZeroDivisionError: int division or modulo by zero\n' + '\n' + ' The above exception was the direct cause of the following ' + 'exception:\n' + '\n' + ' Traceback (most recent call last):\n' + ' File "", line 4, in \n' + ' RuntimeError: Something bad happened\n' + '\n' + 'A similar mechanism works implicitly if an exception is raised ' + 'inside\n' + 'an exception handler or a "finally" clause: the previous ' + 'exception is\n' + 'then attached as the new exception\'s "__context__" attribute:\n' + '\n' + ' >>> try:\n' + ' ... print(1 / 0)\n' + ' ... except:\n' + ' ... raise RuntimeError("Something bad happened")\n' + ' ...\n' + ' Traceback (most recent call last):\n' + ' File "", line 2, in \n' + ' ZeroDivisionError: int division or modulo by zero\n' + '\n' + ' During handling of the above exception, another exception ' + 'occurred:\n' + '\n' + ' Traceback (most recent call last):\n' + ' File "", line 4, in \n' + ' RuntimeError: Something bad happened\n' + '\n' + 'Additional information on exceptions can be found in section\n' + '*Exceptions*, and information about handling exceptions is in ' + 'section\n' + '*The try statement*.\n', + 'return': '\n' + 'The "return" statement\n' + '**********************\n' + '\n' + ' return_stmt ::= "return" [expression_list]\n' + '\n' + '"return" may only occur syntactically nested in a function ' + 'definition,\n' + 'not within a nested class definition.\n' + '\n' + 'If an expression list is present, it is evaluated, else "None" ' + 'is\n' + 'substituted.\n' + '\n' + '"return" leaves the current function call with the expression ' + 'list (or\n' + '"None") as return value.\n' + '\n' + 'When "return" passes control out of a "try" statement with a ' + '"finally"\n' + 'clause, that "finally" clause is executed before really leaving ' + 'the\n' + 'function.\n' + '\n' + 'In a generator function, the "return" statement indicates that ' + 'the\n' + 'generator is done and will cause "StopIteration" to be raised. ' + 'The\n' + 'returned value (if any) is used as an argument to construct\n' + '"StopIteration" and becomes the "StopIteration.value" ' + 'attribute.\n', + 'sequence-types': '\n' + 'Emulating container types\n' + '*************************\n' + '\n' + 'The following methods can be defined to implement ' + 'container objects.\n' + 'Containers usually are sequences (such as lists or ' + 'tuples) or mappings\n' + '(like dictionaries), but can represent other containers ' + 'as well. The\n' + 'first set of methods is used either to emulate a ' + 'sequence or to\n' + 'emulate a mapping; the difference is that for a ' + 'sequence, the\n' + 'allowable keys should be the integers *k* for which "0 ' + '<= k < N" where\n' + '*N* is the length of the sequence, or slice objects, ' + 'which define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + "behaving similar to those for Python's standard " + 'dictionary objects.\n' + 'The "collections" module provides a "MutableMapping" ' + 'abstract base\n' + 'class to help create those methods from a base set of ' + '"__getitem__()",\n' + '"__setitem__()", "__delitem__()", and "keys()". Mutable ' + 'sequences\n' + 'should provide methods "append()", "count()", "index()", ' + '"extend()",\n' + '"insert()", "pop()", "remove()", "reverse()" and ' + '"sort()", like Python\n' + 'standard list objects. Finally, sequence types should ' + 'implement\n' + 'addition (meaning concatenation) and multiplication ' + '(meaning\n' + 'repetition) by defining the methods "__add__()", ' + '"__radd__()",\n' + '"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" ' + 'described\n' + 'below; they should not define other numerical ' + 'operators. It is\n' + 'recommended that both mappings and sequences implement ' + 'the\n' + '"__contains__()" method to allow efficient use of the ' + '"in" operator;\n' + 'for mappings, "in" should search the mapping\'s keys; ' + 'for sequences, it\n' + 'should search through the values. It is further ' + 'recommended that both\n' + 'mappings and sequences implement the "__iter__()" method ' + 'to allow\n' + 'efficient iteration through the container; for mappings, ' + '"__iter__()"\n' + 'should be the same as "keys()"; for sequences, it should ' + 'iterate\n' + 'through the values.\n' + '\n' + 'object.__len__(self)\n' + '\n' + ' Called to implement the built-in function "len()". ' + 'Should return\n' + ' the length of the object, an integer ">=" 0. Also, ' + 'an object that\n' + ' doesn\'t define a "__bool__()" method and whose ' + '"__len__()" method\n' + ' returns zero is considered to be false in a Boolean ' + 'context.\n' + '\n' + 'object.__length_hint__(self)\n' + '\n' + ' Called to implement "operator.length_hint()". Should ' + 'return an\n' + ' estimated length for the object (which may be greater ' + 'or less than\n' + ' the actual length). The length must be an integer ' + '">=" 0. This\n' + ' method is purely an optimization and is never ' + 'required for\n' + ' correctness.\n' + '\n' + ' New in version 3.4.\n' + '\n' + 'Note: Slicing is done exclusively with the following ' + 'three methods.\n' + ' A call like\n' + '\n' + ' a[1:2] = b\n' + '\n' + ' is translated to\n' + '\n' + ' a[slice(1, 2, None)] = b\n' + '\n' + ' and so forth. Missing slice items are always filled ' + 'in with "None".\n' + '\n' + 'object.__getitem__(self, key)\n' + '\n' + ' Called to implement evaluation of "self[key]". For ' + 'sequence types,\n' + ' the accepted keys should be integers and slice ' + 'objects. Note that\n' + ' the special interpretation of negative indexes (if ' + 'the class wishes\n' + ' to emulate a sequence type) is up to the ' + '"__getitem__()" method. If\n' + ' *key* is of an inappropriate type, "TypeError" may be ' + 'raised; if of\n' + ' a value outside the set of indexes for the sequence ' + '(after any\n' + ' special interpretation of negative values), ' + '"IndexError" should be\n' + ' raised. For mapping types, if *key* is missing (not ' + 'in the\n' + ' container), "KeyError" should be raised.\n' + '\n' + ' Note: "for" loops expect that an "IndexError" will be ' + 'raised for\n' + ' illegal indexes to allow proper detection of the ' + 'end of the\n' + ' sequence.\n' + '\n' + 'object.__missing__(self, key)\n' + '\n' + ' Called by "dict"."__getitem__()" to implement ' + '"self[key]" for dict\n' + ' subclasses when key is not in the dictionary.\n' + '\n' + 'object.__setitem__(self, key, value)\n' + '\n' + ' Called to implement assignment to "self[key]". Same ' + 'note as for\n' + ' "__getitem__()". This should only be implemented for ' + 'mappings if\n' + ' the objects support changes to the values for keys, ' + 'or if new keys\n' + ' can be added, or for sequences if elements can be ' + 'replaced. The\n' + ' same exceptions should be raised for improper *key* ' + 'values as for\n' + ' the "__getitem__()" method.\n' + '\n' + 'object.__delitem__(self, key)\n' + '\n' + ' Called to implement deletion of "self[key]". Same ' + 'note as for\n' + ' "__getitem__()". This should only be implemented for ' + 'mappings if\n' + ' the objects support removal of keys, or for sequences ' + 'if elements\n' + ' can be removed from the sequence. The same ' + 'exceptions should be\n' + ' raised for improper *key* values as for the ' + '"__getitem__()" method.\n' + '\n' + 'object.__iter__(self)\n' + '\n' + ' This method is called when an iterator is required ' + 'for a container.\n' + ' This method should return a new iterator object that ' + 'can iterate\n' + ' over all the objects in the container. For mappings, ' + 'it should\n' + ' iterate over the keys of the container.\n' + '\n' + ' Iterator objects also need to implement this method; ' + 'they are\n' + ' required to return themselves. For more information ' + 'on iterator\n' + ' objects, see *Iterator Types*.\n' + '\n' + 'object.__reversed__(self)\n' + '\n' + ' Called (if present) by the "reversed()" built-in to ' + 'implement\n' + ' reverse iteration. It should return a new iterator ' + 'object that\n' + ' iterates over all the objects in the container in ' + 'reverse order.\n' + '\n' + ' If the "__reversed__()" method is not provided, the ' + '"reversed()"\n' + ' built-in will fall back to using the sequence ' + 'protocol ("__len__()"\n' + ' and "__getitem__()"). Objects that support the ' + 'sequence protocol\n' + ' should only provide "__reversed__()" if they can ' + 'provide an\n' + ' implementation that is more efficient than the one ' + 'provided by\n' + ' "reversed()".\n' + '\n' + 'The membership test operators ("in" and "not in") are ' + 'normally\n' + 'implemented as an iteration through a sequence. ' + 'However, container\n' + 'objects can supply the following special method with a ' + 'more efficient\n' + 'implementation, which also does not require the object ' + 'be a sequence.\n' + '\n' + 'object.__contains__(self, item)\n' + '\n' + ' Called to implement membership test operators. ' + 'Should return true\n' + ' if *item* is in *self*, false otherwise. For mapping ' + 'objects, this\n' + ' should consider the keys of the mapping rather than ' + 'the values or\n' + ' the key-item pairs.\n' + '\n' + ' For objects that don\'t define "__contains__()", the ' + 'membership test\n' + ' first tries iteration via "__iter__()", then the old ' + 'sequence\n' + ' iteration protocol via "__getitem__()", see *this ' + 'section in the\n' + ' language reference*.\n', + 'shifting': '\n' + 'Shifting operations\n' + '*******************\n' + '\n' + 'The shifting operations have lower priority than the ' + 'arithmetic\n' + 'operations:\n' + '\n' + ' shift_expr ::= a_expr | shift_expr ( "<<" | ">>" ) a_expr\n' + '\n' + 'These operators accept integers as arguments. They shift the ' + 'first\n' + 'argument to the left or right by the number of bits given by ' + 'the\n' + 'second argument.\n' + '\n' + 'A right shift by *n* bits is defined as floor division by ' + '"pow(2,n)".\n' + 'A left shift by *n* bits is defined as multiplication with ' + '"pow(2,n)".\n' + '\n' + 'Note: In the current implementation, the right-hand operand ' + 'is\n' + ' required to be at most "sys.maxsize". If the right-hand ' + 'operand is\n' + ' larger than "sys.maxsize" an "OverflowError" exception is ' + 'raised.\n', + 'slicings': '\n' + 'Slicings\n' + '********\n' + '\n' + 'A slicing selects a range of items in a sequence object (e.g., ' + 'a\n' + 'string, tuple or list). Slicings may be used as expressions ' + 'or as\n' + 'targets in assignment or "del" statements. The syntax for a ' + 'slicing:\n' + '\n' + ' slicing ::= primary "[" slice_list "]"\n' + ' slice_list ::= slice_item ("," slice_item)* [","]\n' + ' slice_item ::= expression | proper_slice\n' + ' proper_slice ::= [lower_bound] ":" [upper_bound] [ ":" ' + '[stride] ]\n' + ' lower_bound ::= expression\n' + ' upper_bound ::= expression\n' + ' stride ::= expression\n' + '\n' + 'There is ambiguity in the formal syntax here: anything that ' + 'looks like\n' + 'an expression list also looks like a slice list, so any ' + 'subscription\n' + 'can be interpreted as a slicing. Rather than further ' + 'complicating the\n' + 'syntax, this is disambiguated by defining that in this case ' + 'the\n' + 'interpretation as a subscription takes priority over the\n' + 'interpretation as a slicing (this is the case if the slice ' + 'list\n' + 'contains no proper slice).\n' + '\n' + 'The semantics for a slicing are as follows. The primary is ' + 'indexed\n' + '(using the same "__getitem__()" method as normal subscription) ' + 'with a\n' + 'key that is constructed from the slice list, as follows. If ' + 'the slice\n' + 'list contains at least one comma, the key is a tuple ' + 'containing the\n' + 'conversion of the slice items; otherwise, the conversion of ' + 'the lone\n' + 'slice item is the key. The conversion of a slice item that is ' + 'an\n' + 'expression is that expression. The conversion of a proper ' + 'slice is a\n' + 'slice object (see section *The standard type hierarchy*) ' + 'whose\n' + '"start", "stop" and "step" attributes are the values of the\n' + 'expressions given as lower bound, upper bound and stride,\n' + 'respectively, substituting "None" for missing expressions.\n', + 'specialattrs': '\n' + 'Special Attributes\n' + '******************\n' + '\n' + 'The implementation adds a few special read-only attributes ' + 'to several\n' + 'object types, where they are relevant. Some of these are ' + 'not reported\n' + 'by the "dir()" built-in function.\n' + '\n' + 'object.__dict__\n' + '\n' + ' A dictionary or other mapping object used to store an ' + "object's\n" + ' (writable) attributes.\n' + '\n' + 'instance.__class__\n' + '\n' + ' The class to which a class instance belongs.\n' + '\n' + 'class.__bases__\n' + '\n' + ' The tuple of base classes of a class object.\n' + '\n' + 'class.__name__\n' + '\n' + ' The name of the class or type.\n' + '\n' + 'class.__qualname__\n' + '\n' + ' The *qualified name* of the class or type.\n' + '\n' + ' New in version 3.3.\n' + '\n' + 'class.__mro__\n' + '\n' + ' This attribute is a tuple of classes that are ' + 'considered when\n' + ' looking for base classes during method resolution.\n' + '\n' + 'class.mro()\n' + '\n' + ' This method can be overridden by a metaclass to ' + 'customize the\n' + ' method resolution order for its instances. It is ' + 'called at class\n' + ' instantiation, and its result is stored in "__mro__".\n' + '\n' + 'class.__subclasses__()\n' + '\n' + ' Each class keeps a list of weak references to its ' + 'immediate\n' + ' subclasses. This method returns a list of all those ' + 'references\n' + ' still alive. Example:\n' + '\n' + ' >>> int.__subclasses__()\n' + " []\n" + '\n' + '-[ Footnotes ]-\n' + '\n' + '[1] Additional information on these special methods may be ' + 'found\n' + ' in the Python Reference Manual (*Basic ' + 'customization*).\n' + '\n' + '[2] As a consequence, the list "[1, 2]" is considered ' + 'equal to\n' + ' "[1.0, 2.0]", and similarly for tuples.\n' + '\n' + "[3] They must have since the parser can't tell the type of " + 'the\n' + ' operands.\n' + '\n' + '[4] Cased characters are those with general category ' + 'property\n' + ' being one of "Lu" (Letter, uppercase), "Ll" (Letter, ' + 'lowercase),\n' + ' or "Lt" (Letter, titlecase).\n' + '\n' + '[5] To format only a tuple you should therefore provide a\n' + ' singleton tuple whose only element is the tuple to be ' + 'formatted.\n', + 'specialnames': '\n' + 'Special method names\n' + '********************\n' + '\n' + 'A class can implement certain operations that are invoked ' + 'by special\n' + 'syntax (such as arithmetic operations or subscripting and ' + 'slicing) by\n' + "defining methods with special names. This is Python's " + 'approach to\n' + '*operator overloading*, allowing classes to define their ' + 'own behavior\n' + 'with respect to language operators. For instance, if a ' + 'class defines\n' + 'a method named "__getitem__()", and "x" is an instance of ' + 'this class,\n' + 'then "x[i]" is roughly equivalent to ' + '"type(x).__getitem__(x, i)".\n' + 'Except where mentioned, attempts to execute an operation ' + 'raise an\n' + 'exception when no appropriate method is defined ' + '(typically\n' + '"AttributeError" or "TypeError").\n' + '\n' + 'When implementing a class that emulates any built-in type, ' + 'it is\n' + 'important that the emulation only be implemented to the ' + 'degree that it\n' + 'makes sense for the object being modelled. For example, ' + 'some\n' + 'sequences may work well with retrieval of individual ' + 'elements, but\n' + 'extracting a slice may not make sense. (One example of ' + 'this is the\n' + '"NodeList" interface in the W3C\'s Document Object ' + 'Model.)\n' + '\n' + '\n' + 'Basic customization\n' + '===================\n' + '\n' + 'object.__new__(cls[, ...])\n' + '\n' + ' Called to create a new instance of class *cls*. ' + '"__new__()" is a\n' + ' static method (special-cased so you need not declare it ' + 'as such)\n' + ' that takes the class of which an instance was requested ' + 'as its\n' + ' first argument. The remaining arguments are those ' + 'passed to the\n' + ' object constructor expression (the call to the class). ' + 'The return\n' + ' value of "__new__()" should be the new object instance ' + '(usually an\n' + ' instance of *cls*).\n' + '\n' + ' Typical implementations create a new instance of the ' + 'class by\n' + ' invoking the superclass\'s "__new__()" method using\n' + ' "super(currentclass, cls).__new__(cls[, ...])" with ' + 'appropriate\n' + ' arguments and then modifying the newly-created instance ' + 'as\n' + ' necessary before returning it.\n' + '\n' + ' If "__new__()" returns an instance of *cls*, then the ' + 'new\n' + ' instance\'s "__init__()" method will be invoked like\n' + ' "__init__(self[, ...])", where *self* is the new ' + 'instance and the\n' + ' remaining arguments are the same as were passed to ' + '"__new__()".\n' + '\n' + ' If "__new__()" does not return an instance of *cls*, ' + 'then the new\n' + ' instance\'s "__init__()" method will not be invoked.\n' + '\n' + ' "__new__()" is intended mainly to allow subclasses of ' + 'immutable\n' + ' types (like int, str, or tuple) to customize instance ' + 'creation. It\n' + ' is also commonly overridden in custom metaclasses in ' + 'order to\n' + ' customize class creation.\n' + '\n' + 'object.__init__(self[, ...])\n' + '\n' + ' Called after the instance has been created (by ' + '"__new__()"), but\n' + ' before it is returned to the caller. The arguments are ' + 'those\n' + ' passed to the class constructor expression. If a base ' + 'class has an\n' + ' "__init__()" method, the derived class\'s "__init__()" ' + 'method, if\n' + ' any, must explicitly call it to ensure proper ' + 'initialization of the\n' + ' base class part of the instance; for example:\n' + ' "BaseClass.__init__(self, [args...])".\n' + '\n' + ' Because "__new__()" and "__init__()" work together in ' + 'constructing\n' + ' objects ("__new__()" to create it, and "__init__()" to ' + 'customise\n' + ' it), no non-"None" value may be returned by ' + '"__init__()"; doing so\n' + ' will cause a "TypeError" to be raised at runtime.\n' + '\n' + 'object.__del__(self)\n' + '\n' + ' Called when the instance is about to be destroyed. ' + 'This is also\n' + ' called a destructor. If a base class has a "__del__()" ' + 'method, the\n' + ' derived class\'s "__del__()" method, if any, must ' + 'explicitly call it\n' + ' to ensure proper deletion of the base class part of the ' + 'instance.\n' + ' Note that it is possible (though not recommended!) for ' + 'the\n' + ' "__del__()" method to postpone destruction of the ' + 'instance by\n' + ' creating a new reference to it. It may then be called ' + 'at a later\n' + ' time when this new reference is deleted. It is not ' + 'guaranteed that\n' + ' "__del__()" methods are called for objects that still ' + 'exist when\n' + ' the interpreter exits.\n' + '\n' + ' Note: "del x" doesn\'t directly call "x.__del__()" --- ' + 'the former\n' + ' decrements the reference count for "x" by one, and ' + 'the latter is\n' + ' only called when "x"\'s reference count reaches ' + 'zero. Some common\n' + ' situations that may prevent the reference count of an ' + 'object from\n' + ' going to zero include: circular references between ' + 'objects (e.g.,\n' + ' a doubly-linked list or a tree data structure with ' + 'parent and\n' + ' child pointers); a reference to the object on the ' + 'stack frame of\n' + ' a function that caught an exception (the traceback ' + 'stored in\n' + ' "sys.exc_info()[2]" keeps the stack frame alive); or ' + 'a reference\n' + ' to the object on the stack frame that raised an ' + 'unhandled\n' + ' exception in interactive mode (the traceback stored ' + 'in\n' + ' "sys.last_traceback" keeps the stack frame alive). ' + 'The first\n' + ' situation can only be remedied by explicitly breaking ' + 'the cycles;\n' + ' the second can be resolved by freeing the reference ' + 'to the\n' + ' traceback object when it is no longer useful, and the ' + 'third can\n' + ' be resolved by storing "None" in ' + '"sys.last_traceback". Circular\n' + ' references which are garbage are detected and cleaned ' + 'up when the\n' + " cyclic garbage collector is enabled (it's on by " + 'default). Refer\n' + ' to the documentation for the "gc" module for more ' + 'information\n' + ' about this topic.\n' + '\n' + ' Warning: Due to the precarious circumstances under ' + 'which\n' + ' "__del__()" methods are invoked, exceptions that ' + 'occur during\n' + ' their execution are ignored, and a warning is printed ' + 'to\n' + ' "sys.stderr" instead. Also, when "__del__()" is ' + 'invoked in\n' + ' response to a module being deleted (e.g., when ' + 'execution of the\n' + ' program is done), other globals referenced by the ' + '"__del__()"\n' + ' method may already have been deleted or in the ' + 'process of being\n' + ' torn down (e.g. the import machinery shutting down). ' + 'For this\n' + ' reason, "__del__()" methods should do the absolute ' + 'minimum needed\n' + ' to maintain external invariants. Starting with ' + 'version 1.5,\n' + ' Python guarantees that globals whose name begins with ' + 'a single\n' + ' underscore are deleted from their module before other ' + 'globals are\n' + ' deleted; if no other references to such globals ' + 'exist, this may\n' + ' help in assuring that imported modules are still ' + 'available at the\n' + ' time when the "__del__()" method is called.\n' + '\n' + 'object.__repr__(self)\n' + '\n' + ' Called by the "repr()" built-in function to compute the ' + '"official"\n' + ' string representation of an object. If at all ' + 'possible, this\n' + ' should look like a valid Python expression that could ' + 'be used to\n' + ' recreate an object with the same value (given an ' + 'appropriate\n' + ' environment). If this is not possible, a string of the ' + 'form\n' + ' "<...some useful description...>" should be returned. ' + 'The return\n' + ' value must be a string object. If a class defines ' + '"__repr__()" but\n' + ' not "__str__()", then "__repr__()" is also used when an ' + '"informal"\n' + ' string representation of instances of that class is ' + 'required.\n' + '\n' + ' This is typically used for debugging, so it is ' + 'important that the\n' + ' representation is information-rich and unambiguous.\n' + '\n' + 'object.__str__(self)\n' + '\n' + ' Called by "str(object)" and the built-in functions ' + '"format()" and\n' + ' "print()" to compute the "informal" or nicely printable ' + 'string\n' + ' representation of an object. The return value must be ' + 'a *string*\n' + ' object.\n' + '\n' + ' This method differs from "object.__repr__()" in that ' + 'there is no\n' + ' expectation that "__str__()" return a valid Python ' + 'expression: a\n' + ' more convenient or concise representation can be used.\n' + '\n' + ' The default implementation defined by the built-in type ' + '"object"\n' + ' calls "object.__repr__()".\n' + '\n' + 'object.__bytes__(self)\n' + '\n' + ' Called by "bytes()" to compute a byte-string ' + 'representation of an\n' + ' object. This should return a "bytes" object.\n' + '\n' + 'object.__format__(self, format_spec)\n' + '\n' + ' Called by the "format()" built-in function (and by ' + 'extension, the\n' + ' "str.format()" method of class "str") to produce a ' + '"formatted"\n' + ' string representation of an object. The "format_spec" ' + 'argument is a\n' + ' string that contains a description of the formatting ' + 'options\n' + ' desired. The interpretation of the "format_spec" ' + 'argument is up to\n' + ' the type implementing "__format__()", however most ' + 'classes will\n' + ' either delegate formatting to one of the built-in ' + 'types, or use a\n' + ' similar formatting option syntax.\n' + '\n' + ' See *Format Specification Mini-Language* for a ' + 'description of the\n' + ' standard formatting syntax.\n' + '\n' + ' The return value must be a string object.\n' + '\n' + ' Changed in version 3.4: The __format__ method of ' + '"object" itself\n' + ' raises a "TypeError" if passed any non-empty string.\n' + '\n' + 'object.__lt__(self, other)\n' + 'object.__le__(self, other)\n' + 'object.__eq__(self, other)\n' + 'object.__ne__(self, other)\n' + 'object.__gt__(self, other)\n' + 'object.__ge__(self, other)\n' + '\n' + ' These are the so-called "rich comparison" methods. The\n' + ' correspondence between operator symbols and method ' + 'names is as\n' + ' follows: "xy" calls\n' + ' "x.__gt__(y)", and "x>=y" calls "x.__ge__(y)".\n' + '\n' + ' A rich comparison method may return the singleton ' + '"NotImplemented"\n' + ' if it does not implement the operation for a given pair ' + 'of\n' + ' arguments. By convention, "False" and "True" are ' + 'returned for a\n' + ' successful comparison. However, these methods can ' + 'return any value,\n' + ' so if the comparison operator is used in a Boolean ' + 'context (e.g.,\n' + ' in the condition of an "if" statement), Python will ' + 'call "bool()"\n' + ' on the value to determine if the result is true or ' + 'false.\n' + '\n' + ' By default, "__ne__()" delegates to "__eq__()" and ' + 'inverts the\n' + ' result unless it is "NotImplemented". There are no ' + 'other implied\n' + ' relationships among the comparison operators, for ' + 'example, the\n' + ' truth of "(x.__hash__".\n' + '\n' + ' If a class that does not override "__eq__()" wishes to ' + 'suppress\n' + ' hash support, it should include "__hash__ = None" in ' + 'the class\n' + ' definition. A class which defines its own "__hash__()" ' + 'that\n' + ' explicitly raises a "TypeError" would be incorrectly ' + 'identified as\n' + ' hashable by an "isinstance(obj, collections.Hashable)" ' + 'call.\n' + '\n' + ' Note: By default, the "__hash__()" values of str, bytes ' + 'and\n' + ' datetime objects are "salted" with an unpredictable ' + 'random value.\n' + ' Although they remain constant within an individual ' + 'Python\n' + ' process, they are not predictable between repeated ' + 'invocations of\n' + ' Python.This is intended to provide protection against ' + 'a denial-\n' + ' of-service caused by carefully-chosen inputs that ' + 'exploit the\n' + ' worst case performance of a dict insertion, O(n^2) ' + 'complexity.\n' + ' See ' + 'http://www.ocert.org/advisories/ocert-2011-003.html for\n' + ' details.Changing hash values affects the iteration ' + 'order of\n' + ' dicts, sets and other mappings. Python has never ' + 'made guarantees\n' + ' about this ordering (and it typically varies between ' + '32-bit and\n' + ' 64-bit builds).See also "PYTHONHASHSEED".\n' + '\n' + ' Changed in version 3.3: Hash randomization is enabled ' + 'by default.\n' + '\n' + 'object.__bool__(self)\n' + '\n' + ' Called to implement truth value testing and the ' + 'built-in operation\n' + ' "bool()"; should return "False" or "True". When this ' + 'method is not\n' + ' defined, "__len__()" is called, if it is defined, and ' + 'the object is\n' + ' considered true if its result is nonzero. If a class ' + 'defines\n' + ' neither "__len__()" nor "__bool__()", all its instances ' + 'are\n' + ' considered true.\n' + '\n' + '\n' + 'Customizing attribute access\n' + '============================\n' + '\n' + 'The following methods can be defined to customize the ' + 'meaning of\n' + 'attribute access (use of, assignment to, or deletion of ' + '"x.name") for\n' + 'class instances.\n' + '\n' + 'object.__getattr__(self, name)\n' + '\n' + ' Called when an attribute lookup has not found the ' + 'attribute in the\n' + ' usual places (i.e. it is not an instance attribute nor ' + 'is it found\n' + ' in the class tree for "self"). "name" is the attribute ' + 'name. This\n' + ' method should return the (computed) attribute value or ' + 'raise an\n' + ' "AttributeError" exception.\n' + '\n' + ' Note that if the attribute is found through the normal ' + 'mechanism,\n' + ' "__getattr__()" is not called. (This is an intentional ' + 'asymmetry\n' + ' between "__getattr__()" and "__setattr__()".) This is ' + 'done both for\n' + ' efficiency reasons and because otherwise ' + '"__getattr__()" would have\n' + ' no way to access other attributes of the instance. ' + 'Note that at\n' + ' least for instance variables, you can fake total ' + 'control by not\n' + ' inserting any values in the instance attribute ' + 'dictionary (but\n' + ' instead inserting them in another object). See the\n' + ' "__getattribute__()" method below for a way to actually ' + 'get total\n' + ' control over attribute access.\n' + '\n' + 'object.__getattribute__(self, name)\n' + '\n' + ' Called unconditionally to implement attribute accesses ' + 'for\n' + ' instances of the class. If the class also defines ' + '"__getattr__()",\n' + ' the latter will not be called unless ' + '"__getattribute__()" either\n' + ' calls it explicitly or raises an "AttributeError". This ' + 'method\n' + ' should return the (computed) attribute value or raise ' + 'an\n' + ' "AttributeError" exception. In order to avoid infinite ' + 'recursion in\n' + ' this method, its implementation should always call the ' + 'base class\n' + ' method with the same name to access any attributes it ' + 'needs, for\n' + ' example, "object.__getattribute__(self, name)".\n' + '\n' + ' Note: This method may still be bypassed when looking up ' + 'special\n' + ' methods as the result of implicit invocation via ' + 'language syntax\n' + ' or built-in functions. See *Special method lookup*.\n' + '\n' + 'object.__setattr__(self, name, value)\n' + '\n' + ' Called when an attribute assignment is attempted. This ' + 'is called\n' + ' instead of the normal mechanism (i.e. store the value ' + 'in the\n' + ' instance dictionary). *name* is the attribute name, ' + '*value* is the\n' + ' value to be assigned to it.\n' + '\n' + ' If "__setattr__()" wants to assign to an instance ' + 'attribute, it\n' + ' should call the base class method with the same name, ' + 'for example,\n' + ' "object.__setattr__(self, name, value)".\n' + '\n' + 'object.__delattr__(self, name)\n' + '\n' + ' Like "__setattr__()" but for attribute deletion instead ' + 'of\n' + ' assignment. This should only be implemented if "del ' + 'obj.name" is\n' + ' meaningful for the object.\n' + '\n' + 'object.__dir__(self)\n' + '\n' + ' Called when "dir()" is called on the object. A sequence ' + 'must be\n' + ' returned. "dir()" converts the returned sequence to a ' + 'list and\n' + ' sorts it.\n' + '\n' + '\n' + 'Implementing Descriptors\n' + '------------------------\n' + '\n' + 'The following methods only apply when an instance of the ' + 'class\n' + 'containing the method (a so-called *descriptor* class) ' + 'appears in an\n' + '*owner* class (the descriptor must be in either the ' + "owner's class\n" + 'dictionary or in the class dictionary for one of its ' + 'parents). In the\n' + 'examples below, "the attribute" refers to the attribute ' + 'whose name is\n' + 'the key of the property in the owner class\' "__dict__".\n' + '\n' + 'object.__get__(self, instance, owner)\n' + '\n' + ' Called to get the attribute of the owner class (class ' + 'attribute\n' + ' access) or of an instance of that class (instance ' + 'attribute\n' + ' access). *owner* is always the owner class, while ' + '*instance* is the\n' + ' instance that the attribute was accessed through, or ' + '"None" when\n' + ' the attribute is accessed through the *owner*. This ' + 'method should\n' + ' return the (computed) attribute value or raise an ' + '"AttributeError"\n' + ' exception.\n' + '\n' + 'object.__set__(self, instance, value)\n' + '\n' + ' Called to set the attribute on an instance *instance* ' + 'of the owner\n' + ' class to a new value, *value*.\n' + '\n' + 'object.__delete__(self, instance)\n' + '\n' + ' Called to delete the attribute on an instance ' + '*instance* of the\n' + ' owner class.\n' + '\n' + 'The attribute "__objclass__" is interpreted by the ' + '"inspect" module as\n' + 'specifying the class where this object was defined ' + '(setting this\n' + 'appropriately can assist in runtime introspection of ' + 'dynamic class\n' + 'attributes). For callables, it may indicate that an ' + 'instance of the\n' + 'given type (or a subclass) is expected or required as the ' + 'first\n' + 'positional argument (for example, CPython sets this ' + 'attribute for\n' + 'unbound methods that are implemented in C).\n' + '\n' + '\n' + 'Invoking Descriptors\n' + '--------------------\n' + '\n' + 'In general, a descriptor is an object attribute with ' + '"binding\n' + 'behavior", one whose attribute access has been overridden ' + 'by methods\n' + 'in the descriptor protocol: "__get__()", "__set__()", ' + 'and\n' + '"__delete__()". If any of those methods are defined for an ' + 'object, it\n' + 'is said to be a descriptor.\n' + '\n' + 'The default behavior for attribute access is to get, set, ' + 'or delete\n' + "the attribute from an object's dictionary. For instance, " + '"a.x" has a\n' + 'lookup chain starting with "a.__dict__[\'x\']", then\n' + '"type(a).__dict__[\'x\']", and continuing through the base ' + 'classes of\n' + '"type(a)" excluding metaclasses.\n' + '\n' + 'However, if the looked-up value is an object defining one ' + 'of the\n' + 'descriptor methods, then Python may override the default ' + 'behavior and\n' + 'invoke the descriptor method instead. Where this occurs ' + 'in the\n' + 'precedence chain depends on which descriptor methods were ' + 'defined and\n' + 'how they were called.\n' + '\n' + 'The starting point for descriptor invocation is a binding, ' + '"a.x". How\n' + 'the arguments are assembled depends on "a":\n' + '\n' + 'Direct Call\n' + ' The simplest and least common call is when user code ' + 'directly\n' + ' invokes a descriptor method: "x.__get__(a)".\n' + '\n' + 'Instance Binding\n' + ' If binding to an object instance, "a.x" is transformed ' + 'into the\n' + ' call: "type(a).__dict__[\'x\'].__get__(a, type(a))".\n' + '\n' + 'Class Binding\n' + ' If binding to a class, "A.x" is transformed into the ' + 'call:\n' + ' "A.__dict__[\'x\'].__get__(None, A)".\n' + '\n' + 'Super Binding\n' + ' If "a" is an instance of "super", then the binding ' + '"super(B,\n' + ' obj).m()" searches "obj.__class__.__mro__" for the base ' + 'class "A"\n' + ' immediately preceding "B" and then invokes the ' + 'descriptor with the\n' + ' call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n' + '\n' + 'For instance bindings, the precedence of descriptor ' + 'invocation depends\n' + 'on the which descriptor methods are defined. A descriptor ' + 'can define\n' + 'any combination of "__get__()", "__set__()" and ' + '"__delete__()". If it\n' + 'does not define "__get__()", then accessing the attribute ' + 'will return\n' + 'the descriptor object itself unless there is a value in ' + "the object's\n" + 'instance dictionary. If the descriptor defines ' + '"__set__()" and/or\n' + '"__delete__()", it is a data descriptor; if it defines ' + 'neither, it is\n' + 'a non-data descriptor. Normally, data descriptors define ' + 'both\n' + '"__get__()" and "__set__()", while non-data descriptors ' + 'have just the\n' + '"__get__()" method. Data descriptors with "__set__()" and ' + '"__get__()"\n' + 'defined always override a redefinition in an instance ' + 'dictionary. In\n' + 'contrast, non-data descriptors can be overridden by ' + 'instances.\n' + '\n' + 'Python methods (including "staticmethod()" and ' + '"classmethod()") are\n' + 'implemented as non-data descriptors. Accordingly, ' + 'instances can\n' + 'redefine and override methods. This allows individual ' + 'instances to\n' + 'acquire behaviors that differ from other instances of the ' + 'same class.\n' + '\n' + 'The "property()" function is implemented as a data ' + 'descriptor.\n' + 'Accordingly, instances cannot override the behavior of a ' + 'property.\n' + '\n' + '\n' + '__slots__\n' + '---------\n' + '\n' + 'By default, instances of classes have a dictionary for ' + 'attribute\n' + 'storage. This wastes space for objects having very few ' + 'instance\n' + 'variables. The space consumption can become acute when ' + 'creating large\n' + 'numbers of instances.\n' + '\n' + 'The default can be overridden by defining *__slots__* in a ' + 'class\n' + 'definition. The *__slots__* declaration takes a sequence ' + 'of instance\n' + 'variables and reserves just enough space in each instance ' + 'to hold a\n' + 'value for each variable. Space is saved because ' + '*__dict__* is not\n' + 'created for each instance.\n' + '\n' + 'object.__slots__\n' + '\n' + ' This class variable can be assigned a string, iterable, ' + 'or sequence\n' + ' of strings with variable names used by instances. ' + '*__slots__*\n' + ' reserves space for the declared variables and prevents ' + 'the\n' + ' automatic creation of *__dict__* and *__weakref__* for ' + 'each\n' + ' instance.\n' + '\n' + '\n' + 'Notes on using *__slots__*\n' + '~~~~~~~~~~~~~~~~~~~~~~~~~~\n' + '\n' + '* When inheriting from a class without *__slots__*, the ' + '*__dict__*\n' + ' attribute of that class will always be accessible, so a ' + '*__slots__*\n' + ' definition in the subclass is meaningless.\n' + '\n' + '* Without a *__dict__* variable, instances cannot be ' + 'assigned new\n' + ' variables not listed in the *__slots__* definition. ' + 'Attempts to\n' + ' assign to an unlisted variable name raises ' + '"AttributeError". If\n' + ' dynamic assignment of new variables is desired, then ' + 'add\n' + ' "\'__dict__\'" to the sequence of strings in the ' + '*__slots__*\n' + ' declaration.\n' + '\n' + '* Without a *__weakref__* variable for each instance, ' + 'classes\n' + ' defining *__slots__* do not support weak references to ' + 'its\n' + ' instances. If weak reference support is needed, then ' + 'add\n' + ' "\'__weakref__\'" to the sequence of strings in the ' + '*__slots__*\n' + ' declaration.\n' + '\n' + '* *__slots__* are implemented at the class level by ' + 'creating\n' + ' descriptors (*Implementing Descriptors*) for each ' + 'variable name. As\n' + ' a result, class attributes cannot be used to set default ' + 'values for\n' + ' instance variables defined by *__slots__*; otherwise, ' + 'the class\n' + ' attribute would overwrite the descriptor assignment.\n' + '\n' + '* The action of a *__slots__* declaration is limited to ' + 'the class\n' + ' where it is defined. As a result, subclasses will have ' + 'a *__dict__*\n' + ' unless they also define *__slots__* (which must only ' + 'contain names\n' + ' of any *additional* slots).\n' + '\n' + '* If a class defines a slot also defined in a base class, ' + 'the\n' + ' instance variable defined by the base class slot is ' + 'inaccessible\n' + ' (except by retrieving its descriptor directly from the ' + 'base class).\n' + ' This renders the meaning of the program undefined. In ' + 'the future, a\n' + ' check may be added to prevent this.\n' + '\n' + '* Nonempty *__slots__* does not work for classes derived ' + 'from\n' + ' "variable-length" built-in types such as "int", "bytes" ' + 'and "tuple".\n' + '\n' + '* Any non-string iterable may be assigned to *__slots__*. ' + 'Mappings\n' + ' may also be used; however, in the future, special ' + 'meaning may be\n' + ' assigned to the values corresponding to each key.\n' + '\n' + '* *__class__* assignment works only if both classes have ' + 'the same\n' + ' *__slots__*.\n' + '\n' + '\n' + 'Customizing class creation\n' + '==========================\n' + '\n' + 'By default, classes are constructed using "type()". The ' + 'class body is\n' + 'executed in a new namespace and the class name is bound ' + 'locally to the\n' + 'result of "type(name, bases, namespace)".\n' + '\n' + 'The class creation process can be customised by passing ' + 'the\n' + '"metaclass" keyword argument in the class definition line, ' + 'or by\n' + 'inheriting from an existing class that included such an ' + 'argument. In\n' + 'the following example, both "MyClass" and "MySubclass" are ' + 'instances\n' + 'of "Meta":\n' + '\n' + ' class Meta(type):\n' + ' pass\n' + '\n' + ' class MyClass(metaclass=Meta):\n' + ' pass\n' + '\n' + ' class MySubclass(MyClass):\n' + ' pass\n' + '\n' + 'Any other keyword arguments that are specified in the ' + 'class definition\n' + 'are passed through to all metaclass operations described ' + 'below.\n' + '\n' + 'When a class definition is executed, the following steps ' + 'occur:\n' + '\n' + '* the appropriate metaclass is determined\n' + '\n' + '* the class namespace is prepared\n' + '\n' + '* the class body is executed\n' + '\n' + '* the class object is created\n' + '\n' + '\n' + 'Determining the appropriate metaclass\n' + '-------------------------------------\n' + '\n' + 'The appropriate metaclass for a class definition is ' + 'determined as\n' + 'follows:\n' + '\n' + '* if no bases and no explicit metaclass are given, then ' + '"type()" is\n' + ' used\n' + '\n' + '* if an explicit metaclass is given and it is *not* an ' + 'instance of\n' + ' "type()", then it is used directly as the metaclass\n' + '\n' + '* if an instance of "type()" is given as the explicit ' + 'metaclass, or\n' + ' bases are defined, then the most derived metaclass is ' + 'used\n' + '\n' + 'The most derived metaclass is selected from the explicitly ' + 'specified\n' + 'metaclass (if any) and the metaclasses (i.e. "type(cls)") ' + 'of all\n' + 'specified base classes. The most derived metaclass is one ' + 'which is a\n' + 'subtype of *all* of these candidate metaclasses. If none ' + 'of the\n' + 'candidate metaclasses meets that criterion, then the class ' + 'definition\n' + 'will fail with "TypeError".\n' + '\n' + '\n' + 'Preparing the class namespace\n' + '-----------------------------\n' + '\n' + 'Once the appropriate metaclass has been identified, then ' + 'the class\n' + 'namespace is prepared. If the metaclass has a ' + '"__prepare__" attribute,\n' + 'it is called as "namespace = metaclass.__prepare__(name, ' + 'bases,\n' + '**kwds)" (where the additional keyword arguments, if any, ' + 'come from\n' + 'the class definition).\n' + '\n' + 'If the metaclass has no "__prepare__" attribute, then the ' + 'class\n' + 'namespace is initialised as an empty "dict()" instance.\n' + '\n' + 'See also: **PEP 3115** - Metaclasses in Python 3000\n' + '\n' + ' Introduced the "__prepare__" namespace hook\n' + '\n' + '\n' + 'Executing the class body\n' + '------------------------\n' + '\n' + 'The class body is executed (approximately) as "exec(body, ' + 'globals(),\n' + 'namespace)". The key difference from a normal call to ' + '"exec()" is that\n' + 'lexical scoping allows the class body (including any ' + 'methods) to\n' + 'reference names from the current and outer scopes when the ' + 'class\n' + 'definition occurs inside a function.\n' + '\n' + 'However, even when the class definition occurs inside the ' + 'function,\n' + 'methods defined inside the class still cannot see names ' + 'defined at the\n' + 'class scope. Class variables must be accessed through the ' + 'first\n' + 'parameter of instance or class methods, and cannot be ' + 'accessed at all\n' + 'from static methods.\n' + '\n' + '\n' + 'Creating the class object\n' + '-------------------------\n' + '\n' + 'Once the class namespace has been populated by executing ' + 'the class\n' + 'body, the class object is created by calling ' + '"metaclass(name, bases,\n' + 'namespace, **kwds)" (the additional keywords passed here ' + 'are the same\n' + 'as those passed to "__prepare__").\n' + '\n' + 'This class object is the one that will be referenced by ' + 'the zero-\n' + 'argument form of "super()". "__class__" is an implicit ' + 'closure\n' + 'reference created by the compiler if any methods in a ' + 'class body refer\n' + 'to either "__class__" or "super". This allows the zero ' + 'argument form\n' + 'of "super()" to correctly identify the class being defined ' + 'based on\n' + 'lexical scoping, while the class or instance that was used ' + 'to make the\n' + 'current call is identified based on the first argument ' + 'passed to the\n' + 'method.\n' + '\n' + 'After the class object is created, it is passed to the ' + 'class\n' + 'decorators included in the class definition (if any) and ' + 'the resulting\n' + 'object is bound in the local namespace as the defined ' + 'class.\n' + '\n' + 'See also: **PEP 3135** - New super\n' + '\n' + ' Describes the implicit "__class__" closure reference\n' + '\n' + '\n' + 'Metaclass example\n' + '-----------------\n' + '\n' + 'The potential uses for metaclasses are boundless. Some ' + 'ideas that have\n' + 'been explored include logging, interface checking, ' + 'automatic\n' + 'delegation, automatic property creation, proxies, ' + 'frameworks, and\n' + 'automatic resource locking/synchronization.\n' + '\n' + 'Here is an example of a metaclass that uses an\n' + '"collections.OrderedDict" to remember the order that class ' + 'variables\n' + 'are defined:\n' + '\n' + ' class OrderedClass(type):\n' + '\n' + ' @classmethod\n' + ' def __prepare__(metacls, name, bases, **kwds):\n' + ' return collections.OrderedDict()\n' + '\n' + ' def __new__(cls, name, bases, namespace, **kwds):\n' + ' result = type.__new__(cls, name, bases, ' + 'dict(namespace))\n' + ' result.members = tuple(namespace)\n' + ' return result\n' + '\n' + ' class A(metaclass=OrderedClass):\n' + ' def one(self): pass\n' + ' def two(self): pass\n' + ' def three(self): pass\n' + ' def four(self): pass\n' + '\n' + ' >>> A.members\n' + " ('__module__', 'one', 'two', 'three', 'four')\n" + '\n' + 'When the class definition for *A* gets executed, the ' + 'process begins\n' + 'with calling the metaclass\'s "__prepare__()" method which ' + 'returns an\n' + 'empty "collections.OrderedDict". That mapping records the ' + 'methods and\n' + 'attributes of *A* as they are defined within the body of ' + 'the class\n' + 'statement. Once those definitions are executed, the ' + 'ordered dictionary\n' + 'is fully populated and the metaclass\'s "__new__()" method ' + 'gets\n' + 'invoked. That method builds the new type and it saves the ' + 'ordered\n' + 'dictionary keys in an attribute called "members".\n' + '\n' + '\n' + 'Customizing instance and subclass checks\n' + '========================================\n' + '\n' + 'The following methods are used to override the default ' + 'behavior of the\n' + '"isinstance()" and "issubclass()" built-in functions.\n' + '\n' + 'In particular, the metaclass "abc.ABCMeta" implements ' + 'these methods in\n' + 'order to allow the addition of Abstract Base Classes ' + '(ABCs) as\n' + '"virtual base classes" to any class or type (including ' + 'built-in\n' + 'types), including other ABCs.\n' + '\n' + 'class.__instancecheck__(self, instance)\n' + '\n' + ' Return true if *instance* should be considered a ' + '(direct or\n' + ' indirect) instance of *class*. If defined, called to ' + 'implement\n' + ' "isinstance(instance, class)".\n' + '\n' + 'class.__subclasscheck__(self, subclass)\n' + '\n' + ' Return true if *subclass* should be considered a ' + '(direct or\n' + ' indirect) subclass of *class*. If defined, called to ' + 'implement\n' + ' "issubclass(subclass, class)".\n' + '\n' + 'Note that these methods are looked up on the type ' + '(metaclass) of a\n' + 'class. They cannot be defined as class methods in the ' + 'actual class.\n' + 'This is consistent with the lookup of special methods that ' + 'are called\n' + 'on instances, only in this case the instance is itself a ' + 'class.\n' + '\n' + 'See also: **PEP 3119** - Introducing Abstract Base ' + 'Classes\n' + '\n' + ' Includes the specification for customizing ' + '"isinstance()" and\n' + ' "issubclass()" behavior through "__instancecheck__()" ' + 'and\n' + ' "__subclasscheck__()", with motivation for this ' + 'functionality in\n' + ' the context of adding Abstract Base Classes (see the ' + '"abc"\n' + ' module) to the language.\n' + '\n' + '\n' + 'Emulating callable objects\n' + '==========================\n' + '\n' + 'object.__call__(self[, args...])\n' + '\n' + ' Called when the instance is "called" as a function; if ' + 'this method\n' + ' is defined, "x(arg1, arg2, ...)" is a shorthand for\n' + ' "x.__call__(arg1, arg2, ...)".\n' + '\n' + '\n' + 'Emulating container types\n' + '=========================\n' + '\n' + 'The following methods can be defined to implement ' + 'container objects.\n' + 'Containers usually are sequences (such as lists or tuples) ' + 'or mappings\n' + '(like dictionaries), but can represent other containers as ' + 'well. The\n' + 'first set of methods is used either to emulate a sequence ' + 'or to\n' + 'emulate a mapping; the difference is that for a sequence, ' + 'the\n' + 'allowable keys should be the integers *k* for which "0 <= ' + 'k < N" where\n' + '*N* is the length of the sequence, or slice objects, which ' + 'define a\n' + 'range of items. It is also recommended that mappings ' + 'provide the\n' + 'methods "keys()", "values()", "items()", "get()", ' + '"clear()",\n' + '"setdefault()", "pop()", "popitem()", "copy()", and ' + '"update()"\n' + "behaving similar to those for Python's standard dictionary " + 'objects.\n' + 'The "collections" module provides a "MutableMapping" ' + 'abstract base\n' + 'class to help create those methods from a base set of ' + '"__getitem__()",\n' + '"__setitem__()", "__delitem__()", and "keys()". Mutable ' + 'sequences\n' + 'should provide methods "append()", "count()", "index()", ' + '"extend()",\n' + '"insert()", "pop()", "remove()", "reverse()" and "sort()", ' + 'like Python\n' + 'standard list objects. Finally, sequence types should ' + 'implement\n' + 'addition (meaning concatenation) and multiplication ' + '(meaning\n' + 'repetition) by defining the methods "__add__()", ' + '"__radd__()",\n' + '"__iadd__()", "__mul__()", "__rmul__()" and "__imul__()" ' + 'described\n' + 'below; they should not define other numerical operators. ' + 'It is\n' + 'recommended that both mappings and sequences implement ' + 'the\n' + '"__contains__()" method to allow efficient use of the "in" ' + 'operator;\n' + 'for mappings, "in" should search the mapping\'s keys; for ' + 'sequences, it\n' + 'should search through the values. It is further ' + 'recommended that both\n' + 'mappings and sequences implement the "__iter__()" method ' + 'to allow\n' + 'efficient iteration through the container; for mappings, ' + '"__iter__()"\n' + 'should be the same as "keys()"; for sequences, it should ' + 'iterate\n' + 'through the values.\n' + '\n' + 'object.__len__(self)\n' + '\n' + ' Called to implement the built-in function "len()". ' + 'Should return\n' + ' the length of the object, an integer ">=" 0. Also, an ' + 'object that\n' + ' doesn\'t define a "__bool__()" method and whose ' + '"__len__()" method\n' + ' returns zero is considered to be false in a Boolean ' + 'context.\n' + '\n' + 'object.__length_hint__(self)\n' + '\n' + ' Called to implement "operator.length_hint()". Should ' + 'return an\n' + ' estimated length for the object (which may be greater ' + 'or less than\n' + ' the actual length). The length must be an integer ">=" ' + '0. This\n' + ' method is purely an optimization and is never required ' + 'for\n' + ' correctness.\n' + '\n' + ' New in version 3.4.\n' + '\n' + 'Note: Slicing is done exclusively with the following three ' + 'methods.\n' + ' A call like\n' + '\n' + ' a[1:2] = b\n' + '\n' + ' is translated to\n' + '\n' + ' a[slice(1, 2, None)] = b\n' + '\n' + ' and so forth. Missing slice items are always filled in ' + 'with "None".\n' + '\n' + 'object.__getitem__(self, key)\n' + '\n' + ' Called to implement evaluation of "self[key]". For ' + 'sequence types,\n' + ' the accepted keys should be integers and slice ' + 'objects. Note that\n' + ' the special interpretation of negative indexes (if the ' + 'class wishes\n' + ' to emulate a sequence type) is up to the ' + '"__getitem__()" method. If\n' + ' *key* is of an inappropriate type, "TypeError" may be ' + 'raised; if of\n' + ' a value outside the set of indexes for the sequence ' + '(after any\n' + ' special interpretation of negative values), ' + '"IndexError" should be\n' + ' raised. For mapping types, if *key* is missing (not in ' + 'the\n' + ' container), "KeyError" should be raised.\n' + '\n' + ' Note: "for" loops expect that an "IndexError" will be ' + 'raised for\n' + ' illegal indexes to allow proper detection of the end ' + 'of the\n' + ' sequence.\n' + '\n' + 'object.__missing__(self, key)\n' + '\n' + ' Called by "dict"."__getitem__()" to implement ' + '"self[key]" for dict\n' + ' subclasses when key is not in the dictionary.\n' + '\n' + 'object.__setitem__(self, key, value)\n' + '\n' + ' Called to implement assignment to "self[key]". Same ' + 'note as for\n' + ' "__getitem__()". This should only be implemented for ' + 'mappings if\n' + ' the objects support changes to the values for keys, or ' + 'if new keys\n' + ' can be added, or for sequences if elements can be ' + 'replaced. The\n' + ' same exceptions should be raised for improper *key* ' + 'values as for\n' + ' the "__getitem__()" method.\n' + '\n' + 'object.__delitem__(self, key)\n' + '\n' + ' Called to implement deletion of "self[key]". Same note ' + 'as for\n' + ' "__getitem__()". This should only be implemented for ' + 'mappings if\n' + ' the objects support removal of keys, or for sequences ' + 'if elements\n' + ' can be removed from the sequence. The same exceptions ' + 'should be\n' + ' raised for improper *key* values as for the ' + '"__getitem__()" method.\n' + '\n' + 'object.__iter__(self)\n' + '\n' + ' This method is called when an iterator is required for ' + 'a container.\n' + ' This method should return a new iterator object that ' + 'can iterate\n' + ' over all the objects in the container. For mappings, ' + 'it should\n' + ' iterate over the keys of the container.\n' + '\n' + ' Iterator objects also need to implement this method; ' + 'they are\n' + ' required to return themselves. For more information on ' + 'iterator\n' + ' objects, see *Iterator Types*.\n' + '\n' + 'object.__reversed__(self)\n' + '\n' + ' Called (if present) by the "reversed()" built-in to ' + 'implement\n' + ' reverse iteration. It should return a new iterator ' + 'object that\n' + ' iterates over all the objects in the container in ' + 'reverse order.\n' + '\n' + ' If the "__reversed__()" method is not provided, the ' + '"reversed()"\n' + ' built-in will fall back to using the sequence protocol ' + '("__len__()"\n' + ' and "__getitem__()"). Objects that support the ' + 'sequence protocol\n' + ' should only provide "__reversed__()" if they can ' + 'provide an\n' + ' implementation that is more efficient than the one ' + 'provided by\n' + ' "reversed()".\n' + '\n' + 'The membership test operators ("in" and "not in") are ' + 'normally\n' + 'implemented as an iteration through a sequence. However, ' + 'container\n' + 'objects can supply the following special method with a ' + 'more efficient\n' + 'implementation, which also does not require the object be ' + 'a sequence.\n' + '\n' + 'object.__contains__(self, item)\n' + '\n' + ' Called to implement membership test operators. Should ' + 'return true\n' + ' if *item* is in *self*, false otherwise. For mapping ' + 'objects, this\n' + ' should consider the keys of the mapping rather than the ' + 'values or\n' + ' the key-item pairs.\n' + '\n' + ' For objects that don\'t define "__contains__()", the ' + 'membership test\n' + ' first tries iteration via "__iter__()", then the old ' + 'sequence\n' + ' iteration protocol via "__getitem__()", see *this ' + 'section in the\n' + ' language reference*.\n' + '\n' + '\n' + 'Emulating numeric types\n' + '=======================\n' + '\n' + 'The following methods can be defined to emulate numeric ' + 'objects.\n' + 'Methods corresponding to operations that are not supported ' + 'by the\n' + 'particular kind of number implemented (e.g., bitwise ' + 'operations for\n' + 'non-integral numbers) should be left undefined.\n' + '\n' + 'object.__add__(self, other)\n' + 'object.__sub__(self, other)\n' + 'object.__mul__(self, other)\n' + 'object.__matmul__(self, other)\n' + 'object.__truediv__(self, other)\n' + 'object.__floordiv__(self, other)\n' + 'object.__mod__(self, other)\n' + 'object.__divmod__(self, other)\n' + 'object.__pow__(self, other[, modulo])\n' + 'object.__lshift__(self, other)\n' + 'object.__rshift__(self, other)\n' + 'object.__and__(self, other)\n' + 'object.__xor__(self, other)\n' + 'object.__or__(self, other)\n' + '\n' + ' These methods are called to implement the binary ' + 'arithmetic\n' + ' operations ("+", "-", "*", "@", "/", "//", "%", ' + '"divmod()",\n' + ' "pow()", "**", "<<", ">>", "&", "^", "|"). For ' + 'instance, to\n' + ' evaluate the expression "x + y", where *x* is an ' + 'instance of a\n' + ' class that has an "__add__()" method, "x.__add__(y)" is ' + 'called.\n' + ' The "__divmod__()" method should be the equivalent to ' + 'using\n' + ' "__floordiv__()" and "__mod__()"; it should not be ' + 'related to\n' + ' "__truediv__()". Note that "__pow__()" should be ' + 'defined to accept\n' + ' an optional third argument if the ternary version of ' + 'the built-in\n' + ' "pow()" function is to be supported.\n' + '\n' + ' If one of those methods does not support the operation ' + 'with the\n' + ' supplied arguments, it should return "NotImplemented".\n' + '\n' + 'object.__radd__(self, other)\n' + 'object.__rsub__(self, other)\n' + 'object.__rmul__(self, other)\n' + 'object.__rmatmul__(self, other)\n' + 'object.__rtruediv__(self, other)\n' + 'object.__rfloordiv__(self, other)\n' + 'object.__rmod__(self, other)\n' + 'object.__rdivmod__(self, other)\n' + 'object.__rpow__(self, other)\n' + 'object.__rlshift__(self, other)\n' + 'object.__rrshift__(self, other)\n' + 'object.__rand__(self, other)\n' + 'object.__rxor__(self, other)\n' + 'object.__ror__(self, other)\n' + '\n' + ' These methods are called to implement the binary ' + 'arithmetic\n' + ' operations ("+", "-", "*", "@", "/", "//", "%", ' + '"divmod()",\n' + ' "pow()", "**", "<<", ">>", "&", "^", "|") with ' + 'reflected (swapped)\n' + ' operands. These functions are only called if the left ' + 'operand does\n' + ' not support the corresponding operation and the ' + 'operands are of\n' + ' different types. [2] For instance, to evaluate the ' + 'expression "x -\n' + ' y", where *y* is an instance of a class that has an ' + '"__rsub__()"\n' + ' method, "y.__rsub__(x)" is called if "x.__sub__(y)" ' + 'returns\n' + ' *NotImplemented*.\n' + '\n' + ' Note that ternary "pow()" will not try calling ' + '"__rpow__()" (the\n' + ' coercion rules would become too complicated).\n' + '\n' + " Note: If the right operand's type is a subclass of the " + 'left\n' + " operand's type and that subclass provides the " + 'reflected method\n' + ' for the operation, this method will be called before ' + 'the left\n' + " operand's non-reflected method. This behavior allows " + 'subclasses\n' + " to override their ancestors' operations.\n" + '\n' + 'object.__iadd__(self, other)\n' + 'object.__isub__(self, other)\n' + 'object.__imul__(self, other)\n' + 'object.__imatmul__(self, other)\n' + 'object.__itruediv__(self, other)\n' + 'object.__ifloordiv__(self, other)\n' + 'object.__imod__(self, other)\n' + 'object.__ipow__(self, other[, modulo])\n' + 'object.__ilshift__(self, other)\n' + 'object.__irshift__(self, other)\n' + 'object.__iand__(self, other)\n' + 'object.__ixor__(self, other)\n' + 'object.__ior__(self, other)\n' + '\n' + ' These methods are called to implement the augmented ' + 'arithmetic\n' + ' assignments ("+=", "-=", "*=", "@=", "/=", "//=", "%=", ' + '"**=",\n' + ' "<<=", ">>=", "&=", "^=", "|="). These methods should ' + 'attempt to\n' + ' do the operation in-place (modifying *self*) and return ' + 'the result\n' + ' (which could be, but does not have to be, *self*). If ' + 'a specific\n' + ' method is not defined, the augmented assignment falls ' + 'back to the\n' + ' normal methods. For instance, if *x* is an instance of ' + 'a class\n' + ' with an "__iadd__()" method, "x += y" is equivalent to ' + '"x =\n' + ' x.__iadd__(y)" . Otherwise, "x.__add__(y)" and ' + '"y.__radd__(x)" are\n' + ' considered, as with the evaluation of "x + y". In ' + 'certain\n' + ' situations, augmented assignment can result in ' + 'unexpected errors\n' + " (see *Why does a_tuple[i] += ['item'] raise an " + 'exception when the\n' + ' addition works?*), but this behavior is in fact part of ' + 'the data\n' + ' model.\n' + '\n' + 'object.__neg__(self)\n' + 'object.__pos__(self)\n' + 'object.__abs__(self)\n' + 'object.__invert__(self)\n' + '\n' + ' Called to implement the unary arithmetic operations ' + '("-", "+",\n' + ' "abs()" and "~").\n' + '\n' + 'object.__complex__(self)\n' + 'object.__int__(self)\n' + 'object.__float__(self)\n' + 'object.__round__(self[, n])\n' + '\n' + ' Called to implement the built-in functions "complex()", ' + '"int()",\n' + ' "float()" and "round()". Should return a value of the ' + 'appropriate\n' + ' type.\n' + '\n' + 'object.__index__(self)\n' + '\n' + ' Called to implement "operator.index()", and whenever ' + 'Python needs\n' + ' to losslessly convert the numeric object to an integer ' + 'object (such\n' + ' as in slicing, or in the built-in "bin()", "hex()" and ' + '"oct()"\n' + ' functions). Presence of this method indicates that the ' + 'numeric\n' + ' object is an integer type. Must return an integer.\n' + '\n' + ' Note: In order to have a coherent integer type class, ' + 'when\n' + ' "__index__()" is defined "__int__()" should also be ' + 'defined, and\n' + ' both should return the same value.\n' + '\n' + '\n' + 'With Statement Context Managers\n' + '===============================\n' + '\n' + 'A *context manager* is an object that defines the runtime ' + 'context to\n' + 'be established when executing a "with" statement. The ' + 'context manager\n' + 'handles the entry into, and the exit from, the desired ' + 'runtime context\n' + 'for the execution of the block of code. Context managers ' + 'are normally\n' + 'invoked using the "with" statement (described in section ' + '*The with\n' + 'statement*), but can also be used by directly invoking ' + 'their methods.\n' + '\n' + 'Typical uses of context managers include saving and ' + 'restoring various\n' + 'kinds of global state, locking and unlocking resources, ' + 'closing opened\n' + 'files, etc.\n' + '\n' + 'For more information on context managers, see *Context ' + 'Manager Types*.\n' + '\n' + 'object.__enter__(self)\n' + '\n' + ' Enter the runtime context related to this object. The ' + '"with"\n' + " statement will bind this method's return value to the " + 'target(s)\n' + ' specified in the "as" clause of the statement, if any.\n' + '\n' + 'object.__exit__(self, exc_type, exc_value, traceback)\n' + '\n' + ' Exit the runtime context related to this object. The ' + 'parameters\n' + ' describe the exception that caused the context to be ' + 'exited. If the\n' + ' context was exited without an exception, all three ' + 'arguments will\n' + ' be "None".\n' + '\n' + ' If an exception is supplied, and the method wishes to ' + 'suppress the\n' + ' exception (i.e., prevent it from being propagated), it ' + 'should\n' + ' return a true value. Otherwise, the exception will be ' + 'processed\n' + ' normally upon exit from this method.\n' + '\n' + ' Note that "__exit__()" methods should not reraise the ' + 'passed-in\n' + " exception; this is the caller's responsibility.\n" + '\n' + 'See also: **PEP 0343** - The "with" statement\n' + '\n' + ' The specification, background, and examples for the ' + 'Python "with"\n' + ' statement.\n' + '\n' + '\n' + 'Special method lookup\n' + '=====================\n' + '\n' + 'For custom classes, implicit invocations of special ' + 'methods are only\n' + "guaranteed to work correctly if defined on an object's " + 'type, not in\n' + "the object's instance dictionary. That behaviour is the " + 'reason why\n' + 'the following code raises an exception:\n' + '\n' + ' >>> class C:\n' + ' ... pass\n' + ' ...\n' + ' >>> c = C()\n' + ' >>> c.__len__ = lambda: 5\n' + ' >>> len(c)\n' + ' Traceback (most recent call last):\n' + ' File "", line 1, in \n' + " TypeError: object of type 'C' has no len()\n" + '\n' + 'The rationale behind this behaviour lies with a number of ' + 'special\n' + 'methods such as "__hash__()" and "__repr__()" that are ' + 'implemented by\n' + 'all objects, including type objects. If the implicit ' + 'lookup of these\n' + 'methods used the conventional lookup process, they would ' + 'fail when\n' + 'invoked on the type object itself:\n' + '\n' + ' >>> 1 .__hash__() == hash(1)\n' + ' True\n' + ' >>> int.__hash__() == hash(int)\n' + ' Traceback (most recent call last):\n' + ' File "", line 1, in \n' + " TypeError: descriptor '__hash__' of 'int' object needs " + 'an argument\n' + '\n' + 'Incorrectly attempting to invoke an unbound method of a ' + 'class in this\n' + "way is sometimes referred to as 'metaclass confusion', and " + 'is avoided\n' + 'by bypassing the instance when looking up special ' + 'methods:\n' + '\n' + ' >>> type(1).__hash__(1) == hash(1)\n' + ' True\n' + ' >>> type(int).__hash__(int) == hash(int)\n' + ' True\n' + '\n' + 'In addition to bypassing any instance attributes in the ' + 'interest of\n' + 'correctness, implicit special method lookup generally also ' + 'bypasses\n' + 'the "__getattribute__()" method even of the object\'s ' + 'metaclass:\n' + '\n' + ' >>> class Meta(type):\n' + ' ... def __getattribute__(*args):\n' + ' ... print("Metaclass getattribute invoked")\n' + ' ... return type.__getattribute__(*args)\n' + ' ...\n' + ' >>> class C(object, metaclass=Meta):\n' + ' ... def __len__(self):\n' + ' ... return 10\n' + ' ... def __getattribute__(*args):\n' + ' ... print("Class getattribute invoked")\n' + ' ... return object.__getattribute__(*args)\n' + ' ...\n' + ' >>> c = C()\n' + ' >>> c.__len__() # Explicit lookup via ' + 'instance\n' + ' Class getattribute invoked\n' + ' 10\n' + ' >>> type(c).__len__(c) # Explicit lookup via ' + 'type\n' + ' Metaclass getattribute invoked\n' + ' 10\n' + ' >>> len(c) # Implicit lookup\n' + ' 10\n' + '\n' + 'Bypassing the "__getattribute__()" machinery in this ' + 'fashion provides\n' + 'significant scope for speed optimisations within the ' + 'interpreter, at\n' + 'the cost of some flexibility in the handling of special ' + 'methods (the\n' + 'special method *must* be set on the class object itself in ' + 'order to be\n' + 'consistently invoked by the interpreter).\n', + 'string-methods': '\n' + 'String Methods\n' + '**************\n' + '\n' + 'Strings implement all of the *common* sequence ' + 'operations, along with\n' + 'the additional methods described below.\n' + '\n' + 'Strings also support two styles of string formatting, ' + 'one providing a\n' + 'large degree of flexibility and customization (see ' + '"str.format()",\n' + '*Format String Syntax* and *String Formatting*) and the ' + 'other based on\n' + 'C "printf" style formatting that handles a narrower ' + 'range of types and\n' + 'is slightly harder to use correctly, but is often faster ' + 'for the cases\n' + 'it can handle (*printf-style String Formatting*).\n' + '\n' + 'The *Text Processing Services* section of the standard ' + 'library covers\n' + 'a number of other modules that provide various text ' + 'related utilities\n' + '(including regular expression support in the "re" ' + 'module).\n' + '\n' + 'str.capitalize()\n' + '\n' + ' Return a copy of the string with its first character ' + 'capitalized\n' + ' and the rest lowercased.\n' + '\n' + 'str.casefold()\n' + '\n' + ' Return a casefolded copy of the string. Casefolded ' + 'strings may be\n' + ' used for caseless matching.\n' + '\n' + ' Casefolding is similar to lowercasing but more ' + 'aggressive because\n' + ' it is intended to remove all case distinctions in a ' + 'string. For\n' + ' example, the German lowercase letter "\'ß\'" is ' + 'equivalent to ""ss"".\n' + ' Since it is already lowercase, "lower()" would do ' + 'nothing to "\'ß\'";\n' + ' "casefold()" converts it to ""ss"".\n' + '\n' + ' The casefolding algorithm is described in section ' + '3.13 of the\n' + ' Unicode Standard.\n' + '\n' + ' New in version 3.3.\n' + '\n' + 'str.center(width[, fillchar])\n' + '\n' + ' Return centered in a string of length *width*. ' + 'Padding is done\n' + ' using the specified *fillchar* (default is an ASCII ' + 'space). The\n' + ' original string is returned if *width* is less than ' + 'or equal to\n' + ' "len(s)".\n' + '\n' + 'str.count(sub[, start[, end]])\n' + '\n' + ' Return the number of non-overlapping occurrences of ' + 'substring *sub*\n' + ' in the range [*start*, *end*]. Optional arguments ' + '*start* and\n' + ' *end* are interpreted as in slice notation.\n' + '\n' + 'str.encode(encoding="utf-8", errors="strict")\n' + '\n' + ' Return an encoded version of the string as a bytes ' + 'object. Default\n' + ' encoding is "\'utf-8\'". *errors* may be given to set ' + 'a different\n' + ' error handling scheme. The default for *errors* is ' + '"\'strict\'",\n' + ' meaning that encoding errors raise a "UnicodeError". ' + 'Other possible\n' + ' values are "\'ignore\'", "\'replace\'", ' + '"\'xmlcharrefreplace\'",\n' + ' "\'backslashreplace\'" and any other name registered ' + 'via\n' + ' "codecs.register_error()", see section *Error ' + 'Handlers*. For a list\n' + ' of possible encodings, see section *Standard ' + 'Encodings*.\n' + '\n' + ' Changed in version 3.1: Support for keyword arguments ' + 'added.\n' + '\n' + 'str.endswith(suffix[, start[, end]])\n' + '\n' + ' Return "True" if the string ends with the specified ' + '*suffix*,\n' + ' otherwise return "False". *suffix* can also be a ' + 'tuple of suffixes\n' + ' to look for. With optional *start*, test beginning ' + 'at that\n' + ' position. With optional *end*, stop comparing at ' + 'that position.\n' + '\n' + 'str.expandtabs(tabsize=8)\n' + '\n' + ' Return a copy of the string where all tab characters ' + 'are replaced\n' + ' by one or more spaces, depending on the current ' + 'column and the\n' + ' given tab size. Tab positions occur every *tabsize* ' + 'characters\n' + ' (default is 8, giving tab positions at columns 0, 8, ' + '16 and so on).\n' + ' To expand the string, the current column is set to ' + 'zero and the\n' + ' string is examined character by character. If the ' + 'character is a\n' + ' tab ("\\t"), one or more space characters are ' + 'inserted in the result\n' + ' until the current column is equal to the next tab ' + 'position. (The\n' + ' tab character itself is not copied.) If the ' + 'character is a newline\n' + ' ("\\n") or return ("\\r"), it is copied and the ' + 'current column is\n' + ' reset to zero. Any other character is copied ' + 'unchanged and the\n' + ' current column is incremented by one regardless of ' + 'how the\n' + ' character is represented when printed.\n' + '\n' + " >>> '01\\t012\\t0123\\t01234'.expandtabs()\n" + " '01 012 0123 01234'\n" + " >>> '01\\t012\\t0123\\t01234'.expandtabs(4)\n" + " '01 012 0123 01234'\n" + '\n' + 'str.find(sub[, start[, end]])\n' + '\n' + ' Return the lowest index in the string where substring ' + '*sub* is\n' + ' found, such that *sub* is contained in the slice ' + '"s[start:end]".\n' + ' Optional arguments *start* and *end* are interpreted ' + 'as in slice\n' + ' notation. Return "-1" if *sub* is not found.\n' + '\n' + ' Note: The "find()" method should be used only if you ' + 'need to know\n' + ' the position of *sub*. To check if *sub* is a ' + 'substring or not,\n' + ' use the "in" operator:\n' + '\n' + " >>> 'Py' in 'Python'\n" + ' True\n' + '\n' + 'str.format(*args, **kwargs)\n' + '\n' + ' Perform a string formatting operation. The string on ' + 'which this\n' + ' method is called can contain literal text or ' + 'replacement fields\n' + ' delimited by braces "{}". Each replacement field ' + 'contains either\n' + ' the numeric index of a positional argument, or the ' + 'name of a\n' + ' keyword argument. Returns a copy of the string where ' + 'each\n' + ' replacement field is replaced with the string value ' + 'of the\n' + ' corresponding argument.\n' + '\n' + ' >>> "The sum of 1 + 2 is {0}".format(1+2)\n' + " 'The sum of 1 + 2 is 3'\n" + '\n' + ' See *Format String Syntax* for a description of the ' + 'various\n' + ' formatting options that can be specified in format ' + 'strings.\n' + '\n' + 'str.format_map(mapping)\n' + '\n' + ' Similar to "str.format(**mapping)", except that ' + '"mapping" is used\n' + ' directly and not copied to a "dict". This is useful ' + 'if for example\n' + ' "mapping" is a dict subclass:\n' + '\n' + ' >>> class Default(dict):\n' + ' ... def __missing__(self, key):\n' + ' ... return key\n' + ' ...\n' + " >>> '{name} was born in " + "{country}'.format_map(Default(name='Guido'))\n" + " 'Guido was born in country'\n" + '\n' + ' New in version 3.2.\n' + '\n' + 'str.index(sub[, start[, end]])\n' + '\n' + ' Like "find()", but raise "ValueError" when the ' + 'substring is not\n' + ' found.\n' + '\n' + 'str.isalnum()\n' + '\n' + ' Return true if all characters in the string are ' + 'alphanumeric and\n' + ' there is at least one character, false otherwise. A ' + 'character "c"\n' + ' is alphanumeric if one of the following returns ' + '"True":\n' + ' "c.isalpha()", "c.isdecimal()", "c.isdigit()", or ' + '"c.isnumeric()".\n' + '\n' + 'str.isalpha()\n' + '\n' + ' Return true if all characters in the string are ' + 'alphabetic and\n' + ' there is at least one character, false otherwise. ' + 'Alphabetic\n' + ' characters are those characters defined in the ' + 'Unicode character\n' + ' database as "Letter", i.e., those with general ' + 'category property\n' + ' being one of "Lm", "Lt", "Lu", "Ll", or "Lo". Note ' + 'that this is\n' + ' different from the "Alphabetic" property defined in ' + 'the Unicode\n' + ' Standard.\n' + '\n' + 'str.isdecimal()\n' + '\n' + ' Return true if all characters in the string are ' + 'decimal characters\n' + ' and there is at least one character, false otherwise. ' + 'Decimal\n' + ' characters are those from general category "Nd". This ' + 'category\n' + ' includes digit characters, and all characters that ' + 'can be used to\n' + ' form decimal-radix numbers, e.g. U+0660, ARABIC-INDIC ' + 'DIGIT ZERO.\n' + '\n' + 'str.isdigit()\n' + '\n' + ' Return true if all characters in the string are ' + 'digits and there is\n' + ' at least one character, false otherwise. Digits ' + 'include decimal\n' + ' characters and digits that need special handling, ' + 'such as the\n' + ' compatibility superscript digits. Formally, a digit ' + 'is a character\n' + ' that has the property value Numeric_Type=Digit or\n' + ' Numeric_Type=Decimal.\n' + '\n' + 'str.isidentifier()\n' + '\n' + ' Return true if the string is a valid identifier ' + 'according to the\n' + ' language definition, section *Identifiers and ' + 'keywords*.\n' + '\n' + ' Use "keyword.iskeyword()" to test for reserved ' + 'identifiers such as\n' + ' "def" and "class".\n' + '\n' + 'str.islower()\n' + '\n' + ' Return true if all cased characters [4] in the string ' + 'are lowercase\n' + ' and there is at least one cased character, false ' + 'otherwise.\n' + '\n' + 'str.isnumeric()\n' + '\n' + ' Return true if all characters in the string are ' + 'numeric characters,\n' + ' and there is at least one character, false otherwise. ' + 'Numeric\n' + ' characters include digit characters, and all ' + 'characters that have\n' + ' the Unicode numeric value property, e.g. U+2155, ' + 'VULGAR FRACTION\n' + ' ONE FIFTH. Formally, numeric characters are those ' + 'with the\n' + ' property value Numeric_Type=Digit, ' + 'Numeric_Type=Decimal or\n' + ' Numeric_Type=Numeric.\n' + '\n' + 'str.isprintable()\n' + '\n' + ' Return true if all characters in the string are ' + 'printable or the\n' + ' string is empty, false otherwise. Nonprintable ' + 'characters are\n' + ' those characters defined in the Unicode character ' + 'database as\n' + ' "Other" or "Separator", excepting the ASCII space ' + '(0x20) which is\n' + ' considered printable. (Note that printable ' + 'characters in this\n' + ' context are those which should not be escaped when ' + '"repr()" is\n' + ' invoked on a string. It has no bearing on the ' + 'handling of strings\n' + ' written to "sys.stdout" or "sys.stderr".)\n' + '\n' + 'str.isspace()\n' + '\n' + ' Return true if there are only whitespace characters ' + 'in the string\n' + ' and there is at least one character, false ' + 'otherwise. Whitespace\n' + ' characters are those characters defined in the ' + 'Unicode character\n' + ' database as "Other" or "Separator" and those with ' + 'bidirectional\n' + ' property being one of "WS", "B", or "S".\n' + '\n' + 'str.istitle()\n' + '\n' + ' Return true if the string is a titlecased string and ' + 'there is at\n' + ' least one character, for example uppercase characters ' + 'may only\n' + ' follow uncased characters and lowercase characters ' + 'only cased ones.\n' + ' Return false otherwise.\n' + '\n' + 'str.isupper()\n' + '\n' + ' Return true if all cased characters [4] in the string ' + 'are uppercase\n' + ' and there is at least one cased character, false ' + 'otherwise.\n' + '\n' + 'str.join(iterable)\n' + '\n' + ' Return a string which is the concatenation of the ' + 'strings in the\n' + ' *iterable* *iterable*. A "TypeError" will be raised ' + 'if there are\n' + ' any non-string values in *iterable*, including ' + '"bytes" objects.\n' + ' The separator between elements is the string ' + 'providing this method.\n' + '\n' + 'str.ljust(width[, fillchar])\n' + '\n' + ' Return the string left justified in a string of ' + 'length *width*.\n' + ' Padding is done using the specified *fillchar* ' + '(default is an ASCII\n' + ' space). The original string is returned if *width* is ' + 'less than or\n' + ' equal to "len(s)".\n' + '\n' + 'str.lower()\n' + '\n' + ' Return a copy of the string with all the cased ' + 'characters [4]\n' + ' converted to lowercase.\n' + '\n' + ' The lowercasing algorithm used is described in ' + 'section 3.13 of the\n' + ' Unicode Standard.\n' + '\n' + 'str.lstrip([chars])\n' + '\n' + ' Return a copy of the string with leading characters ' + 'removed. The\n' + ' *chars* argument is a string specifying the set of ' + 'characters to be\n' + ' removed. If omitted or "None", the *chars* argument ' + 'defaults to\n' + ' removing whitespace. The *chars* argument is not a ' + 'prefix; rather,\n' + ' all combinations of its values are stripped:\n' + '\n' + " >>> ' spacious '.lstrip()\n" + " 'spacious '\n" + " >>> 'www.example.com'.lstrip('cmowz.')\n" + " 'example.com'\n" + '\n' + 'static str.maketrans(x[, y[, z]])\n' + '\n' + ' This static method returns a translation table usable ' + 'for\n' + ' "str.translate()".\n' + '\n' + ' If there is only one argument, it must be a ' + 'dictionary mapping\n' + ' Unicode ordinals (integers) or characters (strings of ' + 'length 1) to\n' + ' Unicode ordinals, strings (of arbitrary lengths) or ' + 'None.\n' + ' Character keys will then be converted to ordinals.\n' + '\n' + ' If there are two arguments, they must be strings of ' + 'equal length,\n' + ' and in the resulting dictionary, each character in x ' + 'will be mapped\n' + ' to the character at the same position in y. If there ' + 'is a third\n' + ' argument, it must be a string, whose characters will ' + 'be mapped to\n' + ' None in the result.\n' + '\n' + 'str.partition(sep)\n' + '\n' + ' Split the string at the first occurrence of *sep*, ' + 'and return a\n' + ' 3-tuple containing the part before the separator, the ' + 'separator\n' + ' itself, and the part after the separator. If the ' + 'separator is not\n' + ' found, return a 3-tuple containing the string itself, ' + 'followed by\n' + ' two empty strings.\n' + '\n' + 'str.replace(old, new[, count])\n' + '\n' + ' Return a copy of the string with all occurrences of ' + 'substring *old*\n' + ' replaced by *new*. If the optional argument *count* ' + 'is given, only\n' + ' the first *count* occurrences are replaced.\n' + '\n' + 'str.rfind(sub[, start[, end]])\n' + '\n' + ' Return the highest index in the string where ' + 'substring *sub* is\n' + ' found, such that *sub* is contained within ' + '"s[start:end]".\n' + ' Optional arguments *start* and *end* are interpreted ' + 'as in slice\n' + ' notation. Return "-1" on failure.\n' + '\n' + 'str.rindex(sub[, start[, end]])\n' + '\n' + ' Like "rfind()" but raises "ValueError" when the ' + 'substring *sub* is\n' + ' not found.\n' + '\n' + 'str.rjust(width[, fillchar])\n' + '\n' + ' Return the string right justified in a string of ' + 'length *width*.\n' + ' Padding is done using the specified *fillchar* ' + '(default is an ASCII\n' + ' space). The original string is returned if *width* is ' + 'less than or\n' + ' equal to "len(s)".\n' + '\n' + 'str.rpartition(sep)\n' + '\n' + ' Split the string at the last occurrence of *sep*, and ' + 'return a\n' + ' 3-tuple containing the part before the separator, the ' + 'separator\n' + ' itself, and the part after the separator. If the ' + 'separator is not\n' + ' found, return a 3-tuple containing two empty strings, ' + 'followed by\n' + ' the string itself.\n' + '\n' + 'str.rsplit(sep=None, maxsplit=-1)\n' + '\n' + ' Return a list of the words in the string, using *sep* ' + 'as the\n' + ' delimiter string. If *maxsplit* is given, at most ' + '*maxsplit* splits\n' + ' are done, the *rightmost* ones. If *sep* is not ' + 'specified or\n' + ' "None", any whitespace string is a separator. Except ' + 'for splitting\n' + ' from the right, "rsplit()" behaves like "split()" ' + 'which is\n' + ' described in detail below.\n' + '\n' + 'str.rstrip([chars])\n' + '\n' + ' Return a copy of the string with trailing characters ' + 'removed. The\n' + ' *chars* argument is a string specifying the set of ' + 'characters to be\n' + ' removed. If omitted or "None", the *chars* argument ' + 'defaults to\n' + ' removing whitespace. The *chars* argument is not a ' + 'suffix; rather,\n' + ' all combinations of its values are stripped:\n' + '\n' + " >>> ' spacious '.rstrip()\n" + " ' spacious'\n" + " >>> 'mississippi'.rstrip('ipz')\n" + " 'mississ'\n" + '\n' + 'str.split(sep=None, maxsplit=-1)\n' + '\n' + ' Return a list of the words in the string, using *sep* ' + 'as the\n' + ' delimiter string. If *maxsplit* is given, at most ' + '*maxsplit*\n' + ' splits are done (thus, the list will have at most ' + '"maxsplit+1"\n' + ' elements). If *maxsplit* is not specified or "-1", ' + 'then there is\n' + ' no limit on the number of splits (all possible splits ' + 'are made).\n' + '\n' + ' If *sep* is given, consecutive delimiters are not ' + 'grouped together\n' + ' and are deemed to delimit empty strings (for ' + 'example,\n' + ' "\'1,,2\'.split(\',\')" returns "[\'1\', \'\', ' + '\'2\']"). The *sep* argument\n' + ' may consist of multiple characters (for example,\n' + ' "\'1<>2<>3\'.split(\'<>\')" returns "[\'1\', \'2\', ' + '\'3\']"). Splitting an\n' + ' empty string with a specified separator returns ' + '"[\'\']".\n' + '\n' + ' For example:\n' + '\n' + " >>> '1,2,3'.split(',')\n" + " ['1', '2', '3']\n" + " >>> '1,2,3'.split(',', maxsplit=1)\n" + " ['1', '2,3']\n" + " >>> '1,2,,3,'.split(',')\n" + " ['1', '2', '', '3', '']\n" + '\n' + ' If *sep* is not specified or is "None", a different ' + 'splitting\n' + ' algorithm is applied: runs of consecutive whitespace ' + 'are regarded\n' + ' as a single separator, and the result will contain no ' + 'empty strings\n' + ' at the start or end if the string has leading or ' + 'trailing\n' + ' whitespace. Consequently, splitting an empty string ' + 'or a string\n' + ' consisting of just whitespace with a "None" separator ' + 'returns "[]".\n' + '\n' + ' For example:\n' + '\n' + " >>> '1 2 3'.split()\n" + " ['1', '2', '3']\n" + " >>> '1 2 3'.split(maxsplit=1)\n" + " ['1', '2 3']\n" + " >>> ' 1 2 3 '.split()\n" + " ['1', '2', '3']\n" + '\n' + 'str.splitlines([keepends])\n' + '\n' + ' Return a list of the lines in the string, breaking at ' + 'line\n' + ' boundaries. Line breaks are not included in the ' + 'resulting list\n' + ' unless *keepends* is given and true.\n' + '\n' + ' This method splits on the following line boundaries. ' + 'In\n' + ' particular, the boundaries are a superset of ' + '*universal newlines*.\n' + '\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | Representation | ' + 'Description |\n' + ' ' + '+=========================+===============================+\n' + ' | "\\n" | Line ' + 'Feed |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\r" | Carriage ' + 'Return |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\r\\n" | Carriage Return + Line ' + 'Feed |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\v" or "\\x0b" | Line ' + 'Tabulation |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\f" or "\\x0c" | Form ' + 'Feed |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\x1c" | File ' + 'Separator |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\x1d" | Group ' + 'Separator |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\x1e" | Record ' + 'Separator |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\x85" | Next Line (C1 Control ' + 'Code) |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\u2028" | Line ' + 'Separator |\n' + ' ' + '+-------------------------+-------------------------------+\n' + ' | "\\u2029" | Paragraph ' + 'Separator |\n' + ' ' + '+-------------------------+-------------------------------+\n' + '\n' + ' Changed in version 3.2: "\\v" and "\\f" added to list ' + 'of line\n' + ' boundaries.\n' + '\n' + ' For example:\n' + '\n' + " >>> 'ab c\\n\\nde fg\\rkl\\r\\n'.splitlines()\n" + " ['ab c', '', 'de fg', 'kl']\n" + " >>> 'ab c\\n\\nde " + "fg\\rkl\\r\\n'.splitlines(keepends=True)\n" + " ['ab c\\n', '\\n', 'de fg\\r', 'kl\\r\\n']\n" + '\n' + ' Unlike "split()" when a delimiter string *sep* is ' + 'given, this\n' + ' method returns an empty list for the empty string, ' + 'and a terminal\n' + ' line break does not result in an extra line:\n' + '\n' + ' >>> "".splitlines()\n' + ' []\n' + ' >>> "One line\\n".splitlines()\n' + " ['One line']\n" + '\n' + ' For comparison, "split(\'\\n\')" gives:\n' + '\n' + " >>> ''.split('\\n')\n" + " ['']\n" + " >>> 'Two lines\\n'.split('\\n')\n" + " ['Two lines', '']\n" + '\n' + 'str.startswith(prefix[, start[, end]])\n' + '\n' + ' Return "True" if string starts with the *prefix*, ' + 'otherwise return\n' + ' "False". *prefix* can also be a tuple of prefixes to ' + 'look for.\n' + ' With optional *start*, test string beginning at that ' + 'position.\n' + ' With optional *end*, stop comparing string at that ' + 'position.\n' + '\n' + 'str.strip([chars])\n' + '\n' + ' Return a copy of the string with the leading and ' + 'trailing\n' + ' characters removed. The *chars* argument is a string ' + 'specifying the\n' + ' set of characters to be removed. If omitted or ' + '"None", the *chars*\n' + ' argument defaults to removing whitespace. The *chars* ' + 'argument is\n' + ' not a prefix or suffix; rather, all combinations of ' + 'its values are\n' + ' stripped:\n' + '\n' + " >>> ' spacious '.strip()\n" + " 'spacious'\n" + " >>> 'www.example.com'.strip('cmowz.')\n" + " 'example'\n" + '\n' + ' The outermost leading and trailing *chars* argument ' + 'values are\n' + ' stripped from the string. Characters are removed from ' + 'the leading\n' + ' end until reaching a string character that is not ' + 'contained in the\n' + ' set of characters in *chars*. A similar action takes ' + 'place on the\n' + ' trailing end. For example:\n' + '\n' + " >>> comment_string = '#....... Section 3.2.1 Issue " + "#32 .......'\n" + " >>> comment_string.strip('.#! ')\n" + " 'Section 3.2.1 Issue #32'\n" + '\n' + 'str.swapcase()\n' + '\n' + ' Return a copy of the string with uppercase characters ' + 'converted to\n' + ' lowercase and vice versa. Note that it is not ' + 'necessarily true that\n' + ' "s.swapcase().swapcase() == s".\n' + '\n' + 'str.title()\n' + '\n' + ' Return a titlecased version of the string where words ' + 'start with an\n' + ' uppercase character and the remaining characters are ' + 'lowercase.\n' + '\n' + ' For example:\n' + '\n' + " >>> 'Hello world'.title()\n" + " 'Hello World'\n" + '\n' + ' The algorithm uses a simple language-independent ' + 'definition of a\n' + ' word as groups of consecutive letters. The ' + 'definition works in\n' + ' many contexts but it means that apostrophes in ' + 'contractions and\n' + ' possessives form word boundaries, which may not be ' + 'the desired\n' + ' result:\n' + '\n' + ' >>> "they\'re bill\'s friends from the ' + 'UK".title()\n' + ' "They\'Re Bill\'S Friends From The Uk"\n' + '\n' + ' A workaround for apostrophes can be constructed using ' + 'regular\n' + ' expressions:\n' + '\n' + ' >>> import re\n' + ' >>> def titlecase(s):\n' + ' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n' + ' ... lambda mo: ' + 'mo.group(0)[0].upper() +\n' + ' ... ' + 'mo.group(0)[1:].lower(),\n' + ' ... s)\n' + ' ...\n' + ' >>> titlecase("they\'re bill\'s friends.")\n' + ' "They\'re Bill\'s Friends."\n' + '\n' + 'str.translate(table)\n' + '\n' + ' Return a copy of the string in which each character ' + 'has been mapped\n' + ' through the given translation table. The table must ' + 'be an object\n' + ' that implements indexing via "__getitem__()", ' + 'typically a *mapping*\n' + ' or *sequence*. When indexed by a Unicode ordinal (an ' + 'integer), the\n' + ' table object can do any of the following: return a ' + 'Unicode ordinal\n' + ' or a string, to map the character to one or more ' + 'other characters;\n' + ' return "None", to delete the character from the ' + 'return string; or\n' + ' raise a "LookupError" exception, to map the character ' + 'to itself.\n' + '\n' + ' You can use "str.maketrans()" to create a translation ' + 'map from\n' + ' character-to-character mappings in different ' + 'formats.\n' + '\n' + ' See also the "codecs" module for a more flexible ' + 'approach to custom\n' + ' character mappings.\n' + '\n' + 'str.upper()\n' + '\n' + ' Return a copy of the string with all the cased ' + 'characters [4]\n' + ' converted to uppercase. Note that ' + '"str.upper().isupper()" might be\n' + ' "False" if "s" contains uncased characters or if the ' + 'Unicode\n' + ' category of the resulting character(s) is not "Lu" ' + '(Letter,\n' + ' uppercase), but e.g. "Lt" (Letter, titlecase).\n' + '\n' + ' The uppercasing algorithm used is described in ' + 'section 3.13 of the\n' + ' Unicode Standard.\n' + '\n' + 'str.zfill(width)\n' + '\n' + ' Return a copy of the string left filled with ASCII ' + '"\'0\'" digits to\n' + ' make a string of length *width*. A leading sign ' + 'prefix\n' + ' ("\'+\'"/"\'-\'") is handled by inserting the padding ' + '*after* the sign\n' + ' character rather than before. The original string is ' + 'returned if\n' + ' *width* is less than or equal to "len(s)".\n' + '\n' + ' For example:\n' + '\n' + ' >>> "42".zfill(5)\n' + " '00042'\n" + ' >>> "-42".zfill(5)\n' + " '-0042'\n", + 'strings': '\n' + 'String and Bytes literals\n' + '*************************\n' + '\n' + 'String literals are described by the following lexical ' + 'definitions:\n' + '\n' + ' stringliteral ::= [stringprefix](shortstring | ' + 'longstring)\n' + ' stringprefix ::= "r" | "u" | "R" | "U"\n' + ' shortstring ::= "\'" shortstringitem* "\'" | \'"\' ' + 'shortstringitem* \'"\'\n' + ' longstring ::= "\'\'\'" longstringitem* "\'\'\'" | ' + '\'"""\' longstringitem* \'"""\'\n' + ' shortstringitem ::= shortstringchar | stringescapeseq\n' + ' longstringitem ::= longstringchar | stringescapeseq\n' + ' shortstringchar ::= \n' + ' longstringchar ::= \n' + ' stringescapeseq ::= "\\" \n' + '\n' + ' bytesliteral ::= bytesprefix(shortbytes | longbytes)\n' + ' bytesprefix ::= "b" | "B" | "br" | "Br" | "bR" | "BR" | ' + '"rb" | "rB" | "Rb" | "RB"\n' + ' shortbytes ::= "\'" shortbytesitem* "\'" | \'"\' ' + 'shortbytesitem* \'"\'\n' + ' longbytes ::= "\'\'\'" longbytesitem* "\'\'\'" | ' + '\'"""\' longbytesitem* \'"""\'\n' + ' shortbytesitem ::= shortbyteschar | bytesescapeseq\n' + ' longbytesitem ::= longbyteschar | bytesescapeseq\n' + ' shortbyteschar ::= \n' + ' longbyteschar ::= \n' + ' bytesescapeseq ::= "\\" \n' + '\n' + 'One syntactic restriction not indicated by these productions is ' + 'that\n' + 'whitespace is not allowed between the "stringprefix" or ' + '"bytesprefix"\n' + 'and the rest of the literal. The source character set is ' + 'defined by\n' + 'the encoding declaration; it is UTF-8 if no encoding ' + 'declaration is\n' + 'given in the source file; see section *Encoding declarations*.\n' + '\n' + 'In plain English: Both types of literals can be enclosed in ' + 'matching\n' + 'single quotes ("\'") or double quotes ("""). They can also be ' + 'enclosed\n' + 'in matching groups of three single or double quotes (these are\n' + 'generally referred to as *triple-quoted strings*). The ' + 'backslash\n' + '("\\") character is used to escape characters that otherwise ' + 'have a\n' + 'special meaning, such as newline, backslash itself, or the ' + 'quote\n' + 'character.\n' + '\n' + 'Bytes literals are always prefixed with "\'b\'" or "\'B\'"; ' + 'they produce\n' + 'an instance of the "bytes" type instead of the "str" type. ' + 'They may\n' + 'only contain ASCII characters; bytes with a numeric value of ' + '128 or\n' + 'greater must be expressed with escapes.\n' + '\n' + 'As of Python 3.3 it is possible again to prefix string literals ' + 'with a\n' + '"u" prefix to simplify maintenance of dual 2.x and 3.x ' + 'codebases.\n' + '\n' + 'Both string and bytes literals may optionally be prefixed with ' + 'a\n' + 'letter "\'r\'" or "\'R\'"; such strings are called *raw ' + 'strings* and treat\n' + 'backslashes as literal characters. As a result, in string ' + 'literals,\n' + '"\'\\U\'" and "\'\\u\'" escapes in raw strings are not treated ' + 'specially.\n' + "Given that Python 2.x's raw unicode literals behave differently " + 'than\n' + 'Python 3.x\'s the "\'ur\'" syntax is not supported.\n' + '\n' + 'New in version 3.3: The "\'rb\'" prefix of raw bytes literals ' + 'has been\n' + 'added as a synonym of "\'br\'".\n' + '\n' + 'New in version 3.3: Support for the unicode legacy literal\n' + '("u\'value\'") was reintroduced to simplify the maintenance of ' + 'dual\n' + 'Python 2.x and 3.x codebases. See **PEP 414** for more ' + 'information.\n' + '\n' + 'In triple-quoted literals, unescaped newlines and quotes are ' + 'allowed\n' + '(and are retained), except that three unescaped quotes in a ' + 'row\n' + 'terminate the literal. (A "quote" is the character used to ' + 'open the\n' + 'literal, i.e. either "\'" or """.)\n' + '\n' + 'Unless an "\'r\'" or "\'R\'" prefix is present, escape ' + 'sequences in string\n' + 'and bytes literals are interpreted according to rules similar ' + 'to those\n' + 'used by Standard C. The recognized escape sequences are:\n' + '\n' + '+-------------------+-----------------------------------+---------+\n' + '| Escape Sequence | Meaning | ' + 'Notes |\n' + '+===================+===================================+=========+\n' + '| "\\newline" | Backslash and newline ignored ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\\\" | Backslash ("\\") ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\\'" | Single quote ("\'") ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\"" | Double quote (""") ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\a" | ASCII Bell (BEL) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\b" | ASCII Backspace (BS) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\f" | ASCII Formfeed (FF) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\n" | ASCII Linefeed (LF) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\r" | ASCII Carriage Return (CR) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\t" | ASCII Horizontal Tab (TAB) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\v" | ASCII Vertical Tab (VT) ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\ooo" | Character with octal value *ooo* | ' + '(1,3) |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\xhh" | Character with hex value *hh* | ' + '(2,3) |\n' + '+-------------------+-----------------------------------+---------+\n' + '\n' + 'Escape sequences only recognized in string literals are:\n' + '\n' + '+-------------------+-----------------------------------+---------+\n' + '| Escape Sequence | Meaning | ' + 'Notes |\n' + '+===================+===================================+=========+\n' + '| "\\N{name}" | Character named *name* in the | ' + '(4) |\n' + '| | Unicode database ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\uxxxx" | Character with 16-bit hex value | ' + '(5) |\n' + '| | *xxxx* ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '| "\\Uxxxxxxxx" | Character with 32-bit hex value | ' + '(6) |\n' + '| | *xxxxxxxx* ' + '| |\n' + '+-------------------+-----------------------------------+---------+\n' + '\n' + 'Notes:\n' + '\n' + '1. As in Standard C, up to three octal digits are accepted.\n' + '\n' + '2. Unlike in Standard C, exactly two hex digits are required.\n' + '\n' + '3. In a bytes literal, hexadecimal and octal escapes denote ' + 'the\n' + ' byte with the given value. In a string literal, these ' + 'escapes\n' + ' denote a Unicode character with the given value.\n' + '\n' + '4. Changed in version 3.3: Support for name aliases [1] has ' + 'been\n' + ' added.\n' + '\n' + '5. Individual code units which form parts of a surrogate pair ' + 'can\n' + ' be encoded using this escape sequence. Exactly four hex ' + 'digits are\n' + ' required.\n' + '\n' + '6. Any Unicode character can be encoded this way. Exactly ' + 'eight\n' + ' hex digits are required.\n' + '\n' + 'Unlike Standard C, all unrecognized escape sequences are left ' + 'in the\n' + 'string unchanged, i.e., *the backslash is left in the result*. ' + '(This\n' + 'behavior is useful when debugging: if an escape sequence is ' + 'mistyped,\n' + 'the resulting output is more easily recognized as broken.) It ' + 'is also\n' + 'important to note that the escape sequences only recognized in ' + 'string\n' + 'literals fall into the category of unrecognized escapes for ' + 'bytes\n' + 'literals.\n' + '\n' + 'Even in a raw literal, quotes can be escaped with a backslash, ' + 'but the\n' + 'backslash remains in the result; for example, "r"\\""" is a ' + 'valid\n' + 'string literal consisting of two characters: a backslash and a ' + 'double\n' + 'quote; "r"\\"" is not a valid string literal (even a raw string ' + 'cannot\n' + 'end in an odd number of backslashes). Specifically, *a raw ' + 'literal\n' + 'cannot end in a single backslash* (since the backslash would ' + 'escape\n' + 'the following quote character). Note also that a single ' + 'backslash\n' + 'followed by a newline is interpreted as those two characters as ' + 'part\n' + 'of the literal, *not* as a line continuation.\n', + 'subscriptions': '\n' + 'Subscriptions\n' + '*************\n' + '\n' + 'A subscription selects an item of a sequence (string, ' + 'tuple or list)\n' + 'or mapping (dictionary) object:\n' + '\n' + ' subscription ::= primary "[" expression_list "]"\n' + '\n' + 'The primary must evaluate to an object that supports ' + 'subscription\n' + '(lists or dictionaries for example). User-defined ' + 'objects can support\n' + 'subscription by defining a "__getitem__()" method.\n' + '\n' + 'For built-in objects, there are two types of objects that ' + 'support\n' + 'subscription:\n' + '\n' + 'If the primary is a mapping, the expression list must ' + 'evaluate to an\n' + 'object whose value is one of the keys of the mapping, and ' + 'the\n' + 'subscription selects the value in the mapping that ' + 'corresponds to that\n' + 'key. (The expression list is a tuple except if it has ' + 'exactly one\n' + 'item.)\n' + '\n' + 'If the primary is a sequence, the expression (list) must ' + 'evaluate to\n' + 'an integer or a slice (as discussed in the following ' + 'section).\n' + '\n' + 'The formal syntax makes no special provision for negative ' + 'indices in\n' + 'sequences; however, built-in sequences all provide a ' + '"__getitem__()"\n' + 'method that interprets negative indices by adding the ' + 'length of the\n' + 'sequence to the index (so that "x[-1]" selects the last ' + 'item of "x").\n' + 'The resulting value must be a nonnegative integer less ' + 'than the number\n' + 'of items in the sequence, and the subscription selects ' + 'the item whose\n' + 'index is that value (counting from zero). Since the ' + 'support for\n' + "negative indices and slicing occurs in the object's " + '"__getitem__()"\n' + 'method, subclasses overriding this method will need to ' + 'explicitly add\n' + 'that support.\n' + '\n' + "A string's items are characters. A character is not a " + 'separate data\n' + 'type but a string of exactly one character.\n', + 'truth': '\n' + 'Truth Value Testing\n' + '*******************\n' + '\n' + 'Any object can be tested for truth value, for use in an "if" or\n' + '"while" condition or as operand of the Boolean operations below. ' + 'The\n' + 'following values are considered false:\n' + '\n' + '* "None"\n' + '\n' + '* "False"\n' + '\n' + '* zero of any numeric type, for example, "0", "0.0", "0j".\n' + '\n' + '* any empty sequence, for example, "\'\'", "()", "[]".\n' + '\n' + '* any empty mapping, for example, "{}".\n' + '\n' + '* instances of user-defined classes, if the class defines a\n' + ' "__bool__()" or "__len__()" method, when that method returns ' + 'the\n' + ' integer zero or "bool" value "False". [1]\n' + '\n' + 'All other values are considered true --- so objects of many types ' + 'are\n' + 'always true.\n' + '\n' + 'Operations and built-in functions that have a Boolean result ' + 'always\n' + 'return "0" or "False" for false and "1" or "True" for true, ' + 'unless\n' + 'otherwise stated. (Important exception: the Boolean operations ' + '"or"\n' + 'and "and" always return one of their operands.)\n', + 'try': '\n' + 'The "try" statement\n' + '*******************\n' + '\n' + 'The "try" statement specifies exception handlers and/or cleanup ' + 'code\n' + 'for a group of statements:\n' + '\n' + ' try_stmt ::= try1_stmt | try2_stmt\n' + ' try1_stmt ::= "try" ":" suite\n' + ' ("except" [expression ["as" identifier]] ":" ' + 'suite)+\n' + ' ["else" ":" suite]\n' + ' ["finally" ":" suite]\n' + ' try2_stmt ::= "try" ":" suite\n' + ' "finally" ":" suite\n' + '\n' + 'The "except" clause(s) specify one or more exception handlers. When ' + 'no\n' + 'exception occurs in the "try" clause, no exception handler is\n' + 'executed. When an exception occurs in the "try" suite, a search for ' + 'an\n' + 'exception handler is started. This search inspects the except ' + 'clauses\n' + 'in turn until one is found that matches the exception. An ' + 'expression-\n' + 'less except clause, if present, must be last; it matches any\n' + 'exception. For an except clause with an expression, that ' + 'expression\n' + 'is evaluated, and the clause matches the exception if the ' + 'resulting\n' + 'object is "compatible" with the exception. An object is ' + 'compatible\n' + 'with an exception if it is the class or a base class of the ' + 'exception\n' + 'object or a tuple containing an item compatible with the ' + 'exception.\n' + '\n' + 'If no except clause matches the exception, the search for an ' + 'exception\n' + 'handler continues in the surrounding code and on the invocation ' + 'stack.\n' + '[1]\n' + '\n' + 'If the evaluation of an expression in the header of an except ' + 'clause\n' + 'raises an exception, the original search for a handler is canceled ' + 'and\n' + 'a search starts for the new exception in the surrounding code and ' + 'on\n' + 'the call stack (it is treated as if the entire "try" statement ' + 'raised\n' + 'the exception).\n' + '\n' + 'When a matching except clause is found, the exception is assigned ' + 'to\n' + 'the target specified after the "as" keyword in that except clause, ' + 'if\n' + "present, and the except clause's suite is executed. All except\n" + 'clauses must have an executable block. When the end of this block ' + 'is\n' + 'reached, execution continues normally after the entire try ' + 'statement.\n' + '(This means that if two nested handlers exist for the same ' + 'exception,\n' + 'and the exception occurs in the try clause of the inner handler, ' + 'the\n' + 'outer handler will not handle the exception.)\n' + '\n' + 'When an exception has been assigned using "as target", it is ' + 'cleared\n' + 'at the end of the except clause. This is as if\n' + '\n' + ' except E as N:\n' + ' foo\n' + '\n' + 'was translated to\n' + '\n' + ' except E as N:\n' + ' try:\n' + ' foo\n' + ' finally:\n' + ' del N\n' + '\n' + 'This means the exception must be assigned to a different name to ' + 'be\n' + 'able to refer to it after the except clause. Exceptions are ' + 'cleared\n' + 'because with the traceback attached to them, they form a reference\n' + 'cycle with the stack frame, keeping all locals in that frame alive\n' + 'until the next garbage collection occurs.\n' + '\n' + "Before an except clause's suite is executed, details about the\n" + 'exception are stored in the "sys" module and can be accessed via\n' + '"sys.exc_info()". "sys.exc_info()" returns a 3-tuple consisting of ' + 'the\n' + 'exception class, the exception instance and a traceback object ' + '(see\n' + 'section *The standard type hierarchy*) identifying the point in ' + 'the\n' + 'program where the exception occurred. "sys.exc_info()" values are\n' + 'restored to their previous values (before the call) when returning\n' + 'from a function that handled an exception.\n' + '\n' + 'The optional "else" clause is executed if and when control flows ' + 'off\n' + 'the end of the "try" clause. [2] Exceptions in the "else" clause ' + 'are\n' + 'not handled by the preceding "except" clauses.\n' + '\n' + 'If "finally" is present, it specifies a \'cleanup\' handler. The ' + '"try"\n' + 'clause is executed, including any "except" and "else" clauses. If ' + 'an\n' + 'exception occurs in any of the clauses and is not handled, the\n' + 'exception is temporarily saved. The "finally" clause is executed. ' + 'If\n' + 'there is a saved exception it is re-raised at the end of the ' + '"finally"\n' + 'clause. If the "finally" clause raises another exception, the ' + 'saved\n' + 'exception is set as the context of the new exception. If the ' + '"finally"\n' + 'clause executes a "return" or "break" statement, the saved ' + 'exception\n' + 'is discarded:\n' + '\n' + ' >>> def f():\n' + ' ... try:\n' + ' ... 1/0\n' + ' ... finally:\n' + ' ... return 42\n' + ' ...\n' + ' >>> f()\n' + ' 42\n' + '\n' + 'The exception information is not available to the program during\n' + 'execution of the "finally" clause.\n' + '\n' + 'When a "return", "break" or "continue" statement is executed in ' + 'the\n' + '"try" suite of a "try"..."finally" statement, the "finally" clause ' + 'is\n' + 'also executed \'on the way out.\' A "continue" statement is illegal ' + 'in\n' + 'the "finally" clause. (The reason is a problem with the current\n' + 'implementation --- this restriction may be lifted in the future).\n' + '\n' + 'The return value of a function is determined by the last "return"\n' + 'statement executed. Since the "finally" clause always executes, a\n' + '"return" statement executed in the "finally" clause will always be ' + 'the\n' + 'last one executed:\n' + '\n' + ' >>> def foo():\n' + ' ... try:\n' + " ... return 'try'\n" + ' ... finally:\n' + " ... return 'finally'\n" + ' ...\n' + ' >>> foo()\n' + " 'finally'\n" + '\n' + 'Additional information on exceptions can be found in section\n' + '*Exceptions*, and information on using the "raise" statement to\n' + 'generate exceptions may be found in section *The raise statement*.\n', + 'types': '\n' + 'The standard type hierarchy\n' + '***************************\n' + '\n' + 'Below is a list of the types that are built into Python. ' + 'Extension\n' + 'modules (written in C, Java, or other languages, depending on ' + 'the\n' + 'implementation) can define additional types. Future versions of\n' + 'Python may add types to the type hierarchy (e.g., rational ' + 'numbers,\n' + 'efficiently stored arrays of integers, etc.), although such ' + 'additions\n' + 'will often be provided via the standard library instead.\n' + '\n' + 'Some of the type descriptions below contain a paragraph listing\n' + "'special attributes.' These are attributes that provide access " + 'to the\n' + 'implementation and are not intended for general use. Their ' + 'definition\n' + 'may change in the future.\n' + '\n' + 'None\n' + ' This type has a single value. There is a single object with ' + 'this\n' + ' value. This object is accessed through the built-in name ' + '"None". It\n' + ' is used to signify the absence of a value in many situations, ' + 'e.g.,\n' + " it is returned from functions that don't explicitly return\n" + ' anything. Its truth value is false.\n' + '\n' + 'NotImplemented\n' + ' This type has a single value. There is a single object with ' + 'this\n' + ' value. This object is accessed through the built-in name\n' + ' "NotImplemented". Numeric methods and rich comparison methods\n' + ' should return this value if they do not implement the ' + 'operation for\n' + ' the operands provided. (The interpreter will then try the\n' + ' reflected operation, or some other fallback, depending on the\n' + ' operator.) Its truth value is true.\n' + '\n' + ' See *Implementing the arithmetic operations* for more ' + 'details.\n' + '\n' + 'Ellipsis\n' + ' This type has a single value. There is a single object with ' + 'this\n' + ' value. This object is accessed through the literal "..." or ' + 'the\n' + ' built-in name "Ellipsis". Its truth value is true.\n' + '\n' + '"numbers.Number"\n' + ' These are created by numeric literals and returned as results ' + 'by\n' + ' arithmetic operators and arithmetic built-in functions. ' + 'Numeric\n' + ' objects are immutable; once created their value never ' + 'changes.\n' + ' Python numbers are of course strongly related to mathematical\n' + ' numbers, but subject to the limitations of numerical ' + 'representation\n' + ' in computers.\n' + '\n' + ' Python distinguishes between integers, floating point numbers, ' + 'and\n' + ' complex numbers:\n' + '\n' + ' "numbers.Integral"\n' + ' These represent elements from the mathematical set of ' + 'integers\n' + ' (positive and negative).\n' + '\n' + ' There are two types of integers:\n' + '\n' + ' Integers ("int")\n' + '\n' + ' These represent numbers in an unlimited range, subject ' + 'to\n' + ' available (virtual) memory only. For the purpose of ' + 'shift\n' + ' and mask operations, a binary representation is assumed, ' + 'and\n' + " negative numbers are represented in a variant of 2's\n" + ' complement which gives the illusion of an infinite ' + 'string of\n' + ' sign bits extending to the left.\n' + '\n' + ' Booleans ("bool")\n' + ' These represent the truth values False and True. The ' + 'two\n' + ' objects representing the values "False" and "True" are ' + 'the\n' + ' only Boolean objects. The Boolean type is a subtype of ' + 'the\n' + ' integer type, and Boolean values behave like the values ' + '0 and\n' + ' 1, respectively, in almost all contexts, the exception ' + 'being\n' + ' that when converted to a string, the strings ""False"" ' + 'or\n' + ' ""True"" are returned, respectively.\n' + '\n' + ' The rules for integer representation are intended to give ' + 'the\n' + ' most meaningful interpretation of shift and mask ' + 'operations\n' + ' involving negative integers.\n' + '\n' + ' "numbers.Real" ("float")\n' + ' These represent machine-level double precision floating ' + 'point\n' + ' numbers. You are at the mercy of the underlying machine\n' + ' architecture (and C or Java implementation) for the ' + 'accepted\n' + ' range and handling of overflow. Python does not support ' + 'single-\n' + ' precision floating point numbers; the savings in processor ' + 'and\n' + ' memory usage that are usually the reason for using these ' + 'are\n' + ' dwarfed by the overhead of using objects in Python, so ' + 'there is\n' + ' no reason to complicate the language with two kinds of ' + 'floating\n' + ' point numbers.\n' + '\n' + ' "numbers.Complex" ("complex")\n' + ' These represent complex numbers as a pair of machine-level\n' + ' double precision floating point numbers. The same caveats ' + 'apply\n' + ' as for floating point numbers. The real and imaginary parts ' + 'of a\n' + ' complex number "z" can be retrieved through the read-only\n' + ' attributes "z.real" and "z.imag".\n' + '\n' + 'Sequences\n' + ' These represent finite ordered sets indexed by non-negative\n' + ' numbers. The built-in function "len()" returns the number of ' + 'items\n' + ' of a sequence. When the length of a sequence is *n*, the index ' + 'set\n' + ' contains the numbers 0, 1, ..., *n*-1. Item *i* of sequence ' + '*a* is\n' + ' selected by "a[i]".\n' + '\n' + ' Sequences also support slicing: "a[i:j]" selects all items ' + 'with\n' + ' index *k* such that *i* "<=" *k* "<" *j*. When used as an\n' + ' expression, a slice is a sequence of the same type. This ' + 'implies\n' + ' that the index set is renumbered so that it starts at 0.\n' + '\n' + ' Some sequences also support "extended slicing" with a third ' + '"step"\n' + ' parameter: "a[i:j:k]" selects all items of *a* with index *x* ' + 'where\n' + ' "x = i + n*k", *n* ">=" "0" and *i* "<=" *x* "<" *j*.\n' + '\n' + ' Sequences are distinguished according to their mutability:\n' + '\n' + ' Immutable sequences\n' + ' An object of an immutable sequence type cannot change once ' + 'it is\n' + ' created. (If the object contains references to other ' + 'objects,\n' + ' these other objects may be mutable and may be changed; ' + 'however,\n' + ' the collection of objects directly referenced by an ' + 'immutable\n' + ' object cannot change.)\n' + '\n' + ' The following types are immutable sequences:\n' + '\n' + ' Strings\n' + ' A string is a sequence of values that represent Unicode ' + 'code\n' + ' points. All the code points in the range "U+0000 - ' + 'U+10FFFF"\n' + " can be represented in a string. Python doesn't have a " + '"char"\n' + ' type; instead, every code point in the string is ' + 'represented\n' + ' as a string object with length "1". The built-in ' + 'function\n' + ' "ord()" converts a code point from its string form to ' + 'an\n' + ' integer in the range "0 - 10FFFF"; "chr()" converts an\n' + ' integer in the range "0 - 10FFFF" to the corresponding ' + 'length\n' + ' "1" string object. "str.encode()" can be used to convert ' + 'a\n' + ' "str" to "bytes" using the given text encoding, and\n' + ' "bytes.decode()" can be used to achieve the opposite.\n' + '\n' + ' Tuples\n' + ' The items of a tuple are arbitrary Python objects. ' + 'Tuples of\n' + ' two or more items are formed by comma-separated lists ' + 'of\n' + " expressions. A tuple of one item (a 'singleton') can " + 'be\n' + ' formed by affixing a comma to an expression (an ' + 'expression by\n' + ' itself does not create a tuple, since parentheses must ' + 'be\n' + ' usable for grouping of expressions). An empty tuple can ' + 'be\n' + ' formed by an empty pair of parentheses.\n' + '\n' + ' Bytes\n' + ' A bytes object is an immutable array. The items are ' + '8-bit\n' + ' bytes, represented by integers in the range 0 <= x < ' + '256.\n' + ' Bytes literals (like "b\'abc\'") and the built-in ' + 'function\n' + ' "bytes()" can be used to construct bytes objects. ' + 'Also,\n' + ' bytes objects can be decoded to strings via the ' + '"decode()"\n' + ' method.\n' + '\n' + ' Mutable sequences\n' + ' Mutable sequences can be changed after they are created. ' + 'The\n' + ' subscription and slicing notations can be used as the ' + 'target of\n' + ' assignment and "del" (delete) statements.\n' + '\n' + ' There are currently two intrinsic mutable sequence types:\n' + '\n' + ' Lists\n' + ' The items of a list are arbitrary Python objects. Lists ' + 'are\n' + ' formed by placing a comma-separated list of expressions ' + 'in\n' + ' square brackets. (Note that there are no special cases ' + 'needed\n' + ' to form lists of length 0 or 1.)\n' + '\n' + ' Byte Arrays\n' + ' A bytearray object is a mutable array. They are created ' + 'by\n' + ' the built-in "bytearray()" constructor. Aside from ' + 'being\n' + ' mutable (and hence unhashable), byte arrays otherwise ' + 'provide\n' + ' the same interface and functionality as immutable bytes\n' + ' objects.\n' + '\n' + ' The extension module "array" provides an additional example ' + 'of a\n' + ' mutable sequence type, as does the "collections" module.\n' + '\n' + 'Set types\n' + ' These represent unordered, finite sets of unique, immutable\n' + ' objects. As such, they cannot be indexed by any subscript. ' + 'However,\n' + ' they can be iterated over, and the built-in function "len()"\n' + ' returns the number of items in a set. Common uses for sets are ' + 'fast\n' + ' membership testing, removing duplicates from a sequence, and\n' + ' computing mathematical operations such as intersection, ' + 'union,\n' + ' difference, and symmetric difference.\n' + '\n' + ' For set elements, the same immutability rules apply as for\n' + ' dictionary keys. Note that numeric types obey the normal rules ' + 'for\n' + ' numeric comparison: if two numbers compare equal (e.g., "1" ' + 'and\n' + ' "1.0"), only one of them can be contained in a set.\n' + '\n' + ' There are currently two intrinsic set types:\n' + '\n' + ' Sets\n' + ' These represent a mutable set. They are created by the ' + 'built-in\n' + ' "set()" constructor and can be modified afterwards by ' + 'several\n' + ' methods, such as "add()".\n' + '\n' + ' Frozen sets\n' + ' These represent an immutable set. They are created by the\n' + ' built-in "frozenset()" constructor. As a frozenset is ' + 'immutable\n' + ' and *hashable*, it can be used again as an element of ' + 'another\n' + ' set, or as a dictionary key.\n' + '\n' + 'Mappings\n' + ' These represent finite sets of objects indexed by arbitrary ' + 'index\n' + ' sets. The subscript notation "a[k]" selects the item indexed ' + 'by "k"\n' + ' from the mapping "a"; this can be used in expressions and as ' + 'the\n' + ' target of assignments or "del" statements. The built-in ' + 'function\n' + ' "len()" returns the number of items in a mapping.\n' + '\n' + ' There is currently a single intrinsic mapping type:\n' + '\n' + ' Dictionaries\n' + ' These represent finite sets of objects indexed by nearly\n' + ' arbitrary values. The only types of values not acceptable ' + 'as\n' + ' keys are values containing lists or dictionaries or other\n' + ' mutable types that are compared by value rather than by ' + 'object\n' + ' identity, the reason being that the efficient ' + 'implementation of\n' + " dictionaries requires a key's hash value to remain " + 'constant.\n' + ' Numeric types used for keys obey the normal rules for ' + 'numeric\n' + ' comparison: if two numbers compare equal (e.g., "1" and ' + '"1.0")\n' + ' then they can be used interchangeably to index the same\n' + ' dictionary entry.\n' + '\n' + ' Dictionaries are mutable; they can be created by the ' + '"{...}"\n' + ' notation (see section *Dictionary displays*).\n' + '\n' + ' The extension modules "dbm.ndbm" and "dbm.gnu" provide\n' + ' additional examples of mapping types, as does the ' + '"collections"\n' + ' module.\n' + '\n' + 'Callable types\n' + ' These are the types to which the function call operation (see\n' + ' section *Calls*) can be applied:\n' + '\n' + ' User-defined functions\n' + ' A user-defined function object is created by a function\n' + ' definition (see section *Function definitions*). It should ' + 'be\n' + ' called with an argument list containing the same number of ' + 'items\n' + " as the function's formal parameter list.\n" + '\n' + ' Special attributes:\n' + '\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | Attribute | ' + 'Meaning | |\n' + ' ' + '+===========================+=================================+=============+\n' + ' | "__doc__" | The function\'s ' + 'documentation | Writable |\n' + ' | | string, or "None" ' + 'if | |\n' + ' | | unavailable; not inherited ' + 'by | |\n' + ' | | ' + 'subclasses | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__name__" | The function\'s ' + 'name | Writable |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__qualname__" | The function\'s *qualified ' + 'name* | Writable |\n' + ' | | New in version ' + '3.3. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__module__" | The name of the module ' + 'the | Writable |\n' + ' | | function was defined in, ' + 'or | |\n' + ' | | "None" if ' + 'unavailable. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__defaults__" | A tuple containing ' + 'default | Writable |\n' + ' | | argument values for ' + 'those | |\n' + ' | | arguments that have ' + 'defaults, | |\n' + ' | | or "None" if no arguments ' + 'have | |\n' + ' | | a default ' + 'value | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__code__" | The code object ' + 'representing | Writable |\n' + ' | | the compiled function ' + 'body. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__globals__" | A reference to the ' + 'dictionary | Read-only |\n' + ' | | that holds the ' + "function's | |\n" + ' | | global variables --- the ' + 'global | |\n' + ' | | namespace of the module ' + 'in | |\n' + ' | | which the function was ' + 'defined. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__dict__" | The namespace ' + 'supporting | Writable |\n' + ' | | arbitrary function ' + 'attributes. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__closure__" | "None" or a tuple of cells ' + 'that | Read-only |\n' + ' | | contain bindings for ' + 'the | |\n' + " | | function's free " + 'variables. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__annotations__" | A dict containing ' + 'annotations | Writable |\n' + ' | | of parameters. The keys of ' + 'the | |\n' + ' | | dict are the parameter ' + 'names, | |\n' + ' | | and "\'return\'" for the ' + 'return | |\n' + ' | | annotation, if ' + 'provided. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + ' | "__kwdefaults__" | A dict containing defaults ' + 'for | Writable |\n' + ' | | keyword-only ' + 'parameters. | |\n' + ' ' + '+---------------------------+---------------------------------+-------------+\n' + '\n' + ' Most of the attributes labelled "Writable" check the type ' + 'of the\n' + ' assigned value.\n' + '\n' + ' Function objects also support getting and setting ' + 'arbitrary\n' + ' attributes, which can be used, for example, to attach ' + 'metadata\n' + ' to functions. Regular attribute dot-notation is used to ' + 'get and\n' + ' set such attributes. *Note that the current implementation ' + 'only\n' + ' supports function attributes on user-defined functions. ' + 'Function\n' + ' attributes on built-in functions may be supported in the\n' + ' future.*\n' + '\n' + " Additional information about a function's definition can " + 'be\n' + ' retrieved from its code object; see the description of ' + 'internal\n' + ' types below.\n' + '\n' + ' Instance methods\n' + ' An instance method object combines a class, a class ' + 'instance and\n' + ' any callable object (normally a user-defined function).\n' + '\n' + ' Special read-only attributes: "__self__" is the class ' + 'instance\n' + ' object, "__func__" is the function object; "__doc__" is ' + 'the\n' + ' method\'s documentation (same as "__func__.__doc__"); ' + '"__name__"\n' + ' is the method name (same as "__func__.__name__"); ' + '"__module__"\n' + ' is the name of the module the method was defined in, or ' + '"None"\n' + ' if unavailable.\n' + '\n' + ' Methods also support accessing (but not setting) the ' + 'arbitrary\n' + ' function attributes on the underlying function object.\n' + '\n' + ' User-defined method objects may be created when getting an\n' + ' attribute of a class (perhaps via an instance of that ' + 'class), if\n' + ' that attribute is a user-defined function object or a ' + 'class\n' + ' method object.\n' + '\n' + ' When an instance method object is created by retrieving a ' + 'user-\n' + ' defined function object from a class via one of its ' + 'instances,\n' + ' its "__self__" attribute is the instance, and the method ' + 'object\n' + ' is said to be bound. The new method\'s "__func__" ' + 'attribute is\n' + ' the original function object.\n' + '\n' + ' When a user-defined method object is created by retrieving\n' + ' another method object from a class or instance, the ' + 'behaviour is\n' + ' the same as for a function object, except that the ' + '"__func__"\n' + ' attribute of the new instance is not the original method ' + 'object\n' + ' but its "__func__" attribute.\n' + '\n' + ' When an instance method object is created by retrieving a ' + 'class\n' + ' method object from a class or instance, its "__self__" ' + 'attribute\n' + ' is the class itself, and its "__func__" attribute is the\n' + ' function object underlying the class method.\n' + '\n' + ' When an instance method object is called, the underlying\n' + ' function ("__func__") is called, inserting the class ' + 'instance\n' + ' ("__self__") in front of the argument list. For instance, ' + 'when\n' + ' "C" is a class which contains a definition for a function ' + '"f()",\n' + ' and "x" is an instance of "C", calling "x.f(1)" is ' + 'equivalent to\n' + ' calling "C.f(x, 1)".\n' + '\n' + ' When an instance method object is derived from a class ' + 'method\n' + ' object, the "class instance" stored in "__self__" will ' + 'actually\n' + ' be the class itself, so that calling either "x.f(1)" or ' + '"C.f(1)"\n' + ' is equivalent to calling "f(C,1)" where "f" is the ' + 'underlying\n' + ' function.\n' + '\n' + ' Note that the transformation from function object to ' + 'instance\n' + ' method object happens each time the attribute is retrieved ' + 'from\n' + ' the instance. In some cases, a fruitful optimization is ' + 'to\n' + ' assign the attribute to a local variable and call that ' + 'local\n' + ' variable. Also notice that this transformation only happens ' + 'for\n' + ' user-defined functions; other callable objects (and all ' + 'non-\n' + ' callable objects) are retrieved without transformation. It ' + 'is\n' + ' also important to note that user-defined functions which ' + 'are\n' + ' attributes of a class instance are not converted to bound\n' + ' methods; this *only* happens when the function is an ' + 'attribute\n' + ' of the class.\n' + '\n' + ' Generator functions\n' + ' A function or method which uses the "yield" statement (see\n' + ' section *The yield statement*) is called a *generator ' + 'function*.\n' + ' Such a function, when called, always returns an iterator ' + 'object\n' + ' which can be used to execute the body of the function: ' + 'calling\n' + ' the iterator\'s "iterator.__next__()" method will cause ' + 'the\n' + ' function to execute until it provides a value using the ' + '"yield"\n' + ' statement. When the function executes a "return" statement ' + 'or\n' + ' falls off the end, a "StopIteration" exception is raised ' + 'and the\n' + ' iterator will have reached the end of the set of values to ' + 'be\n' + ' returned.\n' + '\n' + ' Coroutine functions\n' + ' A function or method which is defined using "async def" is\n' + ' called a *coroutine function*. Such a function, when ' + 'called,\n' + ' returns a *coroutine* object. It may contain "await"\n' + ' expressions, as well as "async with" and "async for" ' + 'statements.\n' + ' See also the *Coroutine Objects* section.\n' + '\n' + ' Built-in functions\n' + ' A built-in function object is a wrapper around a C ' + 'function.\n' + ' Examples of built-in functions are "len()" and ' + '"math.sin()"\n' + ' ("math" is a standard built-in module). The number and type ' + 'of\n' + ' the arguments are determined by the C function. Special ' + 'read-\n' + ' only attributes: "__doc__" is the function\'s ' + 'documentation\n' + ' string, or "None" if unavailable; "__name__" is the ' + "function's\n" + ' name; "__self__" is set to "None" (but see the next item);\n' + ' "__module__" is the name of the module the function was ' + 'defined\n' + ' in or "None" if unavailable.\n' + '\n' + ' Built-in methods\n' + ' This is really a different disguise of a built-in function, ' + 'this\n' + ' time containing an object passed to the C function as an\n' + ' implicit extra argument. An example of a built-in method ' + 'is\n' + ' "alist.append()", assuming *alist* is a list object. In ' + 'this\n' + ' case, the special read-only attribute "__self__" is set to ' + 'the\n' + ' object denoted by *alist*.\n' + '\n' + ' Classes\n' + ' Classes are callable. These objects normally act as ' + 'factories\n' + ' for new instances of themselves, but variations are ' + 'possible for\n' + ' class types that override "__new__()". The arguments of ' + 'the\n' + ' call are passed to "__new__()" and, in the typical case, ' + 'to\n' + ' "__init__()" to initialize the new instance.\n' + '\n' + ' Class Instances\n' + ' Instances of arbitrary classes can be made callable by ' + 'defining\n' + ' a "__call__()" method in their class.\n' + '\n' + 'Modules\n' + ' Modules are a basic organizational unit of Python code, and ' + 'are\n' + ' created by the *import system* as invoked either by the ' + '"import"\n' + ' statement (see "import"), or by calling functions such as\n' + ' "importlib.import_module()" and built-in "__import__()". A ' + 'module\n' + ' object has a namespace implemented by a dictionary object ' + '(this is\n' + ' the dictionary referenced by the "__globals__" attribute of\n' + ' functions defined in the module). Attribute references are\n' + ' translated to lookups in this dictionary, e.g., "m.x" is ' + 'equivalent\n' + ' to "m.__dict__["x"]". A module object does not contain the ' + 'code\n' + " object used to initialize the module (since it isn't needed " + 'once\n' + ' the initialization is done).\n' + '\n' + " Attribute assignment updates the module's namespace " + 'dictionary,\n' + ' e.g., "m.x = 1" is equivalent to "m.__dict__["x"] = 1".\n' + '\n' + ' Special read-only attribute: "__dict__" is the module\'s ' + 'namespace\n' + ' as a dictionary object.\n' + '\n' + ' **CPython implementation detail:** Because of the way CPython\n' + ' clears module dictionaries, the module dictionary will be ' + 'cleared\n' + ' when the module falls out of scope even if the dictionary ' + 'still has\n' + ' live references. To avoid this, copy the dictionary or keep ' + 'the\n' + ' module around while using its dictionary directly.\n' + '\n' + ' Predefined (writable) attributes: "__name__" is the module\'s ' + 'name;\n' + ' "__doc__" is the module\'s documentation string, or "None" if\n' + ' unavailable; "__file__" is the pathname of the file from which ' + 'the\n' + ' module was loaded, if it was loaded from a file. The ' + '"__file__"\n' + ' attribute may be missing for certain types of modules, such as ' + 'C\n' + ' modules that are statically linked into the interpreter; for\n' + ' extension modules loaded dynamically from a shared library, it ' + 'is\n' + ' the pathname of the shared library file.\n' + '\n' + 'Custom classes\n' + ' Custom class types are typically created by class definitions ' + '(see\n' + ' section *Class definitions*). A class has a namespace ' + 'implemented\n' + ' by a dictionary object. Class attribute references are ' + 'translated\n' + ' to lookups in this dictionary, e.g., "C.x" is translated to\n' + ' "C.__dict__["x"]" (although there are a number of hooks which ' + 'allow\n' + ' for other means of locating attributes). When the attribute ' + 'name is\n' + ' not found there, the attribute search continues in the base\n' + ' classes. This search of the base classes uses the C3 method\n' + ' resolution order which behaves correctly even in the presence ' + 'of\n' + " 'diamond' inheritance structures where there are multiple\n" + ' inheritance paths leading back to a common ancestor. ' + 'Additional\n' + ' details on the C3 MRO used by Python can be found in the\n' + ' documentation accompanying the 2.3 release at\n' + ' https://www.python.org/download/releases/2.3/mro/.\n' + '\n' + ' When a class attribute reference (for class "C", say) would ' + 'yield a\n' + ' class method object, it is transformed into an instance ' + 'method\n' + ' object whose "__self__" attributes is "C". When it would ' + 'yield a\n' + ' static method object, it is transformed into the object ' + 'wrapped by\n' + ' the static method object. See section *Implementing ' + 'Descriptors*\n' + ' for another way in which attributes retrieved from a class ' + 'may\n' + ' differ from those actually contained in its "__dict__".\n' + '\n' + " Class attribute assignments update the class's dictionary, " + 'never\n' + ' the dictionary of a base class.\n' + '\n' + ' A class object can be called (see above) to yield a class ' + 'instance\n' + ' (see below).\n' + '\n' + ' Special attributes: "__name__" is the class name; "__module__" ' + 'is\n' + ' the module name in which the class was defined; "__dict__" is ' + 'the\n' + ' dictionary containing the class\'s namespace; "__bases__" is a ' + 'tuple\n' + ' (possibly empty or a singleton) containing the base classes, ' + 'in the\n' + ' order of their occurrence in the base class list; "__doc__" is ' + 'the\n' + " class's documentation string, or None if undefined.\n" + '\n' + 'Class instances\n' + ' A class instance is created by calling a class object (see ' + 'above).\n' + ' A class instance has a namespace implemented as a dictionary ' + 'which\n' + ' is the first place in which attribute references are ' + 'searched.\n' + " When an attribute is not found there, and the instance's class " + 'has\n' + ' an attribute by that name, the search continues with the ' + 'class\n' + ' attributes. If a class attribute is found that is a ' + 'user-defined\n' + ' function object, it is transformed into an instance method ' + 'object\n' + ' whose "__self__" attribute is the instance. Static method ' + 'and\n' + ' class method objects are also transformed; see above under\n' + ' "Classes". See section *Implementing Descriptors* for another ' + 'way\n' + ' in which attributes of a class retrieved via its instances ' + 'may\n' + " differ from the objects actually stored in the class's " + '"__dict__".\n' + " If no class attribute is found, and the object's class has a\n" + ' "__getattr__()" method, that is called to satisfy the lookup.\n' + '\n' + " Attribute assignments and deletions update the instance's\n" + " dictionary, never a class's dictionary. If the class has a\n" + ' "__setattr__()" or "__delattr__()" method, this is called ' + 'instead\n' + ' of updating the instance dictionary directly.\n' + '\n' + ' Class instances can pretend to be numbers, sequences, or ' + 'mappings\n' + ' if they have methods with certain special names. See section\n' + ' *Special method names*.\n' + '\n' + ' Special attributes: "__dict__" is the attribute dictionary;\n' + ' "__class__" is the instance\'s class.\n' + '\n' + 'I/O objects (also known as file objects)\n' + ' A *file object* represents an open file. Various shortcuts ' + 'are\n' + ' available to create file objects: the "open()" built-in ' + 'function,\n' + ' and also "os.popen()", "os.fdopen()", and the "makefile()" ' + 'method\n' + ' of socket objects (and perhaps by other functions or methods\n' + ' provided by extension modules).\n' + '\n' + ' The objects "sys.stdin", "sys.stdout" and "sys.stderr" are\n' + ' initialized to file objects corresponding to the ' + "interpreter's\n" + ' standard input, output and error streams; they are all open in ' + 'text\n' + ' mode and therefore follow the interface defined by the\n' + ' "io.TextIOBase" abstract class.\n' + '\n' + 'Internal types\n' + ' A few types used internally by the interpreter are exposed to ' + 'the\n' + ' user. Their definitions may change with future versions of ' + 'the\n' + ' interpreter, but they are mentioned here for completeness.\n' + '\n' + ' Code objects\n' + ' Code objects represent *byte-compiled* executable Python ' + 'code,\n' + ' or *bytecode*. The difference between a code object and a\n' + ' function object is that the function object contains an ' + 'explicit\n' + " reference to the function's globals (the module in which it " + 'was\n' + ' defined), while a code object contains no context; also ' + 'the\n' + ' default argument values are stored in the function object, ' + 'not\n' + ' in the code object (because they represent values ' + 'calculated at\n' + ' run-time). Unlike function objects, code objects are ' + 'immutable\n' + ' and contain no references (directly or indirectly) to ' + 'mutable\n' + ' objects.\n' + '\n' + ' Special read-only attributes: "co_name" gives the function ' + 'name;\n' + ' "co_argcount" is the number of positional arguments ' + '(including\n' + ' arguments with default values); "co_nlocals" is the number ' + 'of\n' + ' local variables used by the function (including ' + 'arguments);\n' + ' "co_varnames" is a tuple containing the names of the local\n' + ' variables (starting with the argument names); "co_cellvars" ' + 'is a\n' + ' tuple containing the names of local variables that are\n' + ' referenced by nested functions; "co_freevars" is a tuple\n' + ' containing the names of free variables; "co_code" is a ' + 'string\n' + ' representing the sequence of bytecode instructions; ' + '"co_consts"\n' + ' is a tuple containing the literals used by the bytecode;\n' + ' "co_names" is a tuple containing the names used by the ' + 'bytecode;\n' + ' "co_filename" is the filename from which the code was ' + 'compiled;\n' + ' "co_firstlineno" is the first line number of the function;\n' + ' "co_lnotab" is a string encoding the mapping from bytecode\n' + ' offsets to line numbers (for details see the source code of ' + 'the\n' + ' interpreter); "co_stacksize" is the required stack size\n' + ' (including local variables); "co_flags" is an integer ' + 'encoding a\n' + ' number of flags for the interpreter.\n' + '\n' + ' The following flag bits are defined for "co_flags": bit ' + '"0x04"\n' + ' is set if the function uses the "*arguments" syntax to ' + 'accept an\n' + ' arbitrary number of positional arguments; bit "0x08" is set ' + 'if\n' + ' the function uses the "**keywords" syntax to accept ' + 'arbitrary\n' + ' keyword arguments; bit "0x20" is set if the function is a\n' + ' generator.\n' + '\n' + ' Future feature declarations ("from __future__ import ' + 'division")\n' + ' also use bits in "co_flags" to indicate whether a code ' + 'object\n' + ' was compiled with a particular feature enabled: bit ' + '"0x2000" is\n' + ' set if the function was compiled with future division ' + 'enabled;\n' + ' bits "0x10" and "0x1000" were used in earlier versions of\n' + ' Python.\n' + '\n' + ' Other bits in "co_flags" are reserved for internal use.\n' + '\n' + ' If a code object represents a function, the first item in\n' + ' "co_consts" is the documentation string of the function, ' + 'or\n' + ' "None" if undefined.\n' + '\n' + ' Frame objects\n' + ' Frame objects represent execution frames. They may occur ' + 'in\n' + ' traceback objects (see below).\n' + '\n' + ' Special read-only attributes: "f_back" is to the previous ' + 'stack\n' + ' frame (towards the caller), or "None" if this is the ' + 'bottom\n' + ' stack frame; "f_code" is the code object being executed in ' + 'this\n' + ' frame; "f_locals" is the dictionary used to look up local\n' + ' variables; "f_globals" is used for global variables;\n' + ' "f_builtins" is used for built-in (intrinsic) names; ' + '"f_lasti"\n' + ' gives the precise instruction (this is an index into the\n' + ' bytecode string of the code object).\n' + '\n' + ' Special writable attributes: "f_trace", if not "None", is ' + 'a\n' + ' function called at the start of each source code line (this ' + 'is\n' + ' used by the debugger); "f_lineno" is the current line ' + 'number of\n' + ' the frame --- writing to this from within a trace function ' + 'jumps\n' + ' to the given line (only for the bottom-most frame). A ' + 'debugger\n' + ' can implement a Jump command (aka Set Next Statement) by ' + 'writing\n' + ' to f_lineno.\n' + '\n' + ' Frame objects support one method:\n' + '\n' + ' frame.clear()\n' + '\n' + ' This method clears all references to local variables ' + 'held by\n' + ' the frame. Also, if the frame belonged to a generator, ' + 'the\n' + ' generator is finalized. This helps break reference ' + 'cycles\n' + ' involving frame objects (for example when catching an\n' + ' exception and storing its traceback for later use).\n' + '\n' + ' "RuntimeError" is raised if the frame is currently ' + 'executing.\n' + '\n' + ' New in version 3.4.\n' + '\n' + ' Traceback objects\n' + ' Traceback objects represent a stack trace of an exception. ' + 'A\n' + ' traceback object is created when an exception occurs. When ' + 'the\n' + ' search for an exception handler unwinds the execution ' + 'stack, at\n' + ' each unwound level a traceback object is inserted in front ' + 'of\n' + ' the current traceback. When an exception handler is ' + 'entered,\n' + ' the stack trace is made available to the program. (See ' + 'section\n' + ' *The try statement*.) It is accessible as the third item of ' + 'the\n' + ' tuple returned by "sys.exc_info()". When the program ' + 'contains no\n' + ' suitable handler, the stack trace is written (nicely ' + 'formatted)\n' + ' to the standard error stream; if the interpreter is ' + 'interactive,\n' + ' it is also made available to the user as ' + '"sys.last_traceback".\n' + '\n' + ' Special read-only attributes: "tb_next" is the next level ' + 'in the\n' + ' stack trace (towards the frame where the exception ' + 'occurred), or\n' + ' "None" if there is no next level; "tb_frame" points to the\n' + ' execution frame of the current level; "tb_lineno" gives the ' + 'line\n' + ' number where the exception occurred; "tb_lasti" indicates ' + 'the\n' + ' precise instruction. The line number and last instruction ' + 'in\n' + ' the traceback may differ from the line number of its frame\n' + ' object if the exception occurred in a "try" statement with ' + 'no\n' + ' matching except clause or with a finally clause.\n' + '\n' + ' Slice objects\n' + ' Slice objects are used to represent slices for ' + '"__getitem__()"\n' + ' methods. They are also created by the built-in "slice()"\n' + ' function.\n' + '\n' + ' Special read-only attributes: "start" is the lower bound; ' + '"stop"\n' + ' is the upper bound; "step" is the step value; each is ' + '"None" if\n' + ' omitted. These attributes can have any type.\n' + '\n' + ' Slice objects support one method:\n' + '\n' + ' slice.indices(self, length)\n' + '\n' + ' This method takes a single integer argument *length* ' + 'and\n' + ' computes information about the slice that the slice ' + 'object\n' + ' would describe if applied to a sequence of *length* ' + 'items.\n' + ' It returns a tuple of three integers; respectively these ' + 'are\n' + ' the *start* and *stop* indices and the *step* or stride\n' + ' length of the slice. Missing or out-of-bounds indices ' + 'are\n' + ' handled in a manner consistent with regular slices.\n' + '\n' + ' Static method objects\n' + ' Static method objects provide a way of defeating the\n' + ' transformation of function objects to method objects ' + 'described\n' + ' above. A static method object is a wrapper around any ' + 'other\n' + ' object, usually a user-defined method object. When a ' + 'static\n' + ' method object is retrieved from a class or a class ' + 'instance, the\n' + ' object actually returned is the wrapped object, which is ' + 'not\n' + ' subject to any further transformation. Static method ' + 'objects are\n' + ' not themselves callable, although the objects they wrap ' + 'usually\n' + ' are. Static method objects are created by the built-in\n' + ' "staticmethod()" constructor.\n' + '\n' + ' Class method objects\n' + ' A class method object, like a static method object, is a ' + 'wrapper\n' + ' around another object that alters the way in which that ' + 'object\n' + ' is retrieved from classes and class instances. The ' + 'behaviour of\n' + ' class method objects upon such retrieval is described ' + 'above,\n' + ' under "User-defined methods". Class method objects are ' + 'created\n' + ' by the built-in "classmethod()" constructor.\n', + 'typesfunctions': '\n' + 'Functions\n' + '*********\n' + '\n' + 'Function objects are created by function definitions. ' + 'The only\n' + 'operation on a function object is to call it: ' + '"func(argument-list)".\n' + '\n' + 'There are really two flavors of function objects: ' + 'built-in functions\n' + 'and user-defined functions. Both support the same ' + 'operation (to call\n' + 'the function), but the implementation is different, ' + 'hence the\n' + 'different object types.\n' + '\n' + 'See *Function definitions* for more information.\n', + 'typesmapping': '\n' + 'Mapping Types --- "dict"\n' + '************************\n' + '\n' + 'A *mapping* object maps *hashable* values to arbitrary ' + 'objects.\n' + 'Mappings are mutable objects. There is currently only one ' + 'standard\n' + 'mapping type, the *dictionary*. (For other containers see ' + 'the built-\n' + 'in "list", "set", and "tuple" classes, and the ' + '"collections" module.)\n' + '\n' + "A dictionary's keys are *almost* arbitrary values. Values " + 'that are\n' + 'not *hashable*, that is, values containing lists, ' + 'dictionaries or\n' + 'other mutable types (that are compared by value rather ' + 'than by object\n' + 'identity) may not be used as keys. Numeric types used for ' + 'keys obey\n' + 'the normal rules for numeric comparison: if two numbers ' + 'compare equal\n' + '(such as "1" and "1.0") then they can be used ' + 'interchangeably to index\n' + 'the same dictionary entry. (Note however, that since ' + 'computers store\n' + 'floating-point numbers as approximations it is usually ' + 'unwise to use\n' + 'them as dictionary keys.)\n' + '\n' + 'Dictionaries can be created by placing a comma-separated ' + 'list of "key:\n' + 'value" pairs within braces, for example: "{\'jack\': 4098, ' + "'sjoerd':\n" + '4127}" or "{4098: \'jack\', 4127: \'sjoerd\'}", or by the ' + '"dict"\n' + 'constructor.\n' + '\n' + 'class class dict(**kwarg)\n' + 'class class dict(mapping, **kwarg)\n' + 'class class dict(iterable, **kwarg)\n' + '\n' + ' Return a new dictionary initialized from an optional ' + 'positional\n' + ' argument and a possibly empty set of keyword ' + 'arguments.\n' + '\n' + ' If no positional argument is given, an empty dictionary ' + 'is created.\n' + ' If a positional argument is given and it is a mapping ' + 'object, a\n' + ' dictionary is created with the same key-value pairs as ' + 'the mapping\n' + ' object. Otherwise, the positional argument must be an ' + '*iterable*\n' + ' object. Each item in the iterable must itself be an ' + 'iterable with\n' + ' exactly two objects. The first object of each item ' + 'becomes a key\n' + ' in the new dictionary, and the second object the ' + 'corresponding\n' + ' value. If a key occurs more than once, the last value ' + 'for that key\n' + ' becomes the corresponding value in the new dictionary.\n' + '\n' + ' If keyword arguments are given, the keyword arguments ' + 'and their\n' + ' values are added to the dictionary created from the ' + 'positional\n' + ' argument. If a key being added is already present, the ' + 'value from\n' + ' the keyword argument replaces the value from the ' + 'positional\n' + ' argument.\n' + '\n' + ' To illustrate, the following examples all return a ' + 'dictionary equal\n' + ' to "{"one": 1, "two": 2, "three": 3}":\n' + '\n' + ' >>> a = dict(one=1, two=2, three=3)\n' + " >>> b = {'one': 1, 'two': 2, 'three': 3}\n" + " >>> c = dict(zip(['one', 'two', 'three'], [1, 2, " + '3]))\n' + " >>> d = dict([('two', 2), ('one', 1), ('three', " + '3)])\n' + " >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n" + ' >>> a == b == c == d == e\n' + ' True\n' + '\n' + ' Providing keyword arguments as in the first example ' + 'only works for\n' + ' keys that are valid Python identifiers. Otherwise, any ' + 'valid keys\n' + ' can be used.\n' + '\n' + ' These are the operations that dictionaries support (and ' + 'therefore,\n' + ' custom mapping types should support too):\n' + '\n' + ' len(d)\n' + '\n' + ' Return the number of items in the dictionary *d*.\n' + '\n' + ' d[key]\n' + '\n' + ' Return the item of *d* with key *key*. Raises a ' + '"KeyError" if\n' + ' *key* is not in the map.\n' + '\n' + ' If a subclass of dict defines a method ' + '"__missing__()" and *key*\n' + ' is not present, the "d[key]" operation calls that ' + 'method with\n' + ' the key *key* as argument. The "d[key]" operation ' + 'then returns\n' + ' or raises whatever is returned or raised by the\n' + ' "__missing__(key)" call. No other operations or ' + 'methods invoke\n' + ' "__missing__()". If "__missing__()" is not defined, ' + '"KeyError"\n' + ' is raised. "__missing__()" must be a method; it ' + 'cannot be an\n' + ' instance variable:\n' + '\n' + ' >>> class Counter(dict):\n' + ' ... def __missing__(self, key):\n' + ' ... return 0\n' + ' >>> c = Counter()\n' + " >>> c['red']\n" + ' 0\n' + " >>> c['red'] += 1\n" + " >>> c['red']\n" + ' 1\n' + '\n' + ' The example above shows part of the implementation ' + 'of\n' + ' "collections.Counter". A different "__missing__" ' + 'method is used\n' + ' by "collections.defaultdict".\n' + '\n' + ' d[key] = value\n' + '\n' + ' Set "d[key]" to *value*.\n' + '\n' + ' del d[key]\n' + '\n' + ' Remove "d[key]" from *d*. Raises a "KeyError" if ' + '*key* is not\n' + ' in the map.\n' + '\n' + ' key in d\n' + '\n' + ' Return "True" if *d* has a key *key*, else "False".\n' + '\n' + ' key not in d\n' + '\n' + ' Equivalent to "not key in d".\n' + '\n' + ' iter(d)\n' + '\n' + ' Return an iterator over the keys of the dictionary. ' + 'This is a\n' + ' shortcut for "iter(d.keys())".\n' + '\n' + ' clear()\n' + '\n' + ' Remove all items from the dictionary.\n' + '\n' + ' copy()\n' + '\n' + ' Return a shallow copy of the dictionary.\n' + '\n' + ' classmethod fromkeys(seq[, value])\n' + '\n' + ' Create a new dictionary with keys from *seq* and ' + 'values set to\n' + ' *value*.\n' + '\n' + ' "fromkeys()" is a class method that returns a new ' + 'dictionary.\n' + ' *value* defaults to "None".\n' + '\n' + ' get(key[, default])\n' + '\n' + ' Return the value for *key* if *key* is in the ' + 'dictionary, else\n' + ' *default*. If *default* is not given, it defaults to ' + '"None", so\n' + ' that this method never raises a "KeyError".\n' + '\n' + ' items()\n' + '\n' + ' Return a new view of the dictionary\'s items ("(key, ' + 'value)"\n' + ' pairs). See the *documentation of view objects*.\n' + '\n' + ' keys()\n' + '\n' + " Return a new view of the dictionary's keys. See " + 'the\n' + ' *documentation of view objects*.\n' + '\n' + ' pop(key[, default])\n' + '\n' + ' If *key* is in the dictionary, remove it and return ' + 'its value,\n' + ' else return *default*. If *default* is not given ' + 'and *key* is\n' + ' not in the dictionary, a "KeyError" is raised.\n' + '\n' + ' popitem()\n' + '\n' + ' Remove and return an arbitrary "(key, value)" pair ' + 'from the\n' + ' dictionary.\n' + '\n' + ' "popitem()" is useful to destructively iterate over ' + 'a\n' + ' dictionary, as often used in set algorithms. If the ' + 'dictionary\n' + ' is empty, calling "popitem()" raises a "KeyError".\n' + '\n' + ' setdefault(key[, default])\n' + '\n' + ' If *key* is in the dictionary, return its value. If ' + 'not, insert\n' + ' *key* with a value of *default* and return ' + '*default*. *default*\n' + ' defaults to "None".\n' + '\n' + ' update([other])\n' + '\n' + ' Update the dictionary with the key/value pairs from ' + '*other*,\n' + ' overwriting existing keys. Return "None".\n' + '\n' + ' "update()" accepts either another dictionary object ' + 'or an\n' + ' iterable of key/value pairs (as tuples or other ' + 'iterables of\n' + ' length two). If keyword arguments are specified, ' + 'the dictionary\n' + ' is then updated with those key/value pairs: ' + '"d.update(red=1,\n' + ' blue=2)".\n' + '\n' + ' values()\n' + '\n' + " Return a new view of the dictionary's values. See " + 'the\n' + ' *documentation of view objects*.\n' + '\n' + ' Dictionaries compare equal if and only if they have the ' + 'same "(key,\n' + ' value)" pairs. Order comparisons (\'<\', \'<=\', ' + "'>=', '>') raise\n" + ' "TypeError".\n' + '\n' + 'See also: "types.MappingProxyType" can be used to create a ' + 'read-only\n' + ' view of a "dict".\n' + '\n' + '\n' + 'Dictionary view objects\n' + '=======================\n' + '\n' + 'The objects returned by "dict.keys()", "dict.values()" ' + 'and\n' + '"dict.items()" are *view objects*. They provide a dynamic ' + 'view on the\n' + "dictionary's entries, which means that when the dictionary " + 'changes,\n' + 'the view reflects these changes.\n' + '\n' + 'Dictionary views can be iterated over to yield their ' + 'respective data,\n' + 'and support membership tests:\n' + '\n' + 'len(dictview)\n' + '\n' + ' Return the number of entries in the dictionary.\n' + '\n' + 'iter(dictview)\n' + '\n' + ' Return an iterator over the keys, values or items ' + '(represented as\n' + ' tuples of "(key, value)") in the dictionary.\n' + '\n' + ' Keys and values are iterated over in an arbitrary order ' + 'which is\n' + ' non-random, varies across Python implementations, and ' + 'depends on\n' + " the dictionary's history of insertions and deletions. " + 'If keys,\n' + ' values and items views are iterated over with no ' + 'intervening\n' + ' modifications to the dictionary, the order of items ' + 'will directly\n' + ' correspond. This allows the creation of "(value, key)" ' + 'pairs using\n' + ' "zip()": "pairs = zip(d.values(), d.keys())". Another ' + 'way to\n' + ' create the same list is "pairs = [(v, k) for (k, v) in ' + 'd.items()]".\n' + '\n' + ' Iterating views while adding or deleting entries in the ' + 'dictionary\n' + ' may raise a "RuntimeError" or fail to iterate over all ' + 'entries.\n' + '\n' + 'x in dictview\n' + '\n' + ' Return "True" if *x* is in the underlying dictionary\'s ' + 'keys, values\n' + ' or items (in the latter case, *x* should be a "(key, ' + 'value)"\n' + ' tuple).\n' + '\n' + 'Keys views are set-like since their entries are unique and ' + 'hashable.\n' + 'If all values are hashable, so that "(key, value)" pairs ' + 'are unique\n' + 'and hashable, then the items view is also set-like. ' + '(Values views are\n' + 'not treated as set-like since the entries are generally ' + 'not unique.)\n' + 'For set-like views, all of the operations defined for the ' + 'abstract\n' + 'base class "collections.abc.Set" are available (for ' + 'example, "==",\n' + '"<", or "^").\n' + '\n' + 'An example of dictionary view usage:\n' + '\n' + " >>> dishes = {'eggs': 2, 'sausage': 1, 'bacon': 1, " + "'spam': 500}\n" + ' >>> keys = dishes.keys()\n' + ' >>> values = dishes.values()\n' + '\n' + ' >>> # iteration\n' + ' >>> n = 0\n' + ' >>> for val in values:\n' + ' ... n += val\n' + ' >>> print(n)\n' + ' 504\n' + '\n' + ' >>> # keys and values are iterated over in the same ' + 'order\n' + ' >>> list(keys)\n' + " ['eggs', 'bacon', 'sausage', 'spam']\n" + ' >>> list(values)\n' + ' [2, 1, 1, 500]\n' + '\n' + ' >>> # view objects are dynamic and reflect dict ' + 'changes\n' + " >>> del dishes['eggs']\n" + " >>> del dishes['sausage']\n" + ' >>> list(keys)\n' + " ['spam', 'bacon']\n" + '\n' + ' >>> # set operations\n' + " >>> keys & {'eggs', 'bacon', 'salad'}\n" + " {'bacon'}\n" + " >>> keys ^ {'sausage', 'juice'}\n" + " {'juice', 'sausage', 'bacon', 'spam'}\n", + 'typesmethods': '\n' + 'Methods\n' + '*******\n' + '\n' + 'Methods are functions that are called using the attribute ' + 'notation.\n' + 'There are two flavors: built-in methods (such as ' + '"append()" on lists)\n' + 'and class instance methods. Built-in methods are ' + 'described with the\n' + 'types that support them.\n' + '\n' + 'If you access a method (a function defined in a class ' + 'namespace)\n' + 'through an instance, you get a special object: a *bound ' + 'method* (also\n' + 'called *instance method*) object. When called, it will add ' + 'the "self"\n' + 'argument to the argument list. Bound methods have two ' + 'special read-\n' + 'only attributes: "m.__self__" is the object on which the ' + 'method\n' + 'operates, and "m.__func__" is the function implementing ' + 'the method.\n' + 'Calling "m(arg-1, arg-2, ..., arg-n)" is completely ' + 'equivalent to\n' + 'calling "m.__func__(m.__self__, arg-1, arg-2, ..., ' + 'arg-n)".\n' + '\n' + 'Like function objects, bound method objects support ' + 'getting arbitrary\n' + 'attributes. However, since method attributes are actually ' + 'stored on\n' + 'the underlying function object ("meth.__func__"), setting ' + 'method\n' + 'attributes on bound methods is disallowed. Attempting to ' + 'set an\n' + 'attribute on a method results in an "AttributeError" being ' + 'raised. In\n' + 'order to set a method attribute, you need to explicitly ' + 'set it on the\n' + 'underlying function object:\n' + '\n' + ' >>> class C:\n' + ' ... def method(self):\n' + ' ... pass\n' + ' ...\n' + ' >>> c = C()\n' + " >>> c.method.whoami = 'my name is method' # can't set " + 'on the method\n' + ' Traceback (most recent call last):\n' + ' File "", line 1, in \n' + " AttributeError: 'method' object has no attribute " + "'whoami'\n" + " >>> c.method.__func__.whoami = 'my name is method'\n" + ' >>> c.method.whoami\n' + " 'my name is method'\n" + '\n' + 'See *The standard type hierarchy* for more information.\n', + 'typesmodules': '\n' + 'Modules\n' + '*******\n' + '\n' + 'The only special operation on a module is attribute ' + 'access: "m.name",\n' + 'where *m* is a module and *name* accesses a name defined ' + "in *m*'s\n" + 'symbol table. Module attributes can be assigned to. (Note ' + 'that the\n' + '"import" statement is not, strictly speaking, an operation ' + 'on a module\n' + 'object; "import foo" does not require a module object ' + 'named *foo* to\n' + 'exist, rather it requires an (external) *definition* for a ' + 'module\n' + 'named *foo* somewhere.)\n' + '\n' + 'A special attribute of every module is "__dict__". This is ' + 'the\n' + "dictionary containing the module's symbol table. Modifying " + 'this\n' + "dictionary will actually change the module's symbol table, " + 'but direct\n' + 'assignment to the "__dict__" attribute is not possible ' + '(you can write\n' + '"m.__dict__[\'a\'] = 1", which defines "m.a" to be "1", ' + "but you can't\n" + 'write "m.__dict__ = {}"). Modifying "__dict__" directly ' + 'is not\n' + 'recommended.\n' + '\n' + 'Modules built into the interpreter are written like this: ' + '"". If loaded from a file, they are ' + 'written as\n' + '"".\n', + 'typesseq': '\n' + 'Sequence Types --- "list", "tuple", "range"\n' + '*******************************************\n' + '\n' + 'There are three basic sequence types: lists, tuples, and ' + 'range\n' + 'objects. Additional sequence types tailored for processing of ' + '*binary\n' + 'data* and *text strings* are described in dedicated sections.\n' + '\n' + '\n' + 'Common Sequence Operations\n' + '==========================\n' + '\n' + 'The operations in the following table are supported by most ' + 'sequence\n' + 'types, both mutable and immutable. The ' + '"collections.abc.Sequence" ABC\n' + 'is provided to make it easier to correctly implement these ' + 'operations\n' + 'on custom sequence types.\n' + '\n' + 'This table lists the sequence operations sorted in ascending ' + 'priority.\n' + 'In the table, *s* and *t* are sequences of the same type, *n*, ' + '*i*,\n' + '*j* and *k* are integers and *x* is an arbitrary object that ' + 'meets any\n' + 'type and value restrictions imposed by *s*.\n' + '\n' + 'The "in" and "not in" operations have the same priorities as ' + 'the\n' + 'comparison operations. The "+" (concatenation) and "*" ' + '(repetition)\n' + 'operations have the same priority as the corresponding ' + 'numeric\n' + 'operations.\n' + '\n' + '+----------------------------+----------------------------------+------------+\n' + '| Operation | ' + 'Result | Notes |\n' + '+============================+==================================+============+\n' + '| "x in s" | "True" if an item of *s* ' + 'is | (1) |\n' + '| | equal to *x*, else ' + '"False" | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "x not in s" | "False" if an item of *s* ' + 'is | (1) |\n' + '| | equal to *x*, else ' + '"True" | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s + t" | the concatenation of *s* and ' + '*t* | (6)(7) |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s * n" or "n * s" | *n* shallow copies of ' + '*s* | (2)(7) |\n' + '| | ' + 'concatenated | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s[i]" | *i*th item of *s*, origin ' + '0 | (3) |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s[i:j]" | slice of *s* from *i* to ' + '*j* | (3)(4) |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s[i:j:k]" | slice of *s* from *i* to ' + '*j* | (3)(5) |\n' + '| | with step ' + '*k* | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "len(s)" | length of ' + '*s* | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "min(s)" | smallest item of ' + '*s* | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "max(s)" | largest item of ' + '*s* | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s.index(x[, i[, j]])" | index of the first occurrence ' + 'of | (8) |\n' + '| | *x* in *s* (at or after ' + 'index | |\n' + '| | *i* and before index ' + '*j*) | |\n' + '+----------------------------+----------------------------------+------------+\n' + '| "s.count(x)" | total number of occurrences ' + 'of | |\n' + '| | *x* in ' + '*s* | |\n' + '+----------------------------+----------------------------------+------------+\n' + '\n' + 'Sequences of the same type also support comparisons. In ' + 'particular,\n' + 'tuples and lists are compared lexicographically by comparing\n' + 'corresponding elements. This means that to compare equal, ' + 'every\n' + 'element must compare equal and the two sequences must be of ' + 'the same\n' + 'type and have the same length. (For full details see ' + '*Comparisons* in\n' + 'the language reference.)\n' + '\n' + 'Notes:\n' + '\n' + '1. While the "in" and "not in" operations are used only for ' + 'simple\n' + ' containment testing in the general case, some specialised ' + 'sequences\n' + ' (such as "str", "bytes" and "bytearray") also use them for\n' + ' subsequence testing:\n' + '\n' + ' >>> "gg" in "eggs"\n' + ' True\n' + '\n' + '2. Values of *n* less than "0" are treated as "0" (which ' + 'yields an\n' + ' empty sequence of the same type as *s*). Note also that ' + 'the copies\n' + ' are shallow; nested structures are not copied. This often ' + 'haunts\n' + ' new Python programmers; consider:\n' + '\n' + ' >>> lists = [[]] * 3\n' + ' >>> lists\n' + ' [[], [], []]\n' + ' >>> lists[0].append(3)\n' + ' >>> lists\n' + ' [[3], [3], [3]]\n' + '\n' + ' What has happened is that "[[]]" is a one-element list ' + 'containing\n' + ' an empty list, so all three elements of "[[]] * 3" are ' + '(pointers\n' + ' to) this single empty list. Modifying any of the elements ' + 'of\n' + ' "lists" modifies this single list. You can create a list ' + 'of\n' + ' different lists this way:\n' + '\n' + ' >>> lists = [[] for i in range(3)]\n' + ' >>> lists[0].append(3)\n' + ' >>> lists[1].append(5)\n' + ' >>> lists[2].append(7)\n' + ' >>> lists\n' + ' [[3], [5], [7]]\n' + '\n' + '3. If *i* or *j* is negative, the index is relative to the end ' + 'of\n' + ' the string: "len(s) + i" or "len(s) + j" is substituted. ' + 'But note\n' + ' that "-0" is still "0".\n' + '\n' + '4. The slice of *s* from *i* to *j* is defined as the sequence ' + 'of\n' + ' items with index *k* such that "i <= k < j". If *i* or *j* ' + 'is\n' + ' greater than "len(s)", use "len(s)". If *i* is omitted or ' + '"None",\n' + ' use "0". If *j* is omitted or "None", use "len(s)". If ' + '*i* is\n' + ' greater than or equal to *j*, the slice is empty.\n' + '\n' + '5. The slice of *s* from *i* to *j* with step *k* is defined ' + 'as the\n' + ' sequence of items with index "x = i + n*k" such that "0 <= ' + 'n <\n' + ' (j-i)/k". In other words, the indices are "i", "i+k", ' + '"i+2*k",\n' + ' "i+3*k" and so on, stopping when *j* is reached (but never\n' + ' including *j*). If *i* or *j* is greater than "len(s)", ' + 'use\n' + ' "len(s)". If *i* or *j* are omitted or "None", they become ' + '"end"\n' + ' values (which end depends on the sign of *k*). Note, *k* ' + 'cannot be\n' + ' zero. If *k* is "None", it is treated like "1".\n' + '\n' + '6. Concatenating immutable sequences always results in a new\n' + ' object. This means that building up a sequence by repeated\n' + ' concatenation will have a quadratic runtime cost in the ' + 'total\n' + ' sequence length. To get a linear runtime cost, you must ' + 'switch to\n' + ' one of the alternatives below:\n' + '\n' + ' * if concatenating "str" objects, you can build a list and ' + 'use\n' + ' "str.join()" at the end or else write to a "io.StringIO" ' + 'instance\n' + ' and retrieve its value when complete\n' + '\n' + ' * if concatenating "bytes" objects, you can similarly use\n' + ' "bytes.join()" or "io.BytesIO", or you can do in-place\n' + ' concatenation with a "bytearray" object. "bytearray" ' + 'objects are\n' + ' mutable and have an efficient overallocation mechanism\n' + '\n' + ' * if concatenating "tuple" objects, extend a "list" ' + 'instead\n' + '\n' + ' * for other types, investigate the relevant class ' + 'documentation\n' + '\n' + '7. Some sequence types (such as "range") only support item\n' + " sequences that follow specific patterns, and hence don't " + 'support\n' + ' sequence concatenation or repetition.\n' + '\n' + '8. "index" raises "ValueError" when *x* is not found in *s*. ' + 'When\n' + ' supported, the additional arguments to the index method ' + 'allow\n' + ' efficient searching of subsections of the sequence. Passing ' + 'the\n' + ' extra arguments is roughly equivalent to using ' + '"s[i:j].index(x)",\n' + ' only without copying any data and with the returned index ' + 'being\n' + ' relative to the start of the sequence rather than the start ' + 'of the\n' + ' slice.\n' + '\n' + '\n' + 'Immutable Sequence Types\n' + '========================\n' + '\n' + 'The only operation that immutable sequence types generally ' + 'implement\n' + 'that is not also implemented by mutable sequence types is ' + 'support for\n' + 'the "hash()" built-in.\n' + '\n' + 'This support allows immutable sequences, such as "tuple" ' + 'instances, to\n' + 'be used as "dict" keys and stored in "set" and "frozenset" ' + 'instances.\n' + '\n' + 'Attempting to hash an immutable sequence that contains ' + 'unhashable\n' + 'values will result in "TypeError".\n' + '\n' + '\n' + 'Mutable Sequence Types\n' + '======================\n' + '\n' + 'The operations in the following table are defined on mutable ' + 'sequence\n' + 'types. The "collections.abc.MutableSequence" ABC is provided ' + 'to make\n' + 'it easier to correctly implement these operations on custom ' + 'sequence\n' + 'types.\n' + '\n' + 'In the table *s* is an instance of a mutable sequence type, ' + '*t* is any\n' + 'iterable object and *x* is an arbitrary object that meets any ' + 'type and\n' + 'value restrictions imposed by *s* (for example, "bytearray" ' + 'only\n' + 'accepts integers that meet the value restriction "0 <= x <= ' + '255").\n' + '\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| Operation | ' + 'Result | Notes |\n' + '+================================+==================================+=======================+\n' + '| "s[i] = x" | item *i* of *s* is replaced ' + 'by | |\n' + '| | ' + '*x* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s[i:j] = t" | slice of *s* from *i* to ' + '*j* is | |\n' + '| | replaced by the contents of ' + 'the | |\n' + '| | iterable ' + '*t* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "del s[i:j]" | same as "s[i:j] = ' + '[]" | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s[i:j:k] = t" | the elements of "s[i:j:k]" ' + 'are | (1) |\n' + '| | replaced by those of ' + '*t* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "del s[i:j:k]" | removes the elements ' + 'of | |\n' + '| | "s[i:j:k]" from the ' + 'list | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.append(x)" | appends *x* to the end of ' + 'the | |\n' + '| | sequence (same ' + 'as | |\n' + '| | "s[len(s):len(s)] = ' + '[x]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.clear()" | removes all items from "s" ' + '(same | (5) |\n' + '| | as "del ' + 's[:]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.copy()" | creates a shallow copy of ' + '"s" | (5) |\n' + '| | (same as ' + '"s[:]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.extend(t)" | extends *s* with the ' + 'contents of | |\n' + '| | *t* (same as ' + '"s[len(s):len(s)] = | |\n' + '| | ' + 't") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.insert(i, x)" | inserts *x* into *s* at ' + 'the | |\n' + '| | index given by *i* (same ' + 'as | |\n' + '| | "s[i:i] = ' + '[x]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.pop([i])" | retrieves the item at *i* ' + 'and | (2) |\n' + '| | also removes it from ' + '*s* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.remove(x)" | remove the first item from ' + '*s* | (3) |\n' + '| | where "s[i] == ' + 'x" | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.reverse()" | reverses the items of *s* ' + 'in | (4) |\n' + '| | ' + 'place | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '\n' + 'Notes:\n' + '\n' + '1. *t* must have the same length as the slice it is ' + 'replacing.\n' + '\n' + '2. The optional argument *i* defaults to "-1", so that by ' + 'default\n' + ' the last item is removed and returned.\n' + '\n' + '3. "remove" raises "ValueError" when *x* is not found in *s*.\n' + '\n' + '4. The "reverse()" method modifies the sequence in place for\n' + ' economy of space when reversing a large sequence. To ' + 'remind users\n' + ' that it operates by side effect, it does not return the ' + 'reversed\n' + ' sequence.\n' + '\n' + '5. "clear()" and "copy()" are included for consistency with ' + 'the\n' + " interfaces of mutable containers that don't support " + 'slicing\n' + ' operations (such as "dict" and "set")\n' + '\n' + ' New in version 3.3: "clear()" and "copy()" methods.\n' + '\n' + '\n' + 'Lists\n' + '=====\n' + '\n' + 'Lists are mutable sequences, typically used to store ' + 'collections of\n' + 'homogeneous items (where the precise degree of similarity will ' + 'vary by\n' + 'application).\n' + '\n' + 'class class list([iterable])\n' + '\n' + ' Lists may be constructed in several ways:\n' + '\n' + ' * Using a pair of square brackets to denote the empty list: ' + '"[]"\n' + '\n' + ' * Using square brackets, separating items with commas: ' + '"[a]",\n' + ' "[a, b, c]"\n' + '\n' + ' * Using a list comprehension: "[x for x in iterable]"\n' + '\n' + ' * Using the type constructor: "list()" or "list(iterable)"\n' + '\n' + ' The constructor builds a list whose items are the same and ' + 'in the\n' + " same order as *iterable*'s items. *iterable* may be either " + 'a\n' + ' sequence, a container that supports iteration, or an ' + 'iterator\n' + ' object. If *iterable* is already a list, a copy is made ' + 'and\n' + ' returned, similar to "iterable[:]". For example, ' + '"list(\'abc\')"\n' + ' returns "[\'a\', \'b\', \'c\']" and "list( (1, 2, 3) )" ' + 'returns "[1, 2,\n' + ' 3]". If no argument is given, the constructor creates a new ' + 'empty\n' + ' list, "[]".\n' + '\n' + ' Many other operations also produce lists, including the ' + '"sorted()"\n' + ' built-in.\n' + '\n' + ' Lists implement all of the *common* and *mutable* sequence\n' + ' operations. Lists also provide the following additional ' + 'method:\n' + '\n' + ' sort(*, key=None, reverse=None)\n' + '\n' + ' This method sorts the list in place, using only "<" ' + 'comparisons\n' + ' between items. Exceptions are not suppressed - if any ' + 'comparison\n' + ' operations fail, the entire sort operation will fail ' + '(and the\n' + ' list will likely be left in a partially modified ' + 'state).\n' + '\n' + ' "sort()" accepts two arguments that can only be passed ' + 'by\n' + ' keyword (*keyword-only arguments*):\n' + '\n' + ' *key* specifies a function of one argument that is used ' + 'to\n' + ' extract a comparison key from each list element (for ' + 'example,\n' + ' "key=str.lower"). The key corresponding to each item in ' + 'the list\n' + ' is calculated once and then used for the entire sorting ' + 'process.\n' + ' The default value of "None" means that list items are ' + 'sorted\n' + ' directly without calculating a separate key value.\n' + '\n' + ' The "functools.cmp_to_key()" utility is available to ' + 'convert a\n' + ' 2.x style *cmp* function to a *key* function.\n' + '\n' + ' *reverse* is a boolean value. If set to "True", then ' + 'the list\n' + ' elements are sorted as if each comparison were ' + 'reversed.\n' + '\n' + ' This method modifies the sequence in place for economy ' + 'of space\n' + ' when sorting a large sequence. To remind users that it ' + 'operates\n' + ' by side effect, it does not return the sorted sequence ' + '(use\n' + ' "sorted()" to explicitly request a new sorted list ' + 'instance).\n' + '\n' + ' The "sort()" method is guaranteed to be stable. A sort ' + 'is\n' + ' stable if it guarantees not to change the relative order ' + 'of\n' + ' elements that compare equal --- this is helpful for ' + 'sorting in\n' + ' multiple passes (for example, sort by department, then ' + 'by salary\n' + ' grade).\n' + '\n' + ' **CPython implementation detail:** While a list is being ' + 'sorted,\n' + ' the effect of attempting to mutate, or even inspect, the ' + 'list is\n' + ' undefined. The C implementation of Python makes the ' + 'list appear\n' + ' empty for the duration, and raises "ValueError" if it ' + 'can detect\n' + ' that the list has been mutated during a sort.\n' + '\n' + '\n' + 'Tuples\n' + '======\n' + '\n' + 'Tuples are immutable sequences, typically used to store ' + 'collections of\n' + 'heterogeneous data (such as the 2-tuples produced by the ' + '"enumerate()"\n' + 'built-in). Tuples are also used for cases where an immutable ' + 'sequence\n' + 'of homogeneous data is needed (such as allowing storage in a ' + '"set" or\n' + '"dict" instance).\n' + '\n' + 'class class tuple([iterable])\n' + '\n' + ' Tuples may be constructed in a number of ways:\n' + '\n' + ' * Using a pair of parentheses to denote the empty tuple: ' + '"()"\n' + '\n' + ' * Using a trailing comma for a singleton tuple: "a," or ' + '"(a,)"\n' + '\n' + ' * Separating items with commas: "a, b, c" or "(a, b, c)"\n' + '\n' + ' * Using the "tuple()" built-in: "tuple()" or ' + '"tuple(iterable)"\n' + '\n' + ' The constructor builds a tuple whose items are the same and ' + 'in the\n' + " same order as *iterable*'s items. *iterable* may be either " + 'a\n' + ' sequence, a container that supports iteration, or an ' + 'iterator\n' + ' object. If *iterable* is already a tuple, it is returned\n' + ' unchanged. For example, "tuple(\'abc\')" returns "(\'a\', ' + '\'b\', \'c\')"\n' + ' and "tuple( [1, 2, 3] )" returns "(1, 2, 3)". If no ' + 'argument is\n' + ' given, the constructor creates a new empty tuple, "()".\n' + '\n' + ' Note that it is actually the comma which makes a tuple, not ' + 'the\n' + ' parentheses. The parentheses are optional, except in the ' + 'empty\n' + ' tuple case, or when they are needed to avoid syntactic ' + 'ambiguity.\n' + ' For example, "f(a, b, c)" is a function call with three ' + 'arguments,\n' + ' while "f((a, b, c))" is a function call with a 3-tuple as ' + 'the sole\n' + ' argument.\n' + '\n' + ' Tuples implement all of the *common* sequence operations.\n' + '\n' + 'For heterogeneous collections of data where access by name is ' + 'clearer\n' + 'than access by index, "collections.namedtuple()" may be a ' + 'more\n' + 'appropriate choice than a simple tuple object.\n' + '\n' + '\n' + 'Ranges\n' + '======\n' + '\n' + 'The "range" type represents an immutable sequence of numbers ' + 'and is\n' + 'commonly used for looping a specific number of times in "for" ' + 'loops.\n' + '\n' + 'class class range(stop)\n' + 'class class range(start, stop[, step])\n' + '\n' + ' The arguments to the range constructor must be integers ' + '(either\n' + ' built-in "int" or any object that implements the ' + '"__index__"\n' + ' special method). If the *step* argument is omitted, it ' + 'defaults to\n' + ' "1". If the *start* argument is omitted, it defaults to ' + '"0". If\n' + ' *step* is zero, "ValueError" is raised.\n' + '\n' + ' For a positive *step*, the contents of a range "r" are ' + 'determined\n' + ' by the formula "r[i] = start + step*i" where "i >= 0" and ' + '"r[i] <\n' + ' stop".\n' + '\n' + ' For a negative *step*, the contents of the range are still\n' + ' determined by the formula "r[i] = start + step*i", but the\n' + ' constraints are "i >= 0" and "r[i] > stop".\n' + '\n' + ' A range object will be empty if "r[0]" does not meet the ' + 'value\n' + ' constraint. Ranges do support negative indices, but these ' + 'are\n' + ' interpreted as indexing from the end of the sequence ' + 'determined by\n' + ' the positive indices.\n' + '\n' + ' Ranges containing absolute values larger than "sys.maxsize" ' + 'are\n' + ' permitted but some features (such as "len()") may raise\n' + ' "OverflowError".\n' + '\n' + ' Range examples:\n' + '\n' + ' >>> list(range(10))\n' + ' [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n' + ' >>> list(range(1, 11))\n' + ' [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n' + ' >>> list(range(0, 30, 5))\n' + ' [0, 5, 10, 15, 20, 25]\n' + ' >>> list(range(0, 10, 3))\n' + ' [0, 3, 6, 9]\n' + ' >>> list(range(0, -10, -1))\n' + ' [0, -1, -2, -3, -4, -5, -6, -7, -8, -9]\n' + ' >>> list(range(0))\n' + ' []\n' + ' >>> list(range(1, 0))\n' + ' []\n' + '\n' + ' Ranges implement all of the *common* sequence operations ' + 'except\n' + ' concatenation and repetition (due to the fact that range ' + 'objects\n' + ' can only represent sequences that follow a strict pattern ' + 'and\n' + ' repetition and concatenation will usually violate that ' + 'pattern).\n' + '\n' + 'The advantage of the "range" type over a regular "list" or ' + '"tuple" is\n' + 'that a "range" object will always take the same (small) amount ' + 'of\n' + 'memory, no matter the size of the range it represents (as it ' + 'only\n' + 'stores the "start", "stop" and "step" values, calculating ' + 'individual\n' + 'items and subranges as needed).\n' + '\n' + 'Range objects implement the "collections.abc.Sequence" ABC, ' + 'and\n' + 'provide features such as containment tests, element index ' + 'lookup,\n' + 'slicing and support for negative indices (see *Sequence Types ' + '---\n' + 'list, tuple, range*):\n' + '\n' + '>>> r = range(0, 20, 2)\n' + '>>> r\n' + 'range(0, 20, 2)\n' + '>>> 11 in r\n' + 'False\n' + '>>> 10 in r\n' + 'True\n' + '>>> r.index(10)\n' + '5\n' + '>>> r[5]\n' + '10\n' + '>>> r[:5]\n' + 'range(0, 10, 2)\n' + '>>> r[-1]\n' + '18\n' + '\n' + 'Testing range objects for equality with "==" and "!=" compares ' + 'them as\n' + 'sequences. That is, two range objects are considered equal if ' + 'they\n' + 'represent the same sequence of values. (Note that two range ' + 'objects\n' + 'that compare equal might have different "start", "stop" and ' + '"step"\n' + 'attributes, for example "range(0) == range(2, 1, 3)" or ' + '"range(0, 3,\n' + '2) == range(0, 4, 2)".)\n' + '\n' + 'Changed in version 3.2: Implement the Sequence ABC. Support ' + 'slicing\n' + 'and negative indices. Test "int" objects for membership in ' + 'constant\n' + 'time instead of iterating through all items.\n' + '\n' + "Changed in version 3.3: Define '==' and '!=' to compare range " + 'objects\n' + 'based on the sequence of values they define (instead of ' + 'comparing\n' + 'based on object identity).\n' + '\n' + 'New in version 3.3: The "start", "stop" and "step" ' + 'attributes.\n', + 'typesseq-mutable': '\n' + 'Mutable Sequence Types\n' + '**********************\n' + '\n' + 'The operations in the following table are defined on ' + 'mutable sequence\n' + 'types. The "collections.abc.MutableSequence" ABC is ' + 'provided to make\n' + 'it easier to correctly implement these operations on ' + 'custom sequence\n' + 'types.\n' + '\n' + 'In the table *s* is an instance of a mutable sequence ' + 'type, *t* is any\n' + 'iterable object and *x* is an arbitrary object that ' + 'meets any type and\n' + 'value restrictions imposed by *s* (for example, ' + '"bytearray" only\n' + 'accepts integers that meet the value restriction "0 <= ' + 'x <= 255").\n' + '\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| Operation | ' + 'Result | ' + 'Notes |\n' + '+================================+==================================+=======================+\n' + '| "s[i] = x" | item *i* of *s* is ' + 'replaced by | |\n' + '| | ' + '*x* ' + '| |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s[i:j] = t" | slice of *s* from ' + '*i* to *j* is | |\n' + '| | replaced by the ' + 'contents of the | |\n' + '| | iterable ' + '*t* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "del s[i:j]" | same as "s[i:j] = ' + '[]" | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s[i:j:k] = t" | the elements of ' + '"s[i:j:k]" are | (1) |\n' + '| | replaced by those ' + 'of *t* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "del s[i:j:k]" | removes the ' + 'elements of | |\n' + '| | "s[i:j:k]" from the ' + 'list | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.append(x)" | appends *x* to the ' + 'end of the | |\n' + '| | sequence (same ' + 'as | |\n' + '| | "s[len(s):len(s)] = ' + '[x]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.clear()" | removes all items ' + 'from "s" (same | (5) |\n' + '| | as "del ' + 's[:]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.copy()" | creates a shallow ' + 'copy of "s" | (5) |\n' + '| | (same as ' + '"s[:]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.extend(t)" | extends *s* with ' + 'the contents of | |\n' + '| | *t* (same as ' + '"s[len(s):len(s)] = | |\n' + '| | ' + 't") ' + '| |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.insert(i, x)" | inserts *x* into ' + '*s* at the | |\n' + '| | index given by *i* ' + '(same as | |\n' + '| | "s[i:i] = ' + '[x]") | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.pop([i])" | retrieves the item ' + 'at *i* and | (2) |\n' + '| | also removes it ' + 'from *s* | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.remove(x)" | remove the first ' + 'item from *s* | (3) |\n' + '| | where "s[i] == ' + 'x" | |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '| "s.reverse()" | reverses the items ' + 'of *s* in | (4) |\n' + '| | ' + 'place ' + '| |\n' + '+--------------------------------+----------------------------------+-----------------------+\n' + '\n' + 'Notes:\n' + '\n' + '1. *t* must have the same length as the slice it is ' + 'replacing.\n' + '\n' + '2. The optional argument *i* defaults to "-1", so that ' + 'by default\n' + ' the last item is removed and returned.\n' + '\n' + '3. "remove" raises "ValueError" when *x* is not found ' + 'in *s*.\n' + '\n' + '4. The "reverse()" method modifies the sequence in ' + 'place for\n' + ' economy of space when reversing a large sequence. ' + 'To remind users\n' + ' that it operates by side effect, it does not return ' + 'the reversed\n' + ' sequence.\n' + '\n' + '5. "clear()" and "copy()" are included for consistency ' + 'with the\n' + " interfaces of mutable containers that don't support " + 'slicing\n' + ' operations (such as "dict" and "set")\n' + '\n' + ' New in version 3.3: "clear()" and "copy()" ' + 'methods.\n', + 'unary': '\n' + 'Unary arithmetic and bitwise operations\n' + '***************************************\n' + '\n' + 'All unary arithmetic and bitwise operations have the same ' + 'priority:\n' + '\n' + ' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n' + '\n' + 'The unary "-" (minus) operator yields the negation of its ' + 'numeric\n' + 'argument.\n' + '\n' + 'The unary "+" (plus) operator yields its numeric argument ' + 'unchanged.\n' + '\n' + 'The unary "~" (invert) operator yields the bitwise inversion of ' + 'its\n' + 'integer argument. The bitwise inversion of "x" is defined as\n' + '"-(x+1)". It only applies to integral numbers.\n' + '\n' + 'In all three cases, if the argument does not have the proper ' + 'type, a\n' + '"TypeError" exception is raised.\n', + 'while': '\n' + 'The "while" statement\n' + '*********************\n' + '\n' + 'The "while" statement is used for repeated execution as long as ' + 'an\n' + 'expression is true:\n' + '\n' + ' while_stmt ::= "while" expression ":" suite\n' + ' ["else" ":" suite]\n' + '\n' + 'This repeatedly tests the expression and, if it is true, executes ' + 'the\n' + 'first suite; if the expression is false (which may be the first ' + 'time\n' + 'it is tested) the suite of the "else" clause, if present, is ' + 'executed\n' + 'and the loop terminates.\n' + '\n' + 'A "break" statement executed in the first suite terminates the ' + 'loop\n' + 'without executing the "else" clause\'s suite. A "continue" ' + 'statement\n' + 'executed in the first suite skips the rest of the suite and goes ' + 'back\n' + 'to testing the expression.\n', + 'with': '\n' + 'The "with" statement\n' + '********************\n' + '\n' + 'The "with" statement is used to wrap the execution of a block ' + 'with\n' + 'methods defined by a context manager (see section *With Statement\n' + 'Context Managers*). This allows common ' + '"try"..."except"..."finally"\n' + 'usage patterns to be encapsulated for convenient reuse.\n' + '\n' + ' with_stmt ::= "with" with_item ("," with_item)* ":" suite\n' + ' with_item ::= expression ["as" target]\n' + '\n' + 'The execution of the "with" statement with one "item" proceeds as\n' + 'follows:\n' + '\n' + '1. The context expression (the expression given in the ' + '"with_item")\n' + ' is evaluated to obtain a context manager.\n' + '\n' + '2. The context manager\'s "__exit__()" is loaded for later use.\n' + '\n' + '3. The context manager\'s "__enter__()" method is invoked.\n' + '\n' + '4. If a target was included in the "with" statement, the return\n' + ' value from "__enter__()" is assigned to it.\n' + '\n' + ' Note: The "with" statement guarantees that if the ' + '"__enter__()"\n' + ' method returns without an error, then "__exit__()" will ' + 'always be\n' + ' called. Thus, if an error occurs during the assignment to ' + 'the\n' + ' target list, it will be treated the same as an error ' + 'occurring\n' + ' within the suite would be. See step 6 below.\n' + '\n' + '5. The suite is executed.\n' + '\n' + '6. The context manager\'s "__exit__()" method is invoked. If an\n' + ' exception caused the suite to be exited, its type, value, and\n' + ' traceback are passed as arguments to "__exit__()". Otherwise, ' + 'three\n' + ' "None" arguments are supplied.\n' + '\n' + ' If the suite was exited due to an exception, and the return ' + 'value\n' + ' from the "__exit__()" method was false, the exception is ' + 'reraised.\n' + ' If the return value was true, the exception is suppressed, and\n' + ' execution continues with the statement following the "with"\n' + ' statement.\n' + '\n' + ' If the suite was exited for any reason other than an exception, ' + 'the\n' + ' return value from "__exit__()" is ignored, and execution ' + 'proceeds\n' + ' at the normal location for the kind of exit that was taken.\n' + '\n' + 'With more than one item, the context managers are processed as if\n' + 'multiple "with" statements were nested:\n' + '\n' + ' with A() as a, B() as b:\n' + ' suite\n' + '\n' + 'is equivalent to\n' + '\n' + ' with A() as a:\n' + ' with B() as b:\n' + ' suite\n' + '\n' + 'Changed in version 3.1: Support for multiple context expressions.\n' + '\n' + 'See also: **PEP 0343** - The "with" statement\n' + '\n' + ' The specification, background, and examples for the Python ' + '"with"\n' + ' statement.\n', + 'yield': '\n' + 'The "yield" statement\n' + '*********************\n' + '\n' + ' yield_stmt ::= yield_expression\n' + '\n' + 'A "yield" statement is semantically equivalent to a *yield\n' + 'expression*. The yield statement can be used to omit the ' + 'parentheses\n' + 'that would otherwise be required in the equivalent yield ' + 'expression\n' + 'statement. For example, the yield statements\n' + '\n' + ' yield \n' + ' yield from \n' + '\n' + 'are equivalent to the yield expression statements\n' + '\n' + ' (yield )\n' + ' (yield from )\n' + '\n' + 'Yield expressions and statements are only used when defining a\n' + '*generator* function, and are only used in the body of the ' + 'generator\n' + 'function. Using yield in a function definition is sufficient to ' + 'cause\n' + 'that definition to create a generator function instead of a ' + 'normal\n' + 'function.\n' + '\n' + 'For full details of "yield" semantics, refer to the *Yield\n' + 'expressions* section.\n'} diff --git a/Darwin/lib/python3.4/queue.py b/Darwin/lib/python3.5/queue.py similarity index 98% rename from Darwin/lib/python3.4/queue.py rename to Darwin/lib/python3.5/queue.py index 3cee36b..572425e 100644 --- a/Darwin/lib/python3.4/queue.py +++ b/Darwin/lib/python3.5/queue.py @@ -6,10 +6,7 @@ except ImportError: import dummy_threading as threading from collections import deque from heapq import heappush, heappop -try: - from time import monotonic as time -except ImportError: - from time import time +from time import monotonic as time __all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue'] diff --git a/Darwin/lib/python3.4/quopri.py b/Darwin/lib/python3.5/quopri.py similarity index 94% rename from Darwin/lib/python3.4/quopri.py rename to Darwin/lib/python3.5/quopri.py index e5bd010..cbd979a 100755 --- a/Darwin/lib/python3.4/quopri.py +++ b/Darwin/lib/python3.5/quopri.py @@ -44,13 +44,11 @@ def quote(c): def encode(input, output, quotetabs, header=False): """Read 'input', apply quoted-printable encoding, and write to 'output'. - 'input' and 'output' are files with readline() and write() methods. - The 'quotetabs' flag indicates whether embedded tabs and spaces should be - quoted. Note that line-ending tabs and spaces are always encoded, as per - RFC 1521. - The 'header' flag indicates whether we are encoding spaces as _ as per - RFC 1522. - """ + 'input' and 'output' are binary file objects. The 'quotetabs' flag + indicates whether embedded tabs and spaces should be quoted. Note that + line-ending tabs and spaces are always encoded, as per RFC 1521. + The 'header' flag indicates whether we are encoding spaces as _ as per RFC + 1522.""" if b2a_qp is not None: data = input.read() @@ -118,7 +116,7 @@ def encodestring(s, quotetabs=False, header=False): def decode(input, output, header=False): """Read 'input', apply quoted-printable decoding, and write to 'output'. - 'input' and 'output' are files with readline() and write() methods. + 'input' and 'output' are binary file objects. If 'header' is true, decode underscore as space (per RFC 1522).""" if a2b_qp is not None: @@ -147,7 +145,7 @@ def decode(input, output, header=False): new = new + c; i = i+1 elif i+1 == n and not partial: partial = 1; break - elif i+1 < n and line[i+1] == ESCAPE: + elif i+1 < n and line[i+1:i+2] == ESCAPE: new = new + ESCAPE; i = i+2 elif i+2 < n and ishex(line[i+1:i+2]) and ishex(line[i+2:i+3]): new = new + bytes((unhex(line[i+1:i+3]),)); i = i+3 diff --git a/Darwin/lib/python3.4/random.py b/Darwin/lib/python3.5/random.py similarity index 99% rename from Darwin/lib/python3.4/random.py rename to Darwin/lib/python3.5/random.py index 174e755..1f5be45 100644 --- a/Darwin/lib/python3.4/random.py +++ b/Darwin/lib/python3.5/random.py @@ -355,7 +355,10 @@ class Random(_random.Random): """ u = self.random() - c = 0.5 if mode is None else (mode - low) / (high - low) + try: + c = 0.5 if mode is None else (mode - low) / (high - low) + except ZeroDivisionError: + return low if u > c: u = 1.0 - u c = 1.0 - c @@ -684,7 +687,7 @@ def _test_generator(n, func, args): print(round(t1-t0, 3), 'sec,', end=' ') avg = total/n stddev = _sqrt(sqsum/n - avg*avg) - print('avg %g, stddev %g, min %g, max %g' % \ + print('avg %g, stddev %g, min %g, max %g\n' % \ (avg, stddev, smallest, largest)) diff --git a/Darwin/lib/python3.4/re.py b/Darwin/lib/python3.5/re.py similarity index 91% rename from Darwin/lib/python3.4/re.py rename to Darwin/lib/python3.5/re.py index 2e4d87c..dde8901 100644 --- a/Darwin/lib/python3.4/re.py +++ b/Darwin/lib/python3.5/re.py @@ -122,12 +122,19 @@ This module also defines an exception 'error'. import sys import sre_compile import sre_parse +try: + import _locale +except ImportError: + _locale = None # public symbols -__all__ = [ "match", "fullmatch", "search", "sub", "subn", "split", "findall", - "compile", "purge", "template", "escape", "A", "I", "L", "M", "S", "X", - "U", "ASCII", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE", - "UNICODE", "error" ] +__all__ = [ + "match", "fullmatch", "search", "sub", "subn", "split", + "findall", "finditer", "compile", "purge", "template", "escape", + "error", "A", "I", "L", "M", "S", "X", "U", + "ASCII", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE", + "UNICODE", +] __version__ = "2.2.1" @@ -205,14 +212,12 @@ def findall(pattern, string, flags=0): Empty matches are included in the result.""" return _compile(pattern, flags).findall(string) -if sys.hexversion >= 0x02020000: - __all__.append("finditer") - def finditer(pattern, string, flags=0): - """Return an iterator over all non-overlapping matches in the - string. For each match, the iterator returns a match object. +def finditer(pattern, string, flags=0): + """Return an iterator over all non-overlapping matches in the + string. For each match, the iterator returns a match object. - Empty matches are included in the result.""" - return _compile(pattern, flags).finditer(string) + Empty matches are included in the result.""" + return _compile(pattern, flags).finditer(string) def compile(pattern, flags=0): "Compile a regular expression pattern, returning a pattern object." @@ -272,24 +277,30 @@ _pattern_type = type(sre_compile.compile("", 0)) _MAXCACHE = 512 def _compile(pattern, flags): # internal: compile pattern - bypass_cache = flags & DEBUG - if not bypass_cache: - try: - return _cache[type(pattern), pattern, flags] - except KeyError: - pass + try: + p, loc = _cache[type(pattern), pattern, flags] + if loc is None or loc == _locale.setlocale(_locale.LC_CTYPE): + return p + except KeyError: + pass if isinstance(pattern, _pattern_type): if flags: raise ValueError( - "Cannot process flags argument with a compiled pattern") + "cannot process flags argument with a compiled pattern") return pattern if not sre_compile.isstring(pattern): raise TypeError("first argument must be string or compiled pattern") p = sre_compile.compile(pattern, flags) - if not bypass_cache: + if not (flags & DEBUG): if len(_cache) >= _MAXCACHE: _cache.clear() - _cache[type(pattern), pattern, flags] = p + if p.flags & LOCALE: + if not _locale: + return p + loc = _locale.setlocale(_locale.LC_CTYPE) + else: + loc = None + _cache[type(pattern), pattern, flags] = p, loc return p def _compile_repl(repl, pattern): @@ -340,10 +351,11 @@ class Scanner: s = sre_parse.Pattern() s.flags = flags for phrase, action in lexicon: + gid = s.opengroup() p.append(sre_parse.SubPattern(s, [ - (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))), + (SUBPATTERN, (gid, sre_parse.parse(phrase, flags))), ])) - s.groups = len(p)+1 + s.closegroup(gid, p[-1]) p = sre_parse.SubPattern(s, [(BRANCH, (None, p))]) self.scanner = sre_compile.compile(p) def scan(self, string): @@ -351,7 +363,7 @@ class Scanner: append = result.append match = self.scanner.scanner(string).match i = 0 - while 1: + while True: m = match() if not m: break diff --git a/Darwin/lib/python3.4/reprlib.py b/Darwin/lib/python3.5/reprlib.py similarity index 93% rename from Darwin/lib/python3.4/reprlib.py rename to Darwin/lib/python3.5/reprlib.py index f803360..ecbd2cc 100644 --- a/Darwin/lib/python3.4/reprlib.py +++ b/Darwin/lib/python3.5/reprlib.py @@ -83,16 +83,22 @@ class Repr: return self._repr_iterable(x, level, '[', ']', self.maxlist) def repr_array(self, x, level): + if not x: + return "array('%s')" % x.typecode header = "array('%s', [" % x.typecode return self._repr_iterable(x, level, header, '])', self.maxarray) def repr_set(self, x, level): + if not x: + return 'set()' x = _possibly_sorted(x) - return self._repr_iterable(x, level, 'set([', '])', self.maxset) + return self._repr_iterable(x, level, '{', '}', self.maxset) def repr_frozenset(self, x, level): + if not x: + return 'frozenset()' x = _possibly_sorted(x) - return self._repr_iterable(x, level, 'frozenset([', '])', + return self._repr_iterable(x, level, 'frozenset({', '})', self.maxfrozenset) def repr_deque(self, x, level): @@ -136,7 +142,7 @@ class Repr: # Bugs in x.__repr__() can cause arbitrary # exceptions -- then make up something except Exception: - return '<%s instance at %x>' % (x.__class__.__name__, id(x)) + return '<%s instance at %#x>' % (x.__class__.__name__, id(x)) if len(s) > self.maxother: i = max(0, (self.maxother-3)//2) j = max(0, self.maxother-3-i) diff --git a/Darwin/lib/python3.4/rlcompleter.py b/Darwin/lib/python3.5/rlcompleter.py similarity index 97% rename from Darwin/lib/python3.4/rlcompleter.py rename to Darwin/lib/python3.5/rlcompleter.py index 94f9341..d517c0e 100644 --- a/Darwin/lib/python3.4/rlcompleter.py +++ b/Darwin/lib/python3.5/rlcompleter.py @@ -73,6 +73,12 @@ class Completer: if self.use_main_ns: self.namespace = __main__.__dict__ + if not text.strip(): + if state == 0: + return '\t' + else: + return None + if state == 0: if "." in text: self.matches = self.attr_matches(text) diff --git a/Darwin/lib/python3.4/runpy.py b/Darwin/lib/python3.5/runpy.py similarity index 99% rename from Darwin/lib/python3.4/runpy.py rename to Darwin/lib/python3.5/runpy.py index 0bb57d7..1c5729d 100644 --- a/Darwin/lib/python3.4/runpy.py +++ b/Darwin/lib/python3.5/runpy.py @@ -58,7 +58,7 @@ class _ModifiedArgv0(object): self.value = self._sentinel sys.argv[0] = self._saved_value -# TODO: Replace these helpers with importlib._bootstrap._SpecMethods +# TODO: Replace these helpers with importlib._bootstrap_external functions. def _run_code(code, run_globals, init_globals=None, mod_name=None, mod_spec=None, pkg_name=None, script_name=None): diff --git a/Darwin/lib/python3.4/sched.py b/Darwin/lib/python3.5/sched.py similarity index 97% rename from Darwin/lib/python3.4/sched.py rename to Darwin/lib/python3.5/sched.py index 2e6b00a..b47648d 100644 --- a/Darwin/lib/python3.4/sched.py +++ b/Darwin/lib/python3.5/sched.py @@ -35,16 +35,12 @@ try: import threading except ImportError: import dummy_threading as threading -try: - from time import monotonic as _time -except ImportError: - from time import time as _time +from time import monotonic as _time __all__ = ["scheduler"] class Event(namedtuple('Event', 'time, priority, action, argument, kwargs')): def __eq__(s, o): return (s.time, s.priority) == (o.time, o.priority) - def __ne__(s, o): return (s.time, s.priority) != (o.time, o.priority) def __lt__(s, o): return (s.time, s.priority) < (o.time, o.priority) def __le__(s, o): return (s.time, s.priority) <= (o.time, o.priority) def __gt__(s, o): return (s.time, s.priority) > (o.time, o.priority) diff --git a/Darwin/lib/python3.4/selectors.py b/Darwin/lib/python3.5/selectors.py similarity index 87% rename from Darwin/lib/python3.4/selectors.py rename to Darwin/lib/python3.5/selectors.py index 9be9225..6d569c3 100644 --- a/Darwin/lib/python3.4/selectors.py +++ b/Darwin/lib/python3.5/selectors.py @@ -174,6 +174,8 @@ class BaseSelector(metaclass=ABCMeta): SelectorKey for this file object """ mapping = self.get_map() + if mapping is None: + raise RuntimeError('Selector is closed') try: return mapping[fileobj] except KeyError: @@ -256,6 +258,7 @@ class _BaseSelectorImpl(BaseSelector): def close(self): self._fd_to_key.clear() + self._map = None def get_map(self): return self._map @@ -418,7 +421,12 @@ if hasattr(select, 'epoll'): # epoll_wait() has a resolution of 1 millisecond, round away # from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) * 1e-3 - max_ev = len(self._fd_to_key) + + # epoll_wait() expects `maxevents` to be greater than zero; + # we want to make sure that `select()` can be called when no + # FD is registered. + max_ev = max(len(self._fd_to_key), 1) + ready = [] try: fd_event_list = self._epoll.poll(timeout, max_ev) @@ -441,6 +449,64 @@ if hasattr(select, 'epoll'): super().close() +if hasattr(select, 'devpoll'): + + class DevpollSelector(_BaseSelectorImpl): + """Solaris /dev/poll selector.""" + + def __init__(self): + super().__init__() + self._devpoll = select.devpoll() + + def fileno(self): + return self._devpoll.fileno() + + def register(self, fileobj, events, data=None): + key = super().register(fileobj, events, data) + poll_events = 0 + if events & EVENT_READ: + poll_events |= select.POLLIN + if events & EVENT_WRITE: + poll_events |= select.POLLOUT + self._devpoll.register(key.fd, poll_events) + return key + + def unregister(self, fileobj): + key = super().unregister(fileobj) + self._devpoll.unregister(key.fd) + return key + + def select(self, timeout=None): + if timeout is None: + timeout = None + elif timeout <= 0: + timeout = 0 + else: + # devpoll() has a resolution of 1 millisecond, round away from + # zero to wait *at least* timeout seconds. + timeout = math.ceil(timeout * 1e3) + ready = [] + try: + fd_event_list = self._devpoll.poll(timeout) + except InterruptedError: + return ready + for fd, event in fd_event_list: + events = 0 + if event & ~select.POLLIN: + events |= EVENT_WRITE + if event & ~select.POLLOUT: + events |= EVENT_READ + + key = self._key_from_fd(fd) + if key: + ready.append((key, events & key.events)) + return ready + + def close(self): + self._devpoll.close() + super().close() + + if hasattr(select, 'kqueue'): class KqueueSelector(_BaseSelectorImpl): @@ -513,12 +579,15 @@ if hasattr(select, 'kqueue'): super().close() -# Choose the best implementation: roughly, epoll|kqueue > poll > select. +# Choose the best implementation, roughly: +# epoll|kqueue|devpoll > poll > select. # select() also can't accept a FD > FD_SETSIZE (usually around 1024) if 'KqueueSelector' in globals(): DefaultSelector = KqueueSelector elif 'EpollSelector' in globals(): DefaultSelector = EpollSelector +elif 'DevpollSelector' in globals(): + DefaultSelector = DevpollSelector elif 'PollSelector' in globals(): DefaultSelector = PollSelector else: diff --git a/Darwin/lib/python3.4/shelve.py b/Darwin/lib/python3.5/shelve.py similarity index 94% rename from Darwin/lib/python3.4/shelve.py rename to Darwin/lib/python3.5/shelve.py index cef580e..581baf1 100644 --- a/Darwin/lib/python3.4/shelve.py +++ b/Darwin/lib/python3.5/shelve.py @@ -138,17 +138,21 @@ class Shelf(collections.MutableMapping): self.close() def close(self): - self.sync() + if self.dict is None: + return try: - self.dict.close() - except AttributeError: - pass - # Catch errors that may happen when close is called from __del__ - # because CPython is in interpreter shutdown. - try: - self.dict = _ClosedDict() - except (NameError, TypeError): - self.dict = None + self.sync() + try: + self.dict.close() + except AttributeError: + pass + finally: + # Catch errors that may happen when close is called from __del__ + # because CPython is in interpreter shutdown. + try: + self.dict = _ClosedDict() + except: + self.dict = None def __del__(self): if not hasattr(self, 'writeback'): diff --git a/Darwin/lib/python3.4/shlex.py b/Darwin/lib/python3.5/shlex.py similarity index 97% rename from Darwin/lib/python3.4/shlex.py rename to Darwin/lib/python3.5/shlex.py index 69f3b45..f083918 100644 --- a/Darwin/lib/python3.4/shlex.py +++ b/Darwin/lib/python3.5/shlex.py @@ -49,9 +49,6 @@ class shlex: self.token = '' self.filestack = deque() self.source = None - if self.debug: - print('shlex: reading from %s, line %d' \ - % (self.instream, self.lineno)) def push_token(self, tok): "Push a token onto the stack popped by the get_token method" @@ -290,15 +287,17 @@ def quote(s): return "'" + s.replace("'", "'\"'\"'") + "'" -if __name__ == '__main__': - if len(sys.argv) == 1: - lexer = shlex() - else: - file = sys.argv[1] - lexer = shlex(open(file), file) +def _print_tokens(lexer): while 1: tt = lexer.get_token() - if tt: - print("Token: " + repr(tt)) - else: + if not tt: break + print("Token: " + repr(tt)) + +if __name__ == '__main__': + if len(sys.argv) == 1: + _print_tokens(shlex()) + else: + fn = sys.argv[1] + with open(fn) as f: + _print_tokens(shlex(f, fn)) diff --git a/Darwin/lib/python3.4/shutil.py b/Darwin/lib/python3.5/shutil.py similarity index 93% rename from Darwin/lib/python3.4/shutil.py rename to Darwin/lib/python3.5/shutil.py index 0cd6ec4..a5da587 100644 --- a/Darwin/lib/python3.4/shutil.py +++ b/Darwin/lib/python3.5/shutil.py @@ -7,7 +7,6 @@ XXX The functions here don't copy the resource fork or other metadata on Mac. import os import sys import stat -from os.path import abspath import fnmatch import collections import errno @@ -20,6 +19,13 @@ try: except ImportError: _BZ2_SUPPORTED = False +try: + import lzma + del lzma + _LZMA_SUPPORTED = True +except ImportError: + _LZMA_SUPPORTED = False + try: from pwd import getpwnam except ImportError: @@ -36,7 +42,8 @@ __all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", "register_archive_format", "unregister_archive_format", "get_unpack_formats", "register_unpack_format", "unregister_unpack_format", "unpack_archive", - "ignore_patterns", "chown", "which"] + "ignore_patterns", "chown", "which", "get_terminal_size", + "SameFileError"] # disk_usage is added later, if available on the platform class Error(OSError): @@ -320,7 +327,11 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, if not os.path.exists(linkto) and ignore_dangling_symlinks: continue # otherwise let the copy occurs. copy2 will raise an error - copy_function(srcname, dstname) + if os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, + copy_function) + else: + copy_function(srcname, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks, ignore, copy_function) else: @@ -336,7 +347,7 @@ def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, copystat(src, dst) except OSError as why: # Copying file access times may fail on Windows - if why.winerror is None: + if getattr(why, 'winerror', None) is None: errors.append((src, dst, str(why))) if errors: raise Error(errors) @@ -486,7 +497,7 @@ def _basename(path): sep = os.path.sep + (os.path.altsep or '') return os.path.basename(path.rstrip(sep)) -def move(src, dst): +def move(src, dst, copy_function=copy2): """Recursively move a file or directory to another location. This is similar to the Unix "mv" command. Return the file or directory's destination. @@ -503,6 +514,11 @@ def move(src, dst): recreated under the new name if os.rename() fails because of cross filesystem renames. + The optional `copy_function` argument is a callable that will be used + to copy the source or it will be delegated to `copytree`. + By default, copy2() is used, but any function that supports the same + signature (like copy()) can be used. + A lot more could be done here... A look at a mv.c shows a lot of the issues this implementation glosses over. @@ -527,17 +543,19 @@ def move(src, dst): os.unlink(src) elif os.path.isdir(src): if _destinsrc(src, dst): - raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) - copytree(src, real_dst, symlinks=True) + raise Error("Cannot move a directory '%s' into itself" + " '%s'." % (src, dst)) + copytree(src, real_dst, copy_function=copy_function, + symlinks=True) rmtree(src) else: - copy2(src, real_dst) + copy_function(src, real_dst) os.unlink(src) return real_dst def _destinsrc(src, dst): - src = abspath(src) - dst = abspath(dst) + src = os.path.abspath(src) + dst = os.path.abspath(dst) if not src.endswith(os.path.sep): src += os.path.sep if not dst.endswith(os.path.sep): @@ -573,14 +591,14 @@ def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, """Create a (possibly compressed) tar file from all the files under 'base_dir'. - 'compress' must be "gzip" (the default), "bzip2", or None. + 'compress' must be "gzip" (the default), "bzip2", "xz", or None. 'owner' and 'group' can be used to define an owner and a group for the archive that is being built. If not provided, the current owner and group will be used. The output tar file will be named 'base_name' + ".tar", possibly plus - the appropriate compression extension (".gz", or ".bz2"). + the appropriate compression extension (".gz", ".bz2", or ".xz"). Returns the output filename. """ @@ -591,6 +609,10 @@ def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, tar_compression['bzip2'] = 'bz2' compress_ext['bzip2'] = '.bz2' + if _LZMA_SUPPORTED: + tar_compression['xz'] = 'xz' + compress_ext['xz'] = '.xz' + # flags for compression program, each element of list will be an argument if compress is not None and compress not in compress_ext: raise ValueError("bad value for 'compress', or compression format not " @@ -599,7 +621,7 @@ def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, archive_name = base_name + '.tar' + compress_ext.get(compress, '') archive_dir = os.path.dirname(archive_name) - if not os.path.exists(archive_dir): + if archive_dir and not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: @@ -630,23 +652,6 @@ def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, return archive_name -def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): - # XXX see if we want to keep an external call here - if verbose: - zipoptions = "-r" - else: - zipoptions = "-rq" - from distutils.errors import DistutilsExecError - from distutils.spawn import spawn - try: - spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) - except DistutilsExecError: - # XXX really should distinguish between "couldn't find - # external 'zip' command" and "zip failed". - raise ExecError("unable to create zip file '%s': " - "could neither import the 'zipfile' module nor " - "find a standalone zip utility") % zip_filename - def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): """Create a zip file from all the files under 'base_dir'. @@ -656,39 +661,31 @@ def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): available, raises ExecError. Returns the name of the output zip file. """ + import zipfile + zip_filename = base_name + ".zip" archive_dir = os.path.dirname(base_name) - if not os.path.exists(archive_dir): + if archive_dir and not os.path.exists(archive_dir): if logger is not None: logger.info("creating %s", archive_dir) if not dry_run: os.makedirs(archive_dir) - # If zipfile module is not available, try spawning an external 'zip' - # command. - try: - import zipfile - except ImportError: - zipfile = None + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) - if zipfile is None: - _call_external_zip(base_dir, zip_filename, verbose, dry_run) - else: - if logger is not None: - logger.info("creating '%s' and adding '%s' to it", - zip_filename, base_dir) - - if not dry_run: - with zipfile.ZipFile(zip_filename, "w", - compression=zipfile.ZIP_DEFLATED) as zf: - for dirpath, dirnames, filenames in os.walk(base_dir): - for name in filenames: - path = os.path.normpath(os.path.join(dirpath, name)) - if os.path.isfile(path): - zf.write(path, path) - if logger is not None: - logger.info("adding '%s'", path) + if not dry_run: + with zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) as zf: + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zf.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) return zip_filename @@ -702,6 +699,10 @@ if _BZ2_SUPPORTED: _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file") +if _LZMA_SUPPORTED: + _ARCHIVE_FORMATS['xztar'] = (_make_tarball, [('compress', 'xz')], + "xz'ed tar-file") + def get_archive_formats(): """Returns a list of supported formats for archiving and unarchiving. @@ -890,7 +891,7 @@ def _unpack_zipfile(filename, extract_dir): zip.close() def _unpack_tarfile(filename, extract_dir): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir` """ try: tarobj = tarfile.open(filename) @@ -909,9 +910,13 @@ _UNPACK_FORMATS = { } if _BZ2_SUPPORTED: - _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + _UNPACK_FORMATS['bztar'] = (['.tar.bz2', '.tbz2'], _unpack_tarfile, [], "bzip2'ed tar-file") +if _LZMA_SUPPORTED: + _UNPACK_FORMATS['xztar'] = (['.tar.xz', '.txz'], _unpack_tarfile, [], + "xz'ed tar-file") + def _find_unpack_format(filename): for name, info in _UNPACK_FORMATS.items(): for extension in info[0]: diff --git a/Darwin/lib/python3.5/signal.py b/Darwin/lib/python3.5/signal.py new file mode 100644 index 0000000..371d712 --- /dev/null +++ b/Darwin/lib/python3.5/signal.py @@ -0,0 +1,79 @@ +import _signal +from _signal import * +from functools import wraps as _wraps +from enum import IntEnum as _IntEnum + +_globals = globals() + +_IntEnum._convert( + 'Signals', __name__, + lambda name: + name.isupper() + and (name.startswith('SIG') and not name.startswith('SIG_')) + or name.startswith('CTRL_')) + +_IntEnum._convert( + 'Handlers', __name__, + lambda name: name in ('SIG_DFL', 'SIG_IGN')) + +if 'pthread_sigmask' in _globals: + _IntEnum._convert( + 'Sigmasks', __name__, + lambda name: name in ('SIG_BLOCK', 'SIG_UNBLOCK', 'SIG_SETMASK')) + + +def _int_to_enum(value, enum_klass): + """Convert a numeric value to an IntEnum member. + If it's not a known member, return the numeric value itself. + """ + try: + return enum_klass(value) + except ValueError: + return value + + +def _enum_to_int(value): + """Convert an IntEnum member to a numeric value. + If it's not a IntEnum member return the value itself. + """ + try: + return int(value) + except (ValueError, TypeError): + return value + + +@_wraps(_signal.signal) +def signal(signalnum, handler): + handler = _signal.signal(_enum_to_int(signalnum), _enum_to_int(handler)) + return _int_to_enum(handler, Handlers) + + +@_wraps(_signal.getsignal) +def getsignal(signalnum): + handler = _signal.getsignal(signalnum) + return _int_to_enum(handler, Handlers) + + +if 'pthread_sigmask' in _globals: + @_wraps(_signal.pthread_sigmask) + def pthread_sigmask(how, mask): + sigs_set = _signal.pthread_sigmask(how, mask) + return set(_int_to_enum(x, Signals) for x in sigs_set) + pthread_sigmask.__doc__ = _signal.pthread_sigmask.__doc__ + + +if 'sigpending' in _globals: + @_wraps(_signal.sigpending) + def sigpending(): + sigs = _signal.sigpending() + return set(_int_to_enum(x, Signals) for x in sigs) + + +if 'sigwait' in _globals: + @_wraps(_signal.sigwait) + def sigwait(sigset): + retsig = _signal.sigwait(sigset) + return _int_to_enum(retsig, Signals) + sigwait.__doc__ = _signal.sigwait + +del _globals, _wraps diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/AES.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/AES.py new file mode 100644 index 0000000..14f68d8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/AES.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# +# Cipher/AES.py : AES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""AES symmetric cipher + +AES `(Advanced Encryption Standard)`__ is a symmetric block cipher standardized +by NIST_ . It has a fixed data block size of 16 bytes. +Its keys can be 128, 192, or 256 bits long. + +AES is very fast and secure, and it is the de facto standard for symmetric +encryption. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import AES + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(AES.block_size) + >>> cipher = AES.new(key, AES.MODE_CFB, iv) + >>> msg = iv + cipher.encrypt(b'Attack at dawn') + +.. __: http://en.wikipedia.org/wiki/Advanced_Encryption_Standard +.. _NIST: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _AES + +class AESCipher (blockalgo.BlockAlgo): + """AES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize an AES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _AES, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new AES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 16 (*AES-128*), 24 (*AES-192*), or 32 (*AES-256*) bytes long. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `AESCipher` object + """ + return AESCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 16 +#: Size of a key (in bytes) +key_size = ( 16, 24, 32 ) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC2.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC2.py new file mode 100644 index 0000000..b5234e6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC2.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC2.py : ARC2.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""RC2 symmetric cipher + +RC2_ (Rivest's Cipher version 2) is a symmetric block cipher designed +by Ron Rivest in 1987. The cipher started as a proprietary design, +that was reverse engineered and anonymously posted on Usenet in 1996. +For this reason, the algorithm was first called *Alleged* RC2 (ARC2), +since the company that owned RC2 (RSA Data Inc.) did not confirm whether +the details leaked into public domain were really correct. + +The company eventually published its full specification in RFC2268_. + +RC2 has a fixed data block size of 8 bytes. Length of its keys can vary from +8 to 128 bits. One particular property of RC2 is that the actual +cryptographic strength of the key (*effective key length*) can be reduced +via a parameter. + +Even though RC2 is not cryptographically broken, it has not been analyzed as +thoroughly as AES, which is also faster than RC2. + +New designs should not use RC2. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import ARC2 + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(ARC2.block_size) + >>> cipher = ARC2.new(key, ARC2.MODE_CFB, iv) + >>> msg = iv + cipher.encrypt(b'Attack at dawn') + +.. _RC2: http://en.wikipedia.org/wiki/RC2 +.. _RFC2268: http://tools.ietf.org/html/rfc2268 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _ARC2 + +class RC2Cipher (blockalgo.BlockAlgo): + """RC2 cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC2 cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _ARC2, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new RC2 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length can vary from 1 to 128 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + effective_keylen : integer + Maximum cryptographic strength of the key, in bits. + It can vary from 0 to 1024. The default value is 1024. + + :Return: an `RC2Cipher` object + """ + return RC2Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(1,16+1) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC4.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC4.py new file mode 100644 index 0000000..d83f75b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/ARC4.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC4.py : ARC4 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""ARC4 symmetric cipher + +ARC4_ (Alleged RC4) is an implementation of RC4 (Rivest's Cipher version 4), +a symmetric stream cipher designed by Ron Rivest in 1987. + +The cipher started as a proprietary design, that was reverse engineered and +anonymously posted on Usenet in 1994. The company that owns RC4 (RSA Data +Inc.) never confirmed the correctness of the leaked algorithm. + +Unlike RC2, the company has never published the full specification of RC4, +of whom it still holds the trademark. + +ARC4 keys can vary in length from 40 to 2048 bits. + +One problem of ARC4 is that it does not take a nonce or an IV. If it is required +to encrypt multiple messages with the same long-term key, a distinct +independent nonce must be created for each message, and a short-term key must +be derived from the combination of the long-term key and the nonce. +Due to the weak key scheduling algorithm of RC2, the combination must be carried +out with a complex function (e.g. a cryptographic hash) and not by simply +concatenating key and nonce. + +New designs should not use ARC4. A good alternative is AES +(`Crypto.Cipher.AES`) in any of the modes that turn it into a stream cipher (OFB, CFB, or CTR). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import ARC4 + >>> from Crypto.Hash import SHA + >>> from Crypto import Random + >>> + >>> key = b'Very long and confidential key' + >>> nonce = Random.new().read(16) + >>> tempkey = SHA.new(key+nonce).digest() + >>> cipher = ARC4.new(tempkey) + >>> msg = nonce + cipher.encrypt(b'Open the pod bay doors, HAL') + +.. _ARC4: http://en.wikipedia.org/wiki/RC4 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import _ARC4 + +class ARC4Cipher: + """ARC4 cipher object""" + + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC4 cipher object + + See also `new()` at the module level.""" + + self._cipher = _ARC4.new(key, *args, **kwargs) + self.block_size = self._cipher.block_size + self.key_size = self._cipher.key_size + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. It can be of any size. + :Return: the encrypted data (byte string, as long as the + plaintext). + """ + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. It can be of any size. + :Return: the decrypted data (byte string, as long as the + ciphertext). + """ + return self._cipher.decrypt(ciphertext) + +def new(key, *args, **kwargs): + """Create a new ARC4 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It can have any length, with a minimum of 40 bytes. + Its cryptograpic strength is always capped to 2048 bits (256 bytes). + + :Return: an `ARC4Cipher` object + """ + return ARC4Cipher(key, *args, **kwargs) + +#: Size of a data block (in bytes) +block_size = 1 +#: Size of a key (in bytes) +key_size = range(1,256+1) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/Blowfish.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/Blowfish.py new file mode 100644 index 0000000..8c81d96 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/Blowfish.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Blowfish.py : Blowfish +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Blowfish symmetric cipher + +Blowfish_ is a symmetric block cipher designed by Bruce Schneier. + +It has a fixed data block size of 8 bytes and its keys can vary in length +from 32 to 448 bits (4 to 56 bytes). + +Blowfish is deemed secure and it is fast. However, its keys should be chosen +to be big enough to withstand a brute force attack (e.g. at least 16 bytes). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import Blowfish + >>> from Crypto import Random + >>> from struct import pack + >>> + >>> bs = Blowfish.block_size + >>> key = b'An arbitrarily long key' + >>> iv = Random.new().read(bs) + >>> cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv) + >>> plaintext = b'docendo discimus ' + >>> plen = bs - divmod(len(plaintext),bs)[1] + >>> padding = [plen]*plen + >>> padding = pack('b'*plen, *padding) + >>> msg = iv + cipher.encrypt(plaintext + padding) + +.. _Blowfish: http://www.schneier.com/blowfish.html + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _Blowfish + +class BlowfishCipher (blockalgo.BlockAlgo): + """Blowfish cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a Blowfish cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _Blowfish, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new Blowfish cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length can vary from 4 to 56 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: a `BlowfishCipher` object + """ + return BlowfishCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(4,56+1) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/CAST.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/CAST.py new file mode 100644 index 0000000..89543b2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/CAST.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# +# Cipher/CAST.py : CAST +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""CAST-128 symmetric cipher + +CAST-128_ (or CAST5) is a symmetric block cipher specified in RFC2144_. + +It has a fixed data block size of 8 bytes. Its key can vary in length +from 40 to 128 bits. + +CAST is deemed to be cryptographically secure, but its usage is not widespread. +Keys of sufficient length should be used to prevent brute force attacks +(128 bits are recommended). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import CAST + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(CAST.block_size) + >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, iv) + >>> plaintext = b'sona si latine loqueris ' + >>> msg = cipher.encrypt(plaintext) + >>> + ... + >>> eiv = msg[:CAST.block_size+2] + >>> ciphertext = msg[CAST.block_size+2:] + >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, eiv) + >>> print cipher.decrypt(ciphertext) + +.. _CAST-128: http://en.wikipedia.org/wiki/CAST-128 +.. _RFC2144: http://tools.ietf.org/html/rfc2144 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _CAST + +class CAST128Cipher(blockalgo.BlockAlgo): + """CAST-128 cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a CAST-128 cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _CAST, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new CAST-128 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length may vary from 5 to 16 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `CAST128Cipher` object + """ + return CAST128Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(5,16+1) diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES.py new file mode 100644 index 0000000..2fae42f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES.py : DES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""DES symmetric cipher + +DES `(Data Encryption Standard)`__ is a symmetric block cipher standardized +by NIST_ . It has a fixed data block size of 8 bytes. +Its keys are 64 bits long, even though 8 bits were used for integrity (now they +are ignored) and do not contribute to securty. + +DES is cryptographically secure, but its key length is too short by nowadays +standards and it could be brute forced with some effort. + +DES should not be used for new designs. Use `AES`. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import DES3 + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(DES3.block_size) + >>> cipher = DES3.new(key, DES3.MODE_OFB, iv) + >>> plaintext = b'sona si latine loqueris ' + >>> msg = iv + cipher.encrypt(plaintext) + +.. __: http://en.wikipedia.org/wiki/Data_Encryption_Standard +.. _NIST: http://csrc.nist.gov/publications/fips/fips46-3/fips46-3.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _DES + +class DESCipher(blockalgo.BlockAlgo): + """DES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a DES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _DES, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new DES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 8 byte long. The parity bits will be ignored. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `DESCipher` object + """ + return DESCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = 8 diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES3.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES3.py new file mode 100644 index 0000000..7fedac8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/DES3.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES3.py : DES3 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Triple DES symmetric cipher + +`Triple DES`__ (or TDES or TDEA or 3DES) is a symmetric block cipher standardized by NIST_. +It has a fixed data block size of 8 bytes. Its keys are 128 (*Option 1*) or 192 +bits (*Option 2*) long. +However, 1 out of 8 bits is used for redundancy and do not contribute to +security. The effective key length is respectively 112 or 168 bits. + +TDES consists of the concatenation of 3 simple `DES` ciphers. + +The plaintext is first DES encrypted with *K1*, then decrypted with *K2*, +and finally encrypted again with *K3*. The ciphertext is decrypted in the reverse manner. + +The 192 bit key is a bundle of three 64 bit independent subkeys: *K1*, *K2*, and *K3*. + +The 128 bit key is split into *K1* and *K2*, whereas *K1=K3*. + +It is important that all subkeys are different, otherwise TDES would degrade to +single `DES`. + +TDES is cryptographically secure, even though it is neither as secure nor as fast +as `AES`. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import DES + >>> from Crypto import Random + >>> from Crypto.Util import Counter + >>> + >>> key = b'-8B key-' + >>> nonce = Random.new().read(DES.block_size/2) + >>> ctr = Counter.new(DES.block_size*8/2, prefix=nonce) + >>> cipher = DES.new(key, DES.MODE_CTR, counter=ctr) + >>> plaintext = b'We are no longer the knights who say ni!' + >>> msg = nonce + cipher.encrypt(plaintext) + +.. __: http://en.wikipedia.org/wiki/Triple_DES +.. _NIST: http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _DES3 + +class DES3Cipher(blockalgo.BlockAlgo): + """TDES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a TDES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _DES3, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new TDES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 16 or 24 bytes long. The parity bits will be ignored. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Attention: it is important that all 8 byte subkeys are different, + otherwise TDES would degrade to single `DES`. + :Return: an `DES3Cipher` object + """ + return DES3Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = ( 16, 24 ) diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/PKCS1_OAEP.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/PKCS1_OAEP.py new file mode 100644 index 0000000..2738ce3 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/PKCS1_OAEP.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA encryption protocol according to PKCS#1 OAEP + +See RFC3447__ or the `original RSA Labs specification`__ . + +This scheme is more properly called ``RSAES-OAEP``. + +As an example, a sender may encrypt a message in this way: + + >>> from Crypto.Cipher import PKCS1_OAEP + >>> from Crypto.PublicKey import RSA + >>> + >>> message = 'To be encrypted' + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> cipher = PKCS1_OAEP.new(key) + >>> ciphertext = cipher.encrypt(message) + +At the receiver side, decryption can be done using the private part of +the RSA key: + + >>> key = RSA.importKey(open('privkey.der').read()) + >>> cipher = PKCS1_OAP.new(key) + >>> message = cipher.decrypt(ciphertext) + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. +""" + + + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS1OAEP_Cipher' ] + +import Crypto.Signature.PKCS1_PSS +import Crypto.Hash.SHA + +from Crypto.Util.py3compat import * +import Crypto.Util.number +from Crypto.Util.number import ceil_div +from Crypto.Util.strxor import strxor + +class PKCS1OAEP_Cipher: + """This cipher can perform PKCS#1 v1.5 OAEP encryption or decryption.""" + + def __init__(self, key, hashAlgo, mgfunc, label): + """Initialize this PKCS#1 OAEP cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + hashAlgo : hash object + The hash function to use. This can be a module under `Crypto.Hash` + or an existing hash object created from any of such modules. If not specified, + `Crypto.Hash.SHA` (that is, SHA-1) is used. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 is used (a safe choice). + label : string + A label to apply to this particular encryption. If not specified, + an empty string is used. Specifying a label does not improve + security. + + :attention: Modify the mask generation function only if you know what you are doing. + Sender and receiver must use the same one. + """ + self._key = key + + if hashAlgo: + self._hashObj = hashAlgo + else: + self._hashObj = Crypto.Hash.SHA + + if mgfunc: + self._mgf = mgfunc + else: + self._mgf = lambda x,y: Crypto.Signature.PKCS1_PSS.MGF1(x,y,self._hashObj) + + self._label = label + + def can_encrypt(self): + """Return True/1 if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True/1 if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 OAEP encryption of a message. + + This function is named ``RSAES-OAEP-ENCRYPT``, and is specified in + section 7.1.1 of RFC3447. + + :Parameters: + message : string + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) + minus 2, minus twice the hash output size. + + :Return: A string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + """ + # TODO: Verify the key is RSA + + randFunc = self._key._randfunc + + # See 7.1.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + mLen = len(message) + + # Step 1b + ps_len = k-mLen-2*hLen-2 + if ps_len<0: + raise ValueError("Plaintext is too long.") + # Step 2a + lHash = self._hashObj.new(self._label).digest() + # Step 2b + ps = bchr(0x00)*ps_len + # Step 2c + db = lHash + ps + bchr(0x01) + message + # Step 2d + ros = randFunc(hLen) + # Step 2e + dbMask = self._mgf(ros, k-hLen-1) + # Step 2f + maskedDB = strxor(db, dbMask) + # Step 2g + seedMask = self._mgf(maskedDB, hLen) + # Step 2h + maskedSeed = strxor(ros, seedMask) + # Step 2i + em = bchr(0x00) + maskedSeed + maskedDB + # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) + m = self._key.encrypt(em, 0)[0] + # Complete step 3c (I2OSP) + c = bchr(0x00)*(k-len(m)) + m + return c + + def decrypt(self, ct): + """Decrypt a PKCS#1 OAEP ciphertext. + + This function is named ``RSAES-OAEP-DECRYPT``, and is specified in + section 7.1.2 of RFC3447. + + :Parameters: + ct : string + The ciphertext that contains the message to recover. + + :Return: A string, the original message. + :Raise ValueError: + If the ciphertext length is incorrect, or if the decryption does not + succeed. + :Raise TypeError: + If the RSA key has no private half. + """ + # TODO: Verify the key is RSA + + # See 7.1.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + + # Step 1b and 1c + if len(ct) != k or k>> from Crypto.Cipher import PKCS1_v1_5 + >>> from Crypto.PublicKey import RSA + >>> from Crypto.Hash import SHA + >>> + >>> message = 'To be encrypted' + >>> h = SHA.new(message) + >>> + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> cipher = PKCS1_v1_5.new(key) + >>> ciphertext = cipher.encrypt(message+h.digest()) + +At the receiver side, decryption can be done using the private part of +the RSA key: + + >>> From Crypto.Hash import SHA + >>> from Crypto import Random + >>> + >>> key = RSA.importKey(open('privkey.der').read()) + >>> + >>> dsize = SHA.digest_size + >>> sentinel = Random.new().read(15+dsize) # Let's assume that average data length is 15 + >>> + >>> cipher = PKCS1_v1_5.new(key) + >>> message = cipher.decrypt(ciphertext, sentinel) + >>> + >>> digest = SHA.new(message[:-dsize]).digest() + >>> if digest==message[-dsize:]: # Note how we DO NOT look for the sentinel + >>> print "Encryption was correct." + >>> else: + >>> print "Encryption was not correct." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. +""" + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS115_Cipher' ] + +from Crypto.Util.number import ceil_div +from Crypto.Util.py3compat import * +import Crypto.Util.number + +class PKCS115_Cipher: + """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption.""" + + def __init__(self, key): + """Initialize this PKCS#1 v1.5 cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + """ + self._key = key + + def can_encrypt(self): + """Return True if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 v1.5 encryption of a message. + + This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and is specified in + section 7.2.1 of RFC3447. + For a complete example see `Crypto.Cipher.PKCS1_v1_5`. + + :Parameters: + message : byte string + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) minus 11. + + :Return: A byte string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + + """ + # TODO: Verify the key is RSA + + randFunc = self._key._randfunc + + # See 7.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + mLen = len(message) + + # Step 1 + if mLen > k-11: + raise ValueError("Plaintext is too long.") + # Step 2a + class nonZeroRandByte: + def __init__(self, rf): self.rf=rf + def __call__(self, c): + while bord(c)==0x00: c=self.rf(1)[0] + return c + ps = tobytes(list(map(nonZeroRandByte(randFunc), randFunc(k-mLen-3)))) + # Step 2b + em = b('\x00\x02') + ps + bchr(0x00) + message + # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) + m = self._key.encrypt(em, 0)[0] + # Complete step 3c (I2OSP) + c = bchr(0x00)*(k-len(m)) + m + return c + + def decrypt(self, ct, sentinel): + """Decrypt a PKCS#1 v1.5 ciphertext. + + This function is named ``RSAES-PKCS1-V1_5-DECRYPT``, and is specified in + section 7.2.2 of RFC3447. + For a complete example see `Crypto.Cipher.PKCS1_v1_5`. + + :Parameters: + ct : byte string + The ciphertext that contains the message to recover. + sentinel : any type + The object to return to indicate that an error was detected during decryption. + + :Return: A byte string. It is either the original message or the ``sentinel`` (in case of an error). + :Raise ValueError: + If the ciphertext length is incorrect + :Raise TypeError: + If the RSA key has no private half. + + :attention: + You should **never** let the party who submitted the ciphertext know that + this function returned the ``sentinel`` value. + Armed with such knowledge (for a fair amount of carefully crafted but invalid ciphertexts), + an attacker is able to recontruct the plaintext of any other encryption that were carried out + with the same RSA public key (see `Bleichenbacher's`__ attack). + + In general, it should not be possible for the other party to distinguish + whether processing at the server side failed because the value returned + was a ``sentinel`` as opposed to a random, invalid message. + + In fact, the second option is not that unlikely: encryption done according to PKCS#1 v1.5 + embeds no good integrity check. There is roughly one chance + in 2^16 for a random ciphertext to be returned as a valid message + (although random looking). + + It is therefore advisabled to: + + 1. Select as ``sentinel`` a value that resembles a plausable random, invalid message. + 2. Not report back an error as soon as you detect a ``sentinel`` value. + Put differently, you should not explicitly check if the returned value is the ``sentinel`` or not. + 3. Cover all possible errors with a single, generic error indicator. + 4. Embed into the definition of ``message`` (at the protocol level) a digest (e.g. ``SHA-1``). + It is recommended for it to be the rightmost part ``message``. + 5. Where possible, monitor the number of errors due to ciphertexts originating from the same party, + and slow down the rate of the requests from such party (or even blacklist it altogether). + + **If you are designing a new protocol, consider using the more robust PKCS#1 OAEP.** + + .. __: http://www.bell-labs.com/user/bleichen/papers/pkcs.ps + + """ + + # TODO: Verify the key is RSA + + # See 7.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + if len(ct) != k: + raise ValueError("Ciphertext with incorrect length.") + # Step 2a (O2SIP), 2b (RSADP), and part of 2c (I2OSP) + m = self._key.decrypt(ct) + # Complete step 2c (I2OSP) + em = bchr(0x00)*(k-len(m)) + m + # Step 3 + sep = em.find(bchr(0x00),2) + if not em.startswith(b('\x00\x02')) or sep<10: + return sentinel + # Step 4 + return em[sep+1:] + +def new(key): + """Return a cipher object `PKCS115_Cipher` that can be used to perform PKCS#1 v1.5 encryption or decryption. + + :Parameters: + key : RSA key object + The key to use to encrypt or decrypt the message. This is a `Crypto.PublicKey.RSA` object. + Decryption is only possible if *key* is a private RSA key. + + """ + return PKCS115_Cipher(key) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/XOR.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/XOR.py new file mode 100644 index 0000000..46b8464 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/XOR.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# +# Cipher/XOR.py : XOR +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""XOR toy cipher + +XOR is one the simplest stream ciphers. Encryption and decryption are +performed by XOR-ing data with a keystream made by contatenating +the key. + +Do not use it for real applications! + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import _XOR + +class XORCipher: + """XOR cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a XOR cipher object + + See also `new()` at the module level.""" + self._cipher = _XOR.new(key, *args, **kwargs) + self.block_size = self._cipher.block_size + self.key_size = self._cipher.key_size + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. It can be of any size. + :Return: the encrypted data (byte string, as long as the + plaintext). + """ + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. It can be of any size. + :Return: the decrypted data (byte string, as long as the + ciphertext). + """ + return self._cipher.decrypt(ciphertext) + +def new(key, *args, **kwargs): + """Create a new XOR cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length may vary from 1 to 32 bytes. + + :Return: an `XORCipher` object + """ + return XORCipher(key, *args, **kwargs) + +#: Size of a data block (in bytes) +block_size = 1 +#: Size of a key (in bytes) +key_size = range(1,32+1) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_AES.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_AES.cpython-35m-darwin.so new file mode 100755 index 0000000..0ef8648 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_AES.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC2.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC2.cpython-35m-darwin.so new file mode 100755 index 0000000..92aca58 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC2.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC4.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC4.cpython-35m-darwin.so new file mode 100755 index 0000000..8fc5f47 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_ARC4.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_Blowfish.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_Blowfish.cpython-35m-darwin.so new file mode 100755 index 0000000..95e97b7 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_Blowfish.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_CAST.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_CAST.cpython-35m-darwin.so new file mode 100755 index 0000000..79d749a Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_CAST.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES.cpython-35m-darwin.so new file mode 100755 index 0000000..a47036e Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES3.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES3.cpython-35m-darwin.so new file mode 100755 index 0000000..a86e7a0 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_DES3.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_XOR.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_XOR.cpython-35m-darwin.so new file mode 100755 index 0000000..12a7772 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/_XOR.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/__init__.py new file mode 100644 index 0000000..7afed2d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Symmetric- and asymmetric-key encryption algorithms. + +Encryption algorithms transform plaintext in some way that +is dependent on a key or key pair, producing ciphertext. + +Symmetric algorithms +-------------------- + +Encryption can easily be reversed, if (and, hopefully, only if) +one knows the same key. +In other words, sender and receiver share the same key. + +The symmetric encryption modules here all support the interface described in PEP +272, "API for Block Encryption Algorithms". + +If you don't know which algorithm to choose, use AES because it's +standard and has undergone a fair bit of examination. + +======================== ======= ======================== +Module name Type Description +======================== ======= ======================== +`Crypto.Cipher.AES` Block Advanced Encryption Standard +`Crypto.Cipher.ARC2` Block Alleged RC2 +`Crypto.Cipher.ARC4` Stream Alleged RC4 +`Crypto.Cipher.Blowfish` Block Blowfish +`Crypto.Cipher.CAST` Block CAST +`Crypto.Cipher.DES` Block The Data Encryption Standard. + Very commonly used in the past, + but today its 56-bit keys are too small. +`Crypto.Cipher.DES3` Block Triple DES. +`Crypto.Cipher.XOR` Stream The simple XOR cipher. +======================== ======= ======================== + + +Asymmetric algorithms +--------------------- + +For asymmetric algorithms, the key to be used for decryption is totally +different and cannot be derived in a feasible way from the key used +for encryption. Put differently, sender and receiver each own one half +of a key pair. The encryption key is often called ``public`` whereas +the decryption key is called ``private``. + +========================== ======================= +Module name Description +========================== ======================= +`Crypto.Cipher.PKCS1_v1_5` PKCS#1 v1.5 encryption, based on RSA key pairs +`Crypto.Cipher.PKCS1_OAEP` PKCS#1 OAEP encryption, based on RSA key pairs +========================== ======================= + +:undocumented: __revision__, __package__, _AES, _ARC2, _ARC4, _Blowfish + _CAST, _DES, _DES3, _XOR +""" + +__all__ = ['AES', 'ARC2', 'ARC4', + 'Blowfish', 'CAST', 'DES', 'DES3', + 'XOR', + 'PKCS1_v1_5', 'PKCS1_OAEP' + ] + +__revision__ = "$Id$" + + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Cipher/blockalgo.py b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/blockalgo.py new file mode 100644 index 0000000..dd183dc --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Cipher/blockalgo.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +# +# Cipher/blockalgo.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Module with definitions common to all block ciphers.""" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +#: *Electronic Code Book (ECB)*. +#: This is the simplest encryption mode. Each of the plaintext blocks +#: is directly encrypted into a ciphertext block, independently of +#: any other block. This mode exposes frequency of symbols +#: in your plaintext. Other modes (e.g. *CBC*) should be used instead. +#: +#: See `NIST SP800-38A`_ , Section 6.1 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_ECB = 1 + +#: *Cipher-Block Chaining (CBC)*. Each of the ciphertext blocks depends +#: on the current and all previous plaintext blocks. An Initialization Vector +#: (*IV*) is required. +#: +#: The *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it must be authenticated by the receiver and +#: it should be picked randomly. +#: +#: See `NIST SP800-38A`_ , Section 6.2 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CBC = 2 + +#: *Cipher FeedBack (CFB)*. This mode is similar to CBC, but it transforms +#: the underlying block cipher into a stream cipher. Plaintext and ciphertext +#: are processed in *segments* of **s** bits. The mode is therefore sometimes +#: labelled **s**-bit CFB. An Initialization Vector (*IV*) is required. +#: +#: When encrypting, each ciphertext segment contributes to the encryption of +#: the next plaintext segment. +#: +#: This *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it should be picked randomly. +#: Reusing the same *IV* for encryptions done with the same key lead to +#: catastrophic cryptographic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.3 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CFB = 3 + +#: This mode should not be used. +MODE_PGP = 4 + +#: *Output FeedBack (OFB)*. This mode is very similar to CBC, but it +#: transforms the underlying block cipher into a stream cipher. +#: The keystream is the iterated block encryption of an Initialization Vector (*IV*). +#: +#: The *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it should be picked randomly. +#: +#: Reusing the same *IV* for encryptions done with the same key lead to +#: catastrophic cryptograhic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.4 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_OFB = 5 + +#: *CounTeR (CTR)*. This mode is very similar to ECB, in that +#: encryption of one block is done independently of all other blocks. +#: Unlike ECB, the block *position* contributes to the encryption and no +#: information leaks about symbol frequency. +#: +#: Each message block is associated to a *counter* which must be unique +#: across all messages that get encrypted with the same key (not just within +#: the same message). The counter is as big as the block size. +#: +#: Counters can be generated in several ways. The most straightword one is +#: to choose an *initial counter block* (which can be made public, similarly +#: to the *IV* for the other modes) and increment its lowest **m** bits by +#: one (modulo *2^m*) for each block. In most cases, **m** is chosen to be half +#: the block size. +#: +#: Reusing the same *initial counter block* for encryptions done with the same +#: key lead to catastrophic cryptograhic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.5 (for the mode) and Appendix B (for how +#: to manage the *initial counter block*). +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CTR = 6 + +#: OpenPGP. This mode is a variant of CFB, and it is only used in PGP and OpenPGP_ applications. +#: An Initialization Vector (*IV*) is required. +#: +#: Unlike CFB, the IV is not transmitted to the receiver. Instead, the *encrypted* IV is. +#: The IV is a random data block. Two of its bytes are duplicated to act as a checksum +#: for the correctness of the key. The encrypted IV is therefore 2 bytes longer than +#: the clean IV. +#: +#: .. _OpenPGP: http://tools.ietf.org/html/rfc4880 +MODE_OPENPGP = 7 + +def _getParameter(name, index, args, kwargs, default=None): + """Find a parameter in tuple and dictionary arguments a function receives""" + param = kwargs.get(name) + if len(args)>index: + if param: + raise ValueError("Parameter '%s' is specified twice" % name) + param = args[index] + return param or default + +class BlockAlgo: + """Class modelling an abstract block cipher.""" + + def __init__(self, factory, key, *args, **kwargs): + self.mode = _getParameter('mode', 0, args, kwargs, default=MODE_ECB) + self.block_size = factory.block_size + + if self.mode != MODE_OPENPGP: + self._cipher = factory.new(key, *args, **kwargs) + self.IV = self._cipher.IV + else: + # OPENPGP mode. For details, see 13.9 in RCC4880. + # + # A few members are specifically created for this mode: + # - _encrypted_iv, set in this constructor + # - _done_first_block, set to True after the first encryption + # - _done_last_block, set to True after a partial block is processed + + self._done_first_block = False + self._done_last_block = False + self.IV = _getParameter('iv', 1, args, kwargs) + if not self.IV: + raise ValueError("MODE_OPENPGP requires an IV") + + # Instantiate a temporary cipher to process the IV + IV_cipher = factory.new(key, MODE_CFB, + b('\x00')*self.block_size, # IV for CFB + segment_size=self.block_size*8) + + # The cipher will be used for... + if len(self.IV) == self.block_size: + # ... encryption + self._encrypted_IV = IV_cipher.encrypt( + self.IV + self.IV[-2:] + # Plaintext + b('\x00')*(self.block_size-2) # Padding + )[:self.block_size+2] + elif len(self.IV) == self.block_size+2: + # ... decryption + self._encrypted_IV = self.IV + self.IV = IV_cipher.decrypt(self.IV + # Ciphertext + b('\x00')*(self.block_size-2) # Padding + )[:self.block_size+2] + if self.IV[-2:] != self.IV[-4:-2]: + raise ValueError("Failed integrity check for OPENPGP IV") + self.IV = self.IV[:-2] + else: + raise ValueError("Length of IV must be %d or %d bytes for MODE_OPENPGP" + % (self.block_size, self.block_size+2)) + + # Instantiate the cipher for the real PGP data + self._cipher = factory.new(key, MODE_CFB, + self._encrypted_IV[-self.block_size:], + segment_size=self.block_size*8) + + def encrypt(self, plaintext): + """Encrypt data with the key and the parameters set at initialization. + + The cipher object is stateful; encryption of a long block + of data can be broken up in two or more calls to `encrypt()`. + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is always equivalent to: + + >>> c.encrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not perform any padding. + + - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *plaintext* length + (in bytes) must be a multiple of *block_size*. + + - For `MODE_CFB`, *plaintext* length (in bytes) must be a multiple + of *segment_size*/8. + + - For `MODE_CTR`, *plaintext* can be of any length. + + - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, + unless it is the last chunk of the message. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. + :Return: + the encrypted data, as a byte string. It is as long as + *plaintext* with one exception: when encrypting the first message + chunk with `MODE_OPENPGP`, the encypted IV is prepended to the + returned ciphertext. + """ + + if self.mode == MODE_OPENPGP: + padding_length = (self.block_size - len(plaintext) % self.block_size) % self.block_size + if padding_length>0: + # CFB mode requires ciphertext to have length multiple of block size, + # but PGP mode allows the last block to be shorter + if self._done_last_block: + raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", + self.block_size) + self._done_last_block = True + padded = plaintext + b('\x00')*padding_length + res = self._cipher.encrypt(padded)[:len(plaintext)] + else: + res = self._cipher.encrypt(plaintext) + if not self._done_first_block: + res = self._encrypted_IV + res + self._done_first_block = True + return res + + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt data with the key and the parameters set at initialization. + + The cipher object is stateful; decryption of a long block + of data can be broken up in two or more calls to `decrypt()`. + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is always equivalent to: + + >>> c.decrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not perform any padding. + + - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *ciphertext* length + (in bytes) must be a multiple of *block_size*. + + - For `MODE_CFB`, *ciphertext* length (in bytes) must be a multiple + of *segment_size*/8. + + - For `MODE_CTR`, *ciphertext* can be of any length. + + - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, + unless it is the last chunk of the message. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. + :Return: the decrypted data (byte string, as long as *ciphertext*). + """ + if self.mode == MODE_OPENPGP: + padding_length = (self.block_size - len(ciphertext) % self.block_size) % self.block_size + if padding_length>0: + # CFB mode requires ciphertext to have length multiple of block size, + # but PGP mode allows the last block to be shorter + if self._done_last_block: + raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", + self.block_size) + self._done_last_block = True + padded = ciphertext + b('\x00')*padding_length + res = self._cipher.decrypt(padded)[:len(ciphertext)] + else: + res = self._cipher.decrypt(ciphertext) + return res + + return self._cipher.decrypt(ciphertext) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/HMAC.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/HMAC.py new file mode 100644 index 0000000..324f534 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/HMAC.py @@ -0,0 +1,212 @@ +# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. +# +# =================================================================== +# Portions Copyright (c) 2001, 2002, 2003 Python Software Foundation; +# All Rights Reserved +# +# This file contains code from the Python 2.2 hmac.py module (the +# "Original Code"), with modifications made after it was incorporated +# into PyCrypto (the "Modifications"). +# +# To the best of our knowledge, the Python Software Foundation is the +# copyright holder of the Original Code, and has licensed it under the +# Python 2.2 license. See the file LEGAL/copy/LICENSE.python-2.2 for +# details. +# +# The Modifications to this file are dedicated to the public domain. +# To the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. No rights are +# reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +"""HMAC (Hash-based Message Authentication Code) algorithm + +HMAC is a MAC defined in RFC2104_ and FIPS-198_ and constructed using +a cryptograpic hash algorithm. +It is usually named *HMAC-X*, where *X* is the hash algorithm; for +instance *HMAC-SHA1* or *HMAC-MD5*. + +The strength of an HMAC depends on: + + - the strength of the hash algorithm + - the length and entropy of the secret key + +An example of possible usage is the following: + + >>> from Crypto.Hash import HMAC + >>> + >>> secret = b'Swordfish' + >>> h = HMAC.new(secret) + >>> h.update(b'Hello') + >>> print h.hexdigest() + +.. _RFC2104: http://www.ietf.org/rfc/rfc2104.txt +.. _FIPS-198: http://csrc.nist.gov/publications/fips/fips198/fips-198a.pdf +""" + +# This is just a copy of the Python 2.2 HMAC module, modified to work when +# used on versions of Python before 2.2. + +__revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'HMAC' ] + +from Crypto.Util.strxor import strxor_c +from Crypto.Util.py3compat import * + +#: The size of the authentication tag produced by the MAC. +#: It matches the digest size on the underlying +#: hashing module used. +digest_size = None + +class HMAC: + """Class that implements HMAC""" + + #: The size of the authentication tag produced by the MAC. + #: It matches the digest size on the underlying + #: hashing module used. + digest_size = None + + def __init__(self, key, msg = None, digestmod = None): + """Create a new HMAC object. + + :Parameters: + key : byte string + secret key for the MAC object. + It must be long enough to match the expected security level of the + MAC. However, there is no benefit in using keys longer than the + `digest_size` of the underlying hash algorithm. + msg : byte string + The very first chunk of the message to authenticate. + It is equivalent to an early call to `update()`. Optional. + :Parameter digestmod: + The hash algorithm the HMAC is based on. + Default is `Crypto.Hash.MD5`. + :Type digestmod: + A hash module or object instantiated from `Crypto.Hash` + """ + if digestmod is None: + from . import MD5 + digestmod = MD5 + + self.digestmod = digestmod + self.outer = digestmod.new() + self.inner = digestmod.new() + try: + self.digest_size = digestmod.digest_size + except AttributeError: + self.digest_size = len(self.outer.digest()) + + try: + # The block size is 128 bytes for SHA384 and SHA512 and 64 bytes + # for the others hash function + blocksize = digestmod.block_size + except AttributeError: + blocksize = 64 + + ipad = 0x36 + opad = 0x5C + + if len(key) > blocksize: + key = digestmod.new(key).digest() + + key = key + bchr(0) * (blocksize - len(key)) + self.outer.update(strxor_c(key, opad)) + self.inner.update(strxor_c(key, ipad)) + if (msg): + self.update(msg) + + def update(self, msg): + """Continue authentication of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + msg : byte string + The next chunk of the message being authenticated + """ + + self.inner.update(msg) + + def copy(self): + """Return a copy ("clone") of the MAC object. + + The copy will have the same internal state as the original MAC + object. + This can be used to efficiently compute the MAC of strings that + share a common initial substring. + + :Returns: An `HMAC` object + """ + other = HMAC(b("")) + other.digestmod = self.digestmod + other.inner = self.inner.copy() + other.outer = self.outer.copy() + return other + + def digest(self): + """Return the **binary** (non-printable) MAC of the message that has + been authenticated so far. + + This method does not change the state of the MAC object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + h = self.outer.copy() + h.update(self.inner.digest()) + return h.digest() + + def hexdigest(self): + """Return the **printable** MAC of the message that has been + authenticated so far. + + This method does not change the state of the MAC object. + + :Return: A string of 2* `digest_size` bytes. It contains only + hexadecimal ASCII digits. + """ + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + +def new(key, msg = None, digestmod = None): + """Create a new HMAC object. + + :Parameters: + key : byte string + key for the MAC object. + It must be long enough to match the expected security level of the + MAC. However, there is no benefit in using keys longer than the + `digest_size` of the underlying hash algorithm. + msg : byte string + The very first chunk of the message to authenticate. + It is equivalent to an early call to `HMAC.update()`. + Optional. + :Parameter digestmod: + The hash to use to implement the HMAC. Default is `Crypto.Hash.MD5`. + :Type digestmod: + A hash module or instantiated object from `Crypto.Hash` + :Returns: An `HMAC` object + """ + return HMAC(key, msg, digestmod) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD2.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD2.py new file mode 100644 index 0000000..dac959e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD2.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD2 cryptographic hash algorithm. + +MD2 is specified in RFC1319_ and it produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD2 + >>> + >>> h = MD2.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD2 stand for Message Digest version 2, and it was invented by Rivest in 1989. + +This algorithm is both slow and insecure. Do not use it for new designs. + +.. _RFC1319: http://tools.ietf.org/html/rfc1319 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD2Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._MD2 as _MD2 +hashFactory = _MD2 + +class MD2Hash(HashAlgo): + """Class that implements an MD2 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md2 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 2 + #: } + #: + #: This value uniquely identifies the MD2 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02') + + digest_size = 16 + block_size = 16 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD2Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD2Hash.update()`. + Optional. + + :Return: An `MD2Hash` object + """ + return MD2Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD2Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD2Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD4.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD4.py new file mode 100644 index 0000000..e28a201 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD4.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD4 cryptographic hash algorithm. + +MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD4 + >>> + >>> h = MD4.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. + +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1320: http://tools.ietf.org/html/rfc1320 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD4Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._MD4 as _MD4 +hashFactory = _MD4 + +class MD4Hash(HashAlgo): + """Class that implements an MD4 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md2 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 4 + #: } + #: + #: This value uniquely identifies the MD4 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04') + + digest_size = 16 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD4Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD4Hash.update()`. + Optional. + + :Return: A `MD4Hash` object + """ + return MD4Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD4Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD4Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD5.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD5.py new file mode 100644 index 0000000..9a14754 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/MD5.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD5 cryptographic hash algorithm. + +MD5 is specified in RFC1321_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD5 + >>> + >>> h = MD5.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD5 stand for Message Digest version 5, and it was invented by Rivest in 1991. + +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1321: http://tools.ietf.org/html/rfc1321 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD5Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + # The md5 module is deprecated in Python 2.6, so use hashlib when possible. + import hashlib + hashFactory = hashlib.md5 + +except ImportError: + from . import md5 + hashFactory = md5 + +class MD5Hash(HashAlgo): + """Class that implements an MD5 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md5 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 5 + #: } + #: + #: This value uniquely identifies the MD5 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05') + + digest_size = 16 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD5Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD5Hash.update()`. + Optional. + + :Return: A `MD5Hash` object + """ + return MD5Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD5Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD5Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/RIPEMD.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/RIPEMD.py new file mode 100644 index 0000000..33099cb --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/RIPEMD.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RIPEMD-160 cryptographic hash algorithm. + +RIPEMD-160_ produces the 160 bit digest of a message. + + >>> from Crypto.Hash import RIPEMD + >>> + >>> h = RIPEMD.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +RIPEMD-160 stands for RACE Integrity Primitives Evaluation Message Digest +with a 160 bit digest. It was invented by Dobbertin, Bosselaers, and Preneel. + +This algorithm is considered secure, although it has not been scrutinized as +extensively as SHA-1. Moreover, it provides an informal security level of just +80bits. + +.. _RIPEMD-160: http://homes.esat.kuleuven.be/~bosselae/ripemd160.html +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'RIPEMD160Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._RIPEMD160 as _RIPEMD160 +hashFactory = _RIPEMD160 + +class RIPEMD160Hash(HashAlgo): + """Class that implements a RIPMD-160 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-ripemd160 OBJECT IDENTIFIER ::= { + #: iso(1) identified-organization(3) teletrust(36) + #: algorithm(3) hashAlgorithm(2) ripemd160(1) + #: } + #: + #: This value uniquely identifies the RIPMD-160 algorithm. + oid = b("\x06\x05\x2b\x24\x03\x02\x01") + + digest_size = 20 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return RIPEMD160Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `RIPEMD160Hash.update()`. + Optional. + + :Return: A `RIPEMD160Hash` object + """ + return RIPEMD160Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = RIPEMD160Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = RIPEMD160Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA.py new file mode 100644 index 0000000..8d20c32 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-1 cryptographic hash algorithm. + +SHA-1_ produces the 160 bit digest of a message. + + >>> from Crypto.Hash import SHA + >>> + >>> h = SHA.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +This algorithm is not considered secure. Do not use it for new designs. + +.. _SHA-1: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA1Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + # The sha module is deprecated in Python 2.6, so use hashlib when possible. + import hashlib + hashFactory = hashlib.sha1 + +except ImportError: + from . import sha + hashFactory = sha + +class SHA1Hash(HashAlgo): + """Class that implements a SHA-1 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha1 OBJECT IDENTIFIER ::= { + #: iso(1) identified-organization(3) oiw(14) secsig(3) + #: algorithms(2) 26 + #: } + #: + #: This value uniquely identifies the SHA-1 algorithm. + oid = b('\x06\x05\x2b\x0e\x03\x02\x1a') + + digest_size = 20 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA1Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA1Hash.update()`. + Optional. + + :Return: A `SHA1Hash` object + """ + return SHA1Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA1Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA1Hash.block_size + + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA224.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA224.py new file mode 100644 index 0000000..959b56d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA224.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-224 cryptographic hash algorithm. + +SHA-224 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 224 bit digest of a message. + + >>> from Crypto.Hash import SHA224 + >>> + >>> h = SHA224.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA224Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha224 + +except ImportError: + from Crypto.Hash import _SHA224 + hashFactory = _SHA224 + +class SHA224Hash(HashAlgo): + """Class that implements a SHA-224 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha224 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) + #: nistalgorithm(4) hashalgs(2) 4 + #: } + #: + #: This value uniquely identifies the SHA-224 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') + + digest_size = 28 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA224Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA224Hash.update()`. + Optional. + + :Return: A `SHA224Hash` object + """ + return SHA224Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA224Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA224Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA256.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA256.py new file mode 100644 index 0000000..b0a99b3 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA256.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-256 cryptographic hash algorithm. + +SHA-256 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 256 bit digest of a message. + + >>> from Crypto.Hash import SHA256 + >>> + >>> h = SHA256.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA256Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha256 + +except ImportError: + from Crypto.Hash import _SHA256 + hashFactory = _SHA256 + +class SHA256Hash(HashAlgo): + """Class that implements a SHA-256 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha256 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) + #: gov(101) csor(3) nistalgorithm(4) hashalgs(2) 1 + #: } + #: + #: This value uniquely identifies the SHA-256 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01') + + digest_size = 32 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA256Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA256Hash.update()`. + Optional. + + :Return: A `SHA256Hash` object + """ + return SHA256Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA256Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA256Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA384.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA384.py new file mode 100644 index 0000000..3490b02 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA384.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-384 cryptographic hash algorithm. + +SHA-384 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 384 bit digest of a message. + + >>> from Crypto.Hash import SHA384 + >>> + >>> h = SHA384.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA384Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha384 + +except ImportError: + from Crypto.Hash import _SHA384 + hashFactory = _SHA384 + +class SHA384Hash(HashAlgo): + """Class that implements a SHA-384 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha384 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) + #: nistalgorithm(4) hashalgs(2) 2 + #: } + #: + #: This value uniquely identifies the SHA-384 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') + + digest_size = 48 + block_size = 128 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA384Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA384Hash.update()`. + Optional. + + :Return: A `SHA384Hash` object + """ + return SHA384Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA384Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA384Hash.block_size + + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA512.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA512.py new file mode 100644 index 0000000..d57548d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/SHA512.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-512 cryptographic hash algorithm. + +SHA-512 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 512 bit digest of a message. + + >>> from Crypto.Hash import SHA512 + >>> + >>> h = SHA512.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA512Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha512 + +except ImportError: + from Crypto.Hash import _SHA512 + hashFactory = _SHA512 + +class SHA512Hash(HashAlgo): + """Class that implements a SHA-512 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha512 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) + #: country(16) us(840) organization(1) gov(101) csor(3) nistalgorithm(4) hashalgs(2) 3 + #: } + #: + #: This value uniquely identifies the SHA-512 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03') + + digest_size = 64 + block_size = 128 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA512Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA512Hash.update()`. + Optional. + + :Return: A `SHA512Hash` object + """ + return SHA512Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA512Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA512Hash.block_size + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD2.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD2.cpython-35m-darwin.so new file mode 100755 index 0000000..3a3a563 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD2.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD4.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD4.cpython-35m-darwin.so new file mode 100755 index 0000000..09e6db3 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_MD4.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_RIPEMD160.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_RIPEMD160.cpython-35m-darwin.so new file mode 100755 index 0000000..ddef53b Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_RIPEMD160.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA224.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA224.cpython-35m-darwin.so new file mode 100755 index 0000000..9b7327f Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA224.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA256.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA256.cpython-35m-darwin.so new file mode 100755 index 0000000..d86478f Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA256.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA384.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA384.cpython-35m-darwin.so new file mode 100755 index 0000000..ff8f54c Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA384.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA512.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA512.cpython-35m-darwin.so new file mode 100755 index 0000000..61b9e7e Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Hash/_SHA512.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/__init__.py new file mode 100644 index 0000000..4582c66 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Hashing algorithms + +Hash functions take arbitrary binary strings as input, and produce a random-like output +of fixed size that is dependent on the input; it should be practically infeasible +to derive the original input data given only the hash function's +output. In other words, the hash function is *one-way*. + +It should also not be practically feasible to find a second piece of data +(a *second pre-image*) whose hash is the same as the original message +(*weak collision resistance*). + +Finally, it should not be feasible to find two arbitrary messages with the +same hash (*strong collision resistance*). + +The output of the hash function is called the *digest* of the input message. +In general, the security of a hash function is related to the length of the +digest. If the digest is *n* bits long, its security level is roughly comparable +to the the one offered by an *n/2* bit encryption algorithm. + +Hash functions can be used simply as a integrity check, or, in +association with a public-key algorithm, can be used to implement +digital signatures. + +The hashing modules here all support the interface described in `PEP +247`_ , "API for Cryptographic Hash Functions". + +.. _`PEP 247` : http://www.python.org/dev/peps/pep-0247/ + +:undocumented: _MD2, _MD4, _RIPEMD160, _SHA224, _SHA256, _SHA384, _SHA512 +""" + +__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD', 'SHA', + 'SHA224', 'SHA256', 'SHA384', 'SHA512'] +__revision__ = "$Id$" + + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Hash/hashalgo.py b/Darwin/lib/python3.5/site-packages/Crypto/Hash/hashalgo.py new file mode 100644 index 0000000..b38b3a6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Hash/hashalgo.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import hexlify + +class HashAlgo: + """A generic class for an abstract cryptographic hash algorithm. + + :undocumented: block_size + """ + + #: The size of the resulting hash in bytes. + digest_size = None + #: The internal block size of the hash algorithm in bytes. + block_size = None + + def __init__(self, hashFactory, data=None): + """Initialize the hash object. + + :Parameters: + hashFactory : callable + An object that will generate the actual hash implementation. + *hashFactory* must have a *new()* method, or must be directly + callable. + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `update()`. + """ + if hasattr(hashFactory, 'new'): + self._hash = hashFactory.new() + else: + self._hash = hashFactory() + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + data : byte string + The next chunk of the message being hashed. + """ + return self._hash.update(data) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + This method does not change the state of the hash object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + return self._hash.digest() + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + This method does not change the state of the hash object. + + :Return: A string of 2* `digest_size` characters. It contains only + hexadecimal ASCII digits. + """ + return self._hash.hexdigest() + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :Return: A hash object of the same type + """ + return self._hash.copy() + + def new(self, data=None): + """Return a fresh instance of the hash object. + + Unlike the `copy` method, the internal state of the object is empty. + + :Parameters: + data : byte string + The next chunk of the message being hashed. + + :Return: A hash object of the same type + """ + pass + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Protocol/AllOrNothing.py b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/AllOrNothing.py new file mode 100644 index 0000000..dd20536 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/AllOrNothing.py @@ -0,0 +1,320 @@ +# +# AllOrNothing.py : all-or-nothing package transformations +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""This file implements all-or-nothing package transformations. + +An all-or-nothing package transformation is one in which some text is +transformed into message blocks, such that all blocks must be obtained before +the reverse transformation can be applied. Thus, if any blocks are corrupted +or lost, the original message cannot be reproduced. + +An all-or-nothing package transformation is not encryption, although a block +cipher algorithm is used. The encryption key is randomly generated and is +extractable from the message blocks. + +This class implements the All-Or-Nothing package transformation algorithm +described in: + +Ronald L. Rivest. "All-Or-Nothing Encryption and The Package Transform" +http://theory.lcs.mit.edu/~rivest/fusion.pdf + +""" + +__revision__ = "$Id$" + +import operator +import sys +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.Util.py3compat import * +from functools import reduce + +def isInt(x): + test = 0 + try: + test += x + except TypeError: + return 0 + return 1 + +class AllOrNothing: + """Class implementing the All-or-Nothing package transform. + + Methods for subclassing: + + _inventkey(key_size): + Returns a randomly generated key. Subclasses can use this to + implement better random key generating algorithms. The default + algorithm is probably not very cryptographically secure. + + """ + + def __init__(self, ciphermodule, mode=None, IV=None): + """AllOrNothing(ciphermodule, mode=None, IV=None) + + ciphermodule is a module implementing the cipher algorithm to + use. It must provide the PEP272 interface. + + Note that the encryption key is randomly generated + automatically when needed. Optional arguments mode and IV are + passed directly through to the ciphermodule.new() method; they + are the feedback mode and initialization vector to use. All + three arguments must be the same for the object used to create + the digest, and to undigest'ify the message blocks. + """ + + self.__ciphermodule = ciphermodule + self.__mode = mode + self.__IV = IV + self.__key_size = ciphermodule.key_size + if not isInt(self.__key_size) or self.__key_size==0: + self.__key_size = 16 + + __K0digit = bchr(0x69) + + def digest(self, text): + """digest(text:string) : [string] + + Perform the All-or-Nothing package transform on the given + string. Output is a list of message blocks describing the + transformed text, where each block is a string of bit length equal + to the ciphermodule's block_size. + """ + + # generate a random session key and K0, the key used to encrypt the + # hash blocks. Rivest calls this a fixed, publically-known encryption + # key, but says nothing about the security implications of this key or + # how to choose it. + key = self._inventkey(self.__key_size) + K0 = self.__K0digit * self.__key_size + + # we need two cipher objects here, one that is used to encrypt the + # message blocks and one that is used to encrypt the hashes. The + # former uses the randomly generated key, while the latter uses the + # well-known key. + mcipher = self.__newcipher(key) + hcipher = self.__newcipher(K0) + + # Pad the text so that its length is a multiple of the cipher's + # block_size. Pad with trailing spaces, which will be eliminated in + # the undigest() step. + block_size = self.__ciphermodule.block_size + padbytes = block_size - (len(text) % block_size) + text = text + b(' ') * padbytes + + # Run through the algorithm: + # s: number of message blocks (size of text / block_size) + # input sequence: m1, m2, ... ms + # random key K' (`key' in the code) + # Compute output sequence: m'1, m'2, ... m's' for s' = s + 1 + # Let m'i = mi ^ E(K', i) for i = 1, 2, 3, ..., s + # Let m's' = K' ^ h1 ^ h2 ^ ... hs + # where hi = E(K0, m'i ^ i) for i = 1, 2, ... s + # + # The one complication I add is that the last message block is hard + # coded to the number of padbytes added, so that these can be stripped + # during the undigest() step + s = divmod(len(text), block_size)[0] + blocks = [] + hashes = [] + for i in range(1, s+1): + start = (i-1) * block_size + end = start + block_size + mi = text[start:end] + assert len(mi) == block_size + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mticki = bytes_to_long(mi) ^ bytes_to_long(cipherblock) + blocks.append(mticki) + # calculate the hash block for this block + hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) + hashes.append(bytes_to_long(hi)) + + # Add the padbytes length as a message block + i = i + 1 + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mticki = padbytes ^ bytes_to_long(cipherblock) + blocks.append(mticki) + + # calculate this block's hash + hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) + hashes.append(bytes_to_long(hi)) + + # Now calculate the last message block of the sequence 1..s'. This + # will contain the random session key XOR'd with all the hash blocks, + # so that for undigest(), once all the hash blocks are calculated, the + # session key can be trivially extracted. Calculating all the hash + # blocks requires that all the message blocks be received, thus the + # All-or-Nothing algorithm succeeds. + mtick_stick = bytes_to_long(key) ^ reduce(operator.xor, hashes) + blocks.append(mtick_stick) + + # we convert the blocks to strings since in Python, byte sequences are + # always represented as strings. This is more consistent with the + # model that encryption and hash algorithms always operate on strings. + return [long_to_bytes(i,self.__ciphermodule.block_size) for i in blocks] + + + def undigest(self, blocks): + """undigest(blocks : [string]) : string + + Perform the reverse package transformation on a list of message + blocks. Note that the ciphermodule used for both transformations + must be the same. blocks is a list of strings of bit length + equal to the ciphermodule's block_size. + """ + + # better have at least 2 blocks, for the padbytes package and the hash + # block accumulator + if len(blocks) < 2: + raise ValueError("List must be at least length 2.") + + # blocks is a list of strings. We need to deal with them as long + # integers + blocks = list(map(bytes_to_long, blocks)) + + # Calculate the well-known key, to which the hash blocks are + # encrypted, and create the hash cipher. + K0 = self.__K0digit * self.__key_size + hcipher = self.__newcipher(K0) + block_size = self.__ciphermodule.block_size + + # Since we have all the blocks (or this method would have been called + # prematurely), we can calculate all the hash blocks. + hashes = [] + for i in range(1, len(blocks)): + mticki = blocks[i-1] ^ i + hi = hcipher.encrypt(long_to_bytes(mticki, block_size)) + hashes.append(bytes_to_long(hi)) + + # now we can calculate K' (key). remember the last block contains + # m's' which we don't include here + key = blocks[-1] ^ reduce(operator.xor, hashes) + + # and now we can create the cipher object + mcipher = self.__newcipher(long_to_bytes(key, self.__key_size)) + + # And we can now decode the original message blocks + parts = [] + for i in range(1, len(blocks)): + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mi = blocks[i-1] ^ bytes_to_long(cipherblock) + parts.append(mi) + + # The last message block contains the number of pad bytes appended to + # the original text string, such that its length was an even multiple + # of the cipher's block_size. This number should be small enough that + # the conversion from long integer to integer should never overflow + padbytes = int(parts[-1]) + text = b('').join(map(long_to_bytes, parts[:-1])) + return text[:-padbytes] + + def _inventkey(self, key_size): + # Return key_size random bytes + from Crypto import Random + return Random.new().read(key_size) + + def __newcipher(self, key): + if self.__mode is None and self.__IV is None: + return self.__ciphermodule.new(key) + elif self.__IV is None: + return self.__ciphermodule.new(key, self.__mode) + else: + return self.__ciphermodule.new(key, self.__mode, self.__IV) + + + +if __name__ == '__main__': + import sys + import getopt + import base64 + + usagemsg = '''\ +Test module usage: %(program)s [-c cipher] [-l] [-h] + +Where: + --cipher module + -c module + Cipher module to use. Default: %(ciphermodule)s + + --aslong + -l + Print the encoded message blocks as long integers instead of base64 + encoded strings + + --help + -h + Print this help message +''' + + ciphermodule = 'AES' + aslong = 0 + + def usage(code, msg=None): + if msg: + print(msg) + print(usagemsg % {'program': sys.argv[0], + 'ciphermodule': ciphermodule}) + sys.exit(code) + + try: + opts, args = getopt.getopt(sys.argv[1:], + 'c:l', ['cipher=', 'aslong']) + except getopt.error as msg: + usage(1, msg) + + if args: + usage(1, 'Too many arguments') + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-c', '--cipher'): + ciphermodule = arg + elif opt in ('-l', '--aslong'): + aslong = 1 + + # ugly hack to force __import__ to give us the end-path module + module = __import__('Crypto.Cipher.'+ciphermodule, None, None, ['new']) + + x = AllOrNothing(module) + print('Original text:\n==========') + print(__doc__) + print('==========') + msgblocks = x.digest(b(__doc__)) + print('message blocks:') + for i, blk in zip(list(range(len(msgblocks))), msgblocks): + # base64 adds a trailing newline + print(' %3d' % i, end=' ') + if aslong: + print(bytes_to_long(blk)) + else: + print(base64.encodestring(blk)[:-1]) + # + # get a new undigest-only object so there's no leakage + y = AllOrNothing(module) + text = y.undigest(msgblocks) + if text == b(__doc__): + print('They match!') + else: + print('They differ!') diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Protocol/Chaffing.py b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/Chaffing.py new file mode 100644 index 0000000..bbfcbda --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/Chaffing.py @@ -0,0 +1,245 @@ +# +# Chaffing.py : chaffing & winnowing support +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Barry A. Warsaw, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# +"""This file implements the chaffing algorithm. + +Winnowing and chaffing is a technique for enhancing privacy without requiring +strong encryption. In short, the technique takes a set of authenticated +message blocks (the wheat) and adds a number of chaff blocks which have +randomly chosen data and MAC fields. This means that to an adversary, the +chaff blocks look as valid as the wheat blocks, and so the authentication +would have to be performed on every block. By tailoring the number of chaff +blocks added to the message, the sender can make breaking the message +computationally infeasible. There are many other interesting properties of +the winnow/chaff technique. + +For example, say Alice is sending a message to Bob. She packetizes the +message and performs an all-or-nothing transformation on the packets. Then +she authenticates each packet with a message authentication code (MAC). The +MAC is a hash of the data packet, and there is a secret key which she must +share with Bob (key distribution is an exercise left to the reader). She then +adds a serial number to each packet, and sends the packets to Bob. + +Bob receives the packets, and using the shared secret authentication key, +authenticates the MACs for each packet. Those packets that have bad MACs are +simply discarded. The remainder are sorted by serial number, and passed +through the reverse all-or-nothing transform. The transform means that an +eavesdropper (say Eve) must acquire all the packets before any of the data can +be read. If even one packet is missing, the data is useless. + +There's one twist: by adding chaff packets, Alice and Bob can make Eve's job +much harder, since Eve now has to break the shared secret key, or try every +combination of wheat and chaff packet to read any of the message. The cool +thing is that Bob doesn't need to add any additional code; the chaff packets +are already filtered out because their MACs don't match (in all likelihood -- +since the data and MACs for the chaff packets are randomly chosen it is +possible, but very unlikely that a chaff MAC will match the chaff data). And +Alice need not even be the party adding the chaff! She could be completely +unaware that a third party, say Charles, is adding chaff packets to her +messages as they are transmitted. + +For more information on winnowing and chaffing see this paper: + +Ronald L. Rivest, "Chaffing and Winnowing: Confidentiality without Encryption" +http://theory.lcs.mit.edu/~rivest/chaffing.txt + +""" + +__revision__ = "$Id$" + +from Crypto.Util.number import bytes_to_long + +class Chaff: + """Class implementing the chaff adding algorithm. + + Methods for subclasses: + + _randnum(size): + Returns a randomly generated number with a byte-length equal + to size. Subclasses can use this to implement better random + data and MAC generating algorithms. The default algorithm is + probably not very cryptographically secure. It is most + important that the chaff data does not contain any patterns + that can be used to discern it from wheat data without running + the MAC. + + """ + + def __init__(self, factor=1.0, blocksper=1): + """Chaff(factor:float, blocksper:int) + + factor is the number of message blocks to add chaff to, + expressed as a percentage between 0.0 and 1.0. blocksper is + the number of chaff blocks to include for each block being + chaffed. Thus the defaults add one chaff block to every + message block. By changing the defaults, you can adjust how + computationally difficult it could be for an adversary to + brute-force crack the message. The difficulty is expressed + as: + + pow(blocksper, int(factor * number-of-blocks)) + + For ease of implementation, when factor < 1.0, only the first + int(factor*number-of-blocks) message blocks are chaffed. + """ + + if not (0.0<=factor<=1.0): + raise ValueError("'factor' must be between 0.0 and 1.0") + if blocksper < 0: + raise ValueError("'blocksper' must be zero or more") + + self.__factor = factor + self.__blocksper = blocksper + + + def chaff(self, blocks): + """chaff( [(serial-number:int, data:string, MAC:string)] ) + : [(int, string, string)] + + Add chaff to message blocks. blocks is a list of 3-tuples of the + form (serial-number, data, MAC). + + Chaff is created by choosing a random number of the same + byte-length as data, and another random number of the same + byte-length as MAC. The message block's serial number is + placed on the chaff block and all the packet's chaff blocks + are randomly interspersed with the single wheat block. This + method then returns a list of 3-tuples of the same form. + Chaffed blocks will contain multiple instances of 3-tuples + with the same serial number, but the only way to figure out + which blocks are wheat and which are chaff is to perform the + MAC hash and compare values. + """ + + chaffedblocks = [] + + # count is the number of blocks to add chaff to. blocksper is the + # number of chaff blocks to add per message block that is being + # chaffed. + count = len(blocks) * self.__factor + blocksper = list(range(self.__blocksper)) + for i, wheat in zip(list(range(len(blocks))), blocks): + # it shouldn't matter which of the n blocks we add chaff to, so for + # ease of implementation, we'll just add them to the first count + # blocks + if i < count: + serial, data, mac = wheat + datasize = len(data) + macsize = len(mac) + addwheat = 1 + # add chaff to this block + for j in blocksper: + import sys + chaffdata = self._randnum(datasize) + chaffmac = self._randnum(macsize) + chaff = (serial, chaffdata, chaffmac) + # mix up the order, if the 5th bit is on then put the + # wheat on the list + if addwheat and bytes_to_long(self._randnum(16)) & 0x40: + chaffedblocks.append(wheat) + addwheat = 0 + chaffedblocks.append(chaff) + if addwheat: + chaffedblocks.append(wheat) + else: + # just add the wheat + chaffedblocks.append(wheat) + return chaffedblocks + + def _randnum(self, size): + from Crypto import Random + return Random.new().read(size) + + +if __name__ == '__main__': + text = """\ +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""" + print('Original text:\n==========') + print(text) + print('==========') + + # first transform the text into packets + blocks = [] ; size = 40 + for i in range(0, len(text), size): + blocks.append( text[i:i+size] ) + + # now get MACs for all the text blocks. The key is obvious... + print('Calculating MACs...') + from Crypto.Hash import HMAC, SHA + key = 'Jefferson' + macs = [HMAC.new(key, block, digestmod=SHA).digest() + for block in blocks] + + assert len(blocks) == len(macs) + + # put these into a form acceptable as input to the chaffing procedure + source = [] + m = list(zip(list(range(len(blocks))), blocks, macs)) + print(m) + for i, data, mac in m: + source.append((i, data, mac)) + + # now chaff these + print('Adding chaff...') + c = Chaff(factor=0.5, blocksper=2) + chaffed = c.chaff(source) + + from base64 import encodestring + + # print the chaffed message blocks. meanwhile, separate the wheat from + # the chaff + + wheat = [] + print('chaffed message blocks:') + for i, data, mac in chaffed: + # do the authentication + h = HMAC.new(key, data, digestmod=SHA) + pmac = h.digest() + if pmac == mac: + tag = '-->' + wheat.append(data) + else: + tag = ' ' + # base64 adds a trailing newline + print(tag, '%3d' % i, \ + repr(data), encodestring(mac)[:-1]) + + # now decode the message packets and check it against the original text + print('Undigesting wheat...') + # PY3K: This is meant to be text, do not change to bytes (data) + newtext = "".join(wheat) + if newtext == text: + print('They match!') + else: + print('They differ!') diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Protocol/KDF.py b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/KDF.py new file mode 100644 index 0000000..af4e2a6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/KDF.py @@ -0,0 +1,123 @@ +# +# KDF.py : a collection of Key Derivation Functions +# +# Part of the Python Cryptography Toolkit +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""This file contains a collection of standard key derivation functions. + +A key derivation function derives one or more secondary secret keys from +one primary secret (a master key or a pass phrase). + +This is typically done to insulate the secondary keys from each other, +to avoid that leakage of a secondary key compromises the security of the +master key, or to thwart attacks on pass phrases (e.g. via rainbow tables). + +:undocumented: __revision__ +""" + +__revision__ = "$Id$" + +import math +import struct + +from Crypto.Util.py3compat import * +from Crypto.Hash import SHA as SHA1, HMAC +from Crypto.Util.strxor import strxor + +def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): + """Derive one key from a password (or passphrase). + + This function performs key derivation according an old version of + the PKCS#5 standard (v1.5). + + This algorithm is called ``PBKDF1``. Even though it is still described + in the latest version of the PKCS#5 standard (version 2, or RFC2898), + newer applications should use the more secure and versatile `PBKDF2` instead. + + :Parameters: + password : string + The secret password or pass phrase to generate the key from. + salt : byte string + An 8 byte string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. + dkLen : integer + The length of the desired key. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. + count : integer + The number of iterations to carry out. It's recommended to use at least 1000. + hashAlgo : module + The hash algorithm to use, as a module or an object from the `Crypto.Hash` package. + The digest length must be no shorter than ``dkLen``. + The default algorithm is `SHA1`. + + :Return: A byte string of length `dkLen` that can be used as key. + """ + if not hashAlgo: + hashAlgo = SHA1 + password = tobytes(password) + pHash = hashAlgo.new(password+salt) + digest = pHash.digest_size + if dkLen>digest: + raise ValueError("Selected hash algorithm has a too short digest (%d bytes)." % digest) + if len(salt)!=8: + raise ValueError("Salt is not 8 bytes long.") + for i in range(count-1): + pHash = pHash.new(pHash.digest()) + return pHash.digest()[:dkLen] + +def PBKDF2(password, salt, dkLen=16, count=1000, prf=None): + """Derive one or more keys from a password (or passphrase). + + This performs key derivation according to the PKCS#5 standard (v2.0), + by means of the ``PBKDF2`` algorithm. + + :Parameters: + password : string + The secret password or pass phrase to generate the key from. + salt : string + A string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. It is recommended to be at least 8 bytes long. + dkLen : integer + The cumulative length of the desired keys. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. + count : integer + The number of iterations to carry out. It's recommended to use at least 1000. + prf : callable + A pseudorandom function. It must be a function that returns a pseudorandom string + from two parameters: a secret and a salt. If not specified, HMAC-SHA1 is used. + + :Return: A byte string of length `dkLen` that can be used as key material. + If you wanted multiple keys, just break up this string into segments of the desired length. +""" + password = tobytes(password) + if prf is None: + prf = lambda p,s: HMAC.new(p,s,SHA1).digest() + key = b('') + i = 1 + while len(key)I", i)) + for j in range(count-1): + previousU = t = prf(password,previousU) + U = strxor(U,t) + key += U + i = i + 1 + return key[:dkLen] + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Protocol/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/__init__.py new file mode 100644 index 0000000..cacc685 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Protocol/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Cryptographic protocols + +Implements various cryptographic protocols. (Don't expect to find +network protocols here.) + +Crypto.Protocol.AllOrNothing + Transforms a message into a set of message blocks, such that the blocks + can be recombined to get the message back. + +Crypto.Protocol.Chaffing + Takes a set of authenticated message blocks (the wheat) and adds a number + of randomly generated blocks (the chaff). + +Crypto.Protocol.KDF + A collection of standard key derivation functions. + +:undocumented: __revision__ +""" + +__all__ = ['AllOrNothing', 'Chaffing', 'KDF'] +__revision__ = "$Id$" diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/DSA.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/DSA.py new file mode 100644 index 0000000..648f4b2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/DSA.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# +# PublicKey/DSA.py : DSA signature primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""DSA public-key signature algorithm. + +DSA_ is a widespread public-key signature algorithm. Its security is +based on the discrete logarithm problem (DLP_). Given a cyclic +group, a generator *g*, and an element *h*, it is hard +to find an integer *x* such that *g^x = h*. The problem is believed +to be difficult, and it has been proved such (and therefore secure) for +more than 30 years. + +The group is actually a sub-group over the integers modulo *p*, with *p* prime. +The sub-group order is *q*, which is prime too; it always holds that *(p-1)* is a multiple of *q*. +The cryptographic strength is linked to the magnitude of *p* and *q*. +The signer holds a value *x* (*0>> from Crypto.Random import random + >>> from Crypto.PublicKey import DSA + >>> from Crypto.Hash import SHA + >>> + >>> message = "Hello" + >>> key = DSA.generate(1024) + >>> h = SHA.new(message).digest() + >>> k = random.StrongRandom().randint(1,key.q-1) + >>> sig = key.sign(h,k) + >>> ... + >>> if key.verify(h,sig): + >>> print "OK" + >>> else: + >>> print "Incorrect signature" + +.. _DSA: http://en.wikipedia.org/wiki/Digital_Signature_Algorithm +.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'DSAImplementation', '_DSAobj'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +from Crypto.PublicKey import _DSA, _slowmath, pubkey +from Crypto import Random + +try: + from Crypto.PublicKey import _fastmath +except ImportError: + _fastmath = None + +class _DSAobj(pubkey.pubkey): + """Class defining an actual DSA key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + #: Dictionary of DSA parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: - **q**, the order of the sub-group. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + keydata = ['y', 'g', 'p', 'q', 'x'] + + def __init__(self, implementation, key): + self.implementation = implementation + self.key = key + + def __getattr__(self, attrname): + if attrname in self.keydata: + # For backward compatibility, allow the user to get (not set) the + # DSA key parameters directly from this object. + return getattr(self.key, attrname) + else: + raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) + + def sign(self, M, K): + """Sign a piece of data with DSA. + + :Parameter M: The piece of data to sign with DSA. It may + not be longer in bit size than the sub-group order (*q*). + :Type M: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,q-1]*. + :Type K: long (recommended) or byte string (not recommended) + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *q* and taking the modulus by *q* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *q-1* + (e.g. *floor((q-1)/8)* random bytes) is also **not** secure. In general, + it shall not be possible for an attacker to know the value of `any + bit of K`__. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + + :attention: M must be a digest cryptographic hash, otherwise + an attacker may mount an existential forgery attack. + + :Return: A tuple with 2 longs. + + .. __: http://www.di.ens.fr/~pnguyen/pub_NgSh00.htm + """ + return pubkey.pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of a DSA signature. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The DSA signature to verify. + :Type signature: A tuple with 2 longs as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.pubkey.verify(self, M, signature) + + def _encrypt(self, c, K): + raise TypeError("DSA cannot encrypt") + + def _decrypt(self, c): + raise TypeError("DSA cannot decrypt") + + def _blind(self, m, r): + raise TypeError("DSA cannot blind") + + def _unblind(self, m, r): + raise TypeError("DSA cannot unblind") + + def _sign(self, m, k): + return self.key._sign(m, k) + + def _verify(self, m, sig): + (r, s) = sig + return self.key._verify(m, r, s) + + def has_private(self): + return self.key.has_private() + + def size(self): + return self.key.size() + + def can_blind(self): + return False + + def can_encrypt(self): + return False + + def can_sign(self): + return True + + def publickey(self): + return self.implementation.construct((self.key.y, self.key.g, self.key.p, self.key.q)) + + def __getstate__(self): + d = {} + for k in self.keydata: + try: + d[k] = getattr(self.key, k) + except AttributeError: + pass + return d + + def __setstate__(self, d): + if not hasattr(self, 'implementation'): + self.implementation = DSAImplementation() + t = [] + for k in self.keydata: + if k not in d: + break + t.append(d[k]) + self.key = self.implementation._math.dsa_construct(*tuple(t)) + + def __repr__(self): + attrs = [] + for k in self.keydata: + if k == 'p': + attrs.append("p(%d)" % (self.size()+1,)) + elif hasattr(self.key, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + +class DSAImplementation(object): + """ + A DSA key factory. + + This class is only internally used to implement the methods of the + `Crypto.PublicKey.DSA` module. + """ + + def __init__(self, **kwargs): + """Create a new DSA key factory. + + :Keywords: + use_fast_math : bool + Specify which mathematic library to use: + + - *None* (default). Use fastest math available. + - *True* . Use fast math. + - *False* . Use slow math. + default_randfunc : callable + Specify how to collect random data: + + - *None* (default). Use Random.new().read(). + - not *None* . Use the specified function directly. + :Raise RuntimeError: + When **use_fast_math** =True but fast math is not available. + """ + use_fast_math = kwargs.get('use_fast_math', None) + if use_fast_math is None: # Automatic + if _fastmath is not None: + self._math = _fastmath + else: + self._math = _slowmath + + elif use_fast_math: # Explicitly select fast math + if _fastmath is not None: + self._math = _fastmath + else: + raise RuntimeError("fast math module not available") + + else: # Explicitly select slow math + self._math = _slowmath + + self.error = self._math.error + + # 'default_randfunc' parameter: + # None (default) - use Random.new().read + # not None - use the specified function + self._default_randfunc = kwargs.get('default_randfunc', None) + self._current_randfunc = None + + def _get_randfunc(self, randfunc): + if randfunc is not None: + return randfunc + elif self._current_randfunc is None: + self._current_randfunc = Random.new().read + return self._current_randfunc + + def generate(self, bits, randfunc=None, progress_func=None): + """Randomly generate a fresh, new DSA key. + + :Parameters: + bits : int + Key length, or size (in bits) of the DSA modulus + *p*. + It must be a multiple of 64, in the closed + interval [512,1024]. + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + If not specified, a new one will be instantiated + from ``Crypto.Random``. + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :Return: A DSA key object (`_DSAobj`). + + :Raise ValueError: + When **bits** is too little, too big, or not a multiple of 64. + """ + + # Check against FIPS 186-2, which says that the size of the prime p + # must be a multiple of 64 bits between 512 and 1024 + for i in (0, 1, 2, 3, 4, 5, 6, 7, 8): + if bits == 512 + 64*i: + return self._generate(bits, randfunc, progress_func) + + # The March 2006 draft of FIPS 186-3 also allows 2048 and 3072-bit + # primes, but only with longer q values. Since the current DSA + # implementation only supports a 160-bit q, we don't support larger + # values. + raise ValueError("Number of bits in p must be a multiple of 64 between 512 and 1024, not %d bits" % (bits,)) + + def _generate(self, bits, randfunc=None, progress_func=None): + rf = self._get_randfunc(randfunc) + obj = _DSA.generate_py(bits, rf, progress_func) # TODO: Don't use legacy _DSA module + key = self._math.dsa_construct(obj.y, obj.g, obj.p, obj.q, obj.x) + return _DSAobj(self, key) + + def construct(self, tup): + """Construct a DSA key from a tuple of valid DSA components. + + The modulus *p* must be a prime. + + The following equations must apply: + + - p-1 = 0 mod q + - g^x = y mod p + - 0 < x < q + - 1 < g < p + + :Parameters: + tup : tuple + A tuple of long integers, with 4 or 5 items + in the following order: + + 1. Public key (*y*). + 2. Sub-group generator (*g*). + 3. Modulus, finite field order (*p*). + 4. Sub-group order (*q*). + 5. Private key (*x*). Optional. + + :Return: A DSA key object (`_DSAobj`). + """ + key = self._math.dsa_construct(*tup) + return _DSAobj(self, key) + +_impl = DSAImplementation() +generate = _impl.generate +construct = _impl.construct +error = _impl.error + +# vim:set ts=4 sw=4 sts=4 expandtab: + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/ElGamal.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/ElGamal.py new file mode 100644 index 0000000..99af71c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/ElGamal.py @@ -0,0 +1,373 @@ +# +# ElGamal.py : ElGamal encryption/decryption and signatures +# +# Part of the Python Cryptography Toolkit +# +# Originally written by: A.M. Kuchling +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""ElGamal public-key algorithm (randomized encryption and signature). + +Signature algorithm +------------------- +The security of the ElGamal signature scheme is based (like DSA) on the discrete +logarithm problem (DLP_). Given a cyclic group, a generator *g*, +and an element *h*, it is hard to find an integer *x* such that *g^x = h*. + +The group is the largest multiplicative sub-group of the integers modulo *p*, +with *p* prime. +The signer holds a value *x* (*0>> from Crypto import Random + >>> from Crypto.Random import random + >>> from Crypto.PublicKey import ElGamal + >>> from Crypto.Util.number import GCD + >>> from Crypto.Hash import SHA + >>> + >>> message = "Hello" + >>> key = ElGamal.generate(1024, Random.new().read) + >>> h = SHA.new(message).digest() + >>> while 1: + >>> k = random.StrongRandom().randint(1,key.p-1) + >>> if GCD(k,key.p-1)==1: break + >>> sig = key.sign(h,k) + >>> ... + >>> if key.verify(h,sig): + >>> print "OK" + >>> else: + >>> print "Incorrect signature" + +.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf +.. _CDH: http://en.wikipedia.org/wiki/Computational_Diffie%E2%80%93Hellman_assumption +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'ElGamalobj'] + +from Crypto.PublicKey.pubkey import * +from Crypto.Util import number + +class error (Exception): + pass + +# Generate an ElGamal key with N bits +def generate(bits, randfunc, progress_func=None): + """Randomly generate a fresh, new ElGamal key. + + The key will be safe for use for both encryption and signature + (although it should be used for **only one** purpose). + + :Parameters: + bits : int + Key length, or size (in bits) of the modulus *p*. + Recommended value is 2048. + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :Return: An ElGamal key object (`ElGamalobj`). + """ + obj=ElGamalobj() + # Generate a safe prime p + # See Algorithm 4.86 in Handbook of Applied Cryptography + if progress_func: + progress_func('p\n') + while 1: + q = bignum(getPrime(bits-1, randfunc)) + obj.p = 2*q+1 + if number.isPrime(obj.p, randfunc=randfunc): + break + # Generate generator g + # See Algorithm 4.80 in Handbook of Applied Cryptography + # Note that the order of the group is n=p-1=2q, where q is prime + if progress_func: + progress_func('g\n') + while 1: + # We must avoid g=2 because of Bleichenbacher's attack described + # in "Generating ElGamal signatures without knowning the secret key", + # 1996 + # + obj.g = number.getRandomRange(3, obj.p, randfunc) + safe = 1 + if pow(obj.g, 2, obj.p)==1: + safe=0 + if safe and pow(obj.g, q, obj.p)==1: + safe=0 + # Discard g if it divides p-1 because of the attack described + # in Note 11.67 (iii) in HAC + if safe and divmod(obj.p-1, obj.g)[1]==0: + safe=0 + # g^{-1} must not divide p-1 because of Khadir's attack + # described in "Conditions of the generator for forging ElGamal + # signature", 2011 + ginv = number.inverse(obj.g, obj.p) + if safe and divmod(obj.p-1, ginv)[1]==0: + safe=0 + if safe: + break + # Generate private key x + if progress_func: + progress_func('x\n') + obj.x=number.getRandomRange(2, obj.p-1, randfunc) + # Generate public key y + if progress_func: + progress_func('y\n') + obj.y = pow(obj.g, obj.x, obj.p) + return obj + +def construct(tup): + """Construct an ElGamal key from a tuple of valid ElGamal components. + + The modulus *p* must be a prime. + + The following conditions must apply: + + - 1 < g < p-1 + - g^{p-1} = 1 mod p + - 1 < x < p-1 + - g^x = y mod p + + :Parameters: + tup : tuple + A tuple of long integers, with 3 or 4 items + in the following order: + + 1. Modulus (*p*). + 2. Generator (*g*). + 3. Public key (*y*). + 4. Private key (*x*). Optional. + + :Return: An ElGamal key object (`ElGamalobj`). + """ + + obj=ElGamalobj() + if len(tup) not in [3,4]: + raise ValueError('argument for construct() wrong length') + for i in range(len(tup)): + field = obj.keydata[i] + setattr(obj, field, tup[i]) + return obj + +class ElGamalobj(pubkey): + """Class defining an ElGamal key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + + #: Dictionary of ElGamal parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + keydata=['p', 'g', 'y', 'x'] + + def encrypt(self, plaintext, K): + """Encrypt a piece of data with ElGamal. + + :Parameter plaintext: The piece of data to encrypt with ElGamal. + It must be numerically smaller than the module (*p*). + :Type plaintext: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,p-2]*. + :Type K: long (recommended) or byte string (not recommended) + + :Return: A tuple with two items. Each item is of the same type as the + plaintext (string or long). + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *p-1* and taking the modulus by *p-1* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *p-1* + (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. + In general, it shall not be possible for an attacker to know + the value of any bit of K. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + """ + return pubkey.encrypt(self, plaintext, K) + + def decrypt(self, ciphertext): + """Decrypt a piece of data with ElGamal. + + :Parameter ciphertext: The piece of data to decrypt with ElGamal. + :Type ciphertext: byte string, long or a 2-item tuple as returned + by `encrypt` + + :Return: A byte string if ciphertext was a byte string or a tuple + of byte strings. A long otherwise. + """ + return pubkey.decrypt(self, ciphertext) + + def sign(self, M, K): + """Sign a piece of data with ElGamal. + + :Parameter M: The piece of data to sign with ElGamal. It may + not be longer in bit size than *p-1*. + :Type M: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,p-2]* and such that *gcd(k,p-1)=1*. + :Type K: long (recommended) or byte string (not recommended) + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *p-1* and taking the modulus by *p-1* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *p-1* + (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. + In general, it shall not be possible for an attacker to know + the value of any bit of K. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + + :attention: M must be be a cryptographic hash, otherwise an + attacker may mount an existential forgery attack. + + :Return: A tuple with 2 longs. + """ + return pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of an ElGamal signature. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The ElGamal signature to verify. + :Type signature: A tuple with 2 longs as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.verify(self, M, signature) + + def _encrypt(self, M, K): + a=pow(self.g, K, self.p) + b=( M*pow(self.y, K, self.p) ) % self.p + return ( a,b ) + + def _decrypt(self, M): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + ax=pow(M[0], self.x, self.p) + plaintext=(M[1] * inverse(ax, self.p ) ) % self.p + return plaintext + + def _sign(self, M, K): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + p1=self.p-1 + if (GCD(K, p1)!=1): + raise ValueError('Bad K value: GCD(K,p-1)!=1') + a=pow(self.g, K, self.p) + t=(M-self.x*a) % p1 + while t<0: t=t+p1 + b=(t*inverse(K, p1)) % p1 + return (a, b) + + def _verify(self, M, sig): + if sig[0]<1 or sig[0]>self.p-1: + return 0 + v1=pow(self.y, sig[0], self.p) + v1=(v1*pow(sig[0], sig[1], self.p)) % self.p + v2=pow(self.g, M, self.p) + if v1==v2: + return 1 + return 0 + + def size(self): + return number.size(self.p) - 1 + + def has_private(self): + if hasattr(self, 'x'): + return 1 + else: + return 0 + + def publickey(self): + return construct((self.p, self.g, self.y)) + + +object=ElGamalobj diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/RSA.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/RSA.py new file mode 100644 index 0000000..debe39e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/RSA.py @@ -0,0 +1,719 @@ +# -*- coding: utf-8 -*- +# +# PublicKey/RSA.py : RSA public key primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA public-key cryptography algorithm (signature and encryption). + +RSA_ is the most widespread and used public key algorithm. Its security is +based on the difficulty of factoring large integers. The algorithm has +withstood attacks for 30 years, and it is therefore considered reasonably +secure for new designs. + +The algorithm can be used for both confidentiality (encryption) and +authentication (digital signature). It is worth noting that signing and +decryption are significantly slower than verification and encryption. +The cryptograhic strength is primarily linked to the length of the modulus *n*. +In 2012, a sufficient length is deemed to be 2048 bits. For more information, +see the most recent ECRYPT_ report. + +Both RSA ciphertext and RSA signature are as big as the modulus *n* (256 +bytes if *n* is 2048 bit long). + +This module provides facilities for generating fresh, new RSA keys, constructing +them from known components, exporting them, and importing them. + + >>> from Crypto.PublicKey import RSA + >>> + >>> key = RSA.generate(2048) + >>> f = open('mykey.pem','w') + >>> f.write(RSA.exportKey('PEM')) + >>> f.close() + ... + >>> f = open('mykey.pem','r') + >>> key = RSA.importKey(f.read()) + +Even though you may choose to directly use the methods of an RSA key object +to perform the primitive cryptographic operations (e.g. `_RSAobj.encrypt`), +it is recommended to use one of the standardized schemes instead (like +`Crypto.Cipher.PKCS1_v1_5` or `Crypto.Signature.PKCS1_v1_5`). + +.. _RSA: http://en.wikipedia.org/wiki/RSA_%28algorithm%29 +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf + +:sort: generate,construct,importKey,error +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'importKey', 'RSAImplementation', '_RSAobj'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * +#from Crypto.Util.python_compat import * +from Crypto.Util.number import getRandomRange, bytes_to_long, long_to_bytes + +from Crypto.PublicKey import _RSA, _slowmath, pubkey +from Crypto import Random + +from Crypto.Util.asn1 import DerObject, DerSequence, DerNull +import binascii +import struct + +from Crypto.Util.number import inverse + +from Crypto.Util.number import inverse + +try: + from Crypto.PublicKey import _fastmath +except ImportError: + _fastmath = None + +class _RSAobj(pubkey.pubkey): + """Class defining an actual RSA key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + #: Dictionary of RSA parameters. + #: + #: A public key will only have the following entries: + #: + #: - **n**, the modulus. + #: - **e**, the public exponent. + #: + #: A private key will also have: + #: + #: - **d**, the private exponent. + #: - **p**, the first factor of n. + #: - **q**, the second factor of n. + #: - **u**, the CRT coefficient (1/p) mod q. + keydata = ['n', 'e', 'd', 'p', 'q', 'u'] + + def __init__(self, implementation, key, randfunc=None): + self.implementation = implementation + self.key = key + if randfunc is None: + randfunc = Random.new().read + self._randfunc = randfunc + + def __getattr__(self, attrname): + if attrname in self.keydata: + # For backward compatibility, allow the user to get (not set) the + # RSA key parameters directly from this object. + return getattr(self.key, attrname) + else: + raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) + + def encrypt(self, plaintext, K): + """Encrypt a piece of data with RSA. + + :Parameter plaintext: The piece of data to encrypt with RSA. It may not + be numerically larger than the RSA module (**n**). + :Type plaintext: byte string or long + + :Parameter K: A random parameter (*for compatibility only. This + value will be ignored*) + :Type K: byte string or long + + :attention: this function performs the plain, primitive RSA encryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly encrypt data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. + + :Return: A tuple with two items. The first item is the ciphertext + of the same type as the plaintext (string or long). The second item + is always None. + """ + return pubkey.pubkey.encrypt(self, plaintext, K) + + def decrypt(self, ciphertext): + """Decrypt a piece of data with RSA. + + Decryption always takes place with blinding. + + :attention: this function performs the plain, primitive RSA decryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly decrypt data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. + + :Parameter ciphertext: The piece of data to decrypt with RSA. It may + not be numerically larger than the RSA module (**n**). If a tuple, + the first item is the actual ciphertext; the second item is ignored. + + :Type ciphertext: byte string, long or a 2-item tuple as returned by + `encrypt` + + :Return: A byte string if ciphertext was a byte string or a tuple + of byte strings. A long otherwise. + """ + return pubkey.pubkey.decrypt(self, ciphertext) + + def sign(self, M, K): + """Sign a piece of data with RSA. + + Signing always takes place with blinding. + + :attention: this function performs the plain, primitive RSA decryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly sign data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. + + :Parameter M: The piece of data to sign with RSA. It may + not be numerically larger than the RSA module (**n**). + :Type M: byte string or long + + :Parameter K: A random parameter (*for compatibility only. This + value will be ignored*) + :Type K: byte string or long + + :Return: A 2-item tuple. The first item is the actual signature (a + long). The second item is always None. + """ + return pubkey.pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of an RSA signature. + + :attention: this function performs the plain, primitive RSA encryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly verify data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The RSA signature to verify. The first item of + the tuple is the actual signature (a long not larger than the modulus + **n**), whereas the second item is always ignored. + :Type signature: A 2-item tuple as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.pubkey.verify(self, M, signature) + + def _encrypt(self, c, K): + return (self.key._encrypt(c),) + + def _decrypt(self, c): + #(ciphertext,) = c + (ciphertext,) = c[:1] # HACK - We should use the previous line + # instead, but this is more compatible and we're + # going to replace the Crypto.PublicKey API soon + # anyway. + + # Blinded RSA decryption (to prevent timing attacks): + # Step 1: Generate random secret blinding factor r, such that 0 < r < n-1 + r = getRandomRange(1, self.key.n-1, randfunc=self._randfunc) + # Step 2: Compute c' = c * r**e mod n + cp = self.key._blind(ciphertext, r) + # Step 3: Compute m' = c'**d mod n (ordinary RSA decryption) + mp = self.key._decrypt(cp) + # Step 4: Compute m = m**(r-1) mod n + return self.key._unblind(mp, r) + + def _blind(self, m, r): + return self.key._blind(m, r) + + def _unblind(self, m, r): + return self.key._unblind(m, r) + + def _sign(self, m, K=None): + return (self.key._sign(m),) + + def _verify(self, m, sig): + #(s,) = sig + (s,) = sig[:1] # HACK - We should use the previous line instead, but + # this is more compatible and we're going to replace + # the Crypto.PublicKey API soon anyway. + return self.key._verify(m, s) + + def has_private(self): + return self.key.has_private() + + def size(self): + return self.key.size() + + def can_blind(self): + return True + + def can_encrypt(self): + return True + + def can_sign(self): + return True + + def publickey(self): + return self.implementation.construct((self.key.n, self.key.e)) + + def __getstate__(self): + d = {} + for k in self.keydata: + try: + d[k] = getattr(self.key, k) + except AttributeError: + pass + return d + + def __setstate__(self, d): + if not hasattr(self, 'implementation'): + self.implementation = RSAImplementation() + t = [] + for k in self.keydata: + if k not in d: + break + t.append(d[k]) + self.key = self.implementation._math.rsa_construct(*tuple(t)) + + def __repr__(self): + attrs = [] + for k in self.keydata: + if k == 'n': + attrs.append("n(%d)" % (self.size()+1,)) + elif hasattr(self.key, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + + def exportKey(self, format='PEM', passphrase=None, pkcs=1): + """Export this RSA key. + + :Parameter format: The format to use for wrapping the key. + + - *'DER'*. Binary encoding, always unencrypted. + - *'PEM'*. Textual encoding, done according to `RFC1421`_/`RFC1423`_. + Unencrypted (default) or encrypted. + - *'OpenSSH'*. Textual encoding, done according to OpenSSH specification. + Only suitable for public keys (not private keys). + :Type format: string + + :Parameter passphrase: In case of PEM, the pass phrase to derive the encryption key from. + :Type passphrase: string + + :Parameter pkcs: The PKCS standard to follow for assembling the key. + You have two choices: + + - with **1**, the public key is embedded into an X.509 `SubjectPublicKeyInfo` DER SEQUENCE. + The private key is embedded into a `PKCS#1`_ `RSAPrivateKey` DER SEQUENCE. + This mode is the default. + - with **8**, the private key is embedded into a `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE. + This mode is not available for public keys. + + PKCS standards are not relevant for the *OpenSSH* format. + :Type pkcs: integer + + :Return: A byte string with the encoded public or private half. + :Raise ValueError: + When the format is unknown. + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + if passphrase is not None: + passphrase = tobytes(passphrase) + if format=='OpenSSH': + eb = long_to_bytes(self.e) + nb = long_to_bytes(self.n) + if bord(eb[0]) & 0x80: eb=bchr(0x00)+eb + if bord(nb[0]) & 0x80: nb=bchr(0x00)+nb + keyparts = [ 'ssh-rsa', eb, nb ] + keystring = ''.join([ struct.pack(">I",len(kp))+kp for kp in keyparts]) + return 'ssh-rsa '+binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + der = DerSequence() + if self.has_private(): + keyType= { 1: 'RSA PRIVATE', 8: 'PRIVATE' }[pkcs] + der[:] = [ 0, self.n, self.e, self.d, self.p, self.q, + self.d % (self.p-1), self.d % (self.q-1), + inverse(self.q, self.p) ] + if pkcs==8: + derkey = der.encode() + der = DerSequence([0]) + der.append(algorithmIdentifier) + der.append(DerObject('OCTET STRING', derkey).encode()) + else: + keyType = "PUBLIC" + der.append(algorithmIdentifier) + bitmap = DerObject('BIT STRING') + derPK = DerSequence( [ self.n, self.e ] ) + bitmap.payload = bchr(0x00) + derPK.encode() + der.append(bitmap.encode()) + if format=='DER': + return der.encode() + if format=='PEM': + pem = b("-----BEGIN " + keyType + " KEY-----\n") + objenc = None + if passphrase and keyType.endswith('PRIVATE'): + # We only support 3DES for encryption + import Crypto.Hash.MD5 + from Crypto.Cipher import DES3 + from Crypto.Protocol.KDF import PBKDF1 + salt = self._randfunc(8) + key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) + key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) + objenc = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) + pem += b('Proc-Type: 4,ENCRYPTED\n') + pem += b('DEK-Info: DES-EDE3-CBC,') + binascii.b2a_hex(salt).upper() + b('\n\n') + + binaryKey = der.encode() + if objenc: + # Add PKCS#7-like padding + padding = objenc.block_size-len(binaryKey)%objenc.block_size + binaryKey = objenc.encrypt(binaryKey+bchr(padding)*padding) + + # Each BASE64 line can take up to 64 characters (=48 bytes of data) + chunks = [ binascii.b2a_base64(binaryKey[i:i+48]) for i in range(0, len(binaryKey), 48) ] + pem += b('').join(chunks) + pem += b("-----END " + keyType + " KEY-----") + return pem + return ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) + +class RSAImplementation(object): + """ + An RSA key factory. + + This class is only internally used to implement the methods of the `Crypto.PublicKey.RSA` module. + + :sort: __init__,generate,construct,importKey + :undocumented: _g*, _i* + """ + + def __init__(self, **kwargs): + """Create a new RSA key factory. + + :Keywords: + use_fast_math : bool + Specify which mathematic library to use: + + - *None* (default). Use fastest math available. + - *True* . Use fast math. + - *False* . Use slow math. + default_randfunc : callable + Specify how to collect random data: + + - *None* (default). Use Random.new().read(). + - not *None* . Use the specified function directly. + :Raise RuntimeError: + When **use_fast_math** =True but fast math is not available. + """ + use_fast_math = kwargs.get('use_fast_math', None) + if use_fast_math is None: # Automatic + if _fastmath is not None: + self._math = _fastmath + else: + self._math = _slowmath + + elif use_fast_math: # Explicitly select fast math + if _fastmath is not None: + self._math = _fastmath + else: + raise RuntimeError("fast math module not available") + + else: # Explicitly select slow math + self._math = _slowmath + + self.error = self._math.error + + self._default_randfunc = kwargs.get('default_randfunc', None) + self._current_randfunc = None + + def _get_randfunc(self, randfunc): + if randfunc is not None: + return randfunc + elif self._current_randfunc is None: + self._current_randfunc = Random.new().read + return self._current_randfunc + + def generate(self, bits, randfunc=None, progress_func=None, e=65537): + """Randomly generate a fresh, new RSA key. + + :Parameters: + bits : int + Key length, or size (in bits) of the RSA modulus. + It must be a multiple of 256, and no smaller than 1024. + + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + If not specified, a new one will be instantiated + from ``Crypto.Random``. + + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + e : int + Public RSA exponent. It must be an odd positive integer. + It is typically a small number with very few ones in its + binary representation. + The default value 65537 (= ``0b10000000000000001`` ) is a safe + choice: other common values are 5, 7, 17, and 257. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :attention: Exponent 3 is also widely used, but it requires very special care when padding + the message. + + :Return: An RSA key object (`_RSAobj`). + + :Raise ValueError: + When **bits** is too little or not a multiple of 256, or when + **e** is not odd or smaller than 2. + """ + if bits < 1024 or (bits & 0xff) != 0: + # pubkey.getStrongPrime doesn't like anything that's not a multiple of 256 and >= 1024 + raise ValueError("RSA modulus length must be a multiple of 256 and >= 1024") + if e%2==0 or e<3: + raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") + rf = self._get_randfunc(randfunc) + obj = _RSA.generate_py(bits, rf, progress_func, e) # TODO: Don't use legacy _RSA module + key = self._math.rsa_construct(obj.n, obj.e, obj.d, obj.p, obj.q, obj.u) + return _RSAobj(self, key) + + def construct(self, tup): + """Construct an RSA key from a tuple of valid RSA components. + + The modulus **n** must be the product of two primes. + The public exponent **e** must be odd and larger than 1. + + In case of a private key, the following equations must apply: + + - e != 1 + - p*q = n + - e*d = 1 mod (p-1)(q-1) + - p*u = 1 mod q + + :Parameters: + tup : tuple + A tuple of long integers, with at least 2 and no + more than 6 items. The items come in the following order: + + 1. RSA modulus (n). + 2. Public exponent (e). + 3. Private exponent (d). Only required if the key is private. + 4. First factor of n (p). Optional. + 5. Second factor of n (q). Optional. + 6. CRT coefficient, (1/p) mod q (u). Optional. + + :Return: An RSA key object (`_RSAobj`). + """ + key = self._math.rsa_construct(*tup) + return _RSAobj(self, key) + + def _importKeyDER(self, externKey): + """Import an RSA key (public or private half), encoded in DER form.""" + + try: + + der = DerSequence() + der.decode(externKey, True) + + # Try PKCS#1 first, for a private key + if len(der)==9 and der.hasOnlyInts() and der[0]==0: + # ASN.1 RSAPrivateKey element + del der[6:] # Remove d mod (p-1), d mod (q-1), and q^{-1} mod p + der.append(inverse(der[4],der[5])) # Add p^{-1} mod q + del der[0] # Remove version + return self.construct(der[:]) + + # Keep on trying PKCS#1, but now for a public key + if len(der)==2: + # The DER object is an RSAPublicKey SEQUENCE with two elements + if der.hasOnlyInts(): + return self.construct(der[:]) + # The DER object is a SubjectPublicKeyInfo SEQUENCE with two elements: + # an 'algorithm' (or 'algorithmIdentifier') SEQUENCE and a 'subjectPublicKey' BIT STRING. + # 'algorithm' takes the value given a few lines above. + # 'subjectPublicKey' encapsulates the actual ASN.1 RSAPublicKey element. + if der[0]==algorithmIdentifier: + bitmap = DerObject() + bitmap.decode(der[1], True) + if bitmap.isType('BIT STRING') and bord(bitmap.payload[0])==0x00: + der.decode(bitmap.payload[1:], True) + if len(der)==2 and der.hasOnlyInts(): + return self.construct(der[:]) + + # Try unencrypted PKCS#8 + if der[0]==0: + # The second element in the SEQUENCE is algorithmIdentifier. + # It must say RSA (see above for description). + if der[1]==algorithmIdentifier: + privateKey = DerObject() + privateKey.decode(der[2], True) + if privateKey.isType('OCTET STRING'): + return self._importKeyDER(privateKey.payload) + + except ValueError as IndexError: + pass + + raise ValueError("RSA key format is not supported") + + def importKey(self, externKey, passphrase=None): + """Import an RSA key (public or private half), encoded in standard form. + + :Parameter externKey: + The RSA key to import, encoded as a string. + + An RSA public key can be in any of the following formats: + + - X.509 `subjectPublicKeyInfo` DER SEQUENCE (binary or PEM encoding) + - `PKCS#1`_ `RSAPublicKey` DER SEQUENCE (binary or PEM encoding) + - OpenSSH (textual public key only) + + An RSA private key can be in any of the following formats: + + - PKCS#1 `RSAPrivateKey` DER SEQUENCE (binary or PEM encoding) + - `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE (binary or PEM encoding) + - OpenSSH (textual public key only) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + In case of PEM encoding, the private key can be encrypted with DES or 3TDES according to a certain ``pass phrase``. + Only OpenSSL-compatible pass phrases are supported. + :Type externKey: string + + :Parameter passphrase: + In case of an encrypted PEM key, this is the pass phrase from which the encryption key is derived. + :Type passphrase: string + + :Return: An RSA key object (`_RSAobj`). + + :Raise ValueError/IndexError/TypeError: + When the given key cannot be parsed (possibly because the pass phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + externKey = tobytes(externKey) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if externKey.startswith(b('-----')): + # This is probably a PEM encoded key + lines = externKey.replace(b(" "),b('')).split() + keyobj = None + + # The encrypted PEM format + if lines[1].startswith(b('Proc-Type:4,ENCRYPTED')): + DEK = lines[2].split(b(':')) + if len(DEK)!=2 or DEK[0]!=b('DEK-Info') or not passphrase: + raise ValueError("PEM encryption format not supported.") + algo, salt = DEK[1].split(b(',')) + salt = binascii.a2b_hex(salt) + import Crypto.Hash.MD5 + from Crypto.Cipher import DES, DES3 + from Crypto.Protocol.KDF import PBKDF1 + if algo==b("DES-CBC"): + # This is EVP_BytesToKey in OpenSSL + key = PBKDF1(passphrase, salt, 8, 1, Crypto.Hash.MD5) + keyobj = DES.new(key, Crypto.Cipher.DES.MODE_CBC, salt) + elif algo==b("DES-EDE3-CBC"): + # Note that EVP_BytesToKey is note exactly the same as PBKDF1 + key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) + key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) + keyobj = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) + else: + raise ValueError("Unsupport PEM encryption algorithm.") + lines = lines[2:] + + der = binascii.a2b_base64(b('').join(lines[1:-1])) + if keyobj: + der = keyobj.decrypt(der) + padding = bord(der[-1]) + der = der[:-padding] + return self._importKeyDER(der) + + if externKey.startswith(b('ssh-rsa ')): + # This is probably an OpenSSH key + keystring = binascii.a2b_base64(externKey.split(b(' '))[1]) + keyparts = [] + while len(keystring)>4: + l = struct.unpack(">I",keystring[:4])[0] + keyparts.append(keystring[4:4+l]) + keystring = keystring[4+l:] + e = bytes_to_long(keyparts[1]) + n = bytes_to_long(keyparts[2]) + return self.construct([n, e]) + if bord(externKey[0])==0x30: + # This is probably a DER encoded key + return self._importKeyDER(externKey) + + raise ValueError("RSA key format is not supported") + +#: This is the ASN.1 DER object that qualifies an algorithm as +#: compliant to PKCS#1 (that is, the standard RSA). +# It is found in all 'algorithm' fields (also called 'algorithmIdentifier'). +# It is a SEQUENCE with the oid assigned to RSA and with its parameters (none). +# 0x06 0x09 OBJECT IDENTIFIER, 9 bytes of payload +# 0x2A 0x86 0x48 0x86 0xF7 0x0D 0x01 0x01 0x01 +# rsaEncryption (1 2 840 113549 1 1 1) (PKCS #1) +# 0x05 0x00 NULL +algorithmIdentifier = DerSequence( + [ b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01'), + DerNull().encode() ] + ).encode() + +_impl = RSAImplementation() +#: +#: Randomly generate a fresh, new RSA key object. +#: +#: See `RSAImplementation.generate`. +#: +generate = _impl.generate +#: +#: Construct an RSA key object from a tuple of valid RSA components. +#: +#: See `RSAImplementation.construct`. +#: +construct = _impl.construct +#: +#: Import an RSA key (public or private half), encoded in standard form. +#: +#: See `RSAImplementation.importKey`. +#: +importKey = _impl.importKey +error = _impl.error + +# vim:set ts=4 sw=4 sts=4 expandtab: + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_DSA.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_DSA.py new file mode 100644 index 0000000..1787ced --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_DSA.py @@ -0,0 +1,115 @@ + +# +# DSA.py : Digital Signature Algorithm +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling, Paul Swartz, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.PublicKey.pubkey import * +from Crypto.Util import number +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.Hash import SHA +from Crypto.Util.py3compat import * + +class error (Exception): + pass + +def generateQ(randfunc): + S=randfunc(20) + hash1=SHA.new(S).digest() + hash2=SHA.new(long_to_bytes(bytes_to_long(S)+1)).digest() + q = bignum(0) + for i in range(0,20): + c=bord(hash1[i])^bord(hash2[i]) + if i==0: + c=c | 128 + if i==19: + c= c | 1 + q=q*256+c + while (not isPrime(q)): + q=q+2 + if pow(2,159) < q < pow(2,160): + return S, q + raise RuntimeError('Bad q value generated') + +def generate_py(bits, randfunc, progress_func=None): + """generate(bits:int, randfunc:callable, progress_func:callable) + + Generate a DSA key of length 'bits', using 'randfunc' to get + random data and 'progress_func', if present, to display + the progress of the key generation. + """ + + if bits<160: + raise ValueError('Key length < 160 bits') + obj=DSAobj() + # Generate string S and prime q + if progress_func: + progress_func('p,q\n') + while (1): + S, obj.q = generateQ(randfunc) + n=divmod(bits-1, 160)[0] + C, N, V = 0, 2, {} + b=(obj.q >> 5) & 15 + powb=pow(bignum(2), b) + powL1=pow(bignum(2), bits-1) + while C<4096: + for k in range(0, n+1): + V[k]=bytes_to_long(SHA.new(S+bstr(N)+bstr(k)).digest()) + W=V[n] % powb + for k in range(n-1, -1, -1): + W=(W<<160)+V[k] + X=W+powL1 + p=X-(X%(2*obj.q)-1) + if powL1<=p and isPrime(p): + break + C, N = C+1, N+n+1 + if C<4096: + break + if progress_func: + progress_func('4096 multiples failed\n') + + obj.p = p + power=divmod(p-1, obj.q)[0] + if progress_func: + progress_func('h,g\n') + while (1): + h=bytes_to_long(randfunc(bits)) % (p-1) + g=pow(h, power, p) + if 11: + break + obj.g=g + if progress_func: + progress_func('x,y\n') + while (1): + x=bytes_to_long(randfunc(20)) + if 0 < x < obj.q: + break + obj.x, obj.y = x, pow(g, x, p) + return obj + +class DSAobj: + pass + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_RSA.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_RSA.py new file mode 100644 index 0000000..601ab7c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_RSA.py @@ -0,0 +1,81 @@ +# +# RSA.py : RSA encryption/decryption +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling, Paul Swartz, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.PublicKey import pubkey +from Crypto.Util import number + +def generate_py(bits, randfunc, progress_func=None, e=65537): + """generate(bits:int, randfunc:callable, progress_func:callable, e:int) + + Generate an RSA key of length 'bits', public exponent 'e'(which must be + odd), using 'randfunc' to get random data and 'progress_func', + if present, to display the progress of the key generation. + """ + obj=RSAobj() + obj.e = int(e) + + # Generate the prime factors of n + if progress_func: + progress_func('p,q\n') + p = q = 1 + while number.size(p*q) < bits: + # Note that q might be one bit longer than p if somebody specifies an odd + # number of bits for the key. (Why would anyone do that? You don't get + # more security.) + p = pubkey.getStrongPrime(bits>>1, obj.e, 1e-12, randfunc) + q = pubkey.getStrongPrime(bits - (bits>>1), obj.e, 1e-12, randfunc) + + # It's OK for p to be larger than q, but let's be + # kind to the function that will invert it for + # th calculation of u. + if p > q: + (p, q)=(q, p) + obj.p = p + obj.q = q + + if progress_func: + progress_func('u\n') + obj.u = pubkey.inverse(obj.p, obj.q) + obj.n = obj.p*obj.q + + if progress_func: + progress_func('d\n') + obj.d=pubkey.inverse(obj.e, (obj.p-1)*(obj.q-1)) + + assert bits <= 1+obj.size(), "Generated key is too small" + + return obj + +class RSAobj(pubkey.pubkey): + + def size(self): + """size() : int + Return the maximum number of bits that can be handled by this key. + """ + return number.size(self.n) - 1 + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/__init__.py new file mode 100644 index 0000000..503809f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Public-key encryption and signature algorithms. + +Public-key encryption uses two different keys, one for encryption and +one for decryption. The encryption key can be made public, and the +decryption key is kept private. Many public-key algorithms can also +be used to sign messages, and some can *only* be used for signatures. + +======================== ============================================= +Module Description +======================== ============================================= +Crypto.PublicKey.DSA Digital Signature Algorithm (Signature only) +Crypto.PublicKey.ElGamal (Signing and encryption) +Crypto.PublicKey.RSA (Signing, encryption, and blinding) +======================== ============================================= + +:undocumented: _DSA, _RSA, _fastmath, _slowmath, pubkey +""" + +__all__ = ['RSA', 'DSA', 'ElGamal'] +__revision__ = "$Id$" + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_slowmath.py b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_slowmath.py new file mode 100644 index 0000000..c87bdd2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/PublicKey/_slowmath.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# +# PubKey/RSA/_slowmath.py : Pure Python implementation of the RSA portions of _fastmath +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Pure Python implementation of the RSA-related portions of Crypto.PublicKey._fastmath.""" + +__revision__ = "$Id$" + +__all__ = ['rsa_construct'] + +import sys + +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.number import size, inverse, GCD + +class error(Exception): + pass + +class _RSAKey(object): + def _blind(self, m, r): + # compute r**e * m (mod n) + return m * pow(r, self.e, self.n) + + def _unblind(self, m, r): + # compute m / r (mod n) + return inverse(r, self.n) * m % self.n + + def _decrypt(self, c): + # compute c**d (mod n) + if not self.has_private(): + raise TypeError("No private key") + if (hasattr(self,'p') and hasattr(self,'q') and hasattr(self,'u')): + m1 = pow(c, self.d % (self.p-1), self.p) + m2 = pow(c, self.d % (self.q-1), self.q) + h = m2 - m1 + if (h<0): + h = h + self.q + h = h*self.u % self.q + return h*self.p+m1 + return pow(c, self.d, self.n) + + def _encrypt(self, m): + # compute m**d (mod n) + return pow(m, self.e, self.n) + + def _sign(self, m): # alias for _decrypt + if not self.has_private(): + raise TypeError("No private key") + return self._decrypt(m) + + def _verify(self, m, sig): + return self._encrypt(sig) == m + + def has_private(self): + return hasattr(self, 'd') + + def size(self): + """Return the maximum number of bits that can be encrypted""" + return size(self.n) - 1 + +def rsa_construct(n, e, d=None, p=None, q=None, u=None): + """Construct an RSAKey object""" + assert isinstance(n, int) + assert isinstance(e, int) + assert isinstance(d, (int, type(None))) + assert isinstance(p, (int, type(None))) + assert isinstance(q, (int, type(None))) + assert isinstance(u, (int, type(None))) + obj = _RSAKey() + obj.n = n + obj.e = e + if d is None: + return obj + obj.d = d + if p is not None and q is not None: + obj.p = p + obj.q = q + else: + # Compute factors p and q from the private exponent d. + # We assume that n has no more than two factors. + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d*e-1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t%2==0: + t=divmod(t,2)[0] + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = 0 + a = 2 + while not spotted and a<100: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from binascii import b2a_hex +import time +import warnings + +from Crypto.pct_warnings import ClockRewindWarning +from . import SHAd256 + +from . import FortunaGenerator + +class FortunaPool(object): + """Fortuna pool type + + This object acts like a hash object, with the following differences: + + - It keeps a count (the .length attribute) of the number of bytes that + have been added to the pool + - It supports a .reset() method for in-place reinitialization + - The method to add bytes to the pool is .append(), not .update(). + """ + + digest_size = SHAd256.digest_size + + def __init__(self): + self.reset() + + def append(self, data): + self._h.update(data) + self.length += len(data) + + def digest(self): + return self._h.digest() + + def hexdigest(self): + if sys.version_info[0] == 2: + return b2a_hex(self.digest()) + else: + return b2a_hex(self.digest()).decode() + + def reset(self): + self._h = SHAd256.new() + self.length = 0 + +def which_pools(r): + """Return a list of pools indexes (in range(32)) that are to be included during reseed number r. + + According to _Practical Cryptography_, chapter 10.5.2 "Pools": + + "Pool P_i is included if 2**i is a divisor of r. Thus P_0 is used + every reseed, P_1 every other reseed, P_2 every fourth reseed, etc." + """ + # This is a separate function so that it can be unit-tested. + assert r >= 1 + retval = [] + mask = 0 + for i in range(32): + # "Pool P_i is included if 2**i is a divisor of [reseed_count]" + if (r & mask) == 0: + retval.append(i) + else: + break # optimization. once this fails, it always fails + mask = (mask << 1) | 1 + return retval + +class FortunaAccumulator(object): + + # An estimate of how many bytes we must append to pool 0 before it will + # contain 128 bits of entropy (with respect to an attack). We reseed the + # generator only after pool 0 contains `min_pool_size` bytes. Note that + # unlike with some other PRNGs, Fortuna's security does not rely on the + # accuracy of this estimate---we can accord to be optimistic here. + min_pool_size = 64 # size in bytes + + # If an attacker can predict some (but not all) of our entropy sources, the + # `min_pool_size` check may not be sufficient to prevent a successful state + # compromise extension attack. To resist this attack, Fortuna spreads the + # input across 32 pools, which are then consumed (to reseed the output + # generator) with exponentially decreasing frequency. + # + # In order to prevent an attacker from gaining knowledge of all 32 pools + # before we have a chance to fill them with enough information that the + # attacker cannot predict, we impose a rate limit of 10 reseeds/second (one + # per 100 ms). This ensures that a hypothetical 33rd pool would only be + # needed after a minimum of 13 years of sustained attack. + reseed_interval = 0.100 # time in seconds + + def __init__(self): + self.reseed_count = 0 + self.generator = FortunaGenerator.AESGenerator() + self.last_reseed = None + + # Initialize 32 FortunaPool instances. + # NB: This is _not_ equivalent to [FortunaPool()]*32, which would give + # us 32 references to the _same_ FortunaPool instance (and cause the + # assertion below to fail). + self.pools = [FortunaPool() for i in range(32)] # 32 pools + assert(self.pools[0] is not self.pools[1]) + + def _forget_last_reseed(self): + # This is not part of the standard Fortuna definition, and using this + # function frequently can weaken Fortuna's ability to resist a state + # compromise extension attack, but we need this in order to properly + # implement Crypto.Random.atfork(). Otherwise, forked child processes + # might continue to use their parent's PRNG state for up to 100ms in + # some cases. (e.g. CVE-2013-1445) + self.last_reseed = None + + def random_data(self, bytes): + current_time = time.time() + if (self.last_reseed is not None and self.last_reseed > current_time): # Avoid float comparison to None to make Py3k happy + warnings.warn("Clock rewind detected. Resetting last_reseed.", ClockRewindWarning) + self.last_reseed = None + if (self.pools[0].length >= self.min_pool_size and + (self.last_reseed is None or + current_time > self.last_reseed + self.reseed_interval)): + self._reseed(current_time) + # The following should fail if we haven't seeded the pool yet. + return self.generator.pseudo_random_data(bytes) + + def _reseed(self, current_time=None): + if current_time is None: + current_time = time.time() + seed = [] + self.reseed_count += 1 + self.last_reseed = current_time + for i in which_pools(self.reseed_count): + seed.append(self.pools[i].digest()) + self.pools[i].reset() + + seed = b("").join(seed) + self.generator.reseed(seed) + + def add_random_event(self, source_number, pool_number, data): + assert 1 <= len(data) <= 32 + assert 0 <= source_number <= 255 + assert 0 <= pool_number <= 31 + self.pools[pool_number].append(bchr(source_number)) + self.pools[pool_number].append(bchr(len(data))) + self.pools[pool_number].append(data) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py new file mode 100644 index 0000000..489c81e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py @@ -0,0 +1,132 @@ +# -*- coding: ascii -*- +# +# FortunaGenerator.py : Fortuna's internal PRNG +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] is 2 and sys.version_info[1] is 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import struct + +from Crypto.Util.number import ceil_shift, exact_log2, exact_div +from Crypto.Util import Counter +from Crypto.Cipher import AES + +from . import SHAd256 + +class AESGenerator(object): + """The Fortuna "generator" + + This is used internally by the Fortuna PRNG to generate arbitrary amounts + of pseudorandom data from a smaller amount of seed data. + + The output is generated by running AES-256 in counter mode and re-keying + after every mebibyte (2**16 blocks) of output. + """ + + block_size = AES.block_size # output block size in octets (128 bits) + key_size = 32 # key size in octets (256 bits) + + # Because of the birthday paradox, we expect to find approximately one + # collision for every 2**64 blocks of output from a real random source. + # However, this code generates pseudorandom data by running AES in + # counter mode, so there will be no collisions until the counter + # (theoretically) wraps around at 2**128 blocks. Thus, in order to prevent + # Fortuna's pseudorandom output from deviating perceptibly from a true + # random source, Ferguson and Schneier specify a limit of 2**16 blocks + # without rekeying. + max_blocks_per_request = 2**16 # Allow no more than this number of blocks per _pseudo_random_data request + + _four_kiblocks_of_zeros = b("\0") * block_size * 4096 + + def __init__(self): + self.counter = Counter.new(nbits=self.block_size*8, initial_value=0, little_endian=True) + self.key = None + + # Set some helper constants + self.block_size_shift = exact_log2(self.block_size) + assert (1 << self.block_size_shift) == self.block_size + + self.blocks_per_key = exact_div(self.key_size, self.block_size) + assert self.key_size == self.blocks_per_key * self.block_size + + self.max_bytes_per_request = self.max_blocks_per_request * self.block_size + + def reseed(self, seed): + if self.key is None: + self.key = b("\0") * self.key_size + + self._set_key(SHAd256.new(self.key + seed).digest()) + self.counter() # increment counter + assert len(self.key) == self.key_size + + def pseudo_random_data(self, bytes): + assert bytes >= 0 + + num_full_blocks = bytes >> 20 + remainder = bytes & ((1<<20)-1) + + retval = [] + for i in range(num_full_blocks): + retval.append(self._pseudo_random_data(1<<20)) + retval.append(self._pseudo_random_data(remainder)) + + return b("").join(retval) + + def _set_key(self, key): + self.key = key + self._cipher = AES.new(key, AES.MODE_CTR, counter=self.counter) + + def _pseudo_random_data(self, bytes): + if not (0 <= bytes <= self.max_bytes_per_request): + raise AssertionError("You cannot ask for more than 1 MiB of data per request") + + num_blocks = ceil_shift(bytes, self.block_size_shift) # num_blocks = ceil(bytes / self.block_size) + + # Compute the output + retval = self._generate_blocks(num_blocks)[:bytes] + + # Switch to a new key to avoid later compromises of this output (i.e. + # state compromise extension attacks) + self._set_key(self._generate_blocks(self.blocks_per_key)) + + assert len(retval) == bytes + assert len(self.key) == self.key_size + + return retval + + def _generate_blocks(self, num_blocks): + if self.key is None: + raise AssertionError("generator must be seeded before use") + assert 0 <= num_blocks <= self.max_blocks_per_request + retval = [] + for i in range(num_blocks >> 12): # xrange(num_blocks / 4096) + retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros)) + remaining_bytes = (num_blocks & 4095) << self.block_size_shift # (num_blocks % 4095) * self.block_size + retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros[:remaining_bytes])) + return b("").join(retval) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/SHAd256.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/SHAd256.py new file mode 100644 index 0000000..2e135c9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/SHAd256.py @@ -0,0 +1,98 @@ +# -*- coding: ascii -*- +# +# Random/Fortuna/SHAd256.py : SHA_d-256 hash function implementation +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""\ +SHA_d-256 hash function implementation. + +This module should comply with PEP 247. +""" + +__revision__ = "$Id$" +__all__ = ['new', 'digest_size'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from binascii import b2a_hex + +from Crypto.Hash import SHA256 + +assert SHA256.digest_size == 32 + +class _SHAd256(object): + """SHA-256, doubled. + + Returns SHA-256(SHA-256(data)). + """ + + digest_size = SHA256.digest_size + + _internal = object() + + def __init__(self, internal_api_check, sha256_hash_obj): + if internal_api_check is not self._internal: + raise AssertionError("Do not instantiate this class directly. Use %s.new()" % (__name__,)) + self._h = sha256_hash_obj + + # PEP 247 "copy" method + def copy(self): + """Return a copy of this hashing object""" + return _SHAd256(SHAd256._internal, self._h.copy()) + + # PEP 247 "digest" method + def digest(self): + """Return the hash value of this object as a binary string""" + retval = SHA256.new(self._h.digest()).digest() + assert len(retval) == 32 + return retval + + # PEP 247 "hexdigest" method + def hexdigest(self): + """Return the hash value of this object as a (lowercase) hexadecimal string""" + retval = b2a_hex(self.digest()) + assert len(retval) == 64 + if sys.version_info[0] == 2: + return retval + else: + return retval.decode() + + # PEP 247 "update" method + def update(self, data): + self._h.update(data) + +# PEP 247 module-level "digest_size" variable +digest_size = _SHAd256.digest_size + +# PEP 247 module-level "new" function +def new(data=None): + """Return a new SHAd256 hashing object""" + if not data: + data=b("") + sha = _SHAd256(_SHAd256._internal, SHA256.new(data)) + sha.new = globals()['new'] + return sha + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.4/site-packages/lxml/includes/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/__init__.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/lxml/includes/__init__.py rename to Darwin/lib/python3.5/site-packages/Crypto/Random/Fortuna/__init__.py diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/__init__.py new file mode 100644 index 0000000..2fbbecb --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/__init__.py @@ -0,0 +1,40 @@ +# +# Random/OSRNG/__init__.py : Platform-independent OS RNG API +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Provides a platform-independent interface to the random number generators +supplied by various operating systems.""" + +__revision__ = "$Id$" + +import os + +if os.name == 'posix': + from Crypto.Random.OSRNG.posix import new +elif os.name == 'nt': + from Crypto.Random.OSRNG.nt import new +elif hasattr(os, 'urandom'): + from Crypto.Random.OSRNG.fallback import new +else: + raise ImportError("Not implemented") + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/fallback.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/fallback.py new file mode 100644 index 0000000..6d4130d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/fallback.py @@ -0,0 +1,46 @@ +# +# Random/OSRNG/fallback.py : Fallback entropy source for systems with os.urandom +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +__revision__ = "$Id$" +__all__ = ['PythonOSURandomRNG'] + +import os + +from .rng_base import BaseRNG + +class PythonOSURandomRNG(BaseRNG): + + name = "" + + def __init__(self): + self._read = os.urandom + BaseRNG.__init__(self) + + def _close(self): + self._read = None + +def new(*args, **kwargs): + return PythonOSURandomRNG(*args, **kwargs) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/posix.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/posix.py new file mode 100644 index 0000000..ceea7b7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/posix.py @@ -0,0 +1,86 @@ +# +# Random/OSRNG/posix.py : OS entropy source for POSIX systems +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +__revision__ = "$Id$" +__all__ = ['DevURandomRNG'] + +import errno +import os +import stat + +from .rng_base import BaseRNG +from Crypto.Util.py3compat import b + +class DevURandomRNG(BaseRNG): + + def __init__(self, devname=None): + if devname is None: + self.name = "/dev/urandom" + else: + self.name = devname + + # Test that /dev/urandom is a character special device + f = open(self.name, "rb", 0) + fmode = os.fstat(f.fileno())[stat.ST_MODE] + if not stat.S_ISCHR(fmode): + f.close() + raise TypeError("%r is not a character special device" % (self.name,)) + + self.__file = f + + BaseRNG.__init__(self) + + def _close(self): + self.__file.close() + + def _read(self, N): + # Starting with Python 3 open with buffering=0 returns a FileIO object. + # FileIO.read behaves like read(2) and not like fread(3) and thus we + # have to handle the case that read returns less data as requested here + # more carefully. + data = b("") + while len(data) < N: + try: + d = self.__file.read(N - len(data)) + except IOError as e: + # read(2) has been interrupted by a signal; redo the read + if e.errno == errno.EINTR: + continue + raise + + if d is None: + # __file is in non-blocking mode and no data is available + return data + if len(d) == 0: + # __file is in blocking mode and arrived at EOF + return data + + data += d + return data + +def new(*args, **kwargs): + return DevURandomRNG(*args, **kwargs) + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/rng_base.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/rng_base.py new file mode 100644 index 0000000..546f2e9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/OSRNG/rng_base.py @@ -0,0 +1,88 @@ +# +# Random/OSRNG/rng_base.py : Base class for OSRNG +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +class BaseRNG(object): + + def __init__(self): + self.closed = False + self._selftest() + + def __del__(self): + self.close() + + def _selftest(self): + # Test that urandom can return data + data = self.read(16) + if len(data) != 16: + raise AssertionError("read truncated") + + # Test that we get different data every time (if we don't, the RNG is + # probably malfunctioning) + data2 = self.read(16) + if data == data2: + raise AssertionError("OS RNG returned duplicate data") + + # PEP 343: Support for the "with" statement + def __enter__(self): + pass + def __exit__(self): + """PEP 343 support""" + self.close() + + def close(self): + if not self.closed: + self._close() + self.closed = True + + def flush(self): + pass + + def read(self, N=-1): + """Return N bytes from the RNG.""" + if self.closed: + raise ValueError("I/O operation on closed file") + if not isinstance(N, int): + raise TypeError("an integer is required") + if N < 0: + raise ValueError("cannot read to end of infinite stream") + elif N == 0: + return "" + data = self._read(N) + if len(data) != N: + raise AssertionError("%s produced truncated output (requested %d, got %d)" % (self.name, N, len(data))) + return data + + def _close(self): + raise NotImplementedError("child class must implement this") + + def _read(self, N): + raise NotImplementedError("child class must implement this") + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/_UserFriendlyRNG.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/_UserFriendlyRNG.py new file mode 100644 index 0000000..937c17d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/_UserFriendlyRNG.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +# +# Random/_UserFriendlyRNG.py : A user-friendly random number generator +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +import os +import threading +import struct +import time +from math import floor + +from Crypto.Random import OSRNG +from Crypto.Random.Fortuna import FortunaAccumulator + +class _EntropySource(object): + def __init__(self, accumulator, src_num): + self._fortuna = accumulator + self._src_num = src_num + self._pool_num = 0 + + def feed(self, data): + self._fortuna.add_random_event(self._src_num, self._pool_num, data) + self._pool_num = (self._pool_num + 1) & 31 + +class _EntropyCollector(object): + + def __init__(self, accumulator): + self._osrng = OSRNG.new() + self._osrng_es = _EntropySource(accumulator, 255) + self._time_es = _EntropySource(accumulator, 254) + self._clock_es = _EntropySource(accumulator, 253) + + def reinit(self): + # Add 256 bits to each of the 32 pools, twice. (For a total of 16384 + # bits collected from the operating system.) + for i in range(2): + block = self._osrng.read(32*32) + for p in range(32): + self._osrng_es.feed(block[p*32:(p+1)*32]) + block = None + self._osrng.flush() + + def collect(self): + # Collect 64 bits of entropy from the operating system and feed it to Fortuna. + self._osrng_es.feed(self._osrng.read(8)) + + # Add the fractional part of time.time() + t = time.time() + self._time_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) + + # Add the fractional part of time.clock() + t = time.clock() + self._clock_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) + + +class _UserFriendlyRNG(object): + + def __init__(self): + self.closed = False + self._fa = FortunaAccumulator.FortunaAccumulator() + self._ec = _EntropyCollector(self._fa) + self.reinit() + + def reinit(self): + """Initialize the random number generator and seed it with entropy from + the operating system. + """ + + # Save the pid (helps ensure that Crypto.Random.atfork() gets called) + self._pid = os.getpid() + + # Collect entropy from the operating system and feed it to + # FortunaAccumulator + self._ec.reinit() + + # Override FortunaAccumulator's 100ms minimum re-seed interval. This + # is necessary to avoid a race condition between this function and + # self.read(), which that can otherwise cause forked child processes to + # produce identical output. (e.g. CVE-2013-1445) + # + # Note that if this function can be called frequently by an attacker, + # (and if the bits from OSRNG are insufficiently random) it will weaken + # Fortuna's ability to resist a state compromise extension attack. + self._fa._forget_last_reseed() + + def close(self): + self.closed = True + self._osrng = None + self._fa = None + + def flush(self): + pass + + def read(self, N): + """Return N bytes from the RNG.""" + if self.closed: + raise ValueError("I/O operation on closed file") + if not isinstance(N, int): + raise TypeError("an integer is required") + if N < 0: + raise ValueError("cannot read to end of infinite stream") + + # Collect some entropy and feed it to Fortuna + self._ec.collect() + + # Ask Fortuna to generate some bytes + retval = self._fa.random_data(N) + + # Check that we haven't forked in the meantime. (If we have, we don't + # want to use the data, because it might have been duplicated in the + # parent process. + self._check_pid() + + # Return the random data. + return retval + + def _check_pid(self): + # Lame fork detection to remind developers to invoke Random.atfork() + # after every call to os.fork(). Note that this check is not reliable, + # since process IDs can be reused on most operating systems. + # + # You need to do Random.atfork() in the child process after every call + # to os.fork() to avoid reusing PRNG state. If you want to avoid + # leaking PRNG state to child processes (for example, if you are using + # os.setuid()) then you should also invoke Random.atfork() in the + # *parent* process. + if os.getpid() != self._pid: + raise AssertionError("PID check failed. RNG must be re-initialized after fork(). Hint: Try Random.atfork()") + + +class _LockingUserFriendlyRNG(_UserFriendlyRNG): + def __init__(self): + self._lock = threading.Lock() + _UserFriendlyRNG.__init__(self) + + def close(self): + self._lock.acquire() + try: + return _UserFriendlyRNG.close(self) + finally: + self._lock.release() + + def reinit(self): + self._lock.acquire() + try: + return _UserFriendlyRNG.reinit(self) + finally: + self._lock.release() + + def read(self, bytes): + self._lock.acquire() + try: + return _UserFriendlyRNG.read(self, bytes) + finally: + self._lock.release() + +class RNGFile(object): + def __init__(self, singleton): + self.closed = False + self._singleton = singleton + + # PEP 343: Support for the "with" statement + def __enter__(self): + """PEP 343 support""" + def __exit__(self): + """PEP 343 support""" + self.close() + + def close(self): + # Don't actually close the singleton, just close this RNGFile instance. + self.closed = True + self._singleton = None + + def read(self, bytes): + if self.closed: + raise ValueError("I/O operation on closed file") + return self._singleton.read(bytes) + + def flush(self): + if self.closed: + raise ValueError("I/O operation on closed file") + +_singleton_lock = threading.Lock() +_singleton = None +def _get_singleton(): + global _singleton + _singleton_lock.acquire() + try: + if _singleton is None: + _singleton = _LockingUserFriendlyRNG() + return _singleton + finally: + _singleton_lock.release() + +def new(): + return RNGFile(_get_singleton()) + +def reinit(): + _get_singleton().reinit() + +def get_random_bytes(n): + """Return the specified number of cryptographically-strong random bytes.""" + return _get_singleton().read(n) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/__init__.py new file mode 100644 index 0000000..659ffee --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# +# Random/__init__.py : PyCrypto random number generation +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" +__all__ = ['new'] + +from Crypto.Random import OSRNG +from Crypto.Random import _UserFriendlyRNG + +def new(*args, **kwargs): + """Return a file-like object that outputs cryptographically random bytes.""" + return _UserFriendlyRNG.new(*args, **kwargs) + +def atfork(): + """Call this whenever you call os.fork()""" + _UserFriendlyRNG.reinit() + +def get_random_bytes(n): + """Return the specified number of cryptographically-strong random bytes.""" + return _UserFriendlyRNG.get_random_bytes(n) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Random/random.py b/Darwin/lib/python3.5/site-packages/Crypto/Random/random.py new file mode 100644 index 0000000..cd9a221 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Random/random.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# +# Random/random.py : Strong alternative for the standard 'random' module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""A cryptographically strong version of Python's standard "random" module.""" + +__revision__ = "$Id$" +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +from Crypto import Random +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +class StrongRandom(object): + def __init__(self, rng=None, randfunc=None): + if randfunc is None and rng is None: + self._randfunc = None + elif randfunc is not None and rng is None: + self._randfunc = randfunc + elif randfunc is None and rng is not None: + self._randfunc = rng.read + else: + raise ValueError("Cannot specify both 'rng' and 'randfunc'") + + def getrandbits(self, k): + """Return a python long integer with k random bits.""" + if self._randfunc is None: + self._randfunc = Random.new().read + mask = (1 << k) - 1 + return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) + + def randrange(self, *args): + """randrange([start,] stop[, step]): + Return a randomly-selected element from range(start, stop, step).""" + if len(args) == 3: + (start, stop, step) = args + elif len(args) == 2: + (start, stop) = args + step = 1 + elif len(args) == 1: + (stop,) = args + start = 0 + step = 1 + else: + raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) + if (not isinstance(start, int) + or not isinstance(stop, int) + or not isinstance(step, int)): + raise TypeError("randrange requires integer arguments") + if step == 0: + raise ValueError("randrange step argument must not be zero") + + num_choices = ceil_div(stop - start, step) + if num_choices < 0: + num_choices = 0 + if num_choices < 1: + raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) + + # Pick a random number in the range of possible numbers + r = num_choices + while r >= num_choices: + r = self.getrandbits(size(num_choices)) + + return start + (step * r) + + def randint(self, a, b): + """Return a random integer N such that a <= N <= b.""" + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("randint requires integer arguments") + N = self.randrange(a, b+1) + assert a <= N <= b + return N + + def choice(self, seq): + """Return a random element from a (non-empty) sequence. + + If the seqence is empty, raises IndexError. + """ + if len(seq) == 0: + raise IndexError("empty sequence") + return seq[self.randrange(len(seq))] + + def shuffle(self, x): + """Shuffle the sequence in place.""" + # Make a (copy) of the list of objects we want to shuffle + items = list(x) + + # Choose a random item (without replacement) until all the items have been + # chosen. + for i in range(len(x)): + x[i] = items.pop(self.randrange(len(items))) + + def sample(self, population, k): + """Return a k-length list of unique elements chosen from the population sequence.""" + + num_choices = len(population) + if k > num_choices: + raise ValueError("sample larger than population") + + retval = [] + selected = {} # we emulate a set using a dict here + for i in range(k): + r = None + while r is None or r in selected: + r = self.randrange(num_choices) + retval.append(population[r]) + selected[r] = 1 + return retval + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample + +# These are at the bottom to avoid problems with recursive imports +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes, size + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/__init__.py new file mode 100644 index 0000000..63e9c57 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/__init__.py: Self-test for cipher modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for cipher modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_XOR; tests += test_XOR.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/common.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/common.py new file mode 100644 index 0000000..94183d1 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/common.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +__revision__ = "$Id$" + +import sys +import unittest +from binascii import a2b_hex, b2a_hex +from Crypto.Util.py3compat import * + +# For compatibility with Python 2.1 and Python 2.2 +if sys.hexversion < 0x02030000: + # Python 2.1 doesn't have a dict() function + # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') + def dict(**kwargs): + return kwargs.copy() +else: + dict = dict + +class _NoDefault: pass # sentinel object +def _extract(d, k, default=_NoDefault): + """Get an item from a dictionary, and remove it from the dictionary.""" + try: + retval = d[k] + except KeyError: + if default is _NoDefault: + raise + return default + del d[k] + return retval + +# Generic cipher test case +class CipherSelfTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + self.iv = _extract(params, 'iv', None) + if self.iv is not None: self.iv = b(self.iv) + + # Only relevant for OPENPGP mode + self.encrypted_iv = _extract(params, 'encrypted_iv', None) + if self.encrypted_iv is not None: + self.encrypted_iv = b(self.encrypted_iv) + else: + # Stream cipher + self.mode = None + self.iv = None + + self.extra_params = params + + def shortDescription(self): + return self.description + + def _new(self, do_decryption=0): + params = self.extra_params.copy() + + # Handle CTR mode parameters. By default, we use Counter.new(self.module.block_size) + if hasattr(self.module, "MODE_CTR") and self.mode == self.module.MODE_CTR: + from Crypto.Util import Counter + ctr_class = _extract(params, 'ctr_class', Counter.new) + ctr_params = _extract(params, 'ctr_params', {}).copy() + if 'prefix' in ctr_params: ctr_params['prefix'] = a2b_hex(b(ctr_params['prefix'])) + if 'suffix' in ctr_params: ctr_params['suffix'] = a2b_hex(b(ctr_params['suffix'])) + if 'nbits' not in ctr_params: + ctr_params['nbits'] = 8*(self.module.block_size - len(ctr_params.get('prefix', '')) - len(ctr_params.get('suffix', ''))) + params['counter'] = ctr_class(**ctr_params) + + if self.mode is None: + # Stream cipher + return self.module.new(a2b_hex(self.key), **params) + elif self.iv is None: + # Block cipher without iv + return self.module.new(a2b_hex(self.key), self.mode, **params) + else: + # Block cipher with iv + if do_decryption and self.mode == self.module.MODE_OPENPGP: + # In PGP mode, the IV to feed for decryption is the *encrypted* one + return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.encrypted_iv), **params) + else: + return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.iv), **params) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + ct1 = b2a_hex(self._new().encrypt(plaintext)) + pt1 = b2a_hex(self._new(1).decrypt(ciphertext)) + ct2 = b2a_hex(self._new().encrypt(plaintext)) + pt2 = b2a_hex(self._new(1).decrypt(ciphertext)) + + if hasattr(self.module, "MODE_OPENPGP") and self.mode == self.module.MODE_OPENPGP: + # In PGP mode, data returned by the first encrypt() + # is prefixed with the encrypted IV. + # Here we check it and then remove it from the ciphertexts. + eilen = len(self.encrypted_iv) + self.assertEqual(self.encrypted_iv, ct1[:eilen]) + self.assertEqual(self.encrypted_iv, ct2[:eilen]) + ct1 = ct1[eilen:] + ct2 = ct2[eilen:] + + self.assertEqual(self.ciphertext, ct1) # encrypt + self.assertEqual(self.ciphertext, ct2) # encrypt (second time) + self.assertEqual(self.plaintext, pt1) # decrypt + self.assertEqual(self.plaintext, pt2) # decrypt (second time) + +class CipherStreamingSelfTest(CipherSelfTest): + + def shortDescription(self): + desc = self.module_name + if self.mode is not None: + desc += " in %s mode" % (self.mode_name,) + return "%s should behave like a stream cipher" % (desc,) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # The cipher should work like a stream cipher + + # Test counter mode encryption, 3 bytes at a time + ct3 = [] + cipher = self._new() + for i in range(0, len(plaintext), 3): + ct3.append(cipher.encrypt(plaintext[i:i+3])) + ct3 = b2a_hex(b("").join(ct3)) + self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) + + # Test counter mode decryption, 3 bytes at a time + pt3 = [] + cipher = self._new() + for i in range(0, len(ciphertext), 3): + pt3.append(cipher.encrypt(ciphertext[i:i+3])) + # PY3K: This is meant to be text, do not change to bytes (data) + pt3 = b2a_hex(b("").join(pt3)) + self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) + +class CTRSegfaultTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """Regression test: %s.new(key, %s.MODE_CTR) should raise TypeError, not segfault""" % (self.module_name, self.module_name) + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), self.module.MODE_CTR) + +class CTRWraparoundTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """Regression test: %s with MODE_CTR should raise OverflowError on wraparound when shortcut used""" % (self.module_name,) + + def runTest(self): + from Crypto.Util import Counter + + for disable_shortcut in (0, 1): # (False, True) Test CTR-mode shortcut and PyObject_CallObject code paths + for little_endian in (0, 1): # (False, True) Test both endiannesses + ctr = Counter.new(8*self.module.block_size, initial_value=2**(8*self.module.block_size)-1, little_endian=little_endian, disable_shortcut=disable_shortcut) + cipher = self.module.new(a2b_hex(self.key), self.module.MODE_CTR, counter=ctr) + block = b("\x00") * self.module.block_size + cipher.encrypt(block) + self.assertRaises(OverflowError, cipher.encrypt, block) + +class CFBSegmentSizeTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.description = params['description'] + + def shortDescription(self): + return self.description + + def runTest(self): + """Regression test: m.new(key, m.MODE_CFB, segment_size=N) should require segment_size to be a multiple of 8 bits""" + for i in range(1, 8): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), self.module.MODE_CFB, segment_size=i) + self.module.new(a2b_hex(self.key), self.module.MODE_CFB, "\0"*self.module.block_size, segment_size=8) # should succeed + +class RoundtripTest(unittest.TestCase): + def __init__(self, module, params): + from Crypto import Random + unittest.TestCase.__init__(self) + self.module = module + self.iv = Random.get_random_bytes(module.block_size) + self.key = b(params['key']) + self.plaintext = 100 * b(params['plaintext']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) + + def runTest(self): + for mode in (self.module.MODE_ECB, self.module.MODE_CBC, self.module.MODE_CFB, self.module.MODE_OFB, self.module.MODE_OPENPGP): + encryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) + ciphertext = encryption_cipher.encrypt(self.plaintext) + + if mode != self.module.MODE_OPENPGP: + decryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) + else: + eiv = ciphertext[:self.module.block_size+2] + ciphertext = ciphertext[self.module.block_size+2:] + decryption_cipher = self.module.new(a2b_hex(self.key), mode, eiv) + decrypted_plaintext = decryption_cipher.decrypt(ciphertext) + self.assertEqual(self.plaintext, decrypted_plaintext) + +class PGPTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "MODE_PGP was implemented incorrectly and insecurely. It's completely banished now." + + def runTest(self): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_PGP) + +class IVLengthTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" + + def runTest(self): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_CBC, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_CFB, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_OFB, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_OPENPGP, "") + self.module.new(a2b_hex(self.key), self.module.MODE_ECB, "") + self.module.new(a2b_hex(self.key), self.module.MODE_CTR, "", counter=self._dummy_counter) + + def _dummy_counter(self): + return "\0" * self.module.block_size + +def make_block_tests(module, module_name, test_data): + tests = [] + extra_tests_added = 0 + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {'mode': 'ECB'} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + p_mode = p2.get('mode', 'ECB') + if p_mode == 'ECB': + _extract(p2, 'mode', 'ECB') + + if p_description is not None: + description = p_description + elif p_mode == 'ECB' and not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + CTRSegfaultTest(module, params), + CTRWraparoundTest(module, params), + CFBSegmentSizeTest(module, params), + RoundtripTest(module, params), + PGPTest(module, params), + IVLengthTest(module, params), + ] + extra_tests_added = 1 + + # Add the current test to the test suite + tests.append(CipherSelfTest(module, params)) + + # When using CTR mode, test that the interface behaves like a stream cipher + if p_mode == 'CTR': + tests.append(CipherStreamingSelfTest(module, params)) + + # When using CTR mode, test the non-shortcut code path. + if p_mode == 'CTR' and 'ctr_class' not in params: + params2 = params.copy() + params2['description'] += " (shortcut disabled)" + ctr_params2 = params.get('ctr_params', {}).copy() + params2['ctr_params'] = ctr_params2 + if 'disable_shortcut' not in params2['ctr_params']: + params2['ctr_params']['disable_shortcut'] = 1 + tests.append(CipherSelfTest(module, params2)) + return tests + +def make_stream_tests(module, module_name, test_data): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add the test to the test suite + tests.append(CipherSelfTest(module, params)) + tests.append(CipherStreamingSelfTest(module, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_AES.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_AES.py new file mode 100644 index 0000000..63c56d0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_AES.py @@ -0,0 +1,1433 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/AES.py: Self-test for the AES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.AES""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. +test_data = [ + # FIPS PUB 197 test vectors + # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + + ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', + '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), + + ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', + '000102030405060708090a0b0c0d0e0f1011121314151617', + 'FIPS 197 C.2 (AES-192)'), + + ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'FIPS 197 C.3 (AES-256)'), + + # Rijndael128 test vectors + # Downloaded 2008-09-13 from + # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz + + # ecb_tbl.txt, KEYSIZE=128 + ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=1'), + ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=2'), + ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=3'), + ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=4'), + ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=5'), + ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=6'), + ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=7'), + ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=8'), + ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=9'), + ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=10'), + ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=11'), + ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=12'), + ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=13'), + ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=14'), + ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=15'), + ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=16'), + ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=17'), + ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=18'), + ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=19'), + ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=20'), + ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=21'), + ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=22'), + ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=23'), + ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=24'), + ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=25'), + ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=26'), + ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=27'), + ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=28'), + ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=29'), + ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=30'), + ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=31'), + ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=32'), + ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=33'), + ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=34'), + ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=35'), + ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=36'), + ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=37'), + ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=38'), + ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=39'), + ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=40'), + ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=41'), + ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=42'), + ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=43'), + ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=44'), + ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=45'), + ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=46'), + ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=47'), + ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=48'), + ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=49'), + ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=50'), + ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=51'), + ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=52'), + ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=53'), + ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=54'), + ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=55'), + ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=56'), + ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=57'), + ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=58'), + ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=59'), + ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=60'), + ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=61'), + ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=62'), + ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=63'), + ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=64'), + ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=65'), + ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=66'), + ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=68'), + ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=69'), + ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', + '18191a1b1d1e1f20222324252728292a', + 'ecb-tbl-128: I=70'), + ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=71'), + ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=72'), + ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=73'), + ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=74'), + ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=75'), + ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', + '90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-128: I=76'), + ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=77'), + ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-128: I=78'), + ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-128: I=79'), + ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=80'), + ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', + 'f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-128: I=81'), + ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=82'), + ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', + '1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-128: I=83'), + ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', + '30313233353637383a3b3c3d3f404142', + 'ecb-tbl-128: I=84'), + ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', + '44454647494a4b4c4e4f505153545556', + 'ecb-tbl-128: I=85'), + ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', + '58595a5b5d5e5f60626364656768696a', + 'ecb-tbl-128: I=86'), + ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=87'), + ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=88'), + ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=89'), + ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=90'), + ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=91'), + ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=92'), + ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=93'), + ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=94'), + ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=95'), + ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=96'), + ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=97'), + ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=98'), + ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=99'), + ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=100'), + ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=101'), + ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=102'), + ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=103'), + ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=105'), + ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=106'), + ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=107'), + ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=108'), + ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=109'), + ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=110'), + ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=111'), + ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=112'), + ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=113'), + ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=114'), + ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=115'), + ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=116'), + ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=117'), + ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=118'), + ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=119'), + ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=120'), + ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=121'), + ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=122'), + ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=123'), + ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=124'), + ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=125'), + ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=126'), + ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=127'), + ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=128'), + + # ecb_tbl.txt, KEYSIZE=192 + ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=1'), + ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=2'), + ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=3'), + ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=4'), + ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=5'), + ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=6'), + ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=7'), + ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=8'), + ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=9'), + ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=10'), + ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=11'), + ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=12'), + ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=13'), + ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=14'), + ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', + 'ecb-tbl-192: I=15'), + ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', + 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-192: I=16'), + ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', + 'ecb-tbl-192: I=17'), + ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', + 'fefe01010304050608090a0b0d0e0f10121314151718191a', + 'ecb-tbl-192: I=18'), + ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', + '1c1d1e1f21222324262728292b2c2d2e3031323335363738', + 'ecb-tbl-192: I=19'), + ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', + '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-192: I=20'), + ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', + 'ecb-tbl-192: I=21'), + ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', + '767778797b7c7d7e80818283858687888a8b8c8d8f909192', + 'ecb-tbl-192: I=22'), + ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', + '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', + 'ecb-tbl-192: I=23'), + ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', + 'ecb-tbl-192: I=24'), + ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', + '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-192: I=25'), + ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', + 'ecb-tbl-192: I=26'), + ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', + '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', + 'ecb-tbl-192: I=27'), + ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', + 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-192: I=28'), + ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', + 'ecb-tbl-192: I=29'), + ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', + '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-192: I=30'), + ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', + 'ecb-tbl-192: I=31'), + ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', + '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-192: I=32'), + ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', + '464748494b4c4d4e50515253555657585a5b5c5d5f606162', + 'ecb-tbl-192: I=33'), + ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', + '828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-192: I=34'), + ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', + 'ecb-tbl-192: I=35'), + ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', + 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-192: I=36'), + ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', + 'ecb-tbl-192: I=37'), + ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', + 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-192: I=38'), + ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', + 'ecb-tbl-192: I=39'), + ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', + '363738393b3c3d3e40414243454647484a4b4c4d4f505152', + 'ecb-tbl-192: I=40'), + ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', + '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', + 'ecb-tbl-192: I=41'), + ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', + '727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-192: I=42'), + ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', + 'ecb-tbl-192: I=43'), + ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', + 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-192: I=44'), + ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', + 'ecb-tbl-192: I=45'), + ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', + 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-192: I=46'), + ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', + 'ecb-tbl-192: I=47'), + ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', + '262728292b2c2d2e30313233353637383a3b3c3d3f404142', + 'ecb-tbl-192: I=48'), + ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', + '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', + 'ecb-tbl-192: I=49'), + ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', + '626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-192: I=50'), + ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c', + 'ecb-tbl-192: I=51'), + ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', + '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-192: I=52'), + ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', + 'ecb-tbl-192: I=53'), + ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', + 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-192: I=54'), + ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', + 'ecb-tbl-192: I=55'), + ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', + '161718191b1c1d1e20212223252627282a2b2c2d2f303132', + 'ecb-tbl-192: I=56'), + ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', + '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', + 'ecb-tbl-192: I=57'), + ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', + '525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-192: I=58'), + ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', + '70717273757677787a7b7c7d7f80818284858687898a8b8c', + 'ecb-tbl-192: I=59'), + ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', + '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-192: I=60'), + ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', + 'ecb-tbl-192: I=61'), + ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', + 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-192: I=62'), + ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', + 'ecb-tbl-192: I=63'), + ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', + '060708090b0c0d0e10111213151617181a1b1c1d1f202122', + 'ecb-tbl-192: I=64'), + ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', + '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', + 'ecb-tbl-192: I=65'), + ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', + '424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-192: I=66'), + ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c', + 'ecb-tbl-192: I=67'), + ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', + '7e7f80818384858688898a8b8d8e8f90929394959798999a', + 'ecb-tbl-192: I=68'), + ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', + 'ecb-tbl-192: I=69'), + ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', + 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-192: I=70'), + ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', + 'ecb-tbl-192: I=71'), + ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', + 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', + 'ecb-tbl-192: I=72'), + ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', + '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', + 'ecb-tbl-192: I=73'), + ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', + '323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-192: I=74'), + ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', + '50515253555657585a5b5c5d5f60616264656667696a6b6c', + 'ecb-tbl-192: I=75'), + ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', + '6e6f70717374757678797a7b7d7e7f80828384858788898a', + 'ecb-tbl-192: I=76'), + ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', + '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', + 'ecb-tbl-192: I=77'), + ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', + 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-192: I=78'), + ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', + 'ecb-tbl-192: I=79'), + ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', + 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-192: I=80'), + ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', + '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', + 'ecb-tbl-192: I=81'), + ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', + '222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-192: I=82'), + ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', + '40414243454647484a4b4c4d4f50515254555657595a5b5c', + 'ecb-tbl-192: I=83'), + ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', + '5e5f60616364656668696a6b6d6e6f70727374757778797a', + 'ecb-tbl-192: I=84'), + ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', + '7c7d7e7f81828384868788898b8c8d8e9091929395969798', + 'ecb-tbl-192: I=85'), + ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', + '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-192: I=86'), + ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', + 'ecb-tbl-192: I=87'), + ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', + 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-192: I=88'), + ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', + 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', + 'ecb-tbl-192: I=89'), + ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', + '121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-192: I=90'), + ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', + '30313233353637383a3b3c3d3f40414244454647494a4b4c', + 'ecb-tbl-192: I=91'), + ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', + '4e4f50515354555658595a5b5d5e5f60626364656768696a', + 'ecb-tbl-192: I=92'), + ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', + '6c6d6e6f71727374767778797b7c7d7e8081828385868788', + 'ecb-tbl-192: I=93'), + ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', + '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-192: I=94'), + ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', + 'ecb-tbl-192: I=95'), + ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', + 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-192: I=96'), + ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', + 'ecb-tbl-192: I=97'), + ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', + '020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-192: I=98'), + ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', + '20212223252627282a2b2c2d2f30313234353637393a3b3c', + 'ecb-tbl-192: I=99'), + ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', + '3e3f40414344454648494a4b4d4e4f50525354555758595a', + 'ecb-tbl-192: I=100'), + ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', + '5c5d5e5f61626364666768696b6c6d6e7071727375767778', + 'ecb-tbl-192: I=101'), + ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', + '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-192: I=102'), + ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', + 'ecb-tbl-192: I=103'), + ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', + 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-192: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', + 'ecb-tbl-192: I=105'), + ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', + 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-192: I=106'), + ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', + '10111213151617181a1b1c1d1f20212224252627292a2b2c', + 'ecb-tbl-192: I=107'), + ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', + '2e2f30313334353638393a3b3d3e3f40424344454748494a', + 'ecb-tbl-192: I=108'), + ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', + '4c4d4e4f51525354565758595b5c5d5e6061626365666768', + 'ecb-tbl-192: I=109'), + ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', + '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-192: I=110'), + ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', + 'ecb-tbl-192: I=111'), + ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', + 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-192: I=112'), + ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', + 'ecb-tbl-192: I=113'), + ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', + 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-192: I=114'), + ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=115'), + ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=116'), + ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=117'), + ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=118'), + ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=119'), + ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=120'), + ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=121'), + ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=122'), + ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=123'), + ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=124'), + ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=125'), + ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=126'), + ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=127'), + ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=128'), + + # ecb_tbl.txt, KEYSIZE=256 + ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=1'), + ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=2'), + ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=3'), + ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=4'), + ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=5'), + ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=6'), + ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=7'), + ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=8'), + ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=9'), + ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=10'), + ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=11'), + ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=12'), + ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=13'), + ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=14'), + ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=15'), + ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=16'), + ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=17'), + ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=18'), + ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=19'), + ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=20'), + ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=21'), + ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=22'), + ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=23'), + ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=24'), + ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=25'), + ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=26'), + ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=27'), + ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=28'), + ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=29'), + ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=30'), + ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=31'), + ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=32'), + ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=33'), + ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=34'), + ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=35'), + ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=36'), + ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=37'), + ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=38'), + ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=39'), + ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=40'), + ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=41'), + ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=42'), + ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=43'), + ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=44'), + ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=45'), + ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=46'), + ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=47'), + ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=48'), + ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=49'), + ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=50'), + ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=51'), + ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=52'), + ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=53'), + ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=54'), + ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=55'), + ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=56'), + ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=57'), + ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=58'), + ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=59'), + ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=60'), + ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=61'), + ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=62'), + ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=63'), + ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=64'), + ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=65'), + ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=66'), + ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=68'), + ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=69'), + ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=70'), + ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=71'), + ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=72'), + ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=73'), + ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=74'), + ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=75'), + ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=76'), + ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=77'), + ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=78'), + ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=79'), + ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=80'), + ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=81'), + ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=82'), + ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=83'), + ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=84'), + ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=85'), + ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=86'), + ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=87'), + ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=88'), + ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=89'), + ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=90'), + ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=91'), + ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=92'), + ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=93'), + ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=94'), + ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=95'), + ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=96'), + ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=97'), + ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=98'), + ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=99'), + ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=100'), + ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=101'), + ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=102'), + ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=103'), + ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=104'), + ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=105'), + ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=106'), + ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=107'), + ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=108'), + ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=109'), + ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=110'), + ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=111'), + ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=112'), + ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=113'), + ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=114'), + ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=115'), + ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=116'), + ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=117'), + ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=118'), + ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=119'), + ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=120'), + ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=121'), + ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=122'), + ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=123'), + ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=124'), + ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=125'), + ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=126'), + ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=127'), + ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=128'), + + # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ + '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.1.1, ECB and AES-128'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ + 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.1.3, ECB and AES-192'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ + 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.1.3, ECB and AES-256'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '7649abac8119b246cee98e9b12e9197d'+'5086cb9b507219ee95db113a917678b2'+ + '73bed6b8e3c1743b7116e69e22229516'+'3ff1caa1681fac09120eca307586e1a7', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.2.1, CBC and AES-128', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '4f021db243bc633d7178183a9fa071e8'+'b4d9ada9ad7dedf4e5e738763f69145a'+ + '571b242012fb7ae07fa9baac3df102e0'+'08b0e27988598881d920a9e64f5615cd', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.2.1, CBC and AES-192', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f58c4c04d6e5f1ba779eabfb5f7bfbd6'+'9cfc4e967edb808d679f777bc6702c7d'+ + '39f23369a9d9bacfa530e26304231461'+'b2eb05e2c39be9fcda6c19078c6a9d1b', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.2.1, CBC and AES-256', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + # Skip CFB-1 since it is not supported by PyCrypto + + ('6bc1bee22e409f96e93d7e117393172aae2d','3b79424c9c0dd436bace9e0ed4586a4f32b9', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.3.7, CFB-8 and AES-128', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172aae2d','cda2521ef0a905ca44cd057cbf0d47a0678a', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.3.9, CFB-8 and AES-192', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172aae2d','dc1f1a8520a64db55fcc8ac554844e889700', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.3.11, CFB-8 and AES-256', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3b3fd92eb72dad20333449f8e83cfb4a'+'c8a64537a0b3a93fcde3cdad9f1ce58b'+ + '26751f67a3cbb140b1808cf187a4f4df'+'c04b05357c5d1c0eeac4c66f9ff7f2e6', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.3.13, CFB-128 and AES-128', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'cdc80d6fddf18cab34c25909c99a4174'+'67ce7f7f81173621961a2b70171d3d7a'+ + '2e1e8a1dd59b88b1c8e60fed1efac4c9'+'c05f9f9ca9834fa042ae8fba584b09ff', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.3.15, CFB-128 and AES-192', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'dc7e84bfda79164b7ecd8486985d3860'+'39ffed143b28b1c832113c6331e5407b'+ + 'df10132415e54b92a13ed0a8267ae2f9'+'75a385741ab9cef82031623d55b1e471', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.3.17, CFB-128 and AES-256', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3b3fd92eb72dad20333449f8e83cfb4a'+'7789508d16918f03f53c52dac54ed825'+ + '9740051e9c5fecf64344f7a82260edcc'+'304c6528f659c77866a510d9c1d6ae5e', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.4.1, OFB and AES-128', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'cdc80d6fddf18cab34c25909c99a4174'+'fcc28b8d4c63837c09e81700c1100401'+ + '8d9a9aeac0f6596f559c6d4daf59a5f2'+'6d9f200857ca6c3e9cac524bd9acc92a', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.4.3, OFB and AES-192', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'dc7e84bfda79164b7ecd8486985d3860'+'4febdc6740d20b3ac88f6ad82a4fb08d'+ + '71ab47a086e86eedf39d1c5bba97c408'+'0126141d67f37be8538f5a8be740e484', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.4.5, OFB and AES-256', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '874d6191b620e3261bef6864990db6ce'+'9806f66b7970fdff8617187bb9fffdff'+ + '5ae4df3edbd5d35e5b4f09020db03eab'+'1e031dda2fbe03d1792170a0f3009cee', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.5.1, CTR and AES-128', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '1abc932417521ca24f2b0459fe7e6e0b'+'090339ec0aa6faefd5ccc2c6f4ce8e94'+ + '1e36b26bd1ebc670d1bd1d665620abf7'+'4f78a7f6d29809585a97daec58c6b050', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.5.3, CTR and AES-192', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '601ec313775789a5b7a7f504bbf3d228'+'f443e3ca4d62b59aca84e990cacaf5c5'+ + '2b0930daa23de94ce87017ba2d84988d'+'dfc9c58db67aada613c2dd08457941a6', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.5.5, CTR and AES-256', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + # RFC 3686 test vectors + # This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. + ('53696e676c6520626c6f636b206d7367', 'e4095d4fb7a7b3792d6175a3261311b8', + 'ae6852f8121067cc4bf7a5765577f39e', + 'RFC 3686 Test Vector #1: Encrypting 16 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000030'+'0000000000000000'))), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', + '7e24067817fae0d743d6ce1f32539163', + 'RFC 3686 Test Vector #2: Encrypting 32 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='006cb6db'+'c0543b59da48d90b'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + 'c1cf48a89f2ffdd9cf4652e9efdb72d7'+'4540a42bde6d7836d59a5ceaaef31053'+'25b2072f', + '7691be035e5020a8ac6e618529f9a0dc', + 'RFC 3686 Test Vector #3: Encrypting 36 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00e0017b'+'27777f3f4a1786f0'))), + ('53696e676c6520626c6f636b206d7367', + '4b55384fe259c9c84e7935a003cbe928', + '16af5b145fc9f579c175f93e3bfb0eed'+'863d06ccfdb78515', + 'RFC 3686 Test Vector #4: Encrypting 16 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000048'+'36733c147d6d93cb'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', + '453243fc609b23327edfaafa7131cd9f'+'8490701c5ad4a79cfc1fe0ff42f4fb00', + '7c5cb2401b3dc33c19e7340819e0f69c'+'678c3db8e6f6a91a', + 'RFC 3686 Test Vector #5: Encrypting 32 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0096b03b'+'020c6eadc2cb500d'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + '96893fc55e5c722f540b7dd1ddf7e758'+'d288bc95c69165884536c811662f2188'+'abee0935', + '02bf391ee8ecb159b959617b0965279b'+'f59b60a786d3e0fe', + 'RFC 3686 Test Vector #6: Encrypting 36 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0007bdfd'+'5cbd60278dcc0912'))), + ('53696e676c6520626c6f636b206d7367', + '145ad01dbf824ec7560863dc71e3e0c0', + '776beff2851db06f4c8a0542c8696f6c'+'6a81af1eec96b4d37fc1d689e6c1c104', + 'RFC 3686 Test Vector #7: Encrypting 16 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000060'+'db5672c97aa8f0b2'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', + 'f05e231b3894612c49ee000b804eb2a9'+'b8306b508f839d6a5530831d9344af1c', + 'f6d66d6bd52d59bb0796365879eff886'+'c66dd51a5b6a99744b50590c87a23884', + 'RFC 3686 Test Vector #8: Encrypting 32 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00faac24'+'c1585ef15a43d875'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + 'eb6c52821d0bbbf7ce7594462aca4faa'+'b407df866569fd07f48cc0b583d6071f'+'1ec0e6b8', + 'ff7a617ce69148e4f1726e2f43581de2'+'aa62d9f805532edff1eed687fb54153d', + 'RFC 3686 Test Vector #9: Encrypting 36 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='001cc5b7'+'51a51d70a1c11148'))), + + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # + # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # + # As result, the content of the file 'pt' is encrypted with a key derived + # from 'secret_passphrase' and written to file 'ct'. + # Test vectors must be extracted from 'ct', which is a collection of + # TLVs (see RFC4880 for all details): + # - the encrypted data (with the encrypted IV as prefix) is the payload + # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). + # This is the ciphertext in the test vector. + # - inside the encrypted part, there is a further layer of TLVs. One must + # look for tag 11 (Literal Data Packet); in its payload, after a short + # but time dependent header, there is the content of file 'pt'. + # In the test vector, the plaintext is the complete set of TLVs that gets + # encrypted. It is not just the content of 'pt'. + # - the key is the leftmost 16 bytes of the SHA1 digest of the password. + # The test vector contains such shortened digest. + # + # Note that encryption uses a clear IV, and decryption an encrypted IV + ( 'ac18620270744fb4f647426c61636b4361745768697465436174', # Plaintext, 'BlackCatWhiteCat' + 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb', # Ciphertext + '5baa61e4c9b93f3f0682250b6cf8331b', # Key (hash of 'password') + 'GPG Test Vector #1', + dict(mode='OPENPGP', iv='3d7d3e62282add7eb203eeba5c800733', encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' ) ), +] + +def get_tests(config={}): + from Crypto.Cipher import AES + from .common import make_block_tests + return make_block_tests(AES, "AES", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py new file mode 100644 index 0000000..eadcca4 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC2""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 + +import unittest +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. +test_data = [ + # Test vectors from RFC 2268 + + # 63-bit effective key length + ('0000000000000000', 'ebb773f993278eff', '0000000000000000', + 'RFC2268-1', dict(effective_keylen=63)), + + # 64-bit effective key length + ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', + 'RFC2268-2', dict(effective_keylen=64)), + ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', + 'RFC2268-3', dict(effective_keylen=64)), + ('0000000000000000', '61a8a244adacccf0', '88', + 'RFC2268-4', dict(effective_keylen=64)), + ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', + 'RFC2268-5', dict(effective_keylen=64)), + ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', + 'RFC2268-6', dict(effective_keylen=64)), + + # 128-bit effective key length + ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', + "RFC2268-7", dict(effective_keylen=128)), + ('0000000000000000', '5b78d3a43dfff1f1', + '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', + "RFC2268-8", dict(effective_keylen=129)), + + # Test vectors from PyCrypto 2.0.1's testdata.py + # 1024-bit effective key length + ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', + 'PCTv201-0'), + ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', + 'PCTv201-1'), + ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', + 'PCTv201-2'), + ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', + 'PCTv201-3'), + ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', + 'PCTv201-4'), + ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', + 'PCTv201-5'), + ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', + 'PCTv201-6'), + ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', + 'PCTv201-7'), + ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-8'), + ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-9'), + ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-10'), + ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-11'), + ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', + 'PCTv201-12'), + ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', + 'PCTv201-13'), + ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', + 'PCTv201-14'), + ('0011223344556677', '584644c34503122c', '53e5ffe553', + 'PCTv201-15'), +] + +class BufferOverflowTest(unittest.TestCase): + # Test a buffer overflow found in older versions of PyCrypto + + def setUp(self): + global ARC2 + from Crypto.Cipher import ARC2 + + def runTest(self): + """ARC2 with keylength > 128""" + key = "x" * 16384 + mode = ARC2.MODE_ECB + self.assertRaises(ValueError, ARC2.new, key, mode) + +def get_tests(config={}): + from Crypto.Cipher import ARC2 + from .common import make_block_tests + + tests = make_block_tests(ARC2, "ARC2", test_data) + tests.append(BufferOverflowTest()) + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py new file mode 100644 index 0000000..2ab400e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key[, description]) tuples. +test_data = [ + # Test vectors from Eric Rescorla's message with the subject + # "RC4 compatibility testing", sent to the cipherpunks mailing list on + # September 13, 1994. + # http://cypherpunks.venona.com/date/1994/09/msg00420.html + + ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', + 'Test vector 0'), + + ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', + 'Test vector 1'), + + ('0000000000000000', 'de188941a3375d3a', '0000000000000000', + 'Test vector 2'), + + ('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', + 'Test vector 3'), + + ('01' * 512, + '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' + + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' + + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' + + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' + + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' + + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' + + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' + + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' + + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' + + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' + + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' + + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' + + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' + + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' + + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' + + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', + '0123456789abcdef', + "Test vector 4"), +] + +def get_tests(config={}): + from Crypto.Cipher import ARC4 + from .common import make_stream_tests + return make_stream_tests(ARC4, "ARC4", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py new file mode 100644 index 0000000..f0e6592 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Blowfish""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from http://www.schneier.com/code/vectors.txt + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), + ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), + ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), + ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), + ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), + ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), + ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), + ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), + ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), + ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), + ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), + ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), + ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), + ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), + ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), + ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), + ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), + ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), + ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), + ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), + ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), + ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), + ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), + ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), + ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), + ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), + ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), + ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), + ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), + ('0000000000000000', '245946885754369a', '0123456789abcdef'), + ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), + ('fedcba9876543210', 'f9ad597c49db005e', 'f0'), + ('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), + ('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), + ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), + ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), + ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), + ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), + ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), + ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), + ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), + ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), + ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), + ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), + ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), + ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), + ('fedcba9876543210', '93142887ee3be15c', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), + ('fedcba9876543210', '03429e838ce2d14b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), + ('fedcba9876543210', 'a4299e27469ff67b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), + ('fedcba9876543210', 'afd5aed1c1bc96a8', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), + ('fedcba9876543210', '10851c0e3858da9f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), + ('fedcba9876543210', 'e6f51ed79b9db21f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), + ('fedcba9876543210', '64a6e14afd36b46f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), + ('fedcba9876543210', '80c7d7d45a5479ad', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), + ('fedcba9876543210', '05044b62fa52d080', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), +] + +def get_tests(config={}): + from Crypto.Cipher import Blowfish + from .common import make_block_tests + return make_block_tests(Blowfish, "Blowfish", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_CAST.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_CAST.py new file mode 100644 index 0000000..fee5c99 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_CAST.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.CAST""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from RFC 2144, B.1 + ('0123456789abcdef', '238b4fe5847e44b2', + '0123456712345678234567893456789a', + '128-bit key'), + + ('0123456789abcdef', 'eb6a711a2c02271b', + '01234567123456782345', + '80-bit key'), + + ('0123456789abcdef', '7ac816d16e9b302e', + '0123456712', + '40-bit key'), +] + +def get_tests(config={}): + from Crypto.Cipher import CAST + from .common import make_block_tests + return make_block_tests(CAST, "CAST", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES.py new file mode 100644 index 0000000..f31678f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +import unittest + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_17_B1_KEY = '01' * 8 +SP800_17_B2_PT = '00' * 8 +test_data = [ + # Test vectors from Appendix A of NIST SP 800-17 + # "Modes of Operation Validation System (MOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf + + # Appendix A - "Sample Round Outputs for the DES" + ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', + "NIST SP800-17 A"), + + # Table B.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #23'), + ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #63'), + + # Table B.2 - Variable Key Known Answer Test + (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', + 'NIST SP800-17 B.2 #0'), + (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', + 'NIST SP800-17 B.2 #1'), + (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', + 'NIST SP800-17 B.2 #2'), + (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', + 'NIST SP800-17 B.2 #3'), + (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', + 'NIST SP800-17 B.2 #4'), + (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', + 'NIST SP800-17 B.2 #5'), + (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', + 'NIST SP800-17 B.2 #6'), + (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', + 'NIST SP800-17 B.2 #7'), + (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', + 'NIST SP800-17 B.2 #8'), + (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', + 'NIST SP800-17 B.2 #9'), + (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', + 'NIST SP800-17 B.2 #10'), + (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', + 'NIST SP800-17 B.2 #11'), + (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', + 'NIST SP800-17 B.2 #12'), + (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', + 'NIST SP800-17 B.2 #13'), + (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', + 'NIST SP800-17 B.2 #14'), + (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', + 'NIST SP800-17 B.2 #15'), + (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', + 'NIST SP800-17 B.2 #16'), + (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', + 'NIST SP800-17 B.2 #17'), + (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', + 'NIST SP800-17 B.2 #18'), + (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', + 'NIST SP800-17 B.2 #19'), + (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', + 'NIST SP800-17 B.2 #20'), + (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', + 'NIST SP800-17 B.2 #21'), + (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', + 'NIST SP800-17 B.2 #22'), + (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', + 'NIST SP800-17 B.2 #23'), + (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', + 'NIST SP800-17 B.2 #24'), + (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', + 'NIST SP800-17 B.2 #25'), + (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', + 'NIST SP800-17 B.2 #26'), + (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', + 'NIST SP800-17 B.2 #27'), + (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', + 'NIST SP800-17 B.2 #28'), + (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', + 'NIST SP800-17 B.2 #29'), + (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', + 'NIST SP800-17 B.2 #30'), + (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', + 'NIST SP800-17 B.2 #31'), + (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', + 'NIST SP800-17 B.2 #32'), + (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', + 'NIST SP800-17 B.2 #33'), + (SP800_17_B2_PT, '5570530829705592', '0101010102010101', + 'NIST SP800-17 B.2 #34'), + (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', + 'NIST SP800-17 B.2 #35'), + (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', + 'NIST SP800-17 B.2 #36'), + (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', + 'NIST SP800-17 B.2 #37'), + (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', + 'NIST SP800-17 B.2 #38'), + (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', + 'NIST SP800-17 B.2 #39'), + (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', + 'NIST SP800-17 B.2 #40'), + (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', + 'NIST SP800-17 B.2 #41'), + (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', + 'NIST SP800-17 B.2 #42'), + (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', + 'NIST SP800-17 B.2 #43'), + (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', + 'NIST SP800-17 B.2 #44'), + (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', + 'NIST SP800-17 B.2 #45'), + (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', + 'NIST SP800-17 B.2 #46'), + (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', + 'NIST SP800-17 B.2 #47'), + (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', + 'NIST SP800-17 B.2 #48'), + (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', + 'NIST SP800-17 B.2 #49'), + (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', + 'NIST SP800-17 B.2 #50'), + (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', + 'NIST SP800-17 B.2 #51'), + (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', + 'NIST SP800-17 B.2 #52'), + (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', + 'NIST SP800-17 B.2 #53'), + (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', + 'NIST SP800-17 B.2 #54'), + (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', + 'NIST SP800-17 B.2 #55'), +] + +class RonRivestTest(unittest.TestCase): + """ Ronald L. Rivest's DES test, see + http://people.csail.mit.edu/rivest/Destest.txt + ABSTRACT + -------- + + We present a simple way to test the correctness of a DES implementation: + Use the recurrence relation: + + X0 = 9474B8E8C73BCA7D (hexadecimal) + + X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) + + to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here + E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes + the DES decryption of X using key K. If you obtain + + X16 = 1B1A2DDB4C642438 + + your implementation does not have any of the 36,568 possible single-fault + errors described herein. + """ + def runTest(self): + from Crypto.Cipher import DES + from binascii import b2a_hex + + X = [] + X[0:] = [b('\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D')] + + for i in range(16): + c = DES.new(X[i],DES.MODE_ECB) + if not (i&1): # (num&1) returns 1 for odd numbers + X[i+1:] = [c.encrypt(X[i])] # even + else: + X[i+1:] = [c.decrypt(X[i])] # odd + + self.assertEqual(b2a_hex(X[16]), + b2a_hex(b('\x1B\x1A\x2D\xDB\x4C\x64\x24\x38'))) + +def get_tests(config={}): + from Crypto.Cipher import DES + from .common import make_block_tests + return make_block_tests(DES, "DES", test_data) + [RonRivestTest()] + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES3.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES3.py new file mode 100644 index 0000000..83d1f47 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_DES3.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES3""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_20_A1_KEY = '01' * 24 +SP800_20_A2_PT = '00' * 8 +test_data = [ + # Test vector from Appendix B of NIST SP 800-67 + # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block + # Cipher" + # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + ('54686520717566636b2062726f776e20666f78206a756d70', + 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', + '0123456789abcdef23456789abcdef01456789abcdef0123', + 'NIST SP800-67 B.1'), + + # Test vectors "The Multi-block Message Test (MMT) for DES and TDES" + # http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf + ('326a494cd33fe756', 'b22b8d66de970692', + '627f460e08104a1043cd265d5840eaf1313edf97df2a8a8c', + 'DESMMT #1', dict(mode='CBC', iv='8e29f75ea77e5475')), + + ('84401f78fe6c10876d8ea23094ea5309', '7b1f7c7e3b1c948ebd04a75ffba7d2f5', + '37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae', + 'DESMMT #2', dict(mode='CBC', iv='3d1de3cc132e3b65')), + + # Test vectors from Appendix A of NIST SP 800-20 + # "Modes of Operation Validation System for the Triple Data Encryption + # Algorithm (TMOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-20/800-20.pdf + + # Table A.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #23'), + ('0000008000000000', '750d079407521363', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #63'), + + # Table A.2 - Variable Key Known Answer Test + (SP800_20_A2_PT, '95a8d72813daa94d', '8001010101010101'*3, + 'NIST SP800-20 A.2 #0'), + (SP800_20_A2_PT, '0eec1487dd8c26d5', '4001010101010101'*3, + 'NIST SP800-20 A.2 #1'), + (SP800_20_A2_PT, '7ad16ffb79c45926', '2001010101010101'*3, + 'NIST SP800-20 A.2 #2'), + (SP800_20_A2_PT, 'd3746294ca6a6cf3', '1001010101010101'*3, + 'NIST SP800-20 A.2 #3'), + (SP800_20_A2_PT, '809f5f873c1fd761', '0801010101010101'*3, + 'NIST SP800-20 A.2 #4'), + (SP800_20_A2_PT, 'c02faffec989d1fc', '0401010101010101'*3, + 'NIST SP800-20 A.2 #5'), + (SP800_20_A2_PT, '4615aa1d33e72f10', '0201010101010101'*3, + 'NIST SP800-20 A.2 #6'), + (SP800_20_A2_PT, '2055123350c00858', '0180010101010101'*3, + 'NIST SP800-20 A.2 #7'), + (SP800_20_A2_PT, 'df3b99d6577397c8', '0140010101010101'*3, + 'NIST SP800-20 A.2 #8'), + (SP800_20_A2_PT, '31fe17369b5288c9', '0120010101010101'*3, + 'NIST SP800-20 A.2 #9'), + (SP800_20_A2_PT, 'dfdd3cc64dae1642', '0110010101010101'*3, + 'NIST SP800-20 A.2 #10'), + (SP800_20_A2_PT, '178c83ce2b399d94', '0108010101010101'*3, + 'NIST SP800-20 A.2 #11'), + (SP800_20_A2_PT, '50f636324a9b7f80', '0104010101010101'*3, + 'NIST SP800-20 A.2 #12'), + (SP800_20_A2_PT, 'a8468ee3bc18f06d', '0102010101010101'*3, + 'NIST SP800-20 A.2 #13'), + (SP800_20_A2_PT, 'a2dc9e92fd3cde92', '0101800101010101'*3, + 'NIST SP800-20 A.2 #14'), + (SP800_20_A2_PT, 'cac09f797d031287', '0101400101010101'*3, + 'NIST SP800-20 A.2 #15'), + (SP800_20_A2_PT, '90ba680b22aeb525', '0101200101010101'*3, + 'NIST SP800-20 A.2 #16'), + (SP800_20_A2_PT, 'ce7a24f350e280b6', '0101100101010101'*3, + 'NIST SP800-20 A.2 #17'), + (SP800_20_A2_PT, '882bff0aa01a0b87', '0101080101010101'*3, + 'NIST SP800-20 A.2 #18'), + (SP800_20_A2_PT, '25610288924511c2', '0101040101010101'*3, + 'NIST SP800-20 A.2 #19'), + (SP800_20_A2_PT, 'c71516c29c75d170', '0101020101010101'*3, + 'NIST SP800-20 A.2 #20'), + (SP800_20_A2_PT, '5199c29a52c9f059', '0101018001010101'*3, + 'NIST SP800-20 A.2 #21'), + (SP800_20_A2_PT, 'c22f0a294a71f29f', '0101014001010101'*3, + 'NIST SP800-20 A.2 #22'), + (SP800_20_A2_PT, 'ee371483714c02ea', '0101012001010101'*3, + 'NIST SP800-20 A.2 #23'), + (SP800_20_A2_PT, 'a81fbd448f9e522f', '0101011001010101'*3, + 'NIST SP800-20 A.2 #24'), + (SP800_20_A2_PT, '4f644c92e192dfed', '0101010801010101'*3, + 'NIST SP800-20 A.2 #25'), + (SP800_20_A2_PT, '1afa9a66a6df92ae', '0101010401010101'*3, + 'NIST SP800-20 A.2 #26'), + (SP800_20_A2_PT, 'b3c1cc715cb879d8', '0101010201010101'*3, + 'NIST SP800-20 A.2 #27'), + (SP800_20_A2_PT, '19d032e64ab0bd8b', '0101010180010101'*3, + 'NIST SP800-20 A.2 #28'), + (SP800_20_A2_PT, '3cfaa7a7dc8720dc', '0101010140010101'*3, + 'NIST SP800-20 A.2 #29'), + (SP800_20_A2_PT, 'b7265f7f447ac6f3', '0101010120010101'*3, + 'NIST SP800-20 A.2 #30'), + (SP800_20_A2_PT, '9db73b3c0d163f54', '0101010110010101'*3, + 'NIST SP800-20 A.2 #31'), + (SP800_20_A2_PT, '8181b65babf4a975', '0101010108010101'*3, + 'NIST SP800-20 A.2 #32'), + (SP800_20_A2_PT, '93c9b64042eaa240', '0101010104010101'*3, + 'NIST SP800-20 A.2 #33'), + (SP800_20_A2_PT, '5570530829705592', '0101010102010101'*3, + 'NIST SP800-20 A.2 #34'), + (SP800_20_A2_PT, '8638809e878787a0', '0101010101800101'*3, + 'NIST SP800-20 A.2 #35'), + (SP800_20_A2_PT, '41b9a79af79ac208', '0101010101400101'*3, + 'NIST SP800-20 A.2 #36'), + (SP800_20_A2_PT, '7a9be42f2009a892', '0101010101200101'*3, + 'NIST SP800-20 A.2 #37'), + (SP800_20_A2_PT, '29038d56ba6d2745', '0101010101100101'*3, + 'NIST SP800-20 A.2 #38'), + (SP800_20_A2_PT, '5495c6abf1e5df51', '0101010101080101'*3, + 'NIST SP800-20 A.2 #39'), + (SP800_20_A2_PT, 'ae13dbd561488933', '0101010101040101'*3, + 'NIST SP800-20 A.2 #40'), + (SP800_20_A2_PT, '024d1ffa8904e389', '0101010101020101'*3, + 'NIST SP800-20 A.2 #41'), + (SP800_20_A2_PT, 'd1399712f99bf02e', '0101010101018001'*3, + 'NIST SP800-20 A.2 #42'), + (SP800_20_A2_PT, '14c1d7c1cffec79e', '0101010101014001'*3, + 'NIST SP800-20 A.2 #43'), + (SP800_20_A2_PT, '1de5279dae3bed6f', '0101010101012001'*3, + 'NIST SP800-20 A.2 #44'), + (SP800_20_A2_PT, 'e941a33f85501303', '0101010101011001'*3, + 'NIST SP800-20 A.2 #45'), + (SP800_20_A2_PT, 'da99dbbc9a03f379', '0101010101010801'*3, + 'NIST SP800-20 A.2 #46'), + (SP800_20_A2_PT, 'b7fc92f91d8e92e9', '0101010101010401'*3, + 'NIST SP800-20 A.2 #47'), + (SP800_20_A2_PT, 'ae8e5caa3ca04e85', '0101010101010201'*3, + 'NIST SP800-20 A.2 #48'), + (SP800_20_A2_PT, '9cc62df43b6eed74', '0101010101010180'*3, + 'NIST SP800-20 A.2 #49'), + (SP800_20_A2_PT, 'd863dbb5c59a91a0', '0101010101010140'*3, + 'NIST SP800-20 A.2 #50'), + (SP800_20_A2_PT, 'a1ab2190545b91d7', '0101010101010120'*3, + 'NIST SP800-20 A.2 #51'), + (SP800_20_A2_PT, '0875041e64c570f7', '0101010101010110'*3, + 'NIST SP800-20 A.2 #52'), + (SP800_20_A2_PT, '5a594528bebef1cc', '0101010101010108'*3, + 'NIST SP800-20 A.2 #53'), + (SP800_20_A2_PT, 'fcdb3291de21f0c0', '0101010101010104'*3, + 'NIST SP800-20 A.2 #54'), + (SP800_20_A2_PT, '869efd7f9f265a09', '0101010101010102'*3, + 'NIST SP800-20 A.2 #55'), + + # "Two-key 3DES". Test vector generated using PyCrypto 2.0.1. + # This test is designed to test the DES3 API, not the correctness of the + # output. + ('21e81b7ade88a259', '5c577d4d9b20c0f8', + '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), + + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # For an explanation, see test_AES.py . + ( 'ac1762037074324fb53ba3596f73656d69746556616c6c6579', # Plaintext, 'YosemiteValley' + '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d', # Ciphertext + '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2', # Key (hash of 'BearsAhead') + 'GPG Test Vector #1', + dict(mode='OPENPGP', iv='cd47e2afb8b7e4b0', encrypted_iv='6a7eef0b58050e8b904a' ) ), +] + +def get_tests(config={}): + from Crypto.Cipher import DES3 + from .common import make_block_tests + return make_block_tests(DES3, "DES3", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_XOR.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_XOR.py new file mode 100644 index 0000000..ffd082c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_XOR.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/XOR.py: Self-test for the XOR "cipher" +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.XOR""" + +import unittest + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors written from scratch. (Nobody posts XOR test vectors on the web? How disappointing.) + ('01', '01', + '00', + 'zero key'), + + ('0102040810204080', '0003050911214181', + '01', + '1-byte key'), + + ('0102040810204080', 'cda8c8a2dc8a8c2a', + 'ccaa', + '2-byte key'), + + ('ff'*64, 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0'*2, + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '32-byte key'), +] + +class TruncationSelfTest(unittest.TestCase): + + def runTest(self): + """33-byte key (should raise ValueError under current implementation)""" + # Crypto.Cipher.XOR previously truncated its inputs at 32 bytes. Now + # it should raise a ValueError if the length is too long. + self.assertRaises(ValueError, XOR.new, "x"*33) + +def get_tests(config={}): + global XOR + from Crypto.Cipher import XOR + from .common import make_stream_tests + return make_stream_tests(XOR, "XOR", test_data) + [TruncationSelfTest()] + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py new file mode 100644 index 0000000..b6f3802 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +import sys + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.Cipher import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import * + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + print(clean) + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_15_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data + + _testData = ( + + # + # Generated with openssl 0.9.8o + # + ( + # Private key + '''-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY +W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA +zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB +AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk +y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 +atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG +uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k +IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 +d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ +8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a +1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT +NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs +HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= +-----END RSA PRIVATE KEY-----''', + # Plaintext + '''THIS IS PLAINTEXT\x0A''', + # Ciphertext + '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 + 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c + 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 + 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 + f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 + 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f + da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 + 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', + # Random data + '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d + 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a + 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc + f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d + 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c + bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 + 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' + ), + ) + + def testEncrypt1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + # The real test + key._randfunc = randGen(t2b(test[3])) + cipher = PKCS.new(key) + ct = cipher.encrypt(b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fail if plaintext is too long + pt = '\x00'*(128-11+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testVerify1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # The real test + cipher = PKCS.new(key) + pt = cipher.decrypt(t2b(test[2]), "---") + self.assertEqual(pt, b(test[1])) + + def testVerify2(self): + # Verify that decryption fails if ciphertext is not as long as + # RSA modulus + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") + self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") + + # Verify that decryption fails if there are less then 8 non-zero padding + # bytes + pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) + ct = self.key1024.encrypt(pt, 0)[0] + ct = b('\x00'*(128-len(ct))) + ct + self.assertEqual("---", cipher.decrypt(ct, "---")) + + def testEncryptVerify1(self): + # Encrypt/Verify messages of length [0..RSAlen-11] + # and therefore padding [8..117] + for pt_len in range(0,128-11+1): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct, "---") + self.assertEqual(pt,pt2) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py new file mode 100644 index 0000000..3613415 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +from Crypto.Util.py3compat import * +from Crypto.PublicKey import RSA +from Crypto.Cipher import PKCS1_OAEP as PKCS +from Crypto.Hash import MD2,MD5,SHA as SHA1,SHA256,RIPEMD +from Crypto import Random + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = rws(t) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_OAEP_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 OAEP + # Each tuple is made up by: + # Item #0: dictionary with RSA key component + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data (=seed) + # Item #4: hash object + + _testData = ( + + # + # From in oaep-int.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', + # Public key + 'e':'11', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 + 668b42784813801579641b29410b3c7998d6bc465745e5c3 + 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 + 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef + 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b + 8883fe4463a4bc85b1cb3c1''' + } + , + # Plaintext + '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', + # Ciphertext + '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', + # Random + '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 + f0 6c b5 8f''', + # Hash + SHA1, + ), + + # + # From in oaep-vect.txt to be found in Example 1.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 + 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab + c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 + 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 + f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 + c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 + 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 + 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', + 'e':'''01 00 01''', + 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c + 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd + 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b + 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 + fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 + 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf + b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de + f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' + } + , + # Plaintext + '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 + 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', + # Ciphertext + '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f + 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb + 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 + 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f + a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 + d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 + d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c + 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', + # Random + '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 + dd a0 a5 ef''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 2.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 + e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e + 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba + 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d + 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d + de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 + 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 + 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f + 45''', + 'e':'''01 00 01''', + 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa + 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 + c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 + 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc + d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 + 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 + 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 + 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' + }, + # Plaintext + '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 + 52 b5 8c f1 d9 2f ec 57 0b ee e7''', + # Ciphertext + '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 + 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a + 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a + ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 + 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d + 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a + bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f + ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 + 0e''', + # Random + '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 + e7 1b 17 82''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 10.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d + db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 + df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 + 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 + 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d + 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f + a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 + 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 + 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a + 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 + c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 + 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef + a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 + 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 + d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc + 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', + 'e':'''01 00 01''', + 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 + 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 + 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 + d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d + 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 + 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be + be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 + 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 + 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 + 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 + fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 + 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 + 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 + 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 + 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e + 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' + }, + # Plaintext + '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 + b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', + # Ciphertext + '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 + 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 + ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee + e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 + 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a + 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc + c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb + ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 + 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec + 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 + fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 + 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 + ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 + ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 + 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 + bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', + # Random + '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 + aa 63 bd 33''', + SHA1 + ), + ) + + def testEncrypt1(self): + # Verify encryption using all test vectors + for test in self._testData: + # Build the key + comps = [ int(rws(test[0][x]),16) for x in ('n','e') ] + key = RSA.construct(comps) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + # The real test + key._randfunc = randGen(t2b(test[3])) + cipher = PKCS.new(key, test[4]) + ct = cipher.encrypt(t2b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fails if plaintext is too long + pt = '\x00'*(128-2*20-2+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testDecrypt1(self): + # Verify decryption using all test vectors + for test in self._testData: + # Build the key + comps = [ int(rws(test[0][x]),16) for x in ('n','e','d') ] + key = RSA.construct(comps) + # The real test + cipher = PKCS.new(key, test[4]) + pt = cipher.decrypt(t2b(test[2])) + self.assertEqual(pt, t2b(test[1])) + + def testDecrypt2(self): + # Simplest possible negative tests + for ct_size in (127,128,129): + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) + + def testEncryptDecrypt1(self): + # Encrypt/Decrypt messages of length [0..128-2*20-2] + for pt_len in range(0,128-2*20-2): + pt = self.rng(pt_len) + ct = PKCS.encrypt(pt, self.key1024) + pt2 = PKCS.decrypt(ct, self.key1024) + self.assertEqual(pt,pt2) + + def testEncryptDecrypt1(self): + # Helper function to monitor what's requested from RNG + global asked + def localRng(N): + global asked + asked += N + return self.rng(N) + # Verify that OAEP is friendly to all hashes + for hashmod in (MD2,MD5,SHA1,SHA256,RIPEMD): + # Verify that encrypt() asks for as many random bytes + # as the hash output size + asked = 0 + pt = self.rng(40) + self.key1024._randfunc = localRng + cipher = PKCS.new(self.key1024, hashmod) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + self.assertTrue(asked > hashmod.digest_size) + + def testEncryptDecrypt2(self): + # Verify that OAEP supports labels + pt = self.rng(35) + xlabel = self.rng(22) + cipher = PKCS.new(self.key1024, label=xlabel) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + + def testEncryptDecrypt3(self): + # Verify that encrypt() uses the custom MGF + global mgfcalls + # Helper function to monitor what's requested from MGF + def newMGF(seed,maskLen): + global mgfcalls + mgfcalls += 1 + return bchr(0x00)*maskLen + mgfcalls = 0 + pt = self.rng(32) + cipher = PKCS.new(self.key1024, mgfunc=newMGF) + ct = cipher.encrypt(pt) + self.assertEqual(mgfcalls, 2) + self.assertEqual(cipher.decrypt(ct), pt) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_OAEP_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/__init__.py new file mode 100644 index 0000000..bb19f9b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/__init__.py: Self-test for hash modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for hash modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) + from Crypto.SelfTest.Hash import test_RIPEMD; tests += test_RIPEMD.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA; tests += test_SHA.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) + try: + from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) + except ImportError: + import sys + sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/common.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/common.py new file mode 100644 index 0000000..12e169f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/common.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +__revision__ = "$Id$" + +import sys +import unittest +import binascii +from Crypto.Util.py3compat import * + +# For compatibility with Python 2.1 and Python 2.2 +if sys.hexversion < 0x02030000: + # Python 2.1 doesn't have a dict() function + # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') + def dict(**kwargs): + return kwargs.copy() +else: + dict = dict + + +class HashDigestSizeSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + self.assertTrue(hasattr(self.hashmod, "digest_size")) + self.assertEqual(self.hashmod.digest_size, self.expected) + h = self.hashmod.new() + self.assertTrue(hasattr(h, "digest_size")) + self.assertEqual(h.digest_size, self.expected) + + +class HashSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, input): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.input = input + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + h = self.hashmod.new() + h.update(self.input) + + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(self.input) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() + if sys.version_info[0] == 2: + self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() + else: + self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() + self.assertEqual(self.expected, out4) # h = .new(data); h.digest() + + # Verify that new() object method produces a fresh hash object + h2 = h.new() + h2.update(self.input) + out5 = binascii.b2a_hex(h2.digest()) + self.assertEqual(self.expected, out5) + +class HashTestOID(unittest.TestCase): + def __init__(self, hashmod, oid): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.oid = oid + + def runTest(self): + h = self.hashmod.new() + if self.oid==None: + try: + raised = 0 + a = h.oid + except AttributeError: + raised = 1 + self.assertEqual(raised,1) + else: + self.assertEqual(h.oid, self.oid) + +class MACSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected_dict, input, key, hashmods): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected_dict = expected_dict + self.input = input + self.key = key + self.hashmods = hashmods + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + for hashname in list(self.expected_dict.keys()): + hashmod = self.hashmods[hashname] + key = binascii.a2b_hex(b(self.key)) + data = binascii.a2b_hex(b(self.input)) + + # Strip whitespace from the expected string (which should be in lowercase-hex) + expected = b("".join(self.expected_dict[hashname].split())) + + h = self.hashmod.new(key, digestmod=hashmod) + h.update(data) + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(key, data, hashmod) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # Test .copy() + h2 = h.copy() + h.update(b("blah blah blah")) # Corrupt the original hash object + out5 = binascii.b2a_hex(h2.digest()) # The copied hash object should return the correct result + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(expected, out1) + if sys.version_info[0] == 2: + self.assertEqual(expected, out2) + self.assertEqual(expected, out3) + else: + self.assertEqual(expected.decode(), out2) + self.assertEqual(expected.decode(), out3) + self.assertEqual(expected, out4) + self.assertEqual(expected, out5) + +def make_hash_tests(module, module_name, test_data, digest_size, oid=None): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (expected, input) = list(map(b,row[0:2])) + if len(row) < 3: + description = repr(input) + else: + description = row[2].encode('latin-1') + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(HashSelfTest(module, name, expected, input)) + if oid is not None: + oid = b(oid) + name = "%s #%d: digest_size" % (module_name, i+1) + tests.append(HashDigestSizeSelfTest(module, name, digest_size)) + tests.append(HashTestOID(module, oid)) + return tests + +def make_mac_tests(module, module_name, test_data, hashmods): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (key, data, results, description) = row + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(MACSelfTest(module, name, results, data, key, hashmods)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_HMAC.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_HMAC.py new file mode 100644 index 0000000..8a1123e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_HMAC.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/HMAC.py: Self-test for the HMAC module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.HMAC""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * + +# This is a list of (key, data, results, description) tuples. +test_data = [ + ## Test vectors from RFC 2202 ## + # Test that the default hashmod is MD5 + ('0b' * 16, + '4869205468657265', + dict(default='9294727a3638bb1c13f48ef8158bfc9d'), + 'default-is-MD5'), + + # Test case 1 (MD5) + ('0b' * 16, + '4869205468657265', + dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), + 'RFC 2202 #1-MD5 (HMAC-MD5)'), + + # Test case 1 (SHA1) + ('0b' * 20, + '4869205468657265', + dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), + 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), + + # Test case 2 + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(MD5='750c783e6ab0b503eaa86e310a5db738', + SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), + 'RFC 2202 #2 (HMAC-MD5/SHA1)'), + + # Test case 3 (MD5) + ('aa' * 16, + 'dd' * 50, + dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), + 'RFC 2202 #3-MD5 (HMAC-MD5)'), + + # Test case 3 (SHA1) + ('aa' * 20, + 'dd' * 50, + dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), + 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), + + # Test case 4 + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', + SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), + 'RFC 2202 #4 (HMAC-MD5/SHA1)'), + + # Test case 5 (MD5) + ('0c' * 16, + '546573742057697468205472756e636174696f6e', + dict(MD5='56461ef2342edc00f9bab995690efd4c'), + 'RFC 2202 #5-MD5 (HMAC-MD5)'), + + # Test case 5 (SHA1) + # NB: We do not implement hash truncation, so we only test the full hash here. + ('0c' * 20, + '546573742057697468205472756e636174696f6e', + dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), + 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), + + # Test case 6 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', + SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), + 'RFC 2202 #6 (HMAC-MD5/SHA1)'), + + # Test case 7 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' + + '53697a652044617461', + dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', + SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), + 'RFC 2202 #7 (HMAC-MD5/SHA1)'), + + ## Test vectors from RFC 4231 ## + # 4.2. Test Case 1 + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + '4869205468657265', + dict(SHA256=''' + b0344c61d8db38535ca8afceaf0bf12b + 881dc200c9833da726e9376c2e32cff7 + '''), + 'RFC 4231 #1 (HMAC-SHA256)'), + + # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC + # output. + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(SHA256=''' + 5bdcc146bf60754e6a042426089575c7 + 5a003f089d2739839dec58b964ec3843 + '''), + 'RFC 4231 #2 (HMAC-SHA256)'), + + # 4.4. Test Case 3 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('aa' * 20, + 'dd' * 50, + dict(SHA256=''' + 773ea91e36800e46854db8ebd09181a7 + 2959098b3ef8c122d9635514ced565fe + '''), + 'RFC 4231 #3 (HMAC-SHA256)'), + + # 4.5. Test Case 4 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(SHA256=''' + 82558a389a443c0ea4cc819899f2083a + 85f0faa3e578f8077a2e3ff46729665b + '''), + 'RFC 4231 #4 (HMAC-SHA256)'), + + # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. + # + # Not included because we do not implement hash truncation. + # + + # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of + # SHA-384 and SHA-512). + ('aa' * 131, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(SHA256=''' + 60e431591ee0b67f0d8a26aacbf5b77f + 8e0bc6213728c5140546040f0ee37f54 + '''), + 'RFC 4231 #6 (HMAC-SHA256)'), + + # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes + # (= block-size of SHA-384 and SHA-512). + ('aa' * 131, + '5468697320697320612074657374207573696e672061206c6172676572207468' + + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' + + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' + + '647320746f20626520686173686564206265666f7265206265696e6720757365' + + '642062792074686520484d414320616c676f726974686d2e', + dict(SHA256=''' + 9b09ffa71b942fcb27635fbcd5b0e944 + bfdc63644f0713938a7f51535c3a35e2 + '''), + 'RFC 4231 #7 (HMAC-SHA256)'), +] + +hashlib_test_data = [ + # Test case 8 (SHA224) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), + 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), + + # Test case 9 (SHA384) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), + 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), + + # Test case 10 (SHA512) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), + 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), + +] + +def get_tests(config={}): + global test_data + from Crypto.Hash import HMAC, MD5, SHA as SHA1, SHA256 + from .common import make_mac_tests + hashmods = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256, default=None) + try: + from Crypto.Hash import SHA224, SHA384, SHA512 + hashmods.update(dict(SHA224=SHA224, SHA384=SHA384, SHA512=SHA512)) + test_data += hashlib_test_data + except ImportError: + import sys + sys.stderr.write("SelfTest: warning: not testing HMAC-SHA224/384/512 (not available)\n") + return make_mac_tests(HMAC, "HMAC", test_data, hashmods) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD2.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD2.py new file mode 100644 index 0000000..8653d1a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD2.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD2""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1319 + ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), + ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), + ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), + ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), + + ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('da33def2a42df13975352846c30338cd', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('d5976f79d83d3a0dc9806c3c66f3efd8', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD2 + from .common import make_hash_tests + return make_hash_tests(MD2, "MD2", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD4.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD4.py new file mode 100644 index 0000000..11259ef --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD4.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1320 + ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), + ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), + ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), + ('d9130a8164549fe818874806e1c7014b', 'message digest'), + + ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('043f8582f241db351ce627e153e7f0e4', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('e33b4ddc9c38f2199c3e7b164fcc0536', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD4 + from .common import make_hash_tests + return make_hash_tests(MD4, "MD4", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD5.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD5.py new file mode 100644 index 0000000..724dea0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_MD5.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD5""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1321 + ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), + ('0cc175b9c0f1b6a831c399e269772661', 'a'), + ('900150983cd24fb0d6963f7d28e17f72', 'abc'), + ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), + + ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('d174ab98d277d9f5a5611c2c9f419d9f', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('57edf4a22be3c955ac49da2e2107b67a', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD5 + from .common import make_hash_tests + return make_hash_tests(MD5, "MD5", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py new file mode 100644 index 0000000..6c58876 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_RIPEMD.py: Self-test for the RIPEMD-160 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +#"""Self-test suite for Crypto.Hash.RIPEMD""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors downloaded 2008-09-12 from + # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html + ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), + ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), + ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), + ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), + + ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', + 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', + 'abcdbcd...pnopq'), + + ('b0e20b6e3116640286ed3a87a5713079b21f5189', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', + '1234567890' * 8, + "'1234567890' * 8"), + + ('52783243c1697bdbe16d37f97f68f08325dc1528', + 'a' * 10**6, + '"a" * 10**6'), +] + +def get_tests(config={}): + from Crypto.Hash import RIPEMD + from .common import make_hash_tests + return make_hash_tests(RIPEMD, "RIPEMD", test_data, + digest_size=20, + oid="\x06\x05\x2b\x24\x03\02\x01") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA.py new file mode 100644 index 0000000..bd52a46 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/SHA.py: Self-test for the SHA-1 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # FIPS PUB 180-2, A.1 - "One-Block Message" + ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), + + # FIPS PUB 180-2, A.2 - "Multi-Block Message" + ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, A.3 - "Long Message" +# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', +# 'a' * 10**6, +# '"a" * 10**6'), + + # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) + ('dea356a2cddd90c7a7ecedc5ebb563934f460452', + '01234567' * 80, + '"01234567" * 80'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA + from .common import make_hash_tests + return make_hash_tests(SHA, "SHA", test_data, + digest_size=20, + oid="\x06\x05\x2B\x0E\x03\x02\x1A") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA224.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA224.py new file mode 100644 index 0000000..1db501b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA224.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA224""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 3874: Section 3.1, "Test Vector #1 + ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), + + # RFC 3874: Section 3.2, "Test Vector #2 + ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # RFC 3874: Section 3.3, "Test Vector #3 + ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), + + # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), + + ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + + ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', + 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA224 + from .common import make_hash_tests + return make_hash_tests(SHA224, "SHA224", test_data, + digest_size=28, + oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA256.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA256.py new file mode 100644 index 0000000..9ea9cc6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA256.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA256""" + +__revision__ = "$Id$" + +import unittest +from Crypto.Util.py3compat import * + +class LargeSHA256Test(unittest.TestCase): + def runTest(self): + """SHA256: 512/520 MiB test""" + from Crypto.Hash import SHA256 + zeros = bchr(0x00) * (1024*1024) + + h = SHA256.new(zeros) + for i in range(511): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB + + for i in range(8): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB + +def get_tests(config={}): + # Test vectors from FIPS PUB 180-2 + # This is a list of (expected_result, input[, description]) tuples. + test_data = [ + # FIPS PUB 180-2, B.1 - "One-Block Message" + ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', + 'abc'), + + # FIPS PUB 180-2, B.2 - "Multi-Block Message" + ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, B.3 - "Long Message" + ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', + 'a' * 10**6, + '"a" * 10**6'), + + # Test for an old PyCrypto bug. + ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', + 'This message is precisely 55 bytes long, to test a bug.', + 'Length = 55 (mod 64)'), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), + + ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + ] + + from Crypto.Hash import SHA256 + from .common import make_hash_tests + tests = make_hash_tests(SHA256, "SHA256", test_data, + digest_size=32, + oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01") + + if config.get('slow_tests'): + tests += [LargeSHA256Test()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA384.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA384.py new file mode 100644 index 0000000..0c2f79c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA384.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA384""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.2" + ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA384 + from .common import make_hash_tests + return make_hash_tests(SHA384, "SHA384", test_data, + digest_size=48, + oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA512.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA512.py new file mode 100644 index 0000000..31ace3d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Hash/test_SHA512.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA512""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.1" + ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), + + ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA512 + from .common import make_hash_tests + return make_hash_tests(SHA512, "SHA512", test_data, + digest_size=64, + oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/__init__.py new file mode 100644 index 0000000..a62c670 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/__init__.py: Self-tests for Crypto.Protocol +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Protocol""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Protocol import test_chaffing; tests += test_chaffing.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_AllOrNothing; tests += test_AllOrNothing.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py new file mode 100644 index 0000000..a211eab --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py @@ -0,0 +1,76 @@ +# +# Test script for Crypto.Protocol.AllOrNothing +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from Crypto.Protocol import AllOrNothing +from Crypto.Util.py3compat import * + +text = b("""\ +When in the Course of human events, it becomes necessary for one people to +dissolve the political bands which have connected them with another, and to +assume among the powers of the earth, the separate and equal station to which +the Laws of Nature and of Nature's God entitle them, a decent respect to the +opinions of mankind requires that they should declare the causes which impel +them to the separation. + +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""") + +class AllOrNothingTest (unittest.TestCase): + + def runTest(self): + "Simple test of AllOrNothing" + + from Crypto.Cipher import AES + import base64 + + # The current AllOrNothing will fail + # every so often. Repeat the test + # several times to force this. + for i in range(50): + x = AllOrNothing.AllOrNothing(AES) + + msgblocks = x.digest(text) + + # get a new undigest-only object so there's no leakage + y = AllOrNothing.AllOrNothing(AES) + text2 = y.undigest(msgblocks) + self.assertEqual(text, text2) + +def get_tests(config={}): + return [AllOrNothingTest()] + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_KDF.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_KDF.py new file mode 100644 index 0000000..d3e3be9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_KDF.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA as SHA1,HMAC + +from Crypto.Protocol.KDF import * + +def t2b(t): return unhexlify(b(t)) + +class PBKDF1_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (8 bytes encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",16,1000,"DC19847E05C64D2FAF10EBFB4A3D2A20"), + ) + + def test1(self): + v = self._testData[0] + res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) + self.assertEqual(res, t2b(v[4])) + +class PBKDF2_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",24,2048,"BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), + # From RFC 6050 + ("password","73616c74", 20, 1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), + ("password","73616c74", 20, 2, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + ("password","73616c74", 20, 4096, "4b007901b765489abead49d926f721d065a429c1"), + ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", + 25, 4096, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + ( 'pass\x00word',"7361006c74",16,4096, "56fa6aa75548099dcc37d7f03425e0c3"), + ) + + def test1(self): + # Test only for HMAC-SHA1 as PRF + + def prf(p,s): + return HMAC.new(p,s,SHA1).digest() + + for i in range(len(self._testData)): + v = self._testData[i] + res = PBKDF2(v[0], t2b(v[1]), v[2], v[3]) + res2 = PBKDF2(v[0], t2b(v[1]), v[2], v[3], prf) + self.assertEqual(res, t2b(v[4])) + self.assertEqual(res, res2) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PBKDF1_Tests) + tests += list_test_cases(PBKDF2_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py new file mode 100644 index 0000000..5fa0120 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py @@ -0,0 +1,74 @@ +# +# Test script for Crypto.Protocol.Chaffing +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from Crypto.Protocol import Chaffing + +text = """\ +When in the Course of human events, it becomes necessary for one people to +dissolve the political bands which have connected them with another, and to +assume among the powers of the earth, the separate and equal station to which +the Laws of Nature and of Nature's God entitle them, a decent respect to the +opinions of mankind requires that they should declare the causes which impel +them to the separation. + +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""" + +class ChaffingTest (unittest.TestCase): + + def runTest(self): + "Simple tests of chaffing and winnowing" + # Test constructors + Chaffing.Chaff() + Chaffing.Chaff(0.5, 1) + self.assertRaises(ValueError, Chaffing.Chaff, factor=-1) + self.assertRaises(ValueError, Chaffing.Chaff, blocksper=-1) + + data = [(1, 'data1', 'data1'), (2, 'data2', 'data2')] + c = Chaffing.Chaff(1.0, 1) + c.chaff(data) + chaff = c.chaff(data) + self.assertEqual(len(chaff), 4) + + c = Chaffing.Chaff(0.0, 1) + chaff = c.chaff(data) + self.assertEqual(len(chaff), 2) + +def get_tests(config={}): + return [ChaffingTest()] + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py new file mode 100644 index 0000000..0878cc5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py @@ -0,0 +1,62 @@ +# +# Test script for Crypto.Util.RFC1751. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import binascii +import unittest +from Crypto.Util import RFC1751 +from Crypto.Util.py3compat import * + +test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + +class RFC1751Test_k2e (unittest.TestCase): + + def runTest (self): + "Check converting keys to English" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.key_to_english(key), words) + +class RFC1751Test_e2k (unittest.TestCase): + + def runTest (self): + "Check converting English strings to keys" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.english_to_key(words), key) + +# class RFC1751Test + +def get_tests(config={}): + return [RFC1751Test_k2e(), RFC1751Test_e2k()] + +if __name__ == "__main__": + unittest.main() diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/__init__.py new file mode 100644 index 0000000..61ba53f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/__init__.py: Self-test for public key crypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for public-key crypto""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.PublicKey import test_DSA; tests += test_DSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_RSA; tests += test_RSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_importKey; tests += test_importKey.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_ElGamal; tests += test_ElGamal.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py new file mode 100644 index 0000000..b05f69a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.DSA""" + +__revision__ = "$Id$" + +import sys +import os +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +def _sws(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return "".join(s.split()) + else: + return b("").join(s.split()) + +class DSATest(unittest.TestCase): + # Test vector from "Appendix 5. Example of the DSA" of + # "Digital Signature Standard (DSS)", + # U.S. Department of Commerce/National Institute of Standards and Technology + # FIPS 186-2 (+Change Notice), 2000 January 27. + # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf + + y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 + 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 + 858fba33 f44c0669 9630a76b 030ee333""") + + g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb + 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c + c42e9f6f 464b088c c572af53 e6d78802""") + + p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 + cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac + 49693dfb f83724c2 ec0736ee 31c80291""") + + q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") + + x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") + + k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") + k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") + m = b2a_hex(b("abc")) + m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") + r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") + s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") + + def setUp(self): + global DSA, Random, bytes_to_long, size + from Crypto.PublicKey import DSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse, size + + self.dsa = DSA + + def test_generate_1arg(self): + """DSA (default implementation) generated key (1 argument)""" + dsaObj = self.dsa.generate(1024) + self._check_private_key(dsaObj) + pub = dsaObj.publickey() + self._check_public_key(pub) + + def test_generate_2arg(self): + """DSA (default implementation) generated key (2 arguments)""" + dsaObj = self.dsa.generate(1024, Random.new().read) + self._check_private_key(dsaObj) + pub = dsaObj.publickey() + self._check_public_key(pub) + + def test_construct_4tuple(self): + """DSA (default implementation) constructed key (4-tuple)""" + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + self._test_verification(dsaObj) + + def test_construct_5tuple(self): + """DSA (default implementation) constructed key (5-tuple)""" + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + dsaObj = self.dsa.construct((y, g, p, q, x)) + self._test_signing(dsaObj) + self._test_verification(dsaObj) + + def _check_private_key(self, dsaObj): + # Check capabilities + self.assertEqual(1, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + self.assertEqual(0, dsaObj.can_blind()) + + # Check dsaObj.[ygpqx] -> dsaObj.key.[ygpqx] mapping + self.assertEqual(dsaObj.y, dsaObj.key.y) + self.assertEqual(dsaObj.g, dsaObj.key.g) + self.assertEqual(dsaObj.p, dsaObj.key.p) + self.assertEqual(dsaObj.q, dsaObj.key.q) + self.assertEqual(dsaObj.x, dsaObj.key.x) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p + self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q + + def _check_public_key(self, dsaObj): + k = a2b_hex(self.k) + m_hash = a2b_hex(self.m_hash) + + # Check capabilities + self.assertEqual(0, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + self.assertEqual(0, dsaObj.can_blind()) + + # Check dsaObj.[ygpq] -> dsaObj.key.[ygpq] mapping + self.assertEqual(dsaObj.y, dsaObj.key.y) + self.assertEqual(dsaObj.g, dsaObj.key.g) + self.assertEqual(dsaObj.p, dsaObj.key.p) + self.assertEqual(dsaObj.q, dsaObj.key.q) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(dsaObj, 'x')) + self.assertEqual(0, hasattr(dsaObj.key, 'x')) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + + # Public-only key objects should raise an error when .sign() is called + self.assertRaises(TypeError, dsaObj.sign, m_hash, k) + + # Check __eq__ and __ne__ + self.assertEqual(dsaObj.publickey() == dsaObj.publickey(),True) # assert_ + self.assertEqual(dsaObj.publickey() != dsaObj.publickey(),False) # failIf + + def _test_signing(self, dsaObj): + k = a2b_hex(self.k) + m_hash = a2b_hex(self.m_hash) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + (r_out, s_out) = dsaObj.sign(m_hash, k) + self.assertEqual((r, s), (r_out, s_out)) + + def _test_verification(self, dsaObj): + m_hash = a2b_hex(self.m_hash) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + self.assertEqual(1, dsaObj.verify(m_hash, (r, s))) + self.assertEqual(0, dsaObj.verify(m_hash + b("\0"), (r, s))) + +class DSAFastMathTest(DSATest): + def setUp(self): + DSATest.setUp(self) + self.dsa = DSA.DSAImplementation(use_fast_math=True) + + def test_generate_1arg(self): + """DSA (_fastmath implementation) generated key (1 argument)""" + DSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """DSA (_fastmath implementation) generated key (2 arguments)""" + DSATest.test_generate_2arg(self) + + def test_construct_4tuple(self): + """DSA (_fastmath implementation) constructed key (4-tuple)""" + DSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """DSA (_fastmath implementation) constructed key (5-tuple)""" + DSATest.test_construct_5tuple(self) + +class DSASlowMathTest(DSATest): + def setUp(self): + DSATest.setUp(self) + self.dsa = DSA.DSAImplementation(use_fast_math=False) + + def test_generate_1arg(self): + """DSA (_slowmath implementation) generated key (1 argument)""" + DSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """DSA (_slowmath implementation) generated key (2 arguments)""" + DSATest.test_generate_2arg(self) + + def test_construct_4tuple(self): + """DSA (_slowmath implementation) constructed key (4-tuple)""" + DSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """DSA (_slowmath implementation) constructed key (5-tuple)""" + DSATest.test_construct_5tuple(self) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(DSATest) + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(DSAFastMathTest) + except ImportError: + from distutils.sysconfig import get_config_var + import inspect + _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + +"/../../PublicKey/_fastmath"+get_config_var("SO")) + if os.path.exists(_fm_path): + raise ImportError("While the _fastmath module exists, importing "+ + "it failed. This may point to the gmp or mpir shared library "+ + "not being in the path. _fastmath was found at "+_fm_path) + tests += list_test_cases(DSASlowMathTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py new file mode 100644 index 0000000..cebab30 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.ElGamal""" + +__revision__ = "$Id$" + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.PublicKey import ElGamal +from Crypto.Util.number import * +from Crypto.Util.py3compat import * + +class ElGamalTest(unittest.TestCase): + + # + # Test vectors + # + # There seem to be no real ElGamal test vectors available in the + # public domain. The following test vectors have been generated + # with libgcrypt 1.5.0. + # + # Encryption + tve=[ + { + # 256 bits + 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', + 'g' :'05', + 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', + 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', + 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', + 'pt' :'48656C6C6F207468657265', + 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', + 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' + }, + + { + # 512 bits + 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', + 'g' :'07', + 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', + 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', + 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', + 'pt' :'48656C6C6F207468657265', + 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', + 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' + } + ] + + # Signature + tvs=[ + { + # 256 bits + 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', + 'g' :'05', + 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', + 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', + 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', + 'h' :'48656C6C6F207468657265', + 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', + 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', + }, + { + # 512 bits + 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', + 'g' :'0B', + 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', + 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', + 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', + 'h' :'48656C6C6F207468657265', + 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', + 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', + } + ] + + def test_generate_128(self): + self._test_random_key(128) + + def test_generate_512(self): + self._test_random_key(512) + + def test_encryption(self): + for tv in self.tve: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + ct = key.encrypt(d['pt'], d['k']) + self.assertEqual(ct[0], d['ct1']) + self.assertEqual(ct[1], d['ct2']) + + def test_decryption(self): + for tv in self.tve: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + pt = key.decrypt((d['ct1'], d['ct2'])) + self.assertEqual(pt, d['pt']) + + def test_signing(self): + for tv in self.tvs: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + sig1, sig2 = key.sign(d['h'], d['k']) + self.assertEqual(sig1, d['sig1']) + self.assertEqual(sig2, d['sig2']) + + def test_verification(self): + for tv in self.tvs: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + # Positive test + res = key.verify( d['h'], (d['sig1'],d['sig2']) ) + self.assertTrue(res) + # Negative test + res = key.verify( d['h'], (d['sig1']+1,d['sig2']) ) + self.assertFalse(res) + + def convert_tv(self, tv, as_longs=0): + """Convert a test vector from textual form (hexadecimal ascii + to either integers or byte strings.""" + key_comps = 'p','g','y','x' + tv2 = {} + for c in list(tv.keys()): + tv2[c] = a2b_hex(tv[c]) + if as_longs or c in key_comps or c in ('sig1','sig2'): + tv2[c] = bytes_to_long(tv2[c]) + tv2['key']=[] + for c in key_comps: + tv2['key'] += [tv2[c]] + del tv2[c] + return tv2 + + def _test_random_key(self, bits): + elgObj = ElGamal.generate(bits, Random.new().read) + self._check_private_key(elgObj) + self._exercise_primitive(elgObj) + pub = elgObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(elgObj) + + def _check_private_key(self, elgObj): + + # Check capabilities + self.assertTrue(elgObj.has_private()) + self.assertTrue(elgObj.can_sign()) + self.assertTrue(elgObj.can_encrypt()) + + # Sanity check key data + self.assertTrue(1 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.RSA""" + +__revision__ = "$Id$" + +import sys +import os +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +class RSATest(unittest.TestCase): + # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # See RSADSI's PKCS#1 page at + # http://www.rsa.com/rsalabs/node.asp?id=2125 + + # from oaep-int.txt + + # TODO: PyCrypto treats the message as starting *after* the leading "00" + # TODO: That behaviour should probably be changed in the future. + plaintext = """ + eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 + ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 + c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af + f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db + 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a + b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 + 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f + 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d + """ + + ciphertext = """ + 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 + """ + + modulus = """ + bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb + """ + + e = 0x11 # public exponent + + prime_factor = """ + c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 + 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 + 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf + ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 + """ + + def setUp(self): + global RSA, Random, bytes_to_long + from Crypto.PublicKey import RSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse + self.n = bytes_to_long(a2b_hex(self.modulus)) + self.p = bytes_to_long(a2b_hex(self.prime_factor)) + + # Compute q, d, and u from n, e, and p + self.q = divmod(self.n, self.p)[0] + self.d = inverse(self.e, (self.p-1)*(self.q-1)) + self.u = inverse(self.p, self.q) # u = e**-1 (mod q) + + self.rsa = RSA + + def test_generate_1arg(self): + """RSA (default implementation) generated key (1 argument)""" + rsaObj = self.rsa.generate(1024) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_2arg(self): + """RSA (default implementation) generated key (2 arguments)""" + rsaObj = self.rsa.generate(1024, Random.new().read) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_3args(self): + rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + self.assertEqual(65537,rsaObj.e) + + def test_construct_2tuple(self): + """RSA (default implementation) constructed key (2-tuple)""" + pub = self.rsa.construct((self.n, self.e)) + self._check_public_key(pub) + self._check_encryption(pub) + self._check_verification(pub) + + def test_construct_3tuple(self): + """RSA (default implementation) constructed key (3-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_4tuple(self): + """RSA (default implementation) constructed key (4-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_5tuple(self): + """RSA (default implementation) constructed key (5-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_6tuple(self): + """RSA (default implementation) constructed key (6-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_factoring(self): + rsaObj = self.rsa.construct([self.n, self.e, self.d]) + self.assertTrue(rsaObj.p==self.p or rsaObj.p==self.q) + self.assertTrue(rsaObj.q==self.p or rsaObj.q==self.q) + self.assertTrue(rsaObj.q*rsaObj.p == self.n) + + self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) + + def _check_private_key(self, rsaObj): + # Check capabilities + self.assertEqual(1, rsaObj.has_private()) + self.assertEqual(1, rsaObj.can_sign()) + self.assertEqual(1, rsaObj.can_encrypt()) + self.assertEqual(1, rsaObj.can_blind()) + + # Check rsaObj.[nedpqu] -> rsaObj.key.[nedpqu] mapping + self.assertEqual(rsaObj.n, rsaObj.key.n) + self.assertEqual(rsaObj.e, rsaObj.key.e) + self.assertEqual(rsaObj.d, rsaObj.key.d) + self.assertEqual(rsaObj.p, rsaObj.key.p) + self.assertEqual(rsaObj.q, rsaObj.key.q) + self.assertEqual(rsaObj.u, rsaObj.key.u) + + # Sanity check key data + self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq + self.assertEqual(1, rsaObj.d * rsaObj.e % ((rsaObj.p-1) * (rsaObj.q-1))) # ed = 1 (mod (p-1)(q-1)) + self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) + self.assertEqual(1, rsaObj.p > 1) # p > 1 + self.assertEqual(1, rsaObj.q > 1) # q > 1 + self.assertEqual(1, rsaObj.e > 1) # e > 1 + self.assertEqual(1, rsaObj.d > 1) # d > 1 + + def _check_public_key(self, rsaObj): + ciphertext = a2b_hex(self.ciphertext) + + # Check capabilities + self.assertEqual(0, rsaObj.has_private()) + self.assertEqual(1, rsaObj.can_sign()) + self.assertEqual(1, rsaObj.can_encrypt()) + self.assertEqual(1, rsaObj.can_blind()) + + # Check rsaObj.[ne] -> rsaObj.key.[ne] mapping + self.assertEqual(rsaObj.n, rsaObj.key.n) + self.assertEqual(rsaObj.e, rsaObj.key.e) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(rsaObj, 'd')) + self.assertEqual(0, hasattr(rsaObj, 'p')) + self.assertEqual(0, hasattr(rsaObj, 'q')) + self.assertEqual(0, hasattr(rsaObj, 'u')) + self.assertEqual(0, hasattr(rsaObj.key, 'd')) + self.assertEqual(0, hasattr(rsaObj.key, 'p')) + self.assertEqual(0, hasattr(rsaObj.key, 'q')) + self.assertEqual(0, hasattr(rsaObj.key, 'u')) + + # Sanity check key data + self.assertEqual(1, rsaObj.e > 1) # e > 1 + + # Public keys should not be able to sign or decrypt + self.assertRaises(TypeError, rsaObj.sign, ciphertext, b("")) + self.assertRaises(TypeError, rsaObj.decrypt, ciphertext) + + # Check __eq__ and __ne__ + self.assertEqual(rsaObj.publickey() == rsaObj.publickey(),True) # assert_ + self.assertEqual(rsaObj.publickey() != rsaObj.publickey(),False) # failIf + + def _exercise_primitive(self, rsaObj): + # Since we're using a randomly-generated key, we can't check the test + # vector, but we can make sure encryption and decryption are inverse + # operations. + ciphertext = a2b_hex(self.ciphertext) + + # Test decryption + plaintext = rsaObj.decrypt((ciphertext,)) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) + + # Test blinded decryption + blinding_factor = Random.new().read(len(ciphertext)-1) + blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) + blinded_ptext = rsaObj.decrypt((blinded_ctext,)) + unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) + self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) + + # Test signing (2 arguments) + signature2 = rsaObj.sign(ciphertext, b("")) + self.assertEqual((bytes_to_long(plaintext),), signature2) + + # Test verification + self.assertEqual(1, rsaObj.verify(ciphertext, (bytes_to_long(plaintext),))) + + def _exercise_public_primitive(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + + # Exercise verification + rsaObj.verify(new_ciphertext2, (bytes_to_long(plaintext),)) + + def _check_encryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) + + def _check_decryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test plain decryption + new_plaintext = rsaObj.decrypt((ciphertext,)) + self.assertEqual(b2a_hex(plaintext), b2a_hex(new_plaintext)) + + # Test blinded decryption + blinding_factor = Random.new().read(len(ciphertext)-1) + blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) + blinded_ptext = rsaObj.decrypt((blinded_ctext,)) + unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) + self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) + + def _check_verification(self, rsaObj): + signature = bytes_to_long(a2b_hex(self.plaintext)) + message = a2b_hex(self.ciphertext) + + # Test verification + t = (signature,) # rsaObj.verify expects a tuple + self.assertEqual(1, rsaObj.verify(message, t)) + + # Test verification with overlong tuple (this is a + # backward-compatibility hack to support some harmless misuse of the + # API) + t2 = (signature, '') + self.assertEqual(1, rsaObj.verify(message, t2)) # extra garbage at end of tuple + + def _check_signing(self, rsaObj): + signature = bytes_to_long(a2b_hex(self.plaintext)) + message = a2b_hex(self.ciphertext) + + # Test signing (2 argument) + self.assertEqual((signature,), rsaObj.sign(message, b(""))) + +class RSAFastMathTest(RSATest): + def setUp(self): + RSATest.setUp(self) + self.rsa = RSA.RSAImplementation(use_fast_math=True) + + def test_generate_1arg(self): + """RSA (_fastmath implementation) generated key (1 argument)""" + RSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """RSA (_fastmath implementation) generated key (2 arguments)""" + RSATest.test_generate_2arg(self) + + def test_construct_2tuple(self): + """RSA (_fastmath implementation) constructed key (2-tuple)""" + RSATest.test_construct_2tuple(self) + + def test_construct_3tuple(self): + """RSA (_fastmath implementation) constructed key (3-tuple)""" + RSATest.test_construct_3tuple(self) + + def test_construct_4tuple(self): + """RSA (_fastmath implementation) constructed key (4-tuple)""" + RSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """RSA (_fastmath implementation) constructed key (5-tuple)""" + RSATest.test_construct_5tuple(self) + + def test_construct_6tuple(self): + """RSA (_fastmath implementation) constructed key (6-tuple)""" + RSATest.test_construct_6tuple(self) + + def test_factoring(self): + RSATest.test_factoring(self) + +class RSASlowMathTest(RSATest): + def setUp(self): + RSATest.setUp(self) + self.rsa = RSA.RSAImplementation(use_fast_math=False) + + def test_generate_1arg(self): + """RSA (_slowmath implementation) generated key (1 argument)""" + RSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """RSA (_slowmath implementation) generated key (2 arguments)""" + RSATest.test_generate_2arg(self) + + def test_construct_2tuple(self): + """RSA (_slowmath implementation) constructed key (2-tuple)""" + RSATest.test_construct_2tuple(self) + + def test_construct_3tuple(self): + """RSA (_slowmath implementation) constructed key (3-tuple)""" + RSATest.test_construct_3tuple(self) + + def test_construct_4tuple(self): + """RSA (_slowmath implementation) constructed key (4-tuple)""" + RSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """RSA (_slowmath implementation) constructed key (5-tuple)""" + RSATest.test_construct_5tuple(self) + + def test_construct_6tuple(self): + """RSA (_slowmath implementation) constructed key (6-tuple)""" + RSATest.test_construct_6tuple(self) + + def test_factoring(self): + RSATest.test_factoring(self) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(RSATest) + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(RSAFastMathTest) + except ImportError: + from distutils.sysconfig import get_config_var + import inspect + _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + +"/../../PublicKey/_fastmath"+get_config_var("SO")) + if os.path.exists(_fm_path): + raise ImportError("While the _fastmath module exists, importing "+ + "it failed. This may point to the gmp or mpir shared library "+ + "not being in the path. _fastmath was found at "+_fm_path) + if config.get('slow_tests',1): + tests += list_test_cases(RSASlowMathTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py new file mode 100644 index 0000000..01fbdf8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import * +from Crypto.Util.py3compat import * +from Crypto.Util.number import inverse +from Crypto.Util import asn1 + +def der2pem(der, text='PUBLIC'): + import binascii + chunks = [ binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48) ] + pem = b('-----BEGIN %s KEY-----\n' % text) + pem += b('').join(chunks) + pem += b('-----END %s KEY-----' % text) + return pem + +class ImportKeyTests(unittest.TestCase): + # 512-bit RSA key generated with openssl + rsaKeyPEM = '''-----BEGIN RSA PRIVATE KEY----- +MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII +q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 +Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI +OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr ++rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK +JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 +n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== +-----END RSA PRIVATE KEY-----''' + + # As above, but this is actually an unencrypted PKCS#8 key + rsaKeyPEM8 = '''-----BEGIN PRIVATE KEY----- +MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS +ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj +wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK +5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk +B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC +NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx +yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 +BX85JB8zqwHB +-----END PRIVATE KEY-----''' + + # The same RSA private key as in rsaKeyPEM, but now encrypted + rsaKeyEncryptedPEM=( + + # With DES and passphrase 'test' + ('test', '''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-CBC,AF8F9A40BD2FA2FC + +Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA +u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs +C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP +BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy +9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY +IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp +dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= +-----END RSA PRIVATE KEY-----''', + "\xAF\x8F\x9A\x40\xBD\x2F\xA2\xFC"), + + # With Triple-DES and passphrase 'rocking' + ('rocking', '''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,C05D6C07F7FC02F6 + +w4lwQrXaVoTTJ0GgwY566htTA2/t1YlimhxkxYt9AEeCcidS5M0Wq9ClPiPz9O7F +m6K5QpM1rxo1RUE/ZyI85gglRNPdNwkeTOqit+kum7nN73AToX17+irVmOA4Z9E+ +4O07t91GxGMcjUSIFk0ucwEU4jgxRvYscbvOMvNbuZszGdVNzBTVddnShKCsy9i7 +nJbPlXeEKYi/OkRgO4PtfqqWQu5GIEFVUf9ev1QV7AvC+kyWTR1wWYnHX265jU5c +sopxQQtP8XEHIJEdd5/p1oieRcWTCNyY8EkslxDSsrf0OtZp6mZH9N+KU47cgQtt +9qGORmlWnsIoFFKcDohbtOaWBTKhkj5h6OkLjFjfU/sBeV1c+7wDT3dAy5tawXjG +YSxC7qDQIT/RECvV3+oQKEcmpEujn45wAnkTi12BH30= +-----END RSA PRIVATE KEY-----''', + "\xC0\x5D\x6C\x07\xF7\xFC\x02\xF6"), + ) + + rsaPublicKeyPEM = '''-----BEGIN PUBLIC KEY----- +MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T +Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== +-----END PUBLIC KEY-----''' + + # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' + rsaPublicKeyOpenSSH = '''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''' + + # The private key, in PKCS#1 format encoded with DER + rsaKeyDER = a2b_hex( + '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe + 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 + 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 + 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c + 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f + 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 + 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 + a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca + 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b + c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 + e4defe43ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + # The private key, in unencrypted PKCS#8 format encoded with DER + rsaKeyDER8 = a2b_hex( + '''30820155020100300d06092a864886f70d01010105000482013f3082013 + b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 + ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 + 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 + 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb + f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da + 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c + a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 + 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a + f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf + 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 + 3ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + rsaPublicKeyDER = a2b_hex( + '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a + a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c + b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 + 03010001 + '''.replace(" ","")) + + n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) + e = 65537 + d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) + p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) + q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) + + # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} + # mod q) instead! + qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) + pInv = inverse(p,q) + + def testImportKey1(self): + """Verify import of RSAPrivateKey DER SEQUENCE""" + key = self.rsa.importKey(self.rsaKeyDER) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey2(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" + key = self.rsa.importKey(self.rsaPublicKeyDER) + self.assertFalse(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey3unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaKeyPEM) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey3bytes(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaKeyPEM)) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey4unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaPublicKeyPEM) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey4bytes(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaPublicKeyPEM)) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey5(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyPEM) + idem = key.encrypt(key.decrypt(b("Test")),0) + self.assertEqual(idem[0],b("Test")) + + def testImportKey6(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyDER) + idem = key.encrypt(key.decrypt(b("Test")),0) + self.assertEqual(idem[0],b("Test")) + + def testImportKey7(self): + """Verify import of OpenSSH public key""" + key = self.rsa.importKey(self.rsaPublicKeyOpenSSH) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey8(self): + """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" + for t in self.rsaKeyEncryptedPEM: + key = self.rsa.importKey(t[1], t[0]) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey9(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" + key = self.rsa.importKey(self.rsaKeyDER8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey10(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" + key = self.rsa.importKey(self.rsaKeyPEM8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey11(self): + """Verify import of RSAPublicKey DER SEQUENCE""" + der = asn1.DerSequence([17, 3]).encode() + key = self.rsa.importKey(der) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def testImportKey12(self): + """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" + der = asn1.DerSequence([17, 3]).encode() + pem = der2pem(der) + key = self.rsa.importKey(pem) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + ### + def testExportKey1(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.exportKey("DER") + self.assertEqual(derKey, self.rsaKeyDER) + + def testExportKey2(self): + key = self.rsa.construct([self.n, self.e]) + derKey = key.exportKey("DER") + self.assertEqual(derKey, self.rsaPublicKeyDER) + + def testExportKey3(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.exportKey("PEM") + self.assertEqual(pemKey, b(self.rsaKeyPEM)) + + def testExportKey4(self): + key = self.rsa.construct([self.n, self.e]) + pemKey = key.exportKey("PEM") + self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) + + def testExportKey5(self): + key = self.rsa.construct([self.n, self.e]) + openssh_1 = key.exportKey("OpenSSH").split() + openssh_2 = self.rsaPublicKeyOpenSSH.split() + self.assertEqual(openssh_1[0], openssh_2[0]) + self.assertEqual(openssh_1[1], openssh_2[1]) + + def testExportKey4(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + # Tuple with index #1 is encrypted with 3DES + t = list(map(b,self.rsaKeyEncryptedPEM[1])) + # Force the salt being used when exporting + key._randfunc = lambda N: (t[2]*divmod(N+len(t[2]),len(t[2]))[0])[:N] + pemKey = key.exportKey("PEM", t[0]) + self.assertEqual(pemKey, t[1]) + + def testExportKey5(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.exportKey("DER", pkcs=8) + self.assertEqual(derKey, self.rsaKeyDER8) + + def testExportKey6(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.exportKey("PEM", pkcs=8) + self.assertEqual(pemKey, b(self.rsaKeyPEM8)) + +class ImportKeyTestsSlow(ImportKeyTests): + def setUp(self): + self.rsa = RSA.RSAImplementation(use_fast_math=0) + +class ImportKeyTestsFast(ImportKeyTests): + def setUp(self): + self.rsa = RSA.RSAImplementation(use_fast_math=1) + +if __name__ == '__main__': + unittest.main() + +def get_tests(config={}): + tests = [] + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(ImportKeyTestsFast) + except ImportError: + pass + tests += list_test_cases(ImportKeyTestsSlow) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py new file mode 100644 index 0000000..81a0e13 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/__init__.py: Self-test for Fortuna modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for the Crypto.Random.Fortuna package""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random.Fortuna import test_FortunaAccumulator; tests += test_FortunaAccumulator.get_tests(config=config) + from Crypto.SelfTest.Random.Fortuna import test_FortunaGenerator; tests += test_FortunaGenerator.get_tests(config=config) + from Crypto.SelfTest.Random.Fortuna import test_SHAd256; tests += test_SHAd256.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py new file mode 100644 index 0000000..4d288a0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_FortunaAccumulator.py: Self-test for the FortunaAccumulator module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Random.Fortuna.FortunaAccumulator""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from binascii import b2a_hex + +class FortunaAccumulatorTests(unittest.TestCase): + def setUp(self): + global FortunaAccumulator + from Crypto.Random.Fortuna import FortunaAccumulator + + def test_FortunaPool(self): + """FortunaAccumulator.FortunaPool""" + pool = FortunaAccumulator.FortunaPool() + self.assertEqual(0, pool.length) + self.assertEqual("5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456", pool.hexdigest()) + + pool.append(b('abc')) + + self.assertEqual(3, pool.length) + self.assertEqual("4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358", pool.hexdigest()) + + pool.append(b("dbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")) + + self.assertEqual(56, pool.length) + self.assertEqual(b('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af'), b2a_hex(pool.digest())) + + pool.reset() + + self.assertEqual(0, pool.length) + + pool.append(b('a') * 10**6) + + self.assertEqual(10**6, pool.length) + self.assertEqual(b('80d1189477563e1b5206b2749f1afe4807e5705e8bd77887a60187a712156688'), b2a_hex(pool.digest())) + + def test_which_pools(self): + """FortunaAccumulator.which_pools""" + + # which_pools(0) should fail + self.assertRaises(AssertionError, FortunaAccumulator.which_pools, 0) + + self.assertEqual(FortunaAccumulator.which_pools(1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2), [0, 1]) + self.assertEqual(FortunaAccumulator.which_pools(3), [0]) + self.assertEqual(FortunaAccumulator.which_pools(4), [0, 1, 2]) + self.assertEqual(FortunaAccumulator.which_pools(5), [0]) + self.assertEqual(FortunaAccumulator.which_pools(6), [0, 1]) + self.assertEqual(FortunaAccumulator.which_pools(7), [0]) + self.assertEqual(FortunaAccumulator.which_pools(8), [0, 1, 2, 3]) + for i in range(1, 32): + self.assertEqual(FortunaAccumulator.which_pools(2**i-1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2**i), list(range(i+1))) + self.assertEqual(FortunaAccumulator.which_pools(2**i+1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2**31), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**32), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**33), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**34), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**35), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**36), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**64), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**128), list(range(32))) + + def test_accumulator(self): + """FortunaAccumulator.FortunaAccumulator""" + fa = FortunaAccumulator.FortunaAccumulator() + + # This should fail, because we haven't seeded the PRNG yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Spread some test data across the pools (source number 42) + # This would be horribly insecure in a real system. + for p in range(32): + fa.add_random_event(42, p, b("X") * 32) + self.assertEqual(32+2, fa.pools[p].length) + + # This should still fail, because we haven't seeded the PRNG with 64 bytes yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add more data + for p in range(32): + fa.add_random_event(42, p, b("X") * 32) + self.assertEqual((32+2)*2, fa.pools[p].length) + + # The underlying RandomGenerator should get seeded with Pool 0 + # s = SHAd256(chr(42) + chr(32) + "X"*32 + chr(42) + chr(32) + "X"*32) + # = SHA256(h'edd546f057b389155a31c32e3975e736c1dec030ddebb137014ecbfb32ed8c6f') + # = h'aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064' + # The counter and the key before reseeding is: + # C_0 = 0 + # K_0 = "\x00" * 32 + # The counter after reseeding is 1, and the new key after reseeding is + # C_1 = 1 + # K_1 = SHAd256(K_0 || s) + # = SHA256(h'0eae3e401389fab86640327ac919ecfcb067359d95469e18995ca889abc119a6') + # = h'aafe9d0409fbaaafeb0a1f2ef2014a20953349d3c1c6e6e3b962953bea6184dd' + # The first block of random data, therefore, is + # r_1 = AES-256(K_1, 1) + # = AES-256(K_1, h'01000000000000000000000000000000') + # = h'b7b86bd9a27d96d7bb4add1b6b10d157' + # The second block of random data is + # r_2 = AES-256(K_1, 2) + # = AES-256(K_1, h'02000000000000000000000000000000') + # = h'2350b1c61253db2f8da233be726dc15f' + # The third and fourth blocks of random data (which become the new key) are + # r_3 = AES-256(K_1, 3) + # = AES-256(K_1, h'03000000000000000000000000000000') + # = h'f23ad749f33066ff53d307914fbf5b21' + # r_4 = AES-256(K_1, 4) + # = AES-256(K_1, h'04000000000000000000000000000000') + # = h'da9667c7e86ba247655c9490e9d94a7c' + # K_2 = r_3 || r_4 + # = h'f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c' + # The final counter value is 5. + self.assertEqual("aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064", + fa.pools[0].hexdigest()) + self.assertEqual(None, fa.generator.key) + self.assertEqual(0, fa.generator.counter.next_value()) + + result = fa.random_data(32) + + self.assertEqual(b("b7b86bd9a27d96d7bb4add1b6b10d157" "2350b1c61253db2f8da233be726dc15f"), b2a_hex(result)) + self.assertEqual(b("f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c"), b2a_hex(fa.generator.key)) + self.assertEqual(5, fa.generator.counter.next_value()) + + def test_accumulator_pool_length(self): + """FortunaAccumulator.FortunaAccumulator minimum pool length""" + fa = FortunaAccumulator.FortunaAccumulator() + + # This test case is hard-coded to assume that FortunaAccumulator.min_pool_size is 64. + self.assertEqual(fa.min_pool_size, 64) + + # The PRNG should not allow us to get random data from it yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add 60 bytes, 4 at a time (2 header + 2 payload) to each of the 32 pools + for i in range(15): + for p in range(32): + # Add the bytes to the pool + fa.add_random_event(2, p, b("XX")) + + # The PRNG should not allow us to get random data from it yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add 4 more bytes to pool 0 + fa.add_random_event(2, 0, b("XX")) + + # We should now be able to get data from the accumulator + fa.random_data(1) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(FortunaAccumulatorTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py new file mode 100644 index 0000000..d41bb02 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_FortunaGenerator.py: Self-test for the FortunaGenerator module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Random.Fortuna.FortunaGenerator""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from binascii import b2a_hex + +class FortunaGeneratorTests(unittest.TestCase): + def setUp(self): + global FortunaGenerator + from Crypto.Random.Fortuna import FortunaGenerator + + def test_generator(self): + """FortunaGenerator.AESGenerator""" + fg = FortunaGenerator.AESGenerator() + + # We shouldn't be able to read data until we've seeded the generator + self.assertRaises(Exception, fg.pseudo_random_data, 1) + self.assertEqual(0, fg.counter.next_value()) + + # Seed the generator, which should set the key and increment the counter. + fg.reseed(b("Hello")) + self.assertEqual(b("0ea6919d4361551364242a4ba890f8f073676e82cf1a52bb880f7e496648b565"), b2a_hex(fg.key)) + self.assertEqual(1, fg.counter.next_value()) + + # Read 2 full blocks from the generator + self.assertEqual(b("7cbe2c17684ac223d08969ee8b565616") + # counter=1 + b("717661c0d2f4758bd6ba140bf3791abd"), # counter=2 + b2a_hex(fg.pseudo_random_data(32))) + + # Meanwhile, the generator will have re-keyed itself and incremented its counter + self.assertEqual(b("33a1bb21987859caf2bbfc5615bef56d") + # counter=3 + b("e6b71ff9f37112d0c193a135160862b7"), # counter=4 + b2a_hex(fg.key)) + self.assertEqual(5, fg.counter.next_value()) + + # Read another 2 blocks from the generator + self.assertEqual(b("fd6648ba3086e919cee34904ef09a7ff") + # counter=5 + b("021f77580558b8c3e9248275f23042bf"), # counter=6 + b2a_hex(fg.pseudo_random_data(32))) + + + # Try to read more than 2**20 bytes using the internal function. This should fail. + self.assertRaises(AssertionError, fg._pseudo_random_data, 2**20+1) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(FortunaGeneratorTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py new file mode 100644 index 0000000..f94db8a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_SHAd256.py: Self-test for the SHAd256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.Fortuna.SHAd256""" + +__revision__ = "$Id$" +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # I could not find any test vectors for SHAd256, so I made these vectors by + # feeding some sample data into several plain SHA256 implementations + # (including OpenSSL, the "sha256sum" tool, and this implementation). + # This is a subset of the resulting test vectors. The complete list can be + # found at: http://www.dlitz.net/crypto/shad256-test-vectors/ + ('5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456', + '', "'' (empty string)"), + ('4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358', + 'abc'), + ('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq') +] + +def get_tests(config={}): + from Crypto.Random.Fortuna import SHAd256 + from Crypto.SelfTest.Hash.common import make_hash_tests + return make_hash_tests(SHAd256, "SHAd256", test_data, 32) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py new file mode 100644 index 0000000..44b3fa1 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/OSRNG/__init__.py: Self-test for OSRNG modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Random.OSRNG package""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + if os.name == 'nt': + from Crypto.SelfTest.Random.OSRNG import test_nt; tests += test_nt.get_tests(config=config) + from Crypto.SelfTest.Random.OSRNG import test_winrandom; tests += test_winrandom.get_tests(config=config) + elif os.name == 'posix': + from Crypto.SelfTest.Random.OSRNG import test_posix; tests += test_posix.get_tests(config=config) + if hasattr(os, 'urandom'): + from Crypto.SelfTest.Random.OSRNG import test_fallback; tests += test_fallback.get_tests(config=config) + from Crypto.SelfTest.Random.OSRNG import test_generic; tests += test_generic.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py new file mode 100644 index 0000000..41909b0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_fallback.py: Self-test for the OSRNG.fallback.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.fallback""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.fallback.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.fallback + randobj = Crypto.Random.OSRNG.fallback.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py new file mode 100644 index 0000000..2a40974 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the OSRNG.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.new()""" + # Import the OSRNG module and try to use it + import Crypto.Random.OSRNG + randobj = Crypto.Random.OSRNG.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py new file mode 100644 index 0000000..a7a8338 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the OSRNG.nt.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.nt""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.nt.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.nt + randobj = Crypto.Random.OSRNG.nt.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py new file mode 100644 index 0000000..2224afe --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_posix.py: Self-test for the OSRNG.posix.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.posix""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.posix.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.posix + randobj = Crypto.Random.OSRNG.posix.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py new file mode 100644 index 0000000..3010eb7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.winrandom""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.winrandom""" + # Import the winrandom module and try to use it + from Crypto.Random.OSRNG import winrandom + randobj = winrandom.new() + x = randobj.get_bytes(16) + y = randobj.get_bytes(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/__init__.py new file mode 100644 index 0000000..f972bf0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/__init__.py: Self-test for random number generation modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for random number generators""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random import Fortuna; tests += Fortuna.get_tests(config=config) + from Crypto.SelfTest.Random import OSRNG; tests += OSRNG.get_tests(config=config) + from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) + from Crypto.SelfTest.Random import test_rpoolcompat; tests += test_rpoolcompat.get_tests(config=config) + from Crypto.SelfTest.Random import test__UserFriendlyRNG; tests += test__UserFriendlyRNG.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py new file mode 100644 index 0000000..1a13345 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Self-tests for the user-friendly Crypto.Random interface +# +# Written in 2013 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for generic Crypto.Random stuff """ + + + +__revision__ = "$Id$" + +import binascii +import pprint +import unittest +import os +import time +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +try: + import multiprocessing +except ImportError: + multiprocessing = None + +import Crypto.Random._UserFriendlyRNG +import Crypto.Random.random + +class RNGForkTest(unittest.TestCase): + + def _get_reseed_count(self): + """ + Get `FortunaAccumulator.reseed_count`, the global count of the + number of times that the PRNG has been reseeded. + """ + rng_singleton = Crypto.Random._UserFriendlyRNG._get_singleton() + rng_singleton._lock.acquire() + try: + return rng_singleton._fa.reseed_count + finally: + rng_singleton._lock.release() + + def runTest(self): + # Regression test for CVE-2013-1445. We had a bug where, under the + # right conditions, two processes might see the same random sequence. + + if sys.platform.startswith('win'): # windows can't fork + assert not hasattr(os, 'fork') # ... right? + return + + # Wait 150 ms so that we don't trigger the rate-limit prematurely. + time.sleep(0.15) + + reseed_count_before = self._get_reseed_count() + + # One or both of these calls together should trigger a reseed right here. + Crypto.Random._UserFriendlyRNG._get_singleton().reinit() + Crypto.Random.get_random_bytes(1) + + reseed_count_after = self._get_reseed_count() + self.assertNotEqual(reseed_count_before, reseed_count_after) # sanity check: test should reseed parent before forking + + rfiles = [] + for i in range(10): + rfd, wfd = os.pipe() + if os.fork() == 0: + # child + os.close(rfd) + f = os.fdopen(wfd, "wb") + + Crypto.Random.atfork() + + data = Crypto.Random.get_random_bytes(16) + + f.write(data) + f.close() + os._exit(0) + # parent + os.close(wfd) + rfiles.append(os.fdopen(rfd, "rb")) + + results = [] + results_dict = {} + for f in rfiles: + data = binascii.hexlify(f.read()) + results.append(data) + results_dict[data] = 1 + f.close() + + if len(results) != len(list(results_dict.keys())): + raise AssertionError("RNG output duplicated across fork():\n%s" % + (pprint.pformat(results))) + + +# For RNGMultiprocessingForkTest +def _task_main(q): + a = Crypto.Random.get_random_bytes(16) + time.sleep(0.1) # wait 100 ms + b = Crypto.Random.get_random_bytes(16) + q.put(binascii.b2a_hex(a)) + q.put(binascii.b2a_hex(b)) + q.put(None) # Wait for acknowledgment + + +class RNGMultiprocessingForkTest(unittest.TestCase): + + def runTest(self): + # Another regression test for CVE-2013-1445. This is basically the + # same as RNGForkTest, but less compatible with old versions of Python, + # and a little easier to read. + + n_procs = 5 + manager = multiprocessing.Manager() + queues = [manager.Queue(1) for i in range(n_procs)] + + # Reseed the pool + time.sleep(0.15) + Crypto.Random._UserFriendlyRNG._get_singleton().reinit() + Crypto.Random.get_random_bytes(1) + + # Start the child processes + pool = multiprocessing.Pool(processes=n_procs, initializer=Crypto.Random.atfork) + map_result = pool.map_async(_task_main, queues) + + # Get the results, ensuring that no pool processes are reused. + aa = [queues[i].get(30) for i in range(n_procs)] + bb = [queues[i].get(30) for i in range(n_procs)] + res = list(zip(aa, bb)) + + # Shut down the pool + map_result.get(30) + pool.close() + pool.join() + + # Check that the results are unique + if len(set(aa)) != len(aa) or len(set(res)) != len(res): + raise AssertionError("RNG output duplicated across fork():\n%s" % + (pprint.pformat(res),)) + + +def get_tests(config={}): + tests = [] + tests += [RNGForkTest()] + if multiprocessing is not None: + tests += [RNGMultiprocessingForkTest()] + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_random.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_random.py new file mode 100644 index 0000000..7fed44f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_random.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.new()""" + +__revision__ = "$Id$" + +import unittest +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.new()""" + # Import the Random module and try to use it + from Crypto import Random + randobj = Random.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + z = Random.get_random_bytes(16) + self.assertNotEqual(x, z) + self.assertNotEqual(y, z) + # Test the Random.random module, which + # implements a subset of Python's random API + # Not implemented: + # seed(), getstate(), setstate(), jumpahead() + # random(), uniform(), triangular(), betavariate() + # expovariate(), gammavariate(), gauss(), + # longnormvariate(), normalvariate(), + # vonmisesvariate(), paretovariate() + # weibullvariate() + # WichmannHill(), whseed(), SystemRandom() + from Crypto.Random import random + x = random.getrandbits(16*8) + y = random.getrandbits(16*8) + self.assertNotEqual(x, y) + # Test randrange + if x>y: + start = y + stop = x + else: + start = x + stop = y + for step in range(1,10): + x = random.randrange(start,stop,step) + y = random.randrange(start,stop,step) + self.assertNotEqual(x, y) + self.assertEqual(start <= x < stop, True) + self.assertEqual(start <= y < stop, True) + self.assertEqual((x - start) % step, 0) + self.assertEqual((y - start) % step, 0) + for i in range(10): + self.assertEqual(random.randrange(1,2), 1) + self.assertRaises(ValueError, random.randrange, start, start) + self.assertRaises(ValueError, random.randrange, stop, start, step) + self.assertRaises(TypeError, random.randrange, start, stop, step, step) + self.assertRaises(TypeError, random.randrange, start, stop, "1") + self.assertRaises(TypeError, random.randrange, "1", stop, step) + self.assertRaises(TypeError, random.randrange, 1, "2", step) + self.assertRaises(ValueError, random.randrange, start, stop, 0) + # Test randint + x = random.randint(start,stop) + y = random.randint(start,stop) + self.assertNotEqual(x, y) + self.assertEqual(start <= x <= stop, True) + self.assertEqual(start <= y <= stop, True) + for i in range(10): + self.assertEqual(random.randint(1,1), 1) + self.assertRaises(ValueError, random.randint, stop, start) + self.assertRaises(TypeError, random.randint, start, stop, step) + self.assertRaises(TypeError, random.randint, "1", stop) + self.assertRaises(TypeError, random.randint, 1, "2") + # Test choice + seq = list(range(10000)) + x = random.choice(seq) + y = random.choice(seq) + self.assertNotEqual(x, y) + self.assertEqual(x in seq, True) + self.assertEqual(y in seq, True) + for i in range(10): + self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) + self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) + if sys.version_info[0] is 3: + self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) + self.assertEqual(1, random.choice([1])) + self.assertRaises(IndexError, random.choice, []) + self.assertRaises(TypeError, random.choice, 1) + # Test shuffle. Lacks random parameter to specify function. + # Make copies of seq + seq = list(range(500)) + x = list(seq) + y = list(seq) + random.shuffle(x) + random.shuffle(y) + self.assertNotEqual(x, y) + self.assertEqual(len(seq), len(x)) + self.assertEqual(len(seq), len(y)) + for i in range(len(seq)): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + self.assertEqual(seq[i] in x, True) + self.assertEqual(seq[i] in y, True) + z = [1] + random.shuffle(z) + self.assertEqual(z, [1]) + if sys.version_info[0] == 3: + z = bytearray(b('12')) + random.shuffle(z) + self.assertEqual(b('1') in z, True) + self.assertRaises(TypeError, random.shuffle, b('12')) + self.assertRaises(TypeError, random.shuffle, 1) + self.assertRaises(TypeError, random.shuffle, "1") + self.assertRaises(TypeError, random.shuffle, (1,2)) + # 2to3 wraps a list() around it, alas - but I want to shoot + # myself in the foot here! :D + # if sys.version_info[0] == 3: + # self.assertRaises(TypeError, random.shuffle, range(3)) + # Test sample + x = random.sample(seq, 20) + y = random.sample(seq, 20) + self.assertNotEqual(x, y) + for i in range(20): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + z = random.sample([1], 1) + self.assertEqual(z, [1]) + z = random.sample((1,2,3), 1) + self.assertEqual(z[0] in (1,2,3), True) + z = random.sample("123", 1) + self.assertEqual(z[0] in "123", True) + z = random.sample(list(range(3)), 1) + self.assertEqual(z[0] in range(3), True) + if sys.version_info[0] == 3: + z = random.sample(b("123"), 1) + self.assertEqual(z[0] in b("123"), True) + z = random.sample(bytearray(b("123")), 1) + self.assertEqual(z[0] in bytearray(b("123")), True) + self.assertRaises(TypeError, random.sample, 1) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py new file mode 100644 index 0000000..be538da --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for the Crypto.Util.randpool.RandomPool wrapper class""" + +__revision__ = "$Id$" + +import sys +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Util.randpool.RandomPool""" + # Import the winrandom module and try to use it + from Crypto.Util.randpool import RandomPool + sys.stderr.write("SelfTest: You can ignore the RandomPool_DeprecationWarning that follows.\n") + rpool = RandomPool() + x = rpool.get_bytes(16) + y = rpool.get_bytes(16) + self.assertNotEqual(x, y) + self.assertNotEqual(rpool.entropy, 0) + + rpool.randomize() + rpool.stir('foo') + rpool.add_event('foo') + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/__init__.py new file mode 100644 index 0000000..862763a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/__init__.py: Self-test for signature modules +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for signature modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from . import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from . import test_pkcs1_pss; tests += test_pkcs1_pss.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py new file mode 100644 index 0000000..cf09e81 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 signatures +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto.Hash import * +from Crypto import Random +from Crypto.Signature import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import * + +def isStr(s): + t = '' + try: + t += s + except TypeError: + return 0 + return 1 + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_15_Tests(unittest.TestCase): + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: data to hash and sign + # Item #2: signature of the data #1, done with the key #0, after + # hashing it with #3 + # Item #3: hash object generator + + _testData = ( + + # + # Taken from ftp://ftp.rsa.com/pub/pkcs/ascii/examples.asc + # "Some Examples of the PKCS Standards", 1999 + # + ( + + # Private key, from 2.1 + { + 'n':'''0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 c0 01 c6 + 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 d0 53 b3 e3 78 2a 1d + e5 dc 5a f4 eb e9 94 68 17 01 14 a1 df e6 7c dc 9a 9a f5 5d + 65 56 20 bb ab''', + 'e':'''01 00 + 01''', + 'd':'''01 23 c5 b6 1b a3 6e db 1d 36 79 90 41 99 a8 9e a8 0c 09 + b9 12 2e 14 00 c0 9a dc f7 78 46 76 d0 1d 23 35 6a 7d 44 d6 + bd 8b d5 0e 94 bf c7 23 fa 87 d8 86 2b 75 17 76 91 c1 1d 75 + 76 92 df 88 81''' + }, + # Data to sign, from 3.1 + '''30 81 a4 02 01 00 30 42 31 0b 30 09 06 + 03 55 04 06 13 02 55 53 31 1d 30 1b 06 03 55 04 0a 13 14 + 45 78 61 6d 70 6c 65 20 4f 72 67 61 6e 69 7a 61 74 69 6f + 6e 31 14 30 12 06 03 55 04 03 13 0b 54 65 73 74 20 55 73 + 65 72 20 31 30 5b 30 0d 06 09 2a 86 48 86 f7 0d 01 01 01 + 05 00 03 4a 00 30 47 02 40 + 0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 + c0 01 c6 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 + d0 53 b3 e3 78 2a 1d e5 dc 5a f4 eb e9 94 68 17 + 01 14 a1 df e6 7c dc 9a 9a f5 5d 65 56 20 bb ab + 02 03 01 00 01''', + # Signature, from 3.2 (at the very end) + '''06 db 36 cb 18 d3 47 5b 9c 01 db 3c 78 95 28 08 + 02 79 bb ae ff 2b 7d 55 8e d6 61 59 87 c8 51 86 + 3f 8a 6c 2c ff bc 89 c3 f7 5a 18 d9 6b 12 7c 71 + 7d 54 d0 d8 04 8d a8 a0 54 46 26 d1 7a 2a 8f be''', + MD2 + ), + + # + # RSA keypair generated with openssl + # + ( + """-----BEGIN RSA PRIVATE KEY----- + MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII + q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 + Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI + OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr + +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK + JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 + n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== + -----END RSA PRIVATE KEY-----""", + "This is a test\x0a", + # + # PKCS#1 signature computed with openssl + # + '''4a700a16432a291a3194646952687d5316458b8b86fb0a25aa30e0dcecdb + 442676759ac63d56ec1499c3ae4c0013c2053cabd5b5804848994541ac16 + fa243a4d''', + SHA + ), + + # + # Test vector from http://www.di-mgt.com.au/rsa_alg.html#signpkcs1 + # + ( + { + 'n':'''E08973398DD8F5F5E88776397F4EB005BB5383DE0FB7ABDC7DC775290D052E6D + 12DFA68626D4D26FAA5829FC97ECFA82510F3080BEB1509E4644F12CBBD832CF + C6686F07D9B060ACBEEE34096A13F5F7050593DF5EBA3556D961FF197FC981E6 + F86CEA874070EFAC6D2C749F2DFA553AB9997702A648528C4EF357385774575F''', + 'e':'''010001''', + 'd':'''00A403C327477634346CA686B57949014B2E8AD2C862B2C7D748096A8B91F736 + F275D6E8CD15906027314735644D95CD6763CEB49F56AC2F376E1CEE0EBF282D + F439906F34D86E085BD5656AD841F313D72D395EFE33CBFF29E4030B3D05A28F + B7F18EA27637B07957D32F2BDE8706227D04665EC91BAF8B1AC3EC9144AB7F21''' + }, + "abc", + '''60AD5A78FB4A4030EC542C8974CD15F55384E836554CEDD9A322D5F4135C6267 + A9D20970C54E6651070B0144D43844C899320DD8FA7819F7EBC6A7715287332E + C8675C136183B3F8A1F81EF969418267130A756FDBB2C71D9A667446E34E0EAD + 9CF31BFB66F816F319D0B7E430A5F2891553986E003720261C7E9022C0D9F11F''', + SHA + ) + + ) + + def testSign1(self): + for i in range(len(self._testData)): + row = self._testData[i] + # Build the key + if isStr(row[0]): + key = RSA.importKey(row[0]) + else: + comps = [ int(rws(row[0][x]),16) for x in ('n','e','d') ] + key = RSA.construct(comps) + h = row[3].new() + # Data to sign can either be in hex form or not + try: + h.update(t2b(row[1])) + except: + h.update(b(row[1])) + # The real test + signer = PKCS.new(key) + self.assertTrue(signer.can_sign()) + s = signer.sign(h) + self.assertEqual(s, t2b(row[2])) + + def testVerify1(self): + for i in range(len(self._testData)): + row = self._testData[i] + # Build the key + if isStr(row[0]): + key = RSA.importKey(row[0]).publickey() + else: + comps = [ int(rws(row[0][x]),16) for x in ('n','e') ] + key = RSA.construct(comps) + h = row[3].new() + # Data to sign can either be in hex form or not + try: + h.update(t2b(row[1])) + except: + h.update(b(row[1])) + # The real test + verifier = PKCS.new(key) + self.assertFalse(verifier.can_sign()) + result = verifier.verify(h, t2b(row[2])) + self.assertTrue(result) + + def testSignVerify(self): + rng = Random.new().read + key = RSA.generate(1024, rng) + + for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,SHA512,RIPEMD): + h = hashmod.new() + h.update(b('blah blah blah')) + + signer = PKCS.new(key) + s = signer.sign(h) + result = signer.verify(h, s) + self.assertTrue(result) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py new file mode 100644 index 0000000..3636ef1 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/test_pkcs1_pss.py: Self-test for PKCS#1 PSS signatures +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto import Random +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto.Hash import * +from Crypto.Signature import PKCS1_PSS as PKCS +from Crypto.Util.py3compat import * + +def isStr(s): + t = '' + try: + t += s + except TypeError: + return 0 + return 1 + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\t', '\n', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +# Helper class to count how many bytes have been requested +# from the key's private RNG, w/o counting those used for blinding +class MyKey: + def __init__(self, key): + self._key = key + self.n = key.n + self.asked = 0 + def _randfunc(self, N): + self.asked += N + return self._key._randfunc(N) + def sign(self, m): + return self._key.sign(m) + def has_private(self): + return self._key.has_private() + def decrypt(self, m): + return self._key.decrypt(m) + def verify(self, m, p): + return self._key.verify(m, p) + def encrypt(self, m, p): + return self._key.encrypt(m, p) + +class PKCS1_PSS_Tests(unittest.TestCase): + + # List of tuples with test data for PKCS#1 PSS + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: data to hash and sign + # Item #2: signature of the data #1, done with the key #0, + # and salt #3 after hashing it with #4 + # Item #3: salt + # Item #4: hash object generator + + _testData = ( + + # + # From in pss-vect.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a2 ba 40 ee 07 e3 b2 bd 2f 02 ce 22 7f 36 a1 95 + 02 44 86 e4 9c 19 cb 41 bb bd fb ba 98 b2 2b 0e + 57 7c 2e ea ff a2 0d 88 3a 76 e6 5e 39 4c 69 d4 + b3 c0 5a 1e 8f ad da 27 ed b2 a4 2b c0 00 fe 88 + 8b 9b 32 c2 2d 15 ad d0 cd 76 b3 e7 93 6e 19 95 + 5b 22 0d d1 7d 4e a9 04 b1 ec 10 2b 2e 4d e7 75 + 12 22 aa 99 15 10 24 c7 cb 41 cc 5e a2 1d 00 ee + b4 1f 7c 80 08 34 d2 c6 e0 6b ce 3b ce 7e a9 a5''', + 'e':'''01 00 01''', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''50e2c3e38d886110288dfc68a9533e7e12e27d2aa56 + d2cdb3fb6efa990bcff29e1d2987fb711962860e7391b1ce01 + ebadb9e812d2fbdfaf25df4ae26110a6d7a26f0b810f54875e + 17dd5c9fb6d641761245b81e79f8c88f0e55a6dcd5f133abd3 + 5f8f4ec80adf1bf86277a582894cb6ebcd2162f1c7534f1f49 + 47b129151b71''' + }, + + # Data to sign + '''85 9e ef 2f d7 8a ca 00 30 8b dc 47 11 93 bf 55 + bf 9d 78 db 8f 8a 67 2b 48 46 34 f3 c9 c2 6e 64 + 78 ae 10 26 0f e0 dd 8c 08 2e 53 a5 29 3a f2 17 + 3c d5 0c 6d 5d 35 4f eb f7 8b 26 02 1c 25 c0 27 + 12 e7 8c d4 69 4c 9f 46 97 77 e4 51 e7 f8 e9 e0 + 4c d3 73 9c 6b bf ed ae 48 7f b5 56 44 e9 ca 74 + ff 77 a5 3c b7 29 80 2f 6e d4 a5 ff a8 ba 15 98 + 90 fc''', + # Signature + '''8d aa 62 7d 3d e7 59 5d 63 05 6c 7e c6 59 e5 44 + 06 f1 06 10 12 8b aa e8 21 c8 b2 a0 f3 93 6d 54 + dc 3b dc e4 66 89 f6 b7 95 1b b1 8e 84 05 42 76 + 97 18 d5 71 5d 21 0d 85 ef bb 59 61 92 03 2c 42 + be 4c 29 97 2c 85 62 75 eb 6d 5a 45 f0 5f 51 87 + 6f c6 74 3d ed dd 28 ca ec 9b b3 0e a9 9e 02 c3 + 48 82 69 60 4f e4 97 f7 4c cd 7c 7f ca 16 71 89 + 71 23 cb d3 0d ef 5d 54 a2 b5 53 6a d9 0a 74 7e''', + # Salt + '''e3 b5 d5 d0 02 c1 bc e5 0c 2b 65 ef 88 a1 88 d8 + 3b ce 7e 61''', + # Hash algorithm + SHA + ), + + # + # Example 1.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 + 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 + d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 + 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df + d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 + c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 + 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 + ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', + 'e':'''01 00 01''', + 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 + 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 + 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 + c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a + e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 + a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c + 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b + d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' + }, + # Message + '''cd c8 7d a2 23 d7 86 df 3b 45 e0 bb bc 72 13 26 + d1 ee 2a f8 06 cc 31 54 75 cc 6f 0d 9c 66 e1 b6 + 23 71 d4 5c e2 39 2e 1a c9 28 44 c3 10 10 2f 15 + 6a 0d 8d 52 c1 f4 c4 0b a3 aa 65 09 57 86 cb 76 + 97 57 a6 56 3b a9 58 fe d0 bc c9 84 e8 b5 17 a3 + d5 f5 15 b2 3b 8a 41 e7 4a a8 67 69 3f 90 df b0 + 61 a6 e8 6d fa ae e6 44 72 c0 0e 5f 20 94 57 29 + cb eb e7 7f 06 ce 78 e0 8f 40 98 fb a4 1f 9d 61 + 93 c0 31 7e 8b 60 d4 b6 08 4a cb 42 d2 9e 38 08 + a3 bc 37 2d 85 e3 31 17 0f cb f7 cc 72 d0 b7 1c + 29 66 48 b3 a4 d1 0f 41 62 95 d0 80 7a a6 25 ca + b2 74 4f d9 ea 8f d2 23 c4 25 37 02 98 28 bd 16 + be 02 54 6f 13 0f d2 e3 3b 93 6d 26 76 e0 8a ed + 1b 73 31 8b 75 0a 01 67 d0''', + # Signature + '''90 74 30 8f b5 98 e9 70 1b 22 94 38 8e 52 f9 71 + fa ac 2b 60 a5 14 5a f1 85 df 52 87 b5 ed 28 87 + e5 7c e7 fd 44 dc 86 34 e4 07 c8 e0 e4 36 0b c2 + 26 f3 ec 22 7f 9d 9e 54 63 8e 8d 31 f5 05 12 15 + df 6e bb 9c 2f 95 79 aa 77 59 8a 38 f9 14 b5 b9 + c1 bd 83 c4 e2 f9 f3 82 a0 d0 aa 35 42 ff ee 65 + 98 4a 60 1b c6 9e b2 8d eb 27 dc a1 2c 82 c2 d4 + c3 f6 6c d5 00 f1 ff 2b 99 4d 8a 4e 30 cb b3 3c''', + # Salt + '''de e9 59 c7 e0 64 11 36 14 20 ff 80 18 5e d5 7f + 3e 67 76 af''', + # Hash + SHA + ), + + # + # Example 1.2 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 + 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 + d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 + 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df + d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 + c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 + 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 + ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', + 'e':'''01 00 01''', + 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 + 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 + 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 + c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a + e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 + a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c + 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b + d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' + }, + # Message + '''85 13 84 cd fe 81 9c 22 ed 6c 4c cb 30 da eb 5c + f0 59 bc 8e 11 66 b7 e3 53 0c 4c 23 3e 2b 5f 8f + 71 a1 cc a5 82 d4 3e cc 72 b1 bc a1 6d fc 70 13 + 22 6b 9e''', + # Signature + '''3e f7 f4 6e 83 1b f9 2b 32 27 41 42 a5 85 ff ce + fb dc a7 b3 2a e9 0d 10 fb 0f 0c 72 99 84 f0 4e + f2 9a 9d f0 78 07 75 ce 43 73 9b 97 83 83 90 db + 0a 55 05 e6 3d e9 27 02 8d 9d 29 b2 19 ca 2c 45 + 17 83 25 58 a5 5d 69 4a 6d 25 b9 da b6 60 03 c4 + cc cd 90 78 02 19 3b e5 17 0d 26 14 7d 37 b9 35 + 90 24 1b e5 1c 25 05 5f 47 ef 62 75 2c fb e2 14 + 18 fa fe 98 c2 2c 4d 4d 47 72 4f db 56 69 e8 43''', + # Salt + '''ef 28 69 fa 40 c3 46 cb 18 3d ab 3d 7b ff c9 8f + d5 6d f4 2d''', + # Hash + SHA + ), + + # + # Example 2.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 d4 0c 1b cf 97 a6 8a e7 cd bd 8a 7b f3 e3 4f + a1 9d cc a4 ef 75 a4 74 54 37 5f 94 51 4d 88 fe + d0 06 fb 82 9f 84 19 ff 87 d6 31 5d a6 8a 1f f3 + a0 93 8e 9a bb 34 64 01 1c 30 3a d9 91 99 cf 0c + 7c 7a 8b 47 7d ce 82 9e 88 44 f6 25 b1 15 e5 e9 + c4 a5 9c f8 f8 11 3b 68 34 33 6a 2f d2 68 9b 47 + 2c bb 5e 5c ab e6 74 35 0c 59 b6 c1 7e 17 68 74 + fb 42 f8 fc 3d 17 6a 01 7e dc 61 fd 32 6c 4b 33 + c9''', + 'e':'''01 00 01''', + 'd':'''02 7d 14 7e 46 73 05 73 77 fd 1e a2 01 56 57 72 + 17 6a 7d c3 83 58 d3 76 04 56 85 a2 e7 87 c2 3c + 15 57 6b c1 6b 9f 44 44 02 d6 bf c5 d9 8a 3e 88 + ea 13 ef 67 c3 53 ec a0 c0 dd ba 92 55 bd 7b 8b + b5 0a 64 4a fd fd 1d d5 16 95 b2 52 d2 2e 73 18 + d1 b6 68 7a 1c 10 ff 75 54 5f 3d b0 fe 60 2d 5f + 2b 7f 29 4e 36 01 ea b7 b9 d1 ce cd 76 7f 64 69 + 2e 3e 53 6c a2 84 6c b0 c2 dd 48 6a 39 fa 75 b1''' + }, + # Message + '''da ba 03 20 66 26 3f ae db 65 98 48 11 52 78 a5 + 2c 44 fa a3 a7 6f 37 51 5e d3 36 32 10 72 c4 0a + 9d 9b 53 bc 05 01 40 78 ad f5 20 87 51 46 aa e7 + 0f f0 60 22 6d cb 7b 1f 1f c2 7e 93 60''', + # Signature + '''01 4c 5b a5 33 83 28 cc c6 e7 a9 0b f1 c0 ab 3f + d6 06 ff 47 96 d3 c1 2e 4b 63 9e d9 13 6a 5f ec + 6c 16 d8 88 4b dd 99 cf dc 52 14 56 b0 74 2b 73 + 68 68 cf 90 de 09 9a db 8d 5f fd 1d ef f3 9b a4 + 00 7a b7 46 ce fd b2 2d 7d f0 e2 25 f5 46 27 dc + 65 46 61 31 72 1b 90 af 44 53 63 a8 35 8b 9f 60 + 76 42 f7 8f ab 0a b0 f4 3b 71 68 d6 4b ae 70 d8 + 82 78 48 d8 ef 1e 42 1c 57 54 dd f4 2c 25 89 b5 + b3''', + # Salt + '''57 bf 16 0b cb 02 bb 1d c7 28 0c f0 45 85 30 b7 + d2 83 2f f7''', + SHA + ), + + # + # Example 8.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''49 53 70 a1 fb 18 54 3c 16 d3 63 1e 31 63 25 5d + f6 2b e6 ee e8 90 d5 f2 55 09 e4 f7 78 a8 ea 6f + bb bc df 85 df f6 4e 0d 97 20 03 ab 36 81 fb ba + 6d d4 1f d5 41 82 9b 2e 58 2d e9 f2 a4 a4 e0 a2 + d0 90 0b ef 47 53 db 3c ee 0e e0 6c 7d fa e8 b1 + d5 3b 59 53 21 8f 9c ce ea 69 5b 08 66 8e de aa + dc ed 94 63 b1 d7 90 d5 eb f2 7e 91 15 b4 6c ad + 4d 9a 2b 8e fa b0 56 1b 08 10 34 47 39 ad a0 73 + 3f''', + 'e':'''01 00 01''', + 'd':'''6c 66 ff e9 89 80 c3 8f cd ea b5 15 98 98 83 61 + 65 f4 b4 b8 17 c4 f6 a8 d4 86 ee 4e a9 13 0f e9 + b9 09 2b d1 36 d1 84 f9 5f 50 4a 60 7e ac 56 58 + 46 d2 fd d6 59 7a 89 67 c7 39 6e f9 5a 6e ee bb + 45 78 a6 43 96 6d ca 4d 8e e3 de 84 2d e6 32 79 + c6 18 15 9c 1a b5 4a 89 43 7b 6a 61 20 e4 93 0a + fb 52 a4 ba 6c ed 8a 49 47 ac 64 b3 0a 34 97 cb + e7 01 c2 d6 26 6d 51 72 19 ad 0e c6 d3 47 db e9''' + }, + # Message + '''81 33 2f 4b e6 29 48 41 5e a1 d8 99 79 2e ea cf + 6c 6e 1d b1 da 8b e1 3b 5c ea 41 db 2f ed 46 70 + 92 e1 ff 39 89 14 c7 14 25 97 75 f5 95 f8 54 7f + 73 56 92 a5 75 e6 92 3a f7 8f 22 c6 99 7d db 90 + fb 6f 72 d7 bb 0d d5 74 4a 31 de cd 3d c3 68 58 + 49 83 6e d3 4a ec 59 63 04 ad 11 84 3c 4f 88 48 + 9f 20 97 35 f5 fb 7f da f7 ce c8 ad dc 58 18 16 + 8f 88 0a cb f4 90 d5 10 05 b7 a8 e8 4e 43 e5 42 + 87 97 75 71 dd 99 ee a4 b1 61 eb 2d f1 f5 10 8f + 12 a4 14 2a 83 32 2e db 05 a7 54 87 a3 43 5c 9a + 78 ce 53 ed 93 bc 55 08 57 d7 a9 fb''', + # Signature + '''02 62 ac 25 4b fa 77 f3 c1 ac a2 2c 51 79 f8 f0 + 40 42 2b 3c 5b af d4 0a 8f 21 cf 0f a5 a6 67 cc + d5 99 3d 42 db af b4 09 c5 20 e2 5f ce 2b 1e e1 + e7 16 57 7f 1e fa 17 f3 da 28 05 2f 40 f0 41 9b + 23 10 6d 78 45 aa f0 11 25 b6 98 e7 a4 df e9 2d + 39 67 bb 00 c4 d0 d3 5b a3 55 2a b9 a8 b3 ee f0 + 7c 7f ec db c5 42 4a c4 db 1e 20 cb 37 d0 b2 74 + 47 69 94 0e a9 07 e1 7f bb ca 67 3b 20 52 23 80 + c5''', + # Salt + '''1d 65 49 1d 79 c8 64 b3 73 00 9b e6 f6 f2 46 7b + ac 4c 78 fa''', + SHA + ) + ) + + def testSign1(self): + for i in range(len(self._testData)): + # Build the key + comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e','d') ] + key = MyKey(RSA.construct(comps)) + # Hash function + h = self._testData[i][4].new() + # Data to sign + h.update(t2b(self._testData[i][1])) + # Salt + test_salt = t2b(self._testData[i][3]) + key._randfunc = lambda N: test_salt + # The real test + signer = PKCS.new(key) + self.assertTrue(signer.can_sign()) + s = signer.sign(h) + self.assertEqual(s, t2b(self._testData[i][2])) + + def testVerify1(self): + for i in range(len(self._testData)): + # Build the key + comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e') ] + key = MyKey(RSA.construct(comps)) + # Hash function + h = self._testData[i][4].new() + # Data to sign + h.update(t2b(self._testData[i][1])) + # Salt + test_salt = t2b(self._testData[i][3]) + # The real test + key._randfunc = lambda N: test_salt + verifier = PKCS.new(key) + self.assertFalse(verifier.can_sign()) + result = verifier.verify(h, t2b(self._testData[i][2])) + self.assertTrue(result) + + def testSignVerify(self): + h = SHA.new() + h.update(b('blah blah blah')) + + rng = Random.new().read + key = MyKey(RSA.generate(1024,rng)) + + # Helper function to monitor what's request from MGF + global mgfcalls + def newMGF(seed,maskLen): + global mgfcalls + mgfcalls += 1 + return bchr(0x00)*maskLen + + # Verify that PSS is friendly to all ciphers + for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,RIPEMD): + h = hashmod.new() + h.update(b('blah blah blah')) + + # Verify that sign() asks for as many random bytes + # as the hash output size + key.asked = 0 + signer = PKCS.new(key) + s = signer.sign(h) + self.assertTrue(signer.verify(h, s)) + self.assertEqual(key.asked, h.digest_size) + + h = SHA.new() + h.update(b('blah blah blah')) + + # Verify that sign() uses a different salt length + for sLen in (0,3,21): + key.asked = 0 + signer = PKCS.new(key, saltLen=sLen) + s = signer.sign(h) + self.assertEqual(key.asked, sLen) + self.assertTrue(signer.verify(h, s)) + + # Verify that sign() uses the custom MGF + mgfcalls = 0 + signer = PKCS.new(key, newMGF) + s = signer.sign(h) + self.assertEqual(mgfcalls, 1) + self.assertTrue(signer.verify(h, s)) + + # Verify that sign() does not call the RNG + # when salt length is 0, even when a new MGF is provided + key.asked = 0 + mgfcalls = 0 + signer = PKCS.new(key, newMGF, 0) + s = signer.sign(h) + self.assertEqual(key.asked,0) + self.assertEqual(mgfcalls, 1) + self.assertTrue(signer.verify(h, s)) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_PSS_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/__init__.py new file mode 100644 index 0000000..abd640a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/__init__.py: Self-test for utility modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for utility modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + if os.name == 'nt': + from Crypto.SelfTest.Util import test_winrandom; tests += test_winrandom.get_tests(config=config) + from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) + from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_Counter.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_Counter.py new file mode 100644 index 0000000..339ce60 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_Counter.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_Counter: Self-test for the Crypto.Util.Counter module +# +# Written in 2009 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.Counter""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest + +class CounterTests(unittest.TestCase): + def setUp(self): + global Counter + from Crypto.Util import Counter + + def test_BE_shortcut(self): + """Big endian, shortcut enabled""" + c = Counter.new(128) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=False, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + + def test_LE_shortcut(self): + """Little endian, shortcut enabled""" + c = Counter.new(128, little_endian=True) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=True, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + + def test_BE_no_shortcut(self): + """Big endian, shortcut disabled""" + c = Counter.new(128, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + c = Counter.new(128, little_endian=False, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + + def test_LE_no_shortcut(self): + """Little endian, shortcut disabled""" + c = Counter.new(128, little_endian=True, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + + def test_BE_defaults(self): + """128-bit, Big endian, defaults""" + c = Counter.new(128) + self.assertEqual(1, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01"), c()) + self.assertEqual(2, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02"), c()) + for i in range(3, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")+bchr(i), c()) + self.assertEqual(256, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"), c()) + + def test_LE_defaults(self): + """128-bit, Little endian, defaults""" + c = Counter.new(128, little_endian=True) + self.assertEqual(1, c.next_value()) + self.assertEqual(b("\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + self.assertEqual(2, c.next_value()) + self.assertEqual(b("\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + for i in range(3, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i)+b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + self.assertEqual(256, c.next_value()) + self.assertEqual(b("\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + + def test_BE8_wraparound(self): + """8-bit, Big endian, wraparound""" + c = Counter.new(8) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + + def test_LE8_wraparound(self): + """8-bit, Little endian, wraparound""" + c = Counter.new(8, little_endian=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + + def test_BE8_wraparound_allowed(self): + """8-bit, Big endian, wraparound with allow_wraparound=True""" + c = Counter.new(8, allow_wraparound=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(0, c.next_value()) + self.assertEqual(b("\x00"), c()) + self.assertEqual(1, c.next_value()) + + def test_LE8_wraparound_allowed(self): + """8-bit, Little endian, wraparound with allow_wraparound=True""" + c = Counter.new(8, little_endian=True, allow_wraparound=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(0, c.next_value()) + self.assertEqual(b("\x00"), c()) + self.assertEqual(1, c.next_value()) + + def test_BE8_carry(self): + """8-bit, Big endian, carry attribute""" + c = Counter.new(8) + for i in range(1, 256): + self.assertEqual(0, c.carry) + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(1, c.carry) + + def test_LE8_carry(self): + """8-bit, Little endian, carry attribute""" + c = Counter.new(8, little_endian=True) + for i in range(1, 256): + self.assertEqual(0, c.carry) + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(1, c.carry) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(CounterTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_asn1.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_asn1.py new file mode 100644 index 0000000..fff9286 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_asn1.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.asn1""" + +__revision__ = "$Id$" + +import unittest +import sys + +from Crypto.Util.py3compat import * +from Crypto.Util.asn1 import DerSequence, DerObject + +class DerObjectTests(unittest.TestCase): + + def testObjEncode1(self): + # No payload + der = DerObject(b('\x33')) + self.assertEqual(der.encode(), b('\x33\x00')) + # Small payload + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x33\x01\x45')) + # Invariant + self.assertEqual(der.encode(), b('\x33\x01\x45')) + # Initialize with numerical tag + der = DerObject(b(0x33)) + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x33\x01\x45')) + + def testObjEncode2(self): + # Known types + der = DerObject('SEQUENCE') + self.assertEqual(der.encode(), b('\x30\x00')) + der = DerObject('BIT STRING') + self.assertEqual(der.encode(), b('\x03\x00')) + + def testObjEncode3(self): + # Long payload + der = DerObject(b('\x34')) + der.payload = b("0")*128 + self.assertEqual(der.encode(), b('\x34\x81\x80' + "0"*128)) + + def testObjDecode1(self): + # Decode short payload + der = DerObject() + der.decode(b('\x20\x02\x01\x02')) + self.assertEqual(der.payload, b("\x01\x02")) + self.assertEqual(der.typeTag, 0x20) + + def testObjDecode2(self): + # Decode short payload + der = DerObject() + der.decode(b('\x22\x81\x80' + "1"*128)) + self.assertEqual(der.payload, b("1")*128) + self.assertEqual(der.typeTag, 0x22) + +class DerSequenceTests(unittest.TestCase): + + def testEncode1(self): + # Empty sequence + der = DerSequence() + self.assertEqual(der.encode(), b('0\x00')) + self.assertFalse(der.hasOnlyInts()) + # One single-byte integer (zero) + der.append(0) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + self.assertTrue(der.hasOnlyInts()) + # Invariant + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + + def testEncode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.append(127) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x7f')) + # Indexing + der[0] = 1 + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der[-1],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + # + der[:] = [1] + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + + def testEncode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.append(0x180) + self.assertEqual(der.encode(), b('0\x04\x02\x02\x01\x80')) + + def testEncode4(self): + # One very long integer + der = DerSequence() + der.append(2**2048) + self.assertEqual(der.encode(), b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode5(self): + # One single-byte integer (looks negative) + der = DerSequence() + der.append(0xFF) + self.assertEqual(der.encode(), b('0\x04\x02\x02\x00\xff')) + + def testEncode6(self): + # Two integers + der = DerSequence() + der.append(0x180) + der.append(0xFF) + self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertTrue(der.hasOnlyInts()) + # + der.append(0x01) + der[1:] = [9,8] + self.assertEqual(len(der),3) + self.assertEqual(der[1:],[9,8]) + self.assertEqual(der[1:-1],[9]) + self.assertEqual(der.encode(), b('0\x0A\x02\x02\x01\x80\x02\x01\x09\x02\x01\x08')) + + def testEncode6(self): + # One integer and another type (no matter what it is) + der = DerSequence() + der.append(0x180) + der.append(b('\x00\x02\x00\x00')) + self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x00\x02\x00\x00')) + self.assertFalse(der.hasOnlyInts()) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSequence() + der.decode(b('0\x00')) + self.assertEqual(len(der),0) + # One single-byte integer (zero) + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + # Invariant + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + + def testDecode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x03\x02\x01\x7f')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],127) + + def testDecode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x04\x02\x02\x01\x80')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0x180) + + def testDecode4(self): + # One very long integer + der = DerSequence() + der.decode(b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],2**2048) + + def testDecode5(self): + # One single-byte integer (looks negative) + der = DerSequence() + der.decode(b('0\x04\x02\x02\x00\xff')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0xFF) + + def testDecode6(self): + # Two integers + der = DerSequence() + der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],0xFF) + + def testDecode7(self): + # One integer and 2 other types + der = DerSequence() + der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),3) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],b('\x24\x02\xb6\x63')) + self.assertEqual(der[2],b('\x12\x00')) + + def testDecode8(self): + # Only 2 other types + der = DerSequence() + der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],b('\x24\x02\xb6\x63')) + self.assertEqual(der[1],b('\x12\x00')) + + def testErrDecode1(self): + # Not a sequence + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('')) + self.assertRaises(ValueError, der.decode, b('\x00')) + self.assertRaises(ValueError, der.decode, b('\x30')) + + def testErrDecode2(self): + # Wrong payload type + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('\x30\x00\x00'), True) + + def testErrDecode3(self): + # Wrong length format + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) + self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) + + def testErrDecode4(self): + # Wrong integer format + der = DerSequence() + # Multi-byte encoding for zero + #self.assertRaises(ValueError, der.decode, '\x30\x04\x02\x02\x00\x00') + # Negative integer + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\xFF')) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(DerObjectTests) + listTests += list_test_cases(DerSequenceTests) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_number.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_number.py new file mode 100644 index 0000000..d7d3024 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_number.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_number.py: Self-test for parts of the Crypto.Util.number module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for (some of) Crypto.Util.number""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +import unittest + +# NB: In some places, we compare tuples instead of just output values so that +# if any inputs cause a test failure, we'll be able to tell which ones. + +class MiscTests(unittest.TestCase): + def setUp(self): + global number, math + from Crypto.Util import number + import math + + def test_ceil_shift(self): + """Util.number.ceil_shift""" + self.assertRaises(AssertionError, number.ceil_shift, -1, 1) + self.assertRaises(AssertionError, number.ceil_shift, 1, -1) + + # b = 0 + self.assertEqual(0, number.ceil_shift(0, 0)) + self.assertEqual(1, number.ceil_shift(1, 0)) + self.assertEqual(2, number.ceil_shift(2, 0)) + self.assertEqual(3, number.ceil_shift(3, 0)) + + # b = 1 + self.assertEqual(0, number.ceil_shift(0, 1)) + self.assertEqual(1, number.ceil_shift(1, 1)) + self.assertEqual(1, number.ceil_shift(2, 1)) + self.assertEqual(2, number.ceil_shift(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_shift(0, 2)) + self.assertEqual(1, number.ceil_shift(1, 2)) + self.assertEqual(1, number.ceil_shift(2, 2)) + self.assertEqual(1, number.ceil_shift(3, 2)) + self.assertEqual(1, number.ceil_shift(4, 2)) + self.assertEqual(2, number.ceil_shift(5, 2)) + self.assertEqual(2, number.ceil_shift(6, 2)) + self.assertEqual(2, number.ceil_shift(7, 2)) + self.assertEqual(2, number.ceil_shift(8, 2)) + self.assertEqual(3, number.ceil_shift(9, 2)) + + for b in range(3, 1+129, 3): # 3, 6, ... , 129 + self.assertEqual(0, number.ceil_shift(0, b)) + + n = 1 + while n <= 2**(b+2): + (q, r) = divmod(n-1, 2**b) + expected = q + int(not not r) + self.assertEqual((n-1, b, expected), + (n-1, b, number.ceil_shift(n-1, b))) + + (q, r) = divmod(n, 2**b) + expected = q + int(not not r) + self.assertEqual((n, b, expected), + (n, b, number.ceil_shift(n, b))) + + (q, r) = divmod(n+1, 2**b) + expected = q + int(not not r) + self.assertEqual((n+1, b, expected), + (n+1, b, number.ceil_shift(n+1, b))) + + n *= 2 + + def test_ceil_div(self): + """Util.number.ceil_div""" + self.assertRaises(TypeError, number.ceil_div, "1", 1) + self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) + self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) + + # b = -1 + self.assertEqual(0, number.ceil_div(0, -1)) + self.assertEqual(-1, number.ceil_div(1, -1)) + self.assertEqual(-2, number.ceil_div(2, -1)) + self.assertEqual(-3, number.ceil_div(3, -1)) + + # b = 1 + self.assertEqual(0, number.ceil_div(0, 1)) + self.assertEqual(1, number.ceil_div(1, 1)) + self.assertEqual(2, number.ceil_div(2, 1)) + self.assertEqual(3, number.ceil_div(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_div(0, 2)) + self.assertEqual(1, number.ceil_div(1, 2)) + self.assertEqual(1, number.ceil_div(2, 2)) + self.assertEqual(2, number.ceil_div(3, 2)) + self.assertEqual(2, number.ceil_div(4, 2)) + self.assertEqual(3, number.ceil_div(5, 2)) + + # b = 3 + self.assertEqual(0, number.ceil_div(0, 3)) + self.assertEqual(1, number.ceil_div(1, 3)) + self.assertEqual(1, number.ceil_div(2, 3)) + self.assertEqual(1, number.ceil_div(3, 3)) + self.assertEqual(2, number.ceil_div(4, 3)) + self.assertEqual(2, number.ceil_div(5, 3)) + self.assertEqual(2, number.ceil_div(6, 3)) + self.assertEqual(3, number.ceil_div(7, 3)) + + # b = 4 + self.assertEqual(0, number.ceil_div(0, 4)) + self.assertEqual(1, number.ceil_div(1, 4)) + self.assertEqual(1, number.ceil_div(2, 4)) + self.assertEqual(1, number.ceil_div(3, 4)) + self.assertEqual(1, number.ceil_div(4, 4)) + self.assertEqual(2, number.ceil_div(5, 4)) + self.assertEqual(2, number.ceil_div(6, 4)) + self.assertEqual(2, number.ceil_div(7, 4)) + self.assertEqual(2, number.ceil_div(8, 4)) + self.assertEqual(3, number.ceil_div(9, 4)) + + # b = -4 + self.assertEqual(3, number.ceil_div(-9, -4)) + self.assertEqual(2, number.ceil_div(-8, -4)) + self.assertEqual(2, number.ceil_div(-7, -4)) + self.assertEqual(2, number.ceil_div(-6, -4)) + self.assertEqual(2, number.ceil_div(-5, -4)) + self.assertEqual(1, number.ceil_div(-4, -4)) + self.assertEqual(1, number.ceil_div(-3, -4)) + self.assertEqual(1, number.ceil_div(-2, -4)) + self.assertEqual(1, number.ceil_div(-1, -4)) + self.assertEqual(0, number.ceil_div(0, -4)) + self.assertEqual(0, number.ceil_div(1, -4)) + self.assertEqual(0, number.ceil_div(2, -4)) + self.assertEqual(0, number.ceil_div(3, -4)) + self.assertEqual(-1, number.ceil_div(4, -4)) + self.assertEqual(-1, number.ceil_div(5, -4)) + self.assertEqual(-1, number.ceil_div(6, -4)) + self.assertEqual(-1, number.ceil_div(7, -4)) + self.assertEqual(-2, number.ceil_div(8, -4)) + self.assertEqual(-2, number.ceil_div(9, -4)) + + def test_exact_log2(self): + """Util.number.exact_log2""" + self.assertRaises(TypeError, number.exact_log2, "0") + self.assertRaises(ValueError, number.exact_log2, -1) + self.assertRaises(ValueError, number.exact_log2, 0) + self.assertEqual(0, number.exact_log2(1)) + self.assertEqual(1, number.exact_log2(2)) + self.assertRaises(ValueError, number.exact_log2, 3) + self.assertEqual(2, number.exact_log2(4)) + self.assertRaises(ValueError, number.exact_log2, 5) + self.assertRaises(ValueError, number.exact_log2, 6) + self.assertRaises(ValueError, number.exact_log2, 7) + e = 3 + n = 8 + while e < 16: + if n == 2**e: + self.assertEqual(e, number.exact_log2(n), "expected=2**%d, n=%d" % (e, n)) + e += 1 + else: + self.assertRaises(ValueError, number.exact_log2, n) + n += 1 + + for e in range(16, 1+64, 2): + self.assertRaises(ValueError, number.exact_log2, 2**e-1) + self.assertEqual(e, number.exact_log2(2**e)) + self.assertRaises(ValueError, number.exact_log2, 2**e+1) + + def test_exact_div(self): + """Util.number.exact_div""" + + # Positive numbers + self.assertEqual(1, number.exact_div(1, 1)) + self.assertRaises(ValueError, number.exact_div, 1, 2) + self.assertEqual(1, number.exact_div(2, 2)) + self.assertRaises(ValueError, number.exact_div, 3, 2) + self.assertEqual(2, number.exact_div(4, 2)) + + # Negative numbers + self.assertEqual(-1, number.exact_div(-1, 1)) + self.assertEqual(-1, number.exact_div(1, -1)) + self.assertRaises(ValueError, number.exact_div, -1, 2) + self.assertEqual(1, number.exact_div(-2, -2)) + self.assertEqual(-2, number.exact_div(-4, 2)) + + # Zero dividend + self.assertEqual(0, number.exact_div(0, 1)) + self.assertEqual(0, number.exact_div(0, 2)) + + # Zero divisor (allow_divzero == False) + self.assertRaises(ZeroDivisionError, number.exact_div, 0, 0) + self.assertRaises(ZeroDivisionError, number.exact_div, 1, 0) + + # Zero divisor (allow_divzero == True) + self.assertEqual(0, number.exact_div(0, 0, allow_divzero=True)) + self.assertRaises(ValueError, number.exact_div, 1, 0, allow_divzero=True) + + def test_floor_div(self): + """Util.number.floor_div""" + self.assertRaises(TypeError, number.floor_div, "1", 1) + for a in range(-10, 10): + for b in range(-10, 10): + if b == 0: + self.assertRaises(ZeroDivisionError, number.floor_div, a, b) + else: + self.assertEqual((a, b, int(math.floor(float(a) / b))), + (a, b, number.floor_div(a, b))) + + def test_getStrongPrime(self): + """Util.number.getStrongPrime""" + self.assertRaises(ValueError, number.getStrongPrime, 256) + self.assertRaises(ValueError, number.getStrongPrime, 513) + bits = 512 + x = number.getStrongPrime(bits) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+1 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD(x-1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+2 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD((x-1)>>1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + + def test_isPrime(self): + """Util.number.isPrime""" + self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCrypto to crash. + self.assertEqual(number.isPrime(2), True) + self.assertEqual(number.isPrime(3), True) + self.assertEqual(number.isPrime(4), False) + self.assertEqual(number.isPrime(2**1279-1), True) + self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime + # test some known gmp pseudo-primes taken from + # http://www.trnicely.net/misc/mpzspsp.html + for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, + 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, + 4859419 * 9718837, 2730439 * 5460877, + 245127919 * 490255837, 963939391 * 1927878781, + 4186358431 * 8372716861, 1576820467 * 3153640933): + self.assertEqual(number.isPrime(int(composite)), False) + + def test_size(self): + self.assertEqual(number.size(2),2) + self.assertEqual(number.size(3),2) + self.assertEqual(number.size(0xa2),8) + self.assertEqual(number.size(0xa2ba40),8*3) + self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) + + def test_negative_number_roundtrip_mpzToLongObj_longObjToMPZ(self): + """Test that mpzToLongObj and longObjToMPZ (internal functions) roundtrip negative numbers correctly.""" + n = -100000000000000000000000000000000000 + e = 2 + k = number._fastmath.rsa_construct(n, e) + self.assertEqual(n, k.n) + self.assertEqual(e, k.e) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(MiscTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_winrandom.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_winrandom.py new file mode 100644 index 0000000..3fc5145 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/Util/test_winrandom.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Util.winrandom""" + +__revision__ = "$Id$" + +import unittest + +class WinRandomImportTest(unittest.TestCase): + def runTest(self): + """winrandom: simple test""" + # Import the winrandom module and try to use it + from Crypto.Util import winrandom + randobj = winrandom.new() + x = randobj.get_bytes(16) + y = randobj.get_bytes(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [WinRandomImportTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/__init__.py new file mode 100644 index 0000000..c3281eb --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/__init__.py: Self-test for PyCrypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self tests + +These tests should perform quickly and can ideally be used every time an +application runs. +""" + +__revision__ = "$Id$" + +import sys +import unittest +from io import StringIO + +class SelfTestError(Exception): + def __init__(self, message, result): + Exception.__init__(self, message, result) + self.message = message + self.result = result + +def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): + """Execute self-tests. + + This raises SelfTestError if any test is unsuccessful. + + You may optionally pass in a sub-module of SelfTest if you only want to + perform some of the tests. For example, the following would test only the + hash modules: + + Crypto.SelfTest.run(Crypto.SelfTest.Hash) + + """ + if config is None: + config = {} + suite = unittest.TestSuite() + if module is None: + if tests is None: + tests = get_tests(config=config) + suite.addTests(tests) + else: + if tests is None: + suite.addTests(module.get_tests(config=config)) + else: + raise ValueError("'module' and 'tests' arguments are mutually exclusive") + if stream is None: + kwargs['stream'] = StringIO() + runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) + result = runner.run(suite) + if not result.wasSuccessful(): + if stream is None: + sys.stderr.write(stream.getvalue()) + raise SelfTestError("Self-test failed", result) + return result + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest import Cipher; tests += Cipher.get_tests(config=config) + from Crypto.SelfTest import Hash; tests += Hash.get_tests(config=config) + from Crypto.SelfTest import Protocol; tests += Protocol.get_tests(config=config) + from Crypto.SelfTest import PublicKey; tests += PublicKey.get_tests(config=config) + from Crypto.SelfTest import Random; tests += Random.get_tests(config=config) + from Crypto.SelfTest import Util; tests += Util.get_tests(config=config) + from Crypto.SelfTest import Signature; tests += Signature.get_tests(config=config) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/st_common.py b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/st_common.py new file mode 100644 index 0000000..c56eac5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/SelfTest/st_common.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/st_common.py: Common functions for SelfTest modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Common functions for SelfTest modules""" + +__revision__ = "$Id$" + +import unittest +import binascii +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +class _list_testloader(unittest.TestLoader): + suiteClass = list + +def list_test_cases(class_): + """Return a list of TestCase instances given a TestCase class + + This is useful when you have defined test* methods on your TestCase class. + """ + return _list_testloader().loadTestsFromTestCase(class_) + +def strip_whitespace(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return b("".join(s.split())) + else: + return b("").join(s.split()) + +def a2b_hex(s): + """Convert hexadecimal to binary, ignoring whitespace""" + return binascii.a2b_hex(strip_whitespace(s)) + +def b2a_hex(s): + """Convert binary to hexadecimal""" + # For completeness + return binascii.b2a_hex(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_PSS.py b/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_PSS.py new file mode 100644 index 0000000..319851e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_PSS.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# +# Signature/PKCS1_PSS.py : PKCS#1 PPS +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA digital signature protocol with appendix according to PKCS#1 PSS. + +See RFC3447__ or the `original RSA Labs specification`__. + +This scheme is more properly called ``RSASSA-PSS``. + +For example, a sender may authenticate a message using SHA-1 and PSS like +this: + + >>> from Crypto.Signature import PKCS1_PSS + >>> from Crypto.Hash import SHA + >>> from Crypto.PublicKey import RSA + >>> from Crypto import Random + >>> + >>> message = 'To be signed' + >>> key = RSA.importKey(open('privkey.der').read()) + >>> h = SHA.new() + >>> h.update(message) + >>> signer = PKCS1_PSS.new(key) + >>> signature = PKCS1_PSS.sign(key) + +At the receiver side, verification can be done like using the public part of +the RSA key: + + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> h = SHA.new() + >>> h.update(message) + >>> verifier = PKCS1_PSS.new(key) + >>> if verifier.verify(h, signature): + >>> print "The signature is authentic." + >>> else: + >>> print "The signature is not authentic." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 +""" + +# Allow nested scopes in Python 2.1 +# See http://oreilly.com/pub/a/python/2001/04/19/pythonnews.html + + +__revision__ = "$Id$" +__all__ = [ 'new', 'PSS_SigScheme' ] + +from Crypto.Util.py3compat import * +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +import Crypto.Util.number +from Crypto.Util.number import ceil_shift, ceil_div, long_to_bytes +from Crypto.Util.strxor import strxor + +class PSS_SigScheme: + """This signature scheme can perform PKCS#1 PSS RSA signature or verification.""" + + def __init__(self, key, mgfunc, saltLen): + """Initialize this PKCS#1 PSS signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and verification are possible. + If a public half is given, only verification is possible. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + saltLen : int + Length of the salt, in bytes. + """ + self._key = key + self._saltLen = saltLen + self._mgfunc = mgfunc + + def can_sign(self): + """Return True if this cipher object can be used for signing messages.""" + return self._key.has_private() + + def sign(self, mhash): + """Produce the PKCS#1 PSS signature of a message. + + This function is named ``RSASSA-PSS-SIGN``, and is specified in + section 8.1.1 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + + :Return: The PSS signature encoded as a string. + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + hash algorithm. + :Raise TypeError: + If the RSA key has no private half. + + :attention: Modify the salt length and the mask generation function only + if you know what you are doing. + The receiver must use the same parameters too. + """ + # TODO: Verify the key is RSA + + randfunc = self._key._randfunc + + # Set defaults for salt length and mask generation function + if self._saltLen == None: + sLen = mhash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x,y: MGF1(x,y,mhash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.1 in RFC3447 + k = ceil_div(modBits,8) # Convert from bits to bytes + # Step 1 + em = EMSA_PSS_ENCODE(mhash, modBits-1, randfunc, mgf, sLen) + # Step 2a (OS2IP) and 2b (RSASP1) + m = self._key.decrypt(em) + # Step 2c (I2OSP) + S = bchr(0x00)*(k-len(m)) + m + return S + + def verify(self, mhash, S): + """Verify that a certain PKCS#1 PSS signature is authentic. + + This function checks if the party holding the private half of the given + RSA key has really signed the message. + + This function is called ``RSASSA-PSS-VERIFY``, and is specified in section + 8.1.2 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + S : string + The signature that needs to be validated. + + :Return: True if verification is correct. False otherwise. + """ + # TODO: Verify the key is RSA + + # Set defaults for salt length and mask generation function + if self._saltLen == None: + sLen = mhash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x,y: MGF1(x,y,mhash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.2 in RFC3447 + k = ceil_div(modBits,8) # Convert from bits to bytes + # Step 1 + if len(S) != k: + return False + # Step 2a (O2SIP), 2b (RSAVP1), and partially 2c (I2OSP) + # Note that signature must be smaller than the module + # but RSA.py won't complain about it. + # TODO: Fix RSA object; don't do it here. + em = self._key.encrypt(S, 0)[0] + # Step 2c + emLen = ceil_div(modBits-1,8) + em = bchr(0x00)*(emLen-len(em)) + em + # Step 3 + try: + result = EMSA_PSS_VERIFY(mhash, em, modBits-1, mgf, sLen) + except ValueError: + return False + # Step 4 + return result + +def MGF1(mgfSeed, maskLen, hash): + """Mask Generation Function, described in B.2.1""" + T = b("") + for counter in range(ceil_div(maskLen, hash.digest_size)): + c = long_to_bytes(counter, 4) + T = T + hash.new(mgfSeed + c).digest() + assert(len(T)>=maskLen) + return T[:maskLen] + +def EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): + """ + Implement the ``EMSA-PSS-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.1). + + The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message being signed. + emBits : int + Maximum length of the final encoding, in bits. + randFunc : callable + An RNG function that accepts as only parameter an int, and returns + a string of random bytes, to be used as salt. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: An ``emLen`` byte long string that encodes the hash + (with ``emLen = \ceil(emBits/8)``). + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits,8) + + # Bitmask of digits that fill up + lmask = 0 + for i in range(8*emLen-emBits): + lmask = lmask>>1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Digest or salt length are too long for given key size.") + # Step 4 + salt = b("") + if randFunc and sLen>0: + salt = randFunc(sLen) + # Step 5 and 6 + h = mhash.new(bchr(0x00)*8 + mhash.digest() + salt) + # Step 7 and 8 + db = bchr(0x00)*(emLen-sLen-mhash.digest_size-2) + bchr(0x01) + salt + # Step 9 + dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) + # Step 10 + maskedDB = strxor(db,dbMask) + # Step 11 + maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] + # Step 12 + em = maskedDB + h.digest() + bchr(0xBC) + return em + +def EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): + """ + Implement the ``EMSA-PSS-VERIFY`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.2). + + ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message to be verified. + em : string + The signature to verify, therefore proving that the sender really signed + the message that was received. + emBits : int + Length of the final encoding (em), in bits. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: 0 if the encoding is consistent, 1 if it is inconsistent. + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits,8) + + # Bitmask of digits that fill up + lmask = 0 + for i in range(8*emLen-emBits): + lmask = lmask>>1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + return False + # Step 4 + if ord(em[-1:])!=0xBC: + return False + # Step 5 + maskedDB = em[:emLen-mhash.digest_size-1] + h = em[emLen-mhash.digest_size-1:-1] + # Step 6 + if lmask & bord(em[0]): + return False + # Step 7 + dbMask = mgf(h, emLen-mhash.digest_size-1) + # Step 8 + db = strxor(maskedDB, dbMask) + # Step 9 + db = bchr(bord(db[0]) & ~lmask) + db[1:] + # Step 10 + if not db.startswith(bchr(0x00)*(emLen-mhash.digest_size-sLen-2) + bchr(0x01)): + return False + # Step 11 + salt = b("") + if sLen: salt = db[-sLen:] + # Step 12 and 13 + hp = mhash.new(bchr(0x00)*8 + mhash.digest() + salt).digest() + # Step 14 + if h!=hp: + return False + return True + +def new(key, mgfunc=None, saltLen=None): + """Return a signature scheme object `PSS_SigScheme` that + can be used to perform PKCS#1 PSS signature or verification. + + :Parameters: + key : RSA key object + The key to use to sign or verify the message. This is a `Crypto.PublicKey.RSA` object. + Signing is only possible if *key* is a private RSA key. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 is used. + saltLen : int + Length of the salt, in bytes. If not specified, it matches the output + size of the hash function. + + """ + return PSS_SigScheme(key, mgfunc, saltLen) + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_v1_5.py b/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_v1_5.py new file mode 100644 index 0000000..73ac251 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Signature/PKCS1_v1_5.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +# +# Signature/PKCS1-v1_5.py : PKCS#1 v1.5 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +RSA digital signature protocol according to PKCS#1 v1.5 + +See RFC3447__ or the `original RSA Labs specification`__. + +This scheme is more properly called ``RSASSA-PKCS1-v1_5``. + +For example, a sender may authenticate a message using SHA-1 like +this: + + >>> from Crypto.Signature import PKCS1_v1_5 + >>> from Crypto.Hash import SHA + >>> from Crypto.PublicKey import RSA + >>> + >>> message = 'To be signed' + >>> key = RSA.importKey(open('privkey.der').read()) + >>> h = SHA.new(message) + >>> signer = PKCS1_v1_5.new(key) + >>> signature = signer.sign(h) + +At the receiver side, verification can be done using the public part of +the RSA key: + + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> h = SHA.new(message) + >>> verifier = PKCS1_v1_5.new(key) + >>> if verifier.verify(h, signature): + >>> print "The signature is authentic." + >>> else: + >>> print "The signature is not authentic." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 +""" + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS115_SigScheme' ] + +import Crypto.Util.number +from Crypto.Util.number import ceil_div +from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString +from Crypto.Util.py3compat import * + +class PKCS115_SigScheme: + """This signature scheme can perform PKCS#1 v1.5 RSA signature or verification.""" + + def __init__(self, key): + """Initialize this PKCS#1 v1.5 signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and verification are possible. + If a public half is given, only verification is possible. + """ + self._key = key + + def can_sign(self): + """Return True if this cipher object can be used for signing messages.""" + return self._key.has_private() + + def sign(self, mhash): + """Produce the PKCS#1 v1.5 signature of a message. + + This function is named ``RSASSA-PKCS1-V1_5-SIGN``, and is specified in + section 8.2.1 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + + :Return: The signature encoded as a string. + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + hash algorithm. + :Raise TypeError: + If the RSA key has no private half. + """ + # TODO: Verify the key is RSA + + # See 8.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + em = EMSA_PKCS1_V1_5_ENCODE(mhash, k) + # Step 2a (OS2IP) and 2b (RSASP1) + m = self._key.decrypt(em) + # Step 2c (I2OSP) + S = bchr(0x00)*(k-len(m)) + m + return S + + def verify(self, mhash, S): + """Verify that a certain PKCS#1 v1.5 signature is authentic. + + This function checks if the party holding the private half of the key + really signed the message. + + This function is named ``RSASSA-PKCS1-V1_5-VERIFY``, and is specified in + section 8.2.2 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + S : string + The signature that needs to be validated. + + :Return: True if verification is correct. False otherwise. + """ + # TODO: Verify the key is RSA + + # See 8.2.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + if len(S) != k: + return 0 + # Step 2a (O2SIP) and 2b (RSAVP1) + # Note that signature must be smaller than the module + # but RSA.py won't complain about it. + # TODO: Fix RSA object; don't do it here. + m = self._key.encrypt(S, 0)[0] + # Step 2c (I2OSP) + em1 = bchr(0x00)*(k-len(m)) + m + # Step 3 + try: + em2 = EMSA_PKCS1_V1_5_ENCODE(mhash, k) + except ValueError: + return 0 + # Step 4 + # By comparing the full encodings (as opposed to checking each + # of its components one at a time) we avoid attacks to the padding + # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). + # + return em1==em2 + +def EMSA_PKCS1_V1_5_ENCODE(hash, emLen): + """ + Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.2). + + ``EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + hash : hash object + The hash object that holds the digest of the message being signed. + emLen : int + The length the final encoding must have, in bytes. + + :attention: the early standard (RFC2313) stated that ``DigestInfo`` + had to be BER-encoded. This means that old signatures + might have length tags in indefinite form, which + is not supported in DER. Such encoding cannot be + reproduced by this function. + + :attention: the same standard defined ``DigestAlgorithm`` to be + of ``AlgorithmIdentifier`` type, where the PARAMETERS + item is optional. Encodings for ``MD2/4/5`` without + ``PARAMETERS`` cannot be reproduced by this function. + + :Return: An ``emLen`` byte long string that encodes the hash. + """ + + # First, build the ASN.1 DER object DigestInfo: + # + # DigestInfo ::= SEQUENCE { + # digestAlgorithm AlgorithmIdentifier, + # digest OCTET STRING + # } + # + # where digestAlgorithm identifies the hash function and shall be an + # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. + # + # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { + # { OID id-md2 PARAMETERS NULL }| + # { OID id-md5 PARAMETERS NULL }| + # { OID id-sha1 PARAMETERS NULL }| + # { OID id-sha256 PARAMETERS NULL }| + # { OID id-sha384 PARAMETERS NULL }| + # { OID id-sha512 PARAMETERS NULL } + # } + # + digestAlgo = DerSequence([hash.oid, DerNull().encode()]) + digest = DerOctetString(hash.digest()) + digestInfo = DerSequence([ + digestAlgo.encode(), + digest.encode() + ]).encode() + + # We need at least 11 bytes for the remaining data: 3 fixed bytes and + # at least 8 bytes of padding). + if emLen +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Fast counter functions for CTR cipher modes. + +CTR is a chaining mode for symmetric block encryption or decryption. +Messages are divideded into blocks, and the cipher operation takes +place on each block using the secret key and a unique *counter block*. + +The most straightforward way to fulfil the uniqueness property is +to start with an initial, random *counter block* value, and increment it as +the next block is processed. + +The block ciphers from `Crypto.Cipher` (when configured in *MODE_CTR* mode) +invoke a callable object (the *counter* parameter) to get the next *counter block*. +Unfortunately, the Python calling protocol leads to major performance degradations. + +The counter functions instantiated by this module will be invoked directly +by the ciphers in `Crypto.Cipher`. The fact that the Python layer is bypassed +lead to more efficient (and faster) execution of CTR cipher modes. + +An example of usage is the following: + + >>> from Crypto.Cipher import AES + >>> from Crypto.Util import Counter + >>> + >>> pt = b'\x00'*1000000 + >>> ctr = Counter.new(128) + >>> cipher = AES.new(b'\x00'*16, AES.MODE_CTR, counter=ctr) + >>> ct = cipher.encrypt(pt) + +:undocumented: __package__ +""" +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from Crypto.Util import _counter +import struct + +# Factory function +def new(nbits, prefix=b(""), suffix=b(""), initial_value=1, overflow=0, little_endian=False, allow_wraparound=False, disable_shortcut=False): + """Create a stateful counter block function suitable for CTR encryption modes. + + Each call to the function returns the next counter block. + Each counter block is made up by three parts:: + + prefix || counter value || postfix + + The counter value is incremented by one at each call. + + :Parameters: + nbits : integer + Length of the desired counter, in bits. It must be a multiple of 8. + prefix : byte string + The constant prefix of the counter block. By default, no prefix is + used. + suffix : byte string + The constant postfix of the counter block. By default, no suffix is + used. + initial_value : integer + The initial value of the counter. Default value is 1. + little_endian : boolean + If True, the counter number will be encoded in little endian format. + If False (default), in big endian format. + allow_wraparound : boolean + If True, the function will raise an *OverflowError* exception as soon + as the counter wraps around. If False (default), the counter will + simply restart from zero. + disable_shortcut : boolean + If True, do not make ciphers from `Crypto.Cipher` bypass the Python + layer when invoking the counter block function. + If False (default), bypass the Python layer. + :Returns: + The counter block function. + """ + + # Sanity-check the message size + (nbytes, remainder) = divmod(nbits, 8) + if remainder != 0: + # In the future, we might support arbitrary bit lengths, but for now we don't. + raise ValueError("nbits must be a multiple of 8; got %d" % (nbits,)) + if nbytes < 1: + raise ValueError("nbits too small") + elif nbytes > 0xffff: + raise ValueError("nbits too large") + + initval = _encode(initial_value, nbytes, little_endian) + + if little_endian: + return _counter._newLE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) + else: + return _counter._newBE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) + +def _encode(n, nbytes, little_endian=False): + retval = [] + n = int(n) + for i in range(nbytes): + if little_endian: + retval.append(bchr(n & 0xff)) + else: + retval.insert(0, bchr(n & 0xff)) + n >>= 8 + return b("").join(retval) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/RFC1751.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/RFC1751.py new file mode 100644 index 0000000..1c10c4a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/RFC1751.py @@ -0,0 +1,365 @@ +# rfc1751.py : Converts between 128-bit strings and a human-readable +# sequence of words, as defined in RFC1751: "A Convention for +# Human-Readable 128-bit Keys", by Daniel L. McDonald. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + + +import binascii +from Crypto.Util.py3compat import * +from functools import reduce + +binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101', + 6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011', + 12:'1100', 13:'1101', 14:'1110', 15:'1111'} + +def _key2bin(s): + "Convert a key into a string of binary digits" + kl=[bord(x) for x in s] + kl=[binary[x>>4]+binary[x&15] for x in kl] + return ''.join(kl) + +def _extract(key, start, length): + """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its + numeric value.""" + k=key[start:start+length] + return reduce(lambda x,y: x*2+ord(y)-48, k, 0) + +def key_to_english (key): + """key_to_english(key:string(2.x)/bytes(3.x)) : string + Transform an arbitrary key into a string containing English words. + The key length must be a multiple of 8. + """ + english='' + for index in range(0, len(key), 8): # Loop over 8-byte subkeys + subkey=key[index:index+8] + # Compute the parity of the key + skbin=_key2bin(subkey) ; p=0 + for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) + # Append parity bits to the subkey + skbin=_key2bin(subkey+bchr((p<<6) & 255)) + for i in range(0, 64, 11): + english=english+wordlist[_extract(skbin, i, 11)]+' ' + + return english[:-1] # Remove the trailing space + +def english_to_key (s): + """english_to_key(string):string(2.x)/bytes(2.x) + Transform a string into a corresponding key. + The string must contain words separated by whitespace; the number + of words must be a multiple of 6. + """ + + L=s.upper().split() ; key=b('') + for index in range(0, len(L), 6): + sublist=L[index:index+6] ; char=9*[0] ; bits=0 + for i in sublist: + index = wordlist.index(i) + shift = (8-(bits+11)%8) %8 + y = index << shift + cl, cc, cr = (y>>16), (y>>8)&0xff, y & 0xff + if (shift>5): + char[bits>>3] = char[bits>>3] | cl + char[(bits>>3)+1] = char[(bits>>3)+1] | cc + char[(bits>>3)+2] = char[(bits>>3)+2] | cr + elif shift>-3: + char[bits>>3] = char[bits>>3] | cc + char[(bits>>3)+1] = char[(bits>>3)+1] | cr + else: char[bits>>3] = char[bits>>3] | cr + bits=bits+11 + subkey=reduce(lambda x,y:x+bchr(y), char, b('')) + + # Check the parity of the resulting key + skbin=_key2bin(subkey) + p=0 + for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) + if (p&3) != _extract(skbin, 64, 2): + raise ValueError("Parity error in resulting key") + key=key+subkey[0:8] + return key + +wordlist=[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", + "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", + "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", + "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", + "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", + "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", + "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", + "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", + "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", + "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", + "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", + "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", + "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", + "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", + "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", + "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", + "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", + "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", + "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", + "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", + "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", + "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", + "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", + "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", + "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", + "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", + "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", + "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", + "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", + "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", + "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", + "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", + "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", + "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", + "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", + "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", + "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", + "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", + "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", + "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", + "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", + "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", + "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", + "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", + "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", + "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", + "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", + "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", + "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", + "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", + "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", + "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", + "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", + "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", + "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", + "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", + "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", + "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", + "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", + "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", + "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", + "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", + "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", + "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", + "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", + "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", + "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", + "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", + "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", + "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", + "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", + "BEAU", "BECK", "BEEF", "BEEN", "BEER", + "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", + "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", + "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", + "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", + "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", + "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", + "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", + "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", + "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", + "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", + "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", + "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", + "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", + "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", + "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", + "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", + "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", + "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", + "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", + "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", + "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", + "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", + "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", + "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", + "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", + "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", + "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", + "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", + "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", + "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", + "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", + "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", + "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", + "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", + "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", + "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", + "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", + "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", + "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", + "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", + "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", + "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", + "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", + "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", + "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", + "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", + "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", + "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", + "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", + "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", + "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", + "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", + "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", + "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", + "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", + "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", + "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", + "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", + "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", + "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", + "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", + "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", + "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", + "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", + "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", + "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", + "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", + "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", + "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", + "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", + "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", + "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", + "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", + "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", + "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", + "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", + "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", + "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", + "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", + "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", + "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", + "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", + "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", + "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", + "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", + "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", + "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", + "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", + "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", + "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", + "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", + "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", + "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", + "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", + "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", + "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", + "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", + "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", + "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", + "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", + "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", + "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", + "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", + "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", + "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", + "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", + "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", + "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", + "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", + "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", + "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", + "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", + "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", + "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", + "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", + "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", + "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", + "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", + "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", + "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", + "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", + "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", + "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", + "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", + "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", + "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", + "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", + "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", + "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", + "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", + "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", + "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", + "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", + "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", + "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", + "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", + "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", + "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", + "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", + "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", + "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", + "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", + "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", + "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", + "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", + "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", + "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", + "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", + "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", + "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", + "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", + "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", + "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", + "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", + "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", + "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", + "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", + "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", + "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", + "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", + "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", + "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", + "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", + "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", + "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", + "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", + "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", + "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", + "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", + "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", + "YELL", "YOGA", "YOKE" ] + +if __name__=='__main__': + data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + + for key, words in data: + print('Trying key', key) + key=binascii.a2b_hex(key) + w2=key_to_english(key) + if w2!=words: + print('key_to_english fails on key', repr(key), ', producing', str(w2)) + k2=english_to_key(words) + if k2!=key: + print('english_to_key fails on key', repr(key), ', producing', repr(k2)) + + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/__init__.py new file mode 100644 index 0000000..a3bef8a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/__init__.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Miscellaneous modules + +Contains useful modules that don't belong into any of the +other Crypto.* subpackages. + +Crypto.Util.number Number-theoretic functions (primality testing, etc.) +Crypto.Util.randpool Random number generation +Crypto.Util.RFC1751 Converts between 128-bit keys and human-readable + strings of words. +Crypto.Util.asn1 Minimal support for ASN.1 DER encoding + +""" + +__all__ = ['randpool', 'RFC1751', 'number', 'strxor', 'asn1' ] + +__revision__ = "$Id$" + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/_counter.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Util/_counter.cpython-35m-darwin.so new file mode 100755 index 0000000..0dabc28 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Util/_counter.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/_number_new.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/_number_new.py new file mode 100644 index 0000000..5f29176 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/_number_new.py @@ -0,0 +1,119 @@ +# -*- coding: ascii -*- +# +# Util/_number_new.py : utility functions +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +## NOTE: Do not import this module directly. Import these functions from Crypto.Util.number. + +__revision__ = "$Id$" +__all__ = ['ceil_shift', 'ceil_div', 'floor_div', 'exact_log2', 'exact_div'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +def ceil_shift(n, b): + """Return ceil(n / 2**b) without performing any floating-point or division operations. + + This is done by right-shifting n by b bits and incrementing the result by 1 + if any '1' bits were shifted out. + """ + if not isinstance(n, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(n).__name__, type(b).__name__)) + + assert n >= 0 and b >= 0 # I haven't tested or even thought about negative values + mask = (1 << b) - 1 + if n & mask: + return (n >> b) + 1 + else: + return n >> b + +def ceil_div(a, b): + """Return ceil(a / b) without performing any floating-point operations.""" + + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) + + (q, r) = divmod(a, b) + if r: + return q + 1 + else: + return q + +def floor_div(a, b): + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) + + (q, r) = divmod(a, b) + return q + +def exact_log2(num): + """Find and return an integer i >= 0 such that num == 2**i. + + If no such integer exists, this function raises ValueError. + """ + + if not isinstance(num, int): + raise TypeError("unsupported operand type: %r" % (type(num).__name__,)) + + n = int(num) + if n <= 0: + raise ValueError("cannot compute logarithm of non-positive number") + + i = 0 + while n != 0: + if (n & 1) and n != 1: + raise ValueError("No solution could be found") + i += 1 + n >>= 1 + i -= 1 + + assert num == (1 << i) + return i + +def exact_div(p, d, allow_divzero=False): + """Find and return an integer n such that p == n * d + + If no such integer exists, this function raises ValueError. + + Both operands must be integers. + + If the second operand is zero, this function will raise ZeroDivisionError + unless allow_divzero is true (default: False). + """ + + if not isinstance(p, int) or not isinstance(d, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(p).__name__, type(d).__name__)) + + if d == 0 and allow_divzero: + n = 0 + if p != n * d: + raise ValueError("No solution could be found") + else: + (n, r) = divmod(p, d) + if r != 0: + raise ValueError("No solution could be found") + + assert p == n * d + return n + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/asn1.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/asn1.py new file mode 100644 index 0000000..9a97d97 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/asn1.py @@ -0,0 +1,286 @@ +# -*- coding: ascii -*- +# +# Util/asn1.py : Minimal support for ASN.1 DER binary encoding. +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.number import long_to_bytes, bytes_to_long +import sys +from Crypto.Util.py3compat import * + +__all__ = [ 'DerObject', 'DerInteger', 'DerOctetString', 'DerNull', 'DerSequence', 'DerObjectId' ] + +class DerObject: + """Base class for defining a single DER object. + + Instantiate this class ONLY when you have to decode a DER element. + """ + + # Known TAG types + typeTags = { 'SEQUENCE': 0x30, 'BIT STRING': 0x03, 'INTEGER': 0x02, + 'OCTET STRING': 0x04, 'NULL': 0x05, 'OBJECT IDENTIFIER': 0x06 } + + def __init__(self, ASN1Type=None, payload=b('')): + """Initialize the DER object according to a specific type. + + The ASN.1 type is either specified as the ASN.1 string (e.g. + 'SEQUENCE'), directly with its numerical tag or with no tag + at all (None).""" + if isInt(ASN1Type) or ASN1Type is None: + self.typeTag = ASN1Type + else: + if len(ASN1Type)==1: + self.typeTag = ord(ASN1Type) + else: + self.typeTag = self.typeTags.get(ASN1Type) + self.payload = payload + + def isType(self, ASN1Type): + return self.typeTags[ASN1Type]==self.typeTag + + def _lengthOctets(self, payloadLen): + """Return a byte string that encodes the given payload length (in + bytes) in a format suitable for a DER length tag (L). + """ + if payloadLen>127: + encoding = long_to_bytes(payloadLen) + return bchr(len(encoding)+128) + encoding + return bchr(payloadLen) + + def encode(self): + """Return a complete DER element, fully encoded as a TLV.""" + return bchr(self.typeTag) + self._lengthOctets(len(self.payload)) + self.payload + + def _decodeLen(self, idx, der): + """Given a (part of a) DER element, and an index to the first byte of + a DER length tag (L), return a tuple with the payload size, + and the index of the first byte of the such payload (V). + + Raises a ValueError exception if the DER length is invalid. + Raises an IndexError exception if the DER element is too short. + """ + length = bord(der[idx]) + if length<=127: + return (length,idx+1) + payloadLength = bytes_to_long(der[idx+1:idx+1+(length & 0x7F)]) + if payloadLength<=127: + raise ValueError("Not a DER length tag.") + return (payloadLength, idx+1+(length & 0x7F)) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete DER element, and re-initializes this + object with it. + + @param derEle A complete DER element. It must start with a DER T + tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + Raises a ValueError exception in case of parsing errors. + Raises an IndexError exception if the DER element is too short. + """ + try: + self.typeTag = bord(derEle[0]) + if (self.typeTag & 0x1F)==0x1F: + raise ValueError("Unsupported DER tag") + (length,idx) = self._decodeLen(1, derEle) + if noLeftOvers and len(derEle) != (idx+length): + raise ValueError("Not a DER structure") + self.payload = derEle[idx:idx+length] + except IndexError: + raise ValueError("Not a valid DER SEQUENCE.") + return idx+length + +class DerInteger(DerObject): + def __init__(self, value = 0): + """Class to model an INTEGER DER element. + + Limitation: only non-negative values are supported. + """ + DerObject.__init__(self, 'INTEGER') + self.value = value + + def encode(self): + """Return a complete INTEGER DER element, fully encoded as a TLV.""" + self.payload = long_to_bytes(self.value) + if bord(self.payload[0])>127: + self.payload = bchr(0x00) + self.payload + return DerObject.encode(self) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete INTEGER DER element, and re-initializes this + object with it. + + @param derEle A complete INTEGER DER element. It must start with a DER + INTEGER tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + Raises a ValueError exception if the DER element is not a + valid non-negative INTEGER. + Raises an IndexError exception if the DER element is too short. + """ + tlvLength = DerObject.decode(self, derEle, noLeftOvers) + if self.typeTag!=self.typeTags['INTEGER']: + raise ValueError ("Not a DER INTEGER.") + if bord(self.payload[0])>127: + raise ValueError ("Negative INTEGER.") + self.value = bytes_to_long(self.payload) + return tlvLength + +class DerSequence(DerObject): + """Class to model a SEQUENCE DER element. + + This object behave like a dynamic Python sequence. + Sub-elements that are INTEGERs, look like Python integers. + Any other sub-element is a binary string encoded as the complete DER + sub-element (TLV). + """ + + def __init__(self, startSeq=None): + """Initialize the SEQUENCE DER object. Always empty + initially.""" + DerObject.__init__(self, 'SEQUENCE') + if startSeq==None: + self._seq = [] + else: + self._seq = startSeq + + ## A few methods to make it behave like a python sequence + + def __delitem__(self, n): + del self._seq[n] + def __getitem__(self, n): + return self._seq[n] + def __setitem__(self, key, value): + self._seq[key] = value + def __setslice__(self,i,j,sequence): + self._seq[i:j] = sequence + def __delslice__(self,i,j): + del self._seq[i:j] + def __getslice__(self, i, j): + return self._seq[max(0, i):max(0, j)] + def __len__(self): + return len(self._seq) + def append(self, item): + return self._seq.append(item) + + def hasInts(self): + """Return the number of items in this sequence that are numbers.""" + return len(list(filter(isInt, self._seq))) + + def hasOnlyInts(self): + """Return True if all items in this sequence are numbers.""" + return self._seq and self.hasInts()==len(self._seq) + + def encode(self): + """Return the DER encoding for the ASN.1 SEQUENCE, containing + the non-negative integers and longs added to this object. + + Limitation: Raises a ValueError exception if it some elements + in the sequence are neither Python integers nor complete DER INTEGERs. + """ + self.payload = b('') + for item in self._seq: + try: + self.payload += item + except: + try: + self.payload += DerInteger(item).encode() + except: + raise ValueError("Trying to DER encode an unknown object") + return DerObject.encode(self) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete SEQUENCE DER element, and re-initializes this + object with it. + + @param derEle A complete SEQUENCE DER element. It must start with a DER + SEQUENCE tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + DER INTEGERs are decoded into Python integers. Any other DER + element is not decoded. Its validity is not checked. + + Raises a ValueError exception if the DER element is not a + valid DER SEQUENCE. + Raises an IndexError exception if the DER element is too short. + """ + + self._seq = [] + try: + tlvLength = DerObject.decode(self, derEle, noLeftOvers) + if self.typeTag!=self.typeTags['SEQUENCE']: + raise ValueError("Not a DER SEQUENCE.") + # Scan one TLV at once + idx = 0 + while idx= 5 to avoid timing attack vulnerability.", PowmInsecureWarning) + +# New functions +from ._number_new import * + +# Commented out and replaced with faster versions below +## def long2str(n): +## s='' +## while n>0: +## s=chr(n & 255)+s +## n=n>>8 +## return s + +## import types +## def str2long(s): +## if type(s)!=types.StringType: return s # Integers will be left alone +## return reduce(lambda x,y : x*256+ord(y), s, 0L) + +def size (N): + """size(N:long) : int + Returns the size of the number N in bits. + """ + bits = 0 + while N >> bits: + bits += 1 + return bits + +def getRandomNumber(N, randfunc=None): + """Deprecated. Use getRandomInteger or getRandomNBitInteger instead.""" + warnings.warn("Crypto.Util.number.getRandomNumber has confusing semantics"+ + "and has been deprecated. Use getRandomInteger or getRandomNBitInteger instead.", + GetRandomNumber_DeprecationWarning) + return getRandomNBitInteger(N, randfunc) + +def getRandomInteger(N, randfunc=None): + """getRandomInteger(N:int, randfunc:callable):long + Return a random number with at most N bits. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + if randfunc is None: + _import_Random() + randfunc = Random.new().read + + S = randfunc(N>>3) + odd_bits = N % 8 + if odd_bits != 0: + char = ord(randfunc(1)) >> (8-odd_bits) + S = bchr(char) + S + value = bytes_to_long(S) + return value + +def getRandomRange(a, b, randfunc=None): + """getRandomRange(a:int, b:int, randfunc:callable):long + Return a random number n so that a <= n < b. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + range_ = b - a - 1 + bits = size(range_) + value = getRandomInteger(bits, randfunc) + while value > range_: + value = getRandomInteger(bits, randfunc) + return a + value + +def getRandomNBitInteger(N, randfunc=None): + """getRandomInteger(N:int, randfunc:callable):long + Return a random number with exactly N-bits, i.e. a random number + between 2**(N-1) and (2**N)-1. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + value = getRandomInteger (N-1, randfunc) + value |= 2 ** (N-1) # Ensure high bit is set + assert size(value) >= N + return value + +def GCD(x,y): + """GCD(x:long, y:long): long + Return the GCD of x and y. + """ + x = abs(x) ; y = abs(y) + while x > 0: + x, y = y % x, x + return y + +def inverse(u, v): + """inverse(u:long, v:long):long + Return the inverse of u mod v. + """ + u3, v3 = int(u), int(v) + u1, v1 = 1, 0 + while v3 > 0: + q=divmod(u3, v3)[0] + u1, v1 = v1, u1 - v1*q + u3, v3 = v3, u3 - v3*q + while u1<0: + u1 = u1 + v + return u1 + +# Given a number of bits to generate and a random generation function, +# find a prime number of the appropriate size. + +def getPrime(N, randfunc=None): + """getPrime(N:int, randfunc:callable):long + Return a random N-bit prime number. + + If randfunc is omitted, then Random.new().read is used. + """ + if randfunc is None: + _import_Random() + randfunc = Random.new().read + + number=getRandomNBitInteger(N, randfunc) | 1 + while (not isPrime(number, randfunc=randfunc)): + number=number+2 + return number + + +def _rabinMillerTest(n, rounds, randfunc=None): + """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int + Tests if n is prime. + Returns 0 when n is definitly composite. + Returns 1 when n is probably prime. + Returns 2 when n is definitly prime. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + # check special cases (n==2, n even, n < 2) + if n < 3 or (n & 1) == 0: + return n == 2 + # n might be very large so it might be beneficial to precalculate n-1 + n_1 = n - 1 + # determine m and b so that 2**b * m = n - 1 and b maximal + b = 0 + m = n_1 + while (m & 1) == 0: + b += 1 + m >>= 1 + + tested = [] + # we need to do at most n-2 rounds. + for i in range (min (rounds, n-2)): + # randomly choose a < n and make sure it hasn't been tested yet + a = getRandomRange (2, n, randfunc) + while a in tested: + a = getRandomRange (2, n, randfunc) + tested.append (a) + # do the rabin-miller test + z = pow (a, m, n) # (a**m) % n + if z == 1 or z == n_1: + continue + composite = 1 + for r in range (b): + z = (z * z) % n + if z == 1: + return 0 + elif z == n_1: + composite = 0 + break + if composite: + return 0 + return 1 + +def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): + """getStrongPrime(N:int, e:int, false_positive_prob:float, randfunc:callable):long + Return a random strong N-bit prime number. + In this context p is a strong prime if p-1 and p+1 have at + least one large prime factor. + N should be a multiple of 128 and > 512. + + If e is provided the returned prime p-1 will be coprime to e + and thus suitable for RSA where e is the public exponent. + + The optional false_positive_prob is the statistical probability + that true is returned even though it is not (pseudo-prime). + It defaults to 1e-6 (less than 1:1000000). + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + + randfunc should take a single int parameter and return that + many random bytes as a string. + If randfunc is omitted, then Random.new().read is used. + """ + # This function was implemented following the + # instructions found in the paper: + # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" + # by Robert D. Silverman + # RSA Laboratories + # May 17, 1997 + # which by the time of writing could be freely downloaded here: + # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf + + # Use the accelerator if available + if _fastmath is not None: + return _fastmath.getStrongPrime(int(N), int(e), false_positive_prob, + randfunc) + + if (N < 512) or ((N % 128) != 0): + raise ValueError ("bits must be multiple of 128 and > 512") + + rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + + # calculate range for X + # lower_bound = sqrt(2) * 2^{511 + 128*x} + # upper_bound = 2^{512 + 128*x} - 1 + x = (N - 512) >> 7; + # We need to approximate the sqrt(2) in the lower_bound by an integer + # expression because floating point math overflows with these numbers + lower_bound = divmod(14142135623730950489 * (2 ** (511 + 128*x)), + 10000000000000000000)[0] + upper_bound = (1 << (512 + 128*x)) - 1 + # Randomly choose X in calculated range + X = getRandomRange (lower_bound, upper_bound, randfunc) + + # generate p1 and p2 + p = [0, 0] + for i in (0, 1): + # randomly choose 101-bit y + y = getRandomNBitInteger (101, randfunc) + # initialize the field for sieving + field = [0] * 5 * len (sieve_base) + # sieve the field + for prime in sieve_base: + offset = y % prime + for j in range ((prime - offset) % prime, len (field), prime): + field[j] = 1 + + # look for suitable p[i] starting at y + result = 0 + for j in range(len(field)): + composite = field[j] + # look for next canidate + if composite: + continue + tmp = y + j + result = _rabinMillerTest (tmp, rabin_miller_rounds) + if result > 0: + p[i] = tmp + break + if result == 0: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + + # Calculate R + # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 + tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 + tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 + R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 + + # search for final prime number starting by Y0 + # Y0 = X + (R - X mod p1p2) + increment = p[0] * p[1] + X = X + (R - (X % increment)) + while 1: + is_possible_prime = 1 + # first check candidate against sieve_base + for prime in sieve_base: + if (X % prime) == 0: + is_possible_prime = 0 + break + # if e is given make sure that e and X-1 are coprime + # this is not necessarily a strong prime criterion but useful when + # creating them for RSA where the p-1 and q-1 should be coprime to + # the public exponent e + if e and is_possible_prime: + if e & 1: + if GCD (e, X-1) != 1: + is_possible_prime = 0 + else: + if GCD (e, divmod((X-1),2)[0]) != 1: + is_possible_prime = 0 + + # do some Rabin-Miller-Tests + if is_possible_prime: + result = _rabinMillerTest (X, rabin_miller_rounds) + if result > 0: + break + X += increment + # abort when X has more bits than requested + # TODO: maybe we shouldn't abort but rather start over. + if X >= 1 << N: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + return X + +def isPrime(N, false_positive_prob=1e-6, randfunc=None): + """isPrime(N:long, false_positive_prob:float, randfunc:callable):bool + Return true if N is prime. + + The optional false_positive_prob is the statistical probability + that true is returned even though it is not (pseudo-prime). + It defaults to 1e-6 (less than 1:1000000). + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + + If randfunc is omitted, then Random.new().read is used. + """ + if _fastmath is not None: + return _fastmath.isPrime(int(N), false_positive_prob, randfunc) + + if N < 3 or N & 1 == 0: + return N == 2 + for p in sieve_base: + if N == p: + return 1 + if N % p == 0: + return 0 + + rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + return _rabinMillerTest(N, rounds, randfunc) + + +# Improved conversion functions contributed by Barry Warsaw, after +# careful benchmarking + +import struct + +def long_to_bytes(n, blocksize=0): + """long_to_bytes(n:long, blocksize:int) : string + Convert a long integer to a byte string. + + If optional blocksize is given and greater than zero, pad the front of the + byte string with binary zeros so that the length is a multiple of + blocksize. + """ + # after much testing, this algorithm was deemed to be the fastest + s = b('') + n = int(n) + pack = struct.pack + while n > 0: + s = pack('>I', n & 0xffffffff) + s + n = n >> 32 + # strip off leading zeros + for i in range(len(s)): + if s[i] != b('\000')[0]: + break + else: + # only happens when n == 0 + s = b('\000') + i = 0 + s = s[i:] + # add back some pad bytes. this could be done more efficiently w.r.t. the + # de-padding being done above, but sigh... + if blocksize > 0 and len(s) % blocksize: + s = (blocksize - len(s) % blocksize) * b('\000') + s + return s + +def bytes_to_long(s): + """bytes_to_long(string) : long + Convert a byte string to a long integer. + + This is (essentially) the inverse of long_to_bytes(). + """ + acc = 0 + unpack = struct.unpack + length = len(s) + if length % 4: + extra = (4 - length % 4) + s = b('\000') * extra + s + length = length + extra + for i in range(0, length, 4): + acc = (acc << 32) + unpack('>I', s[i:i+4])[0] + return acc + +# For backwards compatibility... +import warnings +def long2str(n, blocksize=0): + warnings.warn("long2str() has been replaced by long_to_bytes()") + return long_to_bytes(n, blocksize) +def str2long(s): + warnings.warn("str2long() has been replaced by bytes_to_long()") + return bytes_to_long(s) + +def _import_Random(): + # This is called in a function instead of at the module level in order to + # avoid problems with recursive imports + global Random, StrongRandom + from Crypto import Random + from Crypto.Random.random import StrongRandom + + + +# The first 10000 primes used for checking primality. +# This should be enough to eliminate most of the odd +# numbers before needing to do a Rabin-Miller test at all. +sieve_base = ( + 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, + 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, + 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, + 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, + 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, + 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, + 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, + 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, + 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, + 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, + 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, + 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, + 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, + 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, + 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, + 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, + 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, + 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, + 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, + 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, + 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, + 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, + 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, + 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, + 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, + 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, + 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, + 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, + 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, + 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, + 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, + 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, + 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, + 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, + 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, + 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, + 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, + 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, + 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, + 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, + 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, + 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, + 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, + 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, + 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, + 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, + 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, + 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, + 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, + 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, + 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, + 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, + 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, + 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, + 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, + 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, + 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, + 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, + 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, + 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, + 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, + 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, + 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, + 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, + 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, + 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, + 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, + 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, + 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, + 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, + 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, + 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, + 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, + 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, + 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, + 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, + 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, + 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, + 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, + 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, + 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, + 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, + 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, + 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, + 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, + 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, + 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, + 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, + 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, + 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, + 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, + 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, + 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, + 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, + 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, + 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, + 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, + 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, + 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, + 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, + 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, + 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, + 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, + 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, + 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, + 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, + 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, + 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, + 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, + 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, + 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, + 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, + 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, + 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, + 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, + 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, + 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, + 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, + 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, + 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, + 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, + 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, + 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, + 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, + 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, + 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, + 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, + 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, + 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, + 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, + 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, + 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, + 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, + 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, + 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, + 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, + 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, + 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, + 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, + 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, + 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, + 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, + 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, + 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, + 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, + 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, + 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, + 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, + 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, + 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, + 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, + 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, + 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, + 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, + 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, + 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, + 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, + 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, + 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, + 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, + 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, + 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, + 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, + 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, + 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, + 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, + 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, + 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, + 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, + 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, + 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, + 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, + 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, + 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, + 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, + 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, + 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, + 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, + 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, + 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, + 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, + 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, + 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, + 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, + 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, + 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, + 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, + 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, + 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, + 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, + 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, + 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, + 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, + 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, + 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, + 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, + 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, + 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, + 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, + 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, + 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, + 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, + 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, + 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, + 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, + 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, + 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, + 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, + 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, + 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, + 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, + 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, + 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, + 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, + 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, + 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, + 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, + 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, + 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, + 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, + 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, + 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, + 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, + 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, + 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, + 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, + 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, + 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, + 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, + 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, + 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, + 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, + 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, + 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, + 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, + 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, + 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, + 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, + 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, + 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, + 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, + 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, + 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, + 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, + 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, + 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, + 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, + 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, + 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, + 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, + 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, + 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, + 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, + 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, + 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, + 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, + 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, + 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, + 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, + 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, + 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, + 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, + 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, + 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, + 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, + 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, + 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, + 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, + 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, + 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, + 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, + 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, + 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, + 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, + 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, + 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, + 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, + 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, + 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, + 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, + 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, + 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, + 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, + 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, + 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, + 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, + 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, + 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, + 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, + 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, + 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, + 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, + 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, + 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, + 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, + 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, + 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, + 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, + 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, + 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, + 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, + 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, + 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, + 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, + 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, + 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, + 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, + 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, + 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, + 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, + 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, + 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, + 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, + 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, + 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, + 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, + 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, + 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, + 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, + 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, + 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, + 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, + 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, + 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, + 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, + 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, + 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, + 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, + 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, + 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, + 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, + 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, + 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, + 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, + 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, + 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, + 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, + 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, + 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, + 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, + 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, + 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, + 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, + 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, + 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, + 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, + 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, + 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, + 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, + 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, + 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, + 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, + 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, + 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, + 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, + 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, + 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, + 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, + 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, + 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, + 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, + 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, + 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, + 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, + 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, + 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, + 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, + 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, + 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, + 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, + 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, + 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, + 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, + 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, + 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, + 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, + 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, + 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, + 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, + 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, + 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, + 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, + 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, + 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, + 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, + 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, + 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, + 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, + 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, + 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, + 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, + 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, + 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, + 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, + 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, + 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, + 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, + 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, + 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, + 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, + 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, + 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, + 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, + 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, + 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, + 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, + 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, + 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, + 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, + 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, + 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, + 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, + 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, + 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, + 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, + 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, + 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, + 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, + 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, + 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, + 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, + 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, + 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, + 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, + 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, + 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, + 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, + 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, + 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, + 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, + 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, + 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, + 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, + 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, + 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, + 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, + 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, + 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, + 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, + 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, + 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, + 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, + 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, + 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, + 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, + 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, + 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, + 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, + 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, + 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, + 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, + 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, + 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, + 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, + 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, + 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, + 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, + 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, + 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, + 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, + 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, + 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, + 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, + 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, + 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, + 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, + 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, + 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, + 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, + 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, + 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, + 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, + 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, + 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, + 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, + 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, + 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, + 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, + 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, + 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, + 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, + 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, + 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, + 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, + 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, + 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, + 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, + 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, + 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, + 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, + 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, + 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, + 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, + 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, + 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, + 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, + 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, + 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, + 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, + 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, + 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, + 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, + 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, + 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, + 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, + 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, + 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, + 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, + 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, + 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, + 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, + 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, + 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, + 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, + 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, + 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, + 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, + 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, + 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, + 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, + 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, + 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, + 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, + 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, + 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, + 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, + 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, + 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, + 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, + 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, + 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, + 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, + 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, + 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, + 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, + 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, + 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, + 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, + 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, + 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, + 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, + 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, + 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, + 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, + 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, + 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, + 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, + 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, + 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, + 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, + 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, + 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, + 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, + 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, + 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, + 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, + 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, + 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, + 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, + 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, + 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, + 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, + 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, + 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, + 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, + 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, + 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, + 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, + 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, + 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, + 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, + 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, + 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, + 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, + 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, + 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, + 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, + 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, + 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, + 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, + 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, + 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, + 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, + 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, + 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, + 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, + 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, + 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, + 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, + 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, + 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, + 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, + 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, + 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, + 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, + 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, + 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, + 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, + 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, + 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, + 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, + 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, + 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, + 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, + 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, + 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, + 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, + 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, + 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, + 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, + 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, + 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, + 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, + 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, + 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, + 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, + 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, + 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, + 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, + 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, + 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, + 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, + 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, + 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, + 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, + 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, + 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, + 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, + 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, + 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, + 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, + 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, + 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, + 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, + 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, + 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, + 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, + 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, + 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, + 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, + 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, + 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, + 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, + 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, + 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, + 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, + 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, + 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, + 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, + 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, + 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, + 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, + 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, + 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, + 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, + 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, + 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, + 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, + 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, + 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, + 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, + 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, + 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, + 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, + 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, + 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, + 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, + 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, + 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, + 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, + 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, + 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, + 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, + 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, + 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, + 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, + 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, + 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, + 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, + 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, + 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, + 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, + 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, + 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, + 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, + 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, + 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, + 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, + 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, + 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, + 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, + 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, + 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, + 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, + 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, + 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, + 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, + 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, + 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, + 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, + 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, + 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, + 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, + 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, + 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, + 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, + 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, + 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, + 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, + 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, + 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, + 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, + 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, + 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, + 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, + 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, + 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, + 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, + 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, + 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, + 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, + 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, + 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, + 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, + 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, + 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, + 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, + 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, + 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, + 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, + 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, + 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, + 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, + 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, + 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, + 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, + 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, + 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, + 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, + 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, + 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, + 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, + 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, + 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, + 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, + 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, + 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, + 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, + 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, + 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, + 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, + 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, + 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, + 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, + 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, + 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, + 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, + 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, + 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, + 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, + 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, + 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, + 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, + 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, + 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, + 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, + 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, + 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, + 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, + 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, + 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, + 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, + 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, + 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, + 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, + 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, + 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, + 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, + 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, + 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, + 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, + 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, + 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, + 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, + 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, + 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, + 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, + 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, + 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, + 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, + 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, + 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, + 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, + 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, + 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, + 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, + 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, + 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, + 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, + 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, + 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, + 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, + 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, + 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, + 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, + 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, + 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, + 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, + 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, + 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, + 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, + 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, + 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, + 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, + 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, + 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, + 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, + 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, + 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, + 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, + 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, + 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, + 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, + 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, + 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, + 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, + 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, + 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, + 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, + 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, + 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, + 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, + 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, + 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, + 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, + 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, + 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, + 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, + 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, + 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, + 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, + 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, + 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, + 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, + 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, + 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, + 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, + 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, + 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, + 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, + 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, + 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, + 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, + 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, + 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, + 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, + 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, + 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, + 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, + 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, + 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, + 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, + 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, + 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, + 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, + 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, + 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, + 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, + 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, + 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, + 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, + 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, + 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, + 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, + 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, + 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, + 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, + 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, + 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, + 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, + 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, + 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, + 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, + 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, + 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, + 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, + 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, + 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, + 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, + 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, + 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, + 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, + 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, + 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, + 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, + 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, + 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, + 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, + 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, + 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, + 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, + 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, + 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, + 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, + 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, + 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, + 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, + 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, + 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, + 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, + 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, + 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, + 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, + 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, + 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, + 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, + 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, + 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, + 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, + 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, + 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, + 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, + 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, + 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, + 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, + 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, + 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, + 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, + 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, + 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, + 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, + 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, + 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, + 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, + 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, + 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, + 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, + 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, + 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, + 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, + 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, + 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, + 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, + 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, + 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, + 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, + 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, + 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, + 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, + 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, + 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, + 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, + 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, + 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, + 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, + 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, +100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, +100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, +100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, +100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, +100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, +100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, +100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, +100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, +101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, +101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, +101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, +101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, +101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, +101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, +101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, +101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, +101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, +102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, +102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, +102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, +102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, +102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, +102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, +102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, +102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, +102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, +103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, +103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, +103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, +103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, +103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, +103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, +103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, +103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, +104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, +104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, +104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, +104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, +104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, +104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, +) diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/py3compat.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/py3compat.py new file mode 100644 index 0000000..f8367c9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/py3compat.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# +# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x +# +# Written in 2010 by Thorsten Behrens +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k + +In Python 2.x, strings (of type ''str'') contain binary data, including encoded +Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. +Unicode literals are specified via the u'...' prefix. Indexing or slicing +either type always produces a string of the same type as the original. +Data read from a file is always of '''str'' type. + +In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' +prefix and the ''unicode'' type are now redundant. A new type (called +''bytes'') has to be used for binary data (including any particular +''encoding'' of a string). The b'...' prefix allows one to specify a binary +literal. Indexing or slicing a string produces another string. Slicing a byte +string produces another byte string, but the indexing operation produces an +integer. Data read from a file is of '''str'' type if the file was opened in +text mode, or of ''bytes'' type otherwise. + +Since PyCrypto aims at supporting both Python 2.x and 3.x, the following helper +functions are used to keep the rest of the library as independent as possible +from the actual Python version. + +In general, the code should always deal with binary strings, and use integers +instead of 1-byte character strings. + +b(s) + Take a text string literal (with no prefix or with u'...' prefix) and + make a byte string. +bchr(c) + Take an integer and make a 1-character byte string. +bord(c) + Take the result of indexing on a byte string and make an integer. +tobytes(s) + Take a text string, a byte string, or a sequence of character taken from + a byte string and make a byte string. +""" + +__revision__ = "$Id$" + +import sys + +if sys.version_info[0] == 2: + def b(s): + return s + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + if sys.version_info[1] == 1: + def tobytes(s): + try: + return s.encode('latin-1') + except: + return ''.join(s) + else: + def tobytes(s): + if isinstance(s, str): + return s.encode("latin-1") + else: + return ''.join(s) +else: + def b(s): + return s.encode("latin-1") # utf-8 would cause some side-effects we don't want + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s,str): + return bytes(s,"latin-1") + else: + return bytes(s) + def bord(s): + return s + def tobytes(s): + if isinstance(s,bytes): + return s + else: + if isinstance(s,str): + return s.encode("latin-1") + else: + return bytes(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/randpool.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/randpool.py new file mode 100644 index 0000000..8b5a0b7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/randpool.py @@ -0,0 +1,82 @@ +# +# randpool.py : Cryptographically strong random number generation +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Mark Moraes, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.pct_warnings import RandomPool_DeprecationWarning +import Crypto.Random +import warnings + +class RandomPool: + """Deprecated. Use Random.new() instead. + + See http://www.pycrypto.org/randpool-broken + """ + def __init__(self, numbytes = 160, cipher=None, hash=None, file=None): + warnings.warn("This application uses RandomPool, which is BROKEN in older releases. See http://www.pycrypto.org/randpool-broken", + RandomPool_DeprecationWarning) + self.__rng = Crypto.Random.new() + self.bytes = numbytes + self.bits = self.bytes * 8 + self.entropy = self.bits + + def get_bytes(self, N): + return self.__rng.read(N) + + def _updateEntropyEstimate(self, nbits): + self.entropy += nbits + if self.entropy < 0: + self.entropy = 0 + elif self.entropy > self.bits: + self.entropy = self.bits + + def _randomize(self, N=0, devname="/dev/urandom"): + """Dummy _randomize() function""" + self.__rng.flush() + + def randomize(self, N=0): + """Dummy randomize() function""" + self.__rng.flush() + + def stir(self, s=''): + """Dummy stir() function""" + self.__rng.flush() + + def stir_n(self, N=3): + """Dummy stir_n() function""" + self.__rng.flush() + + def add_event(self, s=''): + """Dummy add_event() function""" + self.__rng.flush() + + def getBytes(self, N): + """Dummy getBytes() function""" + return self.get_bytes(N) + + def addEvent(self, event, s=""): + """Dummy addEvent() function""" + return self.add_event() diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/strxor.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/Crypto/Util/strxor.cpython-35m-darwin.so new file mode 100755 index 0000000..8658d0a Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/Crypto/Util/strxor.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/Crypto/Util/winrandom.py b/Darwin/lib/python3.5/site-packages/Crypto/Util/winrandom.py new file mode 100644 index 0000000..0242815 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/Util/winrandom.py @@ -0,0 +1,28 @@ +# +# Util/winrandom.py : Stub for Crypto.Random.OSRNG.winrandom +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +from Crypto.Random.OSRNG.winrandom import * + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.5/site-packages/Crypto/__init__.py b/Darwin/lib/python3.5/site-packages/Crypto/__init__.py new file mode 100644 index 0000000..c27402e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Python Cryptography Toolkit + +A collection of cryptographic modules implementing various algorithms +and protocols. + +Subpackages: + +Crypto.Cipher + Secret-key (AES, DES, ARC4) and public-key encryption (RSA PKCS#1) algorithms +Crypto.Hash + Hashing algorithms (MD5, SHA, HMAC) +Crypto.Protocol + Cryptographic protocols (Chaffing, all-or-nothing transform, key derivation + functions). This package does not contain any network protocols. +Crypto.PublicKey + Public-key encryption and signature algorithms (RSA, DSA) +Crypto.Signature + Public-key signature algorithms (RSA PKCS#1) +Crypto.Util + Various useful modules and functions (long-to-string conversion, random number + generation, number theoretic functions) +""" + +__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature'] + +__version__ = '2.6.1' # See also below and setup.py +__revision__ = "$Id$" + +# New software should look at this instead of at __version__ above. +version_info = (2, 6, 1, 'final', 0) # See also above and setup.py + diff --git a/Darwin/lib/python3.5/site-packages/Crypto/pct_warnings.py b/Darwin/lib/python3.5/site-packages/Crypto/pct_warnings.py new file mode 100644 index 0000000..9b4361e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Crypto/pct_warnings.py @@ -0,0 +1,60 @@ +# -*- coding: ascii -*- +# +# pct_warnings.py : PyCrypto warnings file +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# +# Base classes. All our warnings inherit from one of these in order to allow +# the user to specifically filter them. +# + +class CryptoWarning(Warning): + """Base class for PyCrypto warnings""" + +class CryptoDeprecationWarning(DeprecationWarning, CryptoWarning): + """Base PyCrypto DeprecationWarning class""" + +class CryptoRuntimeWarning(RuntimeWarning, CryptoWarning): + """Base PyCrypto RuntimeWarning class""" + +# +# Warnings that we might actually use +# + +class RandomPool_DeprecationWarning(CryptoDeprecationWarning): + """Issued when Crypto.Util.randpool.RandomPool is instantiated.""" + +class ClockRewindWarning(CryptoRuntimeWarning): + """Warning for when the system clock moves backwards.""" + +class GetRandomNumber_DeprecationWarning(CryptoDeprecationWarning): + """Issued when Crypto.Util.number.getRandomNumber is invoked.""" + +class PowmInsecureWarning(CryptoRuntimeWarning): + """Warning for when _fastmath is built without mpz_powm_sec""" + +# By default, we want this warning to be shown every time we compensate for +# clock rewinding. +import warnings as _warnings +_warnings.filterwarnings('always', category=ClockRewindWarning, append=1) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/SSL.py b/Darwin/lib/python3.5/site-packages/OpenSSL/SSL.py similarity index 65% rename from Darwin/lib/python3.4/site-packages/OpenSSL/SSL.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/SSL.py index a257f16..d0cc933 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/SSL.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/SSL.py @@ -1,29 +1,39 @@ from sys import platform from functools import wraps, partial -from itertools import count +from itertools import count, chain from weakref import WeakValueDictionary from errno import errorcode from six import text_type as _text_type +from six import binary_type as _binary_type from six import integer_types as integer_types +from six import int2byte, indexbytes from OpenSSL._util import ( ffi as _ffi, lib as _lib, exception_from_error_queue as _exception_from_error_queue, - native as _native) + native as _native, + text_to_bytes_and_warn as _text_to_bytes_and_warn, + path_string as _path_string, + UNSPECIFIED as _UNSPECIFIED, +) from OpenSSL.crypto import ( FILETYPE_PEM, _PassphraseHelper, PKey, X509Name, X509, X509Store) -_unspecified = object() - try: _memoryview = memoryview except NameError: class _memoryview(object): pass +try: + _buffer = buffer +except NameError: + class _buffer(object): + pass + OPENSSL_VERSION_NUMBER = _lib.OPENSSL_VERSION_NUMBER SSLEAY_VERSION = _lib.SSLEAY_VERSION SSLEAY_CFLAGS = _lib.SSLEAY_CFLAGS @@ -81,7 +91,10 @@ except AttributeError: OP_NO_QUERY_MTU = _lib.SSL_OP_NO_QUERY_MTU OP_COOKIE_EXCHANGE = _lib.SSL_OP_COOKIE_EXCHANGE -OP_NO_TICKET = _lib.SSL_OP_NO_TICKET +try: + OP_NO_TICKET = _lib.SSL_OP_NO_TICKET +except AttributeError: + pass OP_ALL = _lib.SSL_OP_ALL @@ -121,7 +134,6 @@ SSL_CB_CONNECT_EXIT = _lib.SSL_CB_CONNECT_EXIT SSL_CB_HANDSHAKE_START = _lib.SSL_CB_HANDSHAKE_START SSL_CB_HANDSHAKE_DONE = _lib.SSL_CB_HANDSHAKE_DONE - class Error(Exception): """ An error occurred in an `OpenSSL.SSL` API. @@ -156,11 +168,42 @@ class SysCallError(Error): pass +class _CallbackExceptionHelper(object): + """ + A base class for wrapper classes that allow for intelligent exception + handling in OpenSSL callbacks. -class _VerifyHelper(object): - def __init__(self, connection, callback): + :ivar list _problems: Any exceptions that occurred while executing in a + context where they could not be raised in the normal way. Typically + this is because OpenSSL has called into some Python code and requires a + return value. The exceptions are saved to be raised later when it is + possible to do so. + """ + def __init__(self): self._problems = [] + + def raise_if_problem(self): + """ + Raise an exception from the OpenSSL error queue or that was previously + captured whe running a callback. + """ + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + +class _VerifyHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as a certificate verification + callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + @wraps(callback) def wrapper(ok, store_ctx): cert = X509.__new__(X509) @@ -168,6 +211,10 @@ class _VerifyHelper(object): error_number = _lib.X509_STORE_CTX_get_error(store_ctx) error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() + ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) + connection = Connection._reverse_mapping[ssl] + try: result = callback(connection, cert, error_number, error_depth, ok) except Exception as e: @@ -184,14 +231,142 @@ class _VerifyHelper(object): "int (*)(int, X509_STORE_CTX *)", wrapper) - def raise_if_problem(self): - if self._problems: - try: - _raise_current_error() - except Error: - pass - raise self._problems.pop(0) +class _NpnAdvertiseHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN advertisement callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + @wraps(callback) + def wrapper(ssl, out, outlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + protos = callback(conn) + + # Join the protocols into a Python bytestring, length-prefixing + # each element. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_advertise_callback_args = [ + _ffi.new("unsigned int *", len(protostr)), + _ffi.new("unsigned char[]", protostr), + ] + outlen[0] = conn._npn_advertise_callback_args[0][0] + out[0] = conn._npn_advertise_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, const unsigned char **, unsigned int *, void *)", + wrapper + ) + + +class _NpnSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN selection callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is actually made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + l = indexbytes(instr, 0) + proto = instr[1:l+1] + protolist.append(proto) + instr = instr[l+1:] + + # Call the callback + outstr = callback(conn, protolist) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._npn_select_callback_args[0][0] + out[0] = conn._npn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)", + wrapper + ) + + +class _ALPNSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an ALPN selection callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + encoded_len = indexbytes(instr, 0) + proto = instr[1:encoded_len + 1] + protolist.append(proto) + instr = instr[encoded_len + 1:] + + # Call the callback + outstr = callback(conn, protolist) + + if not isinstance(outstr, _binary_type): + raise TypeError("ALPN callback must return a bytestring.") + + # Save our callback arguments on the connection object to make + # sure that they don't get freed before OpenSSL can use them. + # Then, return them in the appropriate output parameters. + conn._alpn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._alpn_select_callback_args[0][0] + out[0] = conn._alpn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)", + wrapper + ) def _asFileDescriptor(obj): @@ -223,6 +398,37 @@ def SSLeay_version(type): return _ffi.string(_lib.SSLeay_version(type)) +def _requires_npn(func): + """ + Wraps any function that requires NPN support in OpenSSL, ensuring that + NotImplementedError is raised if NPN is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_NEXTPROTONEG: + raise NotImplementedError("NPN not available.") + + return func(*args, **kwargs) + + return wrapper + + + +def _requires_alpn(func): + """ + Wraps any function that requires ALPN support in OpenSSL, ensuring that + NotImplementedError is raised if ALPN support is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_ALPN: + raise NotImplementedError("ALPN not available.") + + return func(*args, **kwargs) + + return wrapper + + class Session(object): pass @@ -235,6 +441,7 @@ class Context(object): new SSL connections. """ _methods = { + SSLv2_METHOD: "SSLv2_method", SSLv3_METHOD: "SSLv3_method", SSLv23_METHOD: "SSLv23_method", TLSv1_METHOD: "TLSv1_method", @@ -280,6 +487,12 @@ class Context(object): self._info_callback = None self._tlsext_servername_callback = None self._app_data = None + self._npn_advertise_helper = None + self._npn_advertise_callback = None + self._npn_select_helper = None + self._npn_select_callback = None + self._alpn_select_helper = None + self._alpn_select_callback = None # SSL_CTX_set_app_data(self->ctx, self); # SSL_CTX_set_mode(self->ctx, SSL_MODE_ENABLE_PARTIAL_WRITE | @@ -293,19 +506,22 @@ class Context(object): Let SSL know where we can find trusted certificates for the certificate chain - :param cafile: In which file we can find the certificates + :param cafile: In which file we can find the certificates (``bytes`` or + ``unicode``). :param capath: In which directory we can find the certificates + (``bytes`` or ``unicode``). + :return: None """ if cafile is None: cafile = _ffi.NULL - elif not isinstance(cafile, bytes): - raise TypeError("cafile must be None or a byte string") + else: + cafile = _path_string(cafile) if capath is None: capath = _ffi.NULL - elif not isinstance(capath, bytes): - raise TypeError("capath must be None or a byte string") + else: + capath = _path_string(capath) load_result = _lib.SSL_CTX_load_verify_locations(self._context, cafile, capath) if not load_result: @@ -355,15 +571,12 @@ class Context(object): """ Load a certificate chain from a file - :param certfile: The name of the certificate chain file + :param certfile: The name of the certificate chain file (``bytes`` or + ``unicode``). + :return: None """ - if isinstance(certfile, _text_type): - # Perhaps sys.getfilesystemencoding() could be better? - certfile = certfile.encode("utf-8") - - if not isinstance(certfile, bytes): - raise TypeError("certfile must be bytes or unicode") + certfile = _path_string(certfile) result = _lib.SSL_CTX_use_certificate_chain_file(self._context, certfile) if not result: @@ -374,15 +587,13 @@ class Context(object): """ Load a certificate from a file - :param certfile: The name of the certificate file + :param certfile: The name of the certificate file (``bytes`` or + ``unicode``). :param filetype: (optional) The encoding of the file, default is PEM + :return: None """ - if isinstance(certfile, _text_type): - # Perhaps sys.getfilesystemencoding() could be better? - certfile = certfile.encode("utf-8") - if not isinstance(certfile, bytes): - raise TypeError("certfile must be bytes or unicode") + certfile = _path_string(certfile) if not isinstance(filetype, integer_types): raise TypeError("filetype must be an integer") @@ -432,22 +643,18 @@ class Context(object): raise exception - def use_privatekey_file(self, keyfile, filetype=_unspecified): + def use_privatekey_file(self, keyfile, filetype=_UNSPECIFIED): """ Load a private key from a file - :param keyfile: The name of the key file + :param keyfile: The name of the key file (``bytes`` or ``unicode``) :param filetype: (optional) The encoding of the file, default is PEM + :return: None """ - if isinstance(keyfile, _text_type): - # Perhaps sys.getfilesystemencoding() could be better? - keyfile = keyfile.encode("utf-8") + keyfile = _path_string(keyfile) - if not isinstance(keyfile, bytes): - raise TypeError("keyfile must be a byte string") - - if filetype is _unspecified: + if filetype is _UNSPECIFIED: filetype = FILETYPE_PEM elif not isinstance(filetype, integer_types): raise TypeError("filetype must be an integer") @@ -479,6 +686,9 @@ class Context(object): :return: None (raises an exception if something's wrong) """ + if not _lib.SSL_CTX_check_private_key(self._context): + _raise_current_error() + def load_client_ca(self, cafile): """ @@ -538,7 +748,7 @@ class Context(object): if not callable(callback): raise TypeError("callback must be callable") - self._verify_helper = _VerifyHelper(self, callback) + self._verify_helper = _VerifyHelper(callback) self._verify_callback = self._verify_helper.callback _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) @@ -578,11 +788,12 @@ class Context(object): """ Load parameters for Ephemeral Diffie-Hellman - :param dhfile: The file to load EDH parameters from + :param dhfile: The file to load EDH parameters from (``bytes`` or + ``unicode``). + :return: None """ - if not isinstance(dhfile, bytes): - raise TypeError("dhfile must be a byte string") + dhfile = _path_string(dhfile) bio = _lib.BIO_new_file(dhfile, b"r") if bio == _ffi.NULL: @@ -594,6 +805,19 @@ class Context(object): _lib.SSL_CTX_set_tmp_dh(self._context, dh) + def set_tmp_ecdh(self, curve): + """ + Select a curve to use for ECDHE key exchange. + + :param curve: A curve object to use as returned by either + :py:meth:`OpenSSL.crypto.get_elliptic_curve` or + :py:meth:`OpenSSL.crypto.get_elliptic_curves`. + + :return: None + """ + _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) + + def set_cipher_list(self, cipher_list): """ Change the cipher list @@ -783,6 +1007,79 @@ class Context(object): _lib.SSL_CTX_set_tlsext_servername_callback( self._context, self._tlsext_servername_callback) + + @_requires_npn + def set_npn_advertise_callback(self, callback): + """ + Specify a callback function that will be called when offering `Next + Protocol Negotiation + `_ as a server. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. It should return a list of + bytestrings representing the advertised protocols, like + ``[b'http/1.1', b'spdy/2']``. + """ + self._npn_advertise_helper = _NpnAdvertiseHelper(callback) + self._npn_advertise_callback = self._npn_advertise_helper.callback + _lib.SSL_CTX_set_next_protos_advertised_cb( + self._context, self._npn_advertise_callback, _ffi.NULL) + + + @_requires_npn + def set_npn_select_callback(self, callback): + """ + Specify a callback function that will be called when a server offers + Next Protocol Negotiation options. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g. ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._npn_select_helper = _NpnSelectHelper(callback) + self._npn_select_callback = self._npn_select_helper.callback + _lib.SSL_CTX_set_next_proto_select_cb( + self._context, self._npn_select_callback, _ffi.NULL) + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the clients ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_CTX_set_alpn_protos(self._context, input_str, input_str_len) + + @_requires_alpn + def set_alpn_select_callback(self, callback): + """ + Set the callback to handle ALPN protocol choice. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._alpn_select_helper = _ALPNSelectHelper(callback) + self._alpn_select_callback = self._alpn_select_helper.callback + _lib.SSL_CTX_set_alpn_select_cb( + self._context, self._alpn_select_callback, _ffi.NULL) + ContextType = Context @@ -807,6 +1104,19 @@ class Connection(object): self._ssl = _ffi.gc(ssl, _lib.SSL_free) self._context = context + # References to strings used for Next Protocol Negotiation. OpenSSL's + # header files suggest that these might get copied at some point, but + # doesn't specify when, so we store them here to make sure they don't + # get freed before OpenSSL uses them. + self._npn_advertise_callback_args = None + self._npn_select_callback_args = None + + # References to strings used for Application Layer Protocol + # Negotiation. These strings get copied at some point but it's well + # after the callback returns, so we have to hang them somewhere to + # avoid them getting freed. + self._alpn_select_callback_args = None + self._reverse_mapping[self._ssl] = self if socket is None: @@ -841,6 +1151,12 @@ class Connection(object): def _raise_ssl_error(self, ssl, result): if self._context._verify_helper is not None: self._context._verify_helper.raise_if_problem() + if self._context._npn_advertise_helper is not None: + self._context._npn_advertise_helper.raise_if_problem() + if self._context._npn_select_helper is not None: + self._context._npn_select_helper.raise_if_problem() + if self._context._alpn_select_helper is not None: + self._context._alpn_select_helper.raise_if_problem() error = _lib.SSL_get_error(ssl, result) if error == _lib.SSL_ERROR_WANT_READ: @@ -859,7 +1175,7 @@ class Connection(object): errno = _ffi.getwinerror()[0] else: errno = _ffi.errno - raise SysCallError(errno, errorcode[errno]) + raise SysCallError(errno, errorcode.get(errno)) else: raise SysCallError(-1, "Unexpected EOF") else: @@ -936,15 +1252,20 @@ class Connection(object): WantWrite or WantX509Lookup exceptions on this, you have to call the method again with the SAME buffer. - :param buf: The string to send + :param buf: The string, buffer or memoryview to send :param flags: (optional) Included for compatibility with the socket API, the value is ignored :return: The number of bytes written """ + # Backward compatibility + buf = _text_to_bytes_and_warn("buf", buf) + if isinstance(buf, _memoryview): buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) if not isinstance(buf, bytes): - raise TypeError("data must be a byte string") + raise TypeError("data must be a memoryview, buffer or byte string") result = _lib.SSL_write(self._ssl, buf, len(buf)) self._raise_ssl_error(self._ssl, result) @@ -958,15 +1279,19 @@ class Connection(object): all data is sent. If an error occurs, it's impossible to tell how much data has been sent. - :param buf: The string to send + :param buf: The string, buffer or memoryview to send :param flags: (optional) Included for compatibility with the socket API, the value is ignored :return: The number of bytes written """ + buf = _text_to_bytes_and_warn("buf", buf) + if isinstance(buf, _memoryview): buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) if not isinstance(buf, bytes): - raise TypeError("buf must be a byte string") + raise TypeError("buf must be a memoryview, buffer or byte string") left_to_send = len(buf) total_sent = 0 @@ -997,6 +1322,45 @@ class Connection(object): read = recv + def recv_into(self, buffer, nbytes=None, flags=None): + """ + Receive data on the connection and store the data into a buffer rather + than creating a new string. + + :param buffer: The buffer to copy into. + :param nbytes: (optional) The maximum number of bytes to read into the + buffer. If not present, defaults to the size of the buffer. If + larger than the size of the buffer, is reduced to the size of the + buffer. + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored. + :return: The number of bytes read into the buffer. + """ + if nbytes is None: + nbytes = len(buffer) + else: + nbytes = min(nbytes, len(buffer)) + + # We need to create a temporary buffer. This is annoying, it would be + # better if we could pass memoryviews straight into the SSL_read call, + # but right now we can't. Revisit this if CFFI gets that ability. + buf = _ffi.new("char[]", nbytes) + result = _lib.SSL_read(self._ssl, buf, nbytes) + self._raise_ssl_error(self._ssl, result) + + # This strange line is all to avoid a memory copy. The buffer protocol + # should allow us to assign a CFFI buffer to the LHS of this line, but + # on CPython 3.3+ that segfaults. As a workaround, we can temporarily + # wrap it in a memoryview, except on Python 2.6 which doesn't have a + # memoryview type. + try: + buffer[:result] = memoryview(_ffi.buffer(buf, result)) + except NameError: + buffer[:result] = _ffi.buffer(buf, result) + + return result + + def _handle_bio_errors(self, bio, result): if _lib.BIO_should_retry(bio): if _lib.BIO_should_read(bio): @@ -1046,6 +1410,8 @@ class Connection(object): :param buf: The string to put into the memory BIO. :return: The number of bytes written """ + buf = _text_to_bytes_and_warn("buf", buf) + if self._into_ssl is None: raise TypeError("Connection sock was not None") @@ -1153,8 +1519,7 @@ class Connection(object): """ result = _lib.SSL_shutdown(self._ssl) if result < 0: - # TODO: This is untested. - _raise_current_error() + self._raise_ssl_error(self._ssl, result) elif result > 0: return True else: @@ -1210,7 +1575,7 @@ class Connection(object): The makefile() method is not implemented, since there is no dup semantics for SSL connections - :raise NotImplementedError + :raise: NotImplementedError """ raise NotImplementedError("Cannot make file object of OpenSSL.SSL.Connection") @@ -1416,6 +1781,166 @@ class Connection(object): if not result: _raise_current_error() + + def _get_finished_message(self, function): + """ + Helper to implement :py:meth:`get_finished` and + :py:meth:`get_peer_finished`. + + :param function: Either :py:data:`SSL_get_finished`: or + :py:data:`SSL_get_peer_finished`. + + :return: :py:data:`None` if the desired message has not yet been + received, otherwise the contents of the message. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + # The OpenSSL documentation says nothing about what might happen if the + # count argument given is zero. Specifically, it doesn't say whether + # the output buffer may be NULL in that case or not. Inspection of the + # implementation reveals that it calls memcpy() unconditionally. + # Section 7.1.4, paragraph 1 of the C standard suggests that + # memcpy(NULL, source, 0) is not guaranteed to produce defined (let + # alone desirable) behavior (though it probably does on just about + # every implementation...) + # + # Allocate a tiny buffer to pass in (instead of just passing NULL as + # one might expect) for the initial call so as to be safe against this + # potentially undefined behavior. + empty = _ffi.new("char[]", 0) + size = function(self._ssl, empty, 0) + if size == 0: + # No Finished message so far. + return None + + buf = _ffi.new("char[]", size) + function(self._ssl, buf, size) + return _ffi.buffer(buf, size)[:] + + + def get_finished(self): + """ + Obtain the latest `handshake finished` message sent to the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_finished) + + + def get_peer_finished(self): + """ + Obtain the latest `handshake finished` message received from the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_peer_finished) + + + def get_cipher_name(self): + """ + Obtain the name of the currently used cipher. + + :returns: The name of the currently used cipher or :py:obj:`None` + if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) + return name.decode("utf-8") + + + def get_cipher_bits(self): + """ + Obtain the number of secret bits of the currently used cipher. + + :returns: The number of secret bits of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`int` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) + + + def get_cipher_version(self): + """ + Obtain the protocol version of the currently used cipher. + + :returns: The protocol name of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + version =_ffi.string(_lib.SSL_CIPHER_get_version(cipher)) + return version.decode("utf-8") + + + @_requires_npn + def get_next_proto_negotiated(self): + """ + Get the protocol that was negotiated by NPN. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_next_proto_negotiated(self._ssl, data, data_len) + + return _ffi.buffer(data[0], data_len[0])[:] + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the client's ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_set_alpn_protos(self._ssl, input_str, input_str_len) + + + def get_alpn_proto_negotiated(self): + """ + Get the protocol that was negotiated by ALPN. + """ + if not _lib.Cryptography_HAS_ALPN: + raise NotImplementedError("ALPN not available") + + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) + + if not data_len: + return b'' + + return _ffi.buffer(data[0], data_len[0])[:] + + + ConnectionType = Connection # This is similar to the initialization calls at the end of OpenSSL/crypto.py diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/__init__.py b/Darwin/lib/python3.5/site-packages/OpenSSL/__init__.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/OpenSSL/__init__.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/__init__.py diff --git a/Darwin/lib/python3.5/site-packages/OpenSSL/_util.py b/Darwin/lib/python3.5/site-packages/OpenSSL/_util.py new file mode 100644 index 0000000..0cc34d8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/_util.py @@ -0,0 +1,127 @@ +from warnings import warn +import sys + +from six import PY3, binary_type, text_type + +from cryptography.hazmat.bindings.openssl.binding import Binding +binding = Binding() +ffi = binding.ffi +lib = binding.lib + + + +def text(charp): + """ + Get a native string type representing of the given CFFI ``char*`` object. + + :param charp: A C-style string represented using CFFI. + + :return: :class:`str` + """ + if not charp: + return "" + return native(ffi.string(charp)) + + + +def exception_from_error_queue(exception_type): + """ + Convert an OpenSSL library failure into a Python exception. + + When a call to the native OpenSSL library fails, this is usually signalled + by the return value, and an error code is stored in an error queue + associated with the current thread. The err library provides functions to + obtain these error codes and textual error messages. + """ + + errors = [] + + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append(( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)))) + + raise exception_type(errors) + + + +def native(s): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using UTF-8 encoding if conversion is necessary. + + :raise UnicodeError: The input string is not UTF-8 decodeable. + + :raise TypeError: The input is neither :py:class:`bytes` nor + :py:class:`unicode`. + """ + if not isinstance(s, (binary_type, text_type)): + raise TypeError("%r is neither bytes nor unicode" % s) + if PY3: + if isinstance(s, binary_type): + return s.decode("utf-8") + else: + if isinstance(s, text_type): + return s.encode("utf-8") + return s + + + +def path_string(s): + """ + Convert a Python string to a :py:class:`bytes` string identifying the same + path and which can be passed into an OpenSSL API accepting a filename. + + :param s: An instance of :py:class:`bytes` or :py:class:`unicode`. + + :return: An instance of :py:class:`bytes`. + """ + if isinstance(s, binary_type): + return s + elif isinstance(s, text_type): + return s.encode(sys.getfilesystemencoding()) + else: + raise TypeError("Path must be represented as bytes or unicode string") + + +if PY3: + def byte_string(s): + return s.encode("charmap") +else: + def byte_string(s): + return s + + +# A marker object to observe whether some optional arguments are passed any +# value or not. +UNSPECIFIED = object() + +_TEXT_WARNING = ( + text_type.__name__ + " for {0} is no longer accepted, use bytes" +) + +def text_to_bytes_and_warn(label, obj): + """ + If ``obj`` is text, emit a warning that it should be bytes instead and try + to convert it to bytes automatically. + + :param str label: The name of the parameter from which ``obj`` was taken + (so a developer can easily find the source of the problem and correct + it). + + :return: If ``obj`` is the text string type, a ``bytes`` object giving the + UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is + returned. + """ + if isinstance(obj, text_type): + warn( + _TEXT_WARNING.format(label), + category=DeprecationWarning, + stacklevel=3 + ) + return obj.encode('utf-8') + return obj diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/crypto.py b/Darwin/lib/python3.5/site-packages/OpenSSL/crypto.py similarity index 86% rename from Darwin/lib/python3.4/site-packages/OpenSSL/crypto.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/crypto.py index d0026bd..50ff74f 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/crypto.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/crypto.py @@ -2,17 +2,22 @@ from time import time from base64 import b16encode from functools import partial from operator import __eq__, __ne__, __lt__, __le__, __gt__, __ge__ +from warnings import warn as _warn from six import ( integer_types as _integer_types, - text_type as _text_type) + text_type as _text_type, + PY3 as _PY3) from OpenSSL._util import ( ffi as _ffi, lib as _lib, exception_from_error_queue as _exception_from_error_queue, byte_string as _byte_string, - native as _native) + native as _native, + UNSPECIFIED as _UNSPECIFIED, + text_to_bytes_and_warn as _text_to_bytes_and_warn, +) FILETYPE_PEM = _lib.SSL_FILETYPE_PEM FILETYPE_ASN1 = _lib.SSL_FILETYPE_ASN1 @@ -24,6 +29,7 @@ TYPE_RSA = _lib.EVP_PKEY_RSA TYPE_DSA = _lib.EVP_PKEY_DSA + class Error(Exception): """ An error occurred in an `OpenSSL.crypto` API. @@ -32,6 +38,8 @@ class Error(Exception): _raise_current_error = partial(_exception_from_error_queue, Error) + + def _untested_error(where): """ An OpenSSL API failed somehow. Additionally, the failure which was @@ -263,6 +271,156 @@ PKeyType = PKey +class _EllipticCurve(object): + """ + A representation of a supported elliptic curve. + + @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. + Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` + instances each of which represents one curve supported by the system. + @type _curves: :py:type:`NoneType` or :py:type:`set` + """ + _curves = None + + if _PY3: + # This only necessary on Python 3. Morever, it is broken on Python 2. + def __ne__(self, other): + """ + Implement cooperation with the right-hand side argument of ``!=``. + + Python 3 seems to have dropped this cooperation in this very narrow + circumstance. + """ + if isinstance(other, _EllipticCurve): + return super(_EllipticCurve, self).__ne__(other) + return NotImplemented + + + @classmethod + def _load_elliptic_curves(cls, lib): + """ + Get the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if lib.Cryptography_HAS_EC: + num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) + builtin_curves = _ffi.new('EC_builtin_curve[]', num_curves) + # The return value on this call should be num_curves again. We could + # check it to make sure but if it *isn't* then.. what could we do? + # Abort the whole process, I suppose...? -exarkun + lib.EC_get_builtin_curves(builtin_curves, num_curves) + return set( + cls.from_nid(lib, c.nid) + for c in builtin_curves) + return set() + + + @classmethod + def _get_elliptic_curves(cls, lib): + """ + Get, cache, and return the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if cls._curves is None: + cls._curves = cls._load_elliptic_curves(lib) + return cls._curves + + + @classmethod + def from_nid(cls, lib, nid): + """ + Instantiate a new :py:class:`_EllipticCurve` associated with the given + OpenSSL NID. + + :param lib: The OpenSSL library binding object. + + :param nid: The OpenSSL NID the resulting curve object will represent. + This must be a curve NID (and not, for example, a hash NID) or + subsequent operations will fail in unpredictable ways. + :type nid: :py:class:`int` + + :return: The curve object. + """ + return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) + + + def __init__(self, lib, nid, name): + """ + :param _lib: The :py:mod:`cryptography` binding instance used to + interface with OpenSSL. + + :param _nid: The OpenSSL NID identifying the curve this object + represents. + :type _nid: :py:class:`int` + + :param name: The OpenSSL short name identifying the curve this object + represents. + :type name: :py:class:`unicode` + """ + self._lib = lib + self._nid = nid + self.name = name + + + def __repr__(self): + return "" % (self.name,) + + + def _to_EC_KEY(self): + """ + Create a new OpenSSL EC_KEY structure initialized to use this curve. + + The structure is automatically garbage collected when the Python object + is garbage collected. + """ + key = self._lib.EC_KEY_new_by_curve_name(self._nid) + return _ffi.gc(key, _lib.EC_KEY_free) + + + +def get_elliptic_curves(): + """ + Return a set of objects representing the elliptic curves supported in the + OpenSSL build in use. + + The curve objects have a :py:class:`unicode` ``name`` attribute by which + they identify themselves. + + The curve objects are useful as values for the argument accepted by + :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be + used for ECDHE key exchange. + """ + return _EllipticCurve._get_elliptic_curves(_lib) + + + +def get_elliptic_curve(name): + """ + Return a single curve object selected by name. + + See :py:func:`get_elliptic_curves` for information about curve objects. + + :param name: The OpenSSL short name identifying the curve object to + retrieve. + :type name: :py:class:`unicode` + + If the named curve is not supported then :py:class:`ValueError` is raised. + """ + for curve in get_elliptic_curves(): + if curve.name == name: + return curve + raise ValueError("unknown curve name", name) + + + class X509Name(object): def __init__(self, name): """ @@ -697,6 +855,21 @@ class X509Req(object): _raise_current_error() + def get_extensions(self): + """ + Get extensions to the request. + + :return: A :py:class:`list` of :py:class:`X509Extension` objects. + """ + exts = [] + native_exts_obj = _lib.X509_REQ_get_extensions(self._req) + for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.sk_X509_EXTENSION_value(native_exts_obj, i) + exts.append(ext) + return exts + + def sign(self, pkey, digest): """ Sign the certificate request using the supplied key and digest @@ -1190,6 +1363,125 @@ class X509Store(object): X509StoreType = X509Store +class X509StoreContextError(Exception): + """ + An error occurred while verifying a certificate using + `OpenSSL.X509StoreContext.verify_certificate`. + + :ivar certificate: The certificate which caused verificate failure. + :type cert: :class:`X509` + + """ + def __init__(self, message, certificate): + super(X509StoreContextError, self).__init__(message) + self.certificate = certificate + + +class X509StoreContext(object): + """ + An X.509 store context. + + An :py:class:`X509StoreContext` is used to define some of the criteria for + certificate verification. The information encapsulated in this object + includes, but is not limited to, a set of trusted certificates, + verification parameters, and revoked certificates. + + Of these, only the set of trusted certificates is currently exposed. + + :ivar _store_ctx: The underlying X509_STORE_CTX structure used by this + instance. It is dynamically allocated and automatically garbage + collected. + + :ivar _store: See the ``store`` ``__init__`` parameter. + + :ivar _cert: See the ``certificate`` ``__init__`` parameter. + """ + + def __init__(self, store, certificate): + """ + :param X509Store store: The certificates which will be trusted for the + purposes of any verifications. + + :param X509 certificate: The certificate to be verified. + """ + store_ctx = _lib.X509_STORE_CTX_new() + self._store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) + self._store = store + self._cert = certificate + # Make the store context available for use after instantiating this + # class by initializing it now. Per testing, subsequent calls to + # :py:meth:`_init` have no adverse affect. + self._init() + + + def _init(self): + """ + Set up the store context for a subsequent verification operation. + """ + ret = _lib.X509_STORE_CTX_init(self._store_ctx, self._store._store, self._cert._x509, _ffi.NULL) + if ret <= 0: + _raise_current_error() + + + def _cleanup(self): + """ + Internally cleans up the store context. + + The store context can then be reused with a new call to + :py:meth:`_init`. + """ + _lib.X509_STORE_CTX_cleanup(self._store_ctx) + + + def _exception_from_context(self): + """ + Convert an OpenSSL native context error failure into a Python + exception. + + When a call to native OpenSSL X509_verify_cert fails, additonal information + about the failure can be obtained from the store context. + """ + errors = [ + _lib.X509_STORE_CTX_get_error(self._store_ctx), + _lib.X509_STORE_CTX_get_error_depth(self._store_ctx), + _native(_ffi.string(_lib.X509_verify_cert_error_string( + _lib.X509_STORE_CTX_get_error(self._store_ctx)))), + ] + # A context error should always be associated with a certificate, so we + # expect this call to never return :class:`None`. + _x509 = _lib.X509_STORE_CTX_get_current_cert(self._store_ctx) + _cert = _lib.X509_dup(_x509) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(_cert, _lib.X509_free) + return X509StoreContextError(errors, pycert) + + + def set_store(self, store): + """ + Set the context's trust store. + + :param X509Store store: The certificates which will be trusted for the + purposes of any *future* verifications. + """ + self._store = store + + + def verify_certificate(self): + """ + Verify a certificate in a context. + + :param store_ctx: The :py:class:`X509StoreContext` to verify. + :raises: Error + """ + # Always re-initialize the store context in case + # :py:meth:`verify_certificate` is called multiple times. + self._init() + ret = _lib.X509_verify_cert(self._store_ctx) + self._cleanup() + if ret <= 0: + raise self._exception_from_context() + + def load_certificate(type, buffer): """ @@ -1308,9 +1600,11 @@ def _X509_REVOKED_dup(original): _raise_current_error() if original.serialNumber != _ffi.NULL: + _lib.ASN1_INTEGER_free(copy.serialNumber) copy.serialNumber = _lib.ASN1_INTEGER_dup(original.serialNumber) if original.revocationDate != _ffi.NULL: + _lib.ASN1_TIME_free(copy.revocationDate) copy.revocationDate = _lib.M_ASN1_TIME_dup(original.revocationDate) if original.extensions != _ffi.NULL: @@ -1539,7 +1833,8 @@ class CRL(object): _raise_current_error() - def export(self, cert, key, type=FILETYPE_PEM, days=100): + def export(self, cert, key, type=FILETYPE_PEM, days=100, + digest=_UNSPECIFIED): """ export a CRL as a string @@ -1549,12 +1844,15 @@ class CRL(object): :param key: Used to sign CRL. :type key: :class:`PKey` - :param type: The export format, either :py:data:`FILETYPE_PEM`, :py:data:`FILETYPE_ASN1`, or :py:data:`FILETYPE_TEXT`. + :param type: The export format, either :py:data:`FILETYPE_PEM`, + :py:data:`FILETYPE_ASN1`, or :py:data:`FILETYPE_TEXT`. - :param days: The number of days until the next update of this CRL. - :type days: :py:data:`int` + :param int days: The number of days until the next update of this CRL. - :return: :py:data:`str` + :param bytes digest: The name of the message digest to use (eg + ``b"sha1"``). + + :return: :py:data:`bytes` """ if not isinstance(cert, X509): raise TypeError("cert must be an X509 instance") @@ -1563,6 +1861,19 @@ class CRL(object): if not isinstance(type, int): raise TypeError("type must be an integer") + if digest is _UNSPECIFIED: + _warn( + "The default message digest (md5) is deprecated. " + "Pass the name of a message digest explicitly.", + category=DeprecationWarning, + stacklevel=2, + ) + digest = b"md5" + + digest_obj = _lib.EVP_get_digestbyname(digest) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + bio = _lib.BIO_new(_lib.BIO_s_mem()) if bio == _ffi.NULL: # TODO: This is untested. @@ -1582,7 +1893,7 @@ class CRL(object): _lib.X509_CRL_set_issuer_name(self._crl, _lib.X509_get_subject_name(cert._x509)) - sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, _lib.EVP_md5()) + sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) if not sign_result: _raise_current_error() @@ -1729,7 +2040,7 @@ class PKCS12(object): def set_ca_certificates(self, cacerts): """ - Replace or set the CA certificates withing the PKCS12 object. + Replace or set the CA certificates within the PKCS12 object. :param cacerts: The new CA certificates. :type cacerts: :py:data:`None` or an iterable of :py:class:`X509` @@ -1784,6 +2095,8 @@ class PKCS12(object): :return: The string containing the PKCS12 """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + if self._cacerts is None: cacerts = _ffi.NULL else: @@ -2081,6 +2394,8 @@ def sign(pkey, data, digest): :param digest: message digest to use :return: signature """ + data = _text_to_bytes_and_warn("data", data) + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) if digest_obj == _ffi.NULL: raise ValueError("No such digest method") @@ -2115,6 +2430,8 @@ def verify(cert, signature, data, digest): :param digest: message digest to use :return: None if the signature is correct, raise exception otherwise """ + data = _text_to_bytes_and_warn("data", data) + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) if digest_obj == _ffi.NULL: raise ValueError("No such digest method") @@ -2136,7 +2453,6 @@ def verify(cert, signature, data, digest): _raise_current_error() - def load_crl(type, buffer): """ Load a certificate revocation list from a buffer @@ -2183,7 +2499,7 @@ def load_pkcs7_data(type, buffer): if type == FILETYPE_PEM: pkcs7 = _lib.PEM_read_bio_PKCS7(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) elif type == FILETYPE_ASN1: - pass + pkcs7 = _lib.d2i_PKCS7_bio(bio, _ffi.NULL) else: # TODO: This is untested. _raise_current_error() @@ -2198,7 +2514,7 @@ def load_pkcs7_data(type, buffer): -def load_pkcs12(buffer, passphrase): +def load_pkcs12(buffer, passphrase=None): """ Load a PKCS12 object from a buffer @@ -2206,11 +2522,20 @@ def load_pkcs12(buffer, passphrase): :param passphrase: (Optional) The password to decrypt the PKCS12 lump :returns: The PKCS12 object """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + if isinstance(buffer, _text_type): buffer = buffer.encode("ascii") bio = _new_mem_buf(buffer) + # Use null passphrase if passphrase is None or empty string. With PKCS#12 + # password based encryption no password and a zero length password are two + # different things, but OpenSSL implementation will try both to figure out + # which one works. + if not passphrase: + passphrase = _ffi.NULL + p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) if p12 == _ffi.NULL: _raise_current_error() diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/rand.py b/Darwin/lib/python3.5/site-packages/OpenSSL/rand.py similarity index 90% rename from Darwin/lib/python3.4/site-packages/OpenSSL/rand.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/rand.py index e754378..3adf693 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/rand.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/rand.py @@ -11,7 +11,8 @@ from six import integer_types as _integer_types from OpenSSL._util import ( ffi as _ffi, lib as _lib, - exception_from_error_queue as _exception_from_error_queue) + exception_from_error_queue as _exception_from_error_queue, + path_string as _path_string) class Error(Exception): @@ -131,13 +132,13 @@ def load_file(filename, maxbytes=_unspecified): """ Seed the PRNG with data from a file - :param filename: The file to read data from - :param maxbytes: (optional) The number of bytes to read, default is - to read the entire file + :param filename: The file to read data from (``bytes`` or ``unicode``). + :param maxbytes: (optional) The number of bytes to read, default is to read + the entire file + :return: The number of bytes read """ - if not isinstance(filename, _builtin_bytes): - raise TypeError("filename must be a string") + filename = _path_string(filename) if maxbytes is _unspecified: maxbytes = -1 @@ -152,12 +153,11 @@ def write_file(filename): """ Save PRNG state to a file - :param filename: The file to write data to + :param filename: The file to write data to (``bytes`` or ``unicode``). + :return: The number of bytes written """ - if not isinstance(filename, _builtin_bytes): - raise TypeError("filename must be a string") - + filename = _path_string(filename) return _lib.RAND_write_file(filename) diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/test/__init__.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/__init__.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/OpenSSL/test/__init__.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/test/__init__.py diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_crypto.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_crypto.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/OpenSSL/test/test_crypto.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/test/test_crypto.py index 4e42f70..f6f0751 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_crypto.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_crypto.py @@ -6,16 +6,22 @@ Unit tests for :py:mod:`OpenSSL.crypto`. """ from unittest import main +from warnings import catch_warnings, simplefilter -import os, re +import base64 +import os +import re from subprocess import PIPE, Popen from datetime import datetime, timedelta -from six import binary_type +from six import u, b, binary_type, PY3 +from warnings import simplefilter +from warnings import catch_warnings from OpenSSL.crypto import TYPE_RSA, TYPE_DSA, Error, PKey, PKeyType from OpenSSL.crypto import X509, X509Type, X509Name, X509NameType -from OpenSSL.crypto import X509Store, X509StoreType, X509Req, X509ReqType +from OpenSSL.crypto import X509Store, X509StoreType, X509StoreContext, X509StoreContextError +from OpenSSL.crypto import X509Req, X509ReqType from OpenSSL.crypto import X509Extension, X509ExtensionType from OpenSSL.crypto import load_certificate, load_privatekey from OpenSSL.crypto import FILETYPE_PEM, FILETYPE_ASN1, FILETYPE_TEXT @@ -25,9 +31,12 @@ from OpenSSL.crypto import PKCS7Type, load_pkcs7_data from OpenSSL.crypto import PKCS12, PKCS12Type, load_pkcs12 from OpenSSL.crypto import CRL, Revoked, load_crl from OpenSSL.crypto import NetscapeSPKI, NetscapeSPKIType -from OpenSSL.crypto import sign, verify -from OpenSSL.test.util import TestCase, b -from OpenSSL._util import native +from OpenSSL.crypto import ( + sign, verify, get_elliptic_curve, get_elliptic_curves) +from OpenSSL.test.util import ( + EqualityTestsMixin, TestCase, WARNING_TYPE_EXPECTED +) +from OpenSSL._util import native, lib def normalize_certificate_pem(pem): return dump_certificate(FILETYPE_PEM, load_certificate(FILETYPE_PEM, pem)) @@ -80,6 +89,40 @@ cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq -----END RSA PRIVATE KEY----- """) +intermediate_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICVzCCAcCgAwIBAgIRAMPzhm6//0Y/g2pmnHR2C4cwDQYJKoZIhvcNAQENBQAw +WDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAw +DgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwHhcNMTQw +ODI4MDIwNDA4WhcNMjQwODI1MDIwNDA4WjBmMRUwEwYDVQQDEwxpbnRlcm1lZGlh +dGUxDDAKBgNVBAoTA29yZzERMA8GA1UECxMIb3JnLXVuaXQxCzAJBgNVBAYTAlVT +MQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU2FuIERpZWdvMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDYcEQw5lfbEQRjr5Yy4yxAHGV0b9Al+Lmu7wLHMkZ/ZMmK +FGIbljbviiD1Nz97Oh2cpB91YwOXOTN2vXHq26S+A5xe8z/QJbBsyghMur88CjdT +21H2qwMa+r5dCQwEhuGIiZ3KbzB/n4DTMYI5zy4IYPv0pjxShZn4aZTCCK2IUwID +AQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDQUAA4GBAPIWSkLX +QRMApOjjyC+tMxumT5e2pMqChHmxobQK4NMdrf2VCx+cRT6EmY8sK3/Xl/X8UBQ+ +9n5zXb1ZwhW/sTWgUvmOceJ4/XVs9FkdWOOn1J0XBch9ZIiFe/s5ASIgG7fUdcUF +9mAWS6FK2ca3xIh5kIupCXOFa0dPvlw/YUFT +-----END CERTIFICATE----- +""") + +intermediate_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQDYcEQw5lfbEQRjr5Yy4yxAHGV0b9Al+Lmu7wLHMkZ/ZMmKFGIb +ljbviiD1Nz97Oh2cpB91YwOXOTN2vXHq26S+A5xe8z/QJbBsyghMur88CjdT21H2 +qwMa+r5dCQwEhuGIiZ3KbzB/n4DTMYI5zy4IYPv0pjxShZn4aZTCCK2IUwIDAQAB +AoGAfSZVV80pSeOKHTYfbGdNY/jHdU9eFUa/33YWriXU+77EhpIItJjkRRgivIfo +rhFJpBSGmDLblaqepm8emsXMeH4+2QzOYIf0QGGP6E6scjTt1PLqdqKfVJ1a2REN +147cujNcmFJb/5VQHHMpaPTgttEjlzuww4+BCDPsVRABWrkCQQD3loH36nLoQTtf ++kQq0T6Bs9/UWkTAGo0ND81ALj0F8Ie1oeZg6RNT96RxZ3aVuFTESTv6/TbjWywO +wdzlmV1vAkEA38rTJ6PTwaJlw5OttdDzAXGPB9tDmzh9oSi7cHwQQXizYd8MBYx4 +sjHUKD3dCQnb1dxJFhd3BT5HsnkRMbVZXQJAbXduH17ZTzcIOXc9jHDXYiFVZV5D +52vV0WCbLzVCZc3jMrtSUKa8lPN5EWrdU3UchWybyG0MR5mX8S5lrF4SoQJAIyUD +DBKaSqpqONCUUx1BTFS9FYrFjzbL4+c1qHCTTPTblt8kUCrDOZjBrKAqeiTmNSum +/qUot9YUBF8m6BuGsQJATHHmdFy/fG1VLkyBp49CAa8tN3Z5r/CgTznI4DfMTf4C +NbRHn2UmYlwQBa+L5lg9phewNe8aEwpPyPLoV85U8Q== +-----END RSA PRIVATE KEY----- +""") + server_cert_pem = b("""-----BEGIN CERTIFICATE----- MIICKDCCAZGgAwIBAgIJAJn/HpR21r/8MA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH @@ -113,6 +156,40 @@ r50+LF74iLXFwqysVCebPKMOpDWp/qQ1BbJQIPs7/A== -----END RSA PRIVATE KEY----- """)) +intermediate_server_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICWDCCAcGgAwIBAgIRAPQFY9jfskSihdiNSNdt6GswDQYJKoZIhvcNAQENBQAw +ZjEVMBMGA1UEAxMMaW50ZXJtZWRpYXRlMQwwCgYDVQQKEwNvcmcxETAPBgNVBAsT +CG9yZy11bml0MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVNh +biBEaWVnbzAeFw0xNDA4MjgwMjEwNDhaFw0yNDA4MjUwMjEwNDhaMG4xHTAbBgNV +BAMTFGludGVybWVkaWF0ZS1zZXJ2aWNlMQwwCgYDVQQKEwNvcmcxETAPBgNVBAsT +CG9yZy11bml0MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVNh +biBEaWVnbzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAqpJZygd+w1faLOr1 +iOAmbBhx5SZWcTCZ/ZjHQTJM7GuPT624QkqsixFghRKdDROwpwnAP7gMRukLqiy4 ++kRuGT5OfyGggL95i2xqA+zehjj08lSTlvGHpePJgCyTavIy5+Ljsj4DKnKyuhxm +biXTRrH83NDgixVkObTEmh/OVK0CAwEAATANBgkqhkiG9w0BAQ0FAAOBgQBa0Npw +UkzjaYEo1OUE1sTI6Mm4riTIHMak4/nswKh9hYup//WVOlr/RBSBtZ7Q/BwbjobN +3bfAtV7eSAqBsfxYXyof7G1ALANQERkq3+oyLP1iVt08W1WOUlIMPhdCF/QuCwy6 +x9MJLhUCGLJPM+O2rAPWVD9wCmvq10ALsiH3yA== +-----END CERTIFICATE----- +""") + +intermediate_server_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQCqklnKB37DV9os6vWI4CZsGHHlJlZxMJn9mMdBMkzsa49PrbhC +SqyLEWCFEp0NE7CnCcA/uAxG6QuqLLj6RG4ZPk5/IaCAv3mLbGoD7N6GOPTyVJOW +8Yel48mALJNq8jLn4uOyPgMqcrK6HGZuJdNGsfzc0OCLFWQ5tMSaH85UrQIDAQAB +AoGAIQ594j5zna3/9WaPsTgnmhlesVctt4AAx/n827DA4ayyuHFlXUuVhtoWR5Pk +5ezj9mtYW8DyeCegABnsu2vZni/CdvU6uiS1Hv6qM1GyYDm9KWgovIP9rQCDSGaz +d57IWVGxx7ODFkm3gN5nxnSBOFVHytuW1J7FBRnEsehRroECQQDXHFOv82JuXDcz +z3+4c74IEURdOHcbycxlppmK9kFqm5lsUdydnnGW+mvwDk0APOB7Wg7vyFyr393e +dpmBDCzNAkEAyv6tVbTKUYhSjW+QhabJo896/EqQEYUmtMXxk4cQnKeR/Ao84Rkf +EqD5IykMUfUI0jJU4DGX+gWZ10a7kNbHYQJAVFCuHNFxS4Cpwo0aqtnzKoZaHY/8 +X9ABZfafSHCtw3Op92M+7ikkrOELXdS9KdKyyqbKJAKNEHF3LbOfB44WIQJAA2N4 +9UNNVUsXRbElEnYUS529CdUczo4QdVgQjkvk5RiPAUwSdBd9Q0xYnFOlFwEmIowg +ipWJWe0aAlP18ZcEQQJBAL+5lekZ/GUdQoZ4HAsN5a9syrzavJ9VvU1KOOPorPZK +nMRZbbQgP+aSB7yl6K0gaLaZ8XaK0pjxNBh6ASqg9f4= +-----END RSA PRIVATE KEY----- +""") + client_cert_pem = b("""-----BEGIN CERTIFICATE----- MIICJjCCAY+gAwIBAgIJAKxpFI5lODkjMA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH @@ -247,6 +324,27 @@ Ho4EzbYCOaEAMQA= -----END PKCS7----- """) +pkcs7DataASN1 = base64.b64decode(b""" +MIIDNwYJKoZIhvcNAQcCoIIDKDCCAyQCAQExADALBgkqhkiG9w0BBwGgggMKMIID +BjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzERMA8G +A1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtN +MkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNA +cG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzELMAkG +A1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhvc3Qx +HTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEBBQAD +SwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh5kwI +zOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQCMAAw +LAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0G +A1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7hyNp +65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoTCE0y +Q3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlwdG8g +Q2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3QxLmNv +bYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6BoJu +VwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++7QGG +/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JEWUQ9 +Ho4EzbYCOaEAMQA= +""") + crlData = b("""\ -----BEGIN X509 CRL----- MIIBWzCBxTANBgkqhkiG9w0BAQQFADBYMQswCQYDVQQGEwJVUzELMAkGA1UECBMC @@ -512,7 +610,7 @@ class X509ExtTests(TestCase): def test_issuer(self): """ - If an extension requires a issuer, the :py:data:`issuer` parameter to + If an extension requires an issuer, the :py:data:`issuer` parameter to :py:class:`X509Extension` provides its value. """ ext2 = X509Extension( @@ -1108,7 +1206,32 @@ class X509ReqTests(TestCase, _PKeyInteractionTestsMixin): request = X509Req() request.add_extensions([ X509Extension(b('basicConstraints'), True, b('CA:false'))]) - # XXX Add get_extensions so the rest of this unit test can be written. + exts = request.get_extensions() + self.assertEqual(len(exts), 1) + self.assertEqual(exts[0].get_short_name(), b('basicConstraints')) + self.assertEqual(exts[0].get_critical(), 1) + self.assertEqual(exts[0].get_data(), b('0\x00')) + + + def test_get_extensions(self): + """ + :py:obj:`X509Req.get_extensions` returns a :py:obj:`list` of + extensions added to this X509 request. + """ + request = X509Req() + exts = request.get_extensions() + self.assertEqual(exts, []) + request.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:true')), + X509Extension(b('keyUsage'), False, b('digitalSignature'))]) + exts = request.get_extensions() + self.assertEqual(len(exts), 2) + self.assertEqual(exts[0].get_short_name(), b('basicConstraints')) + self.assertEqual(exts[0].get_critical(), 1) + self.assertEqual(exts[0].get_data(), b('0\x03\x01\x01\xff')) + self.assertEqual(exts[1].get_short_name(), b('keyUsage')) + self.assertEqual(exts[1].get_critical(), 0) + self.assertEqual(exts[1].get_data(), b('\x03\x02\x07\x80')) def test_add_extensions_wrong_args(self): @@ -1163,7 +1286,7 @@ class X509ReqTests(TestCase, _PKeyInteractionTestsMixin): def test_verify_success(self): """ :py:obj:`X509Req.verify` returns :py:obj:`True` if called with a - :py:obj:`OpenSSL.crypto.PKey` which represents the public part ofthe key + :py:obj:`OpenSSL.crypto.PKey` which represents the public part of the key which signed the request. """ request = X509Req() @@ -1915,6 +2038,21 @@ class PKCS12Tests(TestCase): self.assertEqual(recovered_cert[-len(ca):], ca) + def verify_pkcs12_container(self, p12): + """ + Verify that the PKCS#12 container contains the correct client + certificate and private key. + + :param p12: The PKCS12 instance to verify. + :type p12: :py:class:`PKCS12` + """ + cert_pem = dump_certificate(FILETYPE_PEM, p12.get_certificate()) + key_pem = dump_privatekey(FILETYPE_PEM, p12.get_privatekey()) + self.assertEqual( + (client_cert_pem, client_key_pem, None), + (cert_pem, key_pem, p12.get_ca_certificates())) + + def test_load_pkcs12(self): """ A PKCS12 string generated using the openssl command line can be loaded @@ -1924,14 +2062,95 @@ class PKCS12Tests(TestCase): pem = client_key_pem + client_cert_pem p12_str = _runopenssl( pem, b"pkcs12", b"-export", b"-clcerts", b"-passout", b"pass:" + passwd) - p12 = load_pkcs12(p12_str, passwd) - # verify - self.assertTrue(isinstance(p12, PKCS12)) - cert_pem = dump_certificate(FILETYPE_PEM, p12.get_certificate()) - self.assertEqual(cert_pem, client_cert_pem) - key_pem = dump_privatekey(FILETYPE_PEM, p12.get_privatekey()) - self.assertEqual(key_pem, client_key_pem) - self.assertEqual(None, p12.get_ca_certificates()) + p12 = load_pkcs12(p12_str, passphrase=passwd) + self.verify_pkcs12_container(p12) + + + def test_load_pkcs12_text_passphrase(self): + """ + A PKCS12 string generated using the openssl command line can be loaded + with :py:obj:`load_pkcs12` and its components extracted and examined. + Using text as passphrase instead of bytes. DeprecationWarning expected. + """ + pem = client_key_pem + client_cert_pem + passwd = b"whatever" + p12_str = _runopenssl(pem, b"pkcs12", b"-export", b"-clcerts", + b"-passout", b"pass:" + passwd) + with catch_warnings(record=True) as w: + simplefilter("always") + p12 = load_pkcs12(p12_str, passphrase=b"whatever".decode("ascii")) + + self.assertEqual( + "{0} for passphrase is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + self.verify_pkcs12_container(p12) + + + def test_load_pkcs12_no_passphrase(self): + """ + A PKCS12 string generated using openssl command line can be loaded with + :py:obj:`load_pkcs12` without a passphrase and its components extracted + and examined. + """ + pem = client_key_pem + client_cert_pem + p12_str = _runopenssl( + pem, b"pkcs12", b"-export", b"-clcerts", b"-passout", b"pass:") + p12 = load_pkcs12(p12_str) + self.verify_pkcs12_container(p12) + + + def _dump_and_load(self, dump_passphrase, load_passphrase): + """ + A helper method to dump and load a PKCS12 object. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem) + dumped_p12 = p12.export(passphrase=dump_passphrase, iter=2, maciter=3) + return load_pkcs12(dumped_p12, passphrase=load_passphrase) + + + def test_load_pkcs12_null_passphrase_load_empty(self): + """ + A PKCS12 string can be dumped with a null passphrase, loaded with an + empty passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=None, load_passphrase=b'')) + + + def test_load_pkcs12_null_passphrase_load_null(self): + """ + A PKCS12 string can be dumped with a null passphrase, loaded with a + null passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=None, load_passphrase=None)) + + + def test_load_pkcs12_empty_passphrase_load_empty(self): + """ + A PKCS12 string can be dumped with an empty passphrase, loaded with an + empty passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=b'', load_passphrase=b'')) + + + def test_load_pkcs12_empty_passphrase_load_null(self): + """ + A PKCS12 string can be dumped with an empty passphrase, loaded with a + null passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=b'', load_passphrase=None)) def test_load_pkcs12_garbage(self): @@ -2073,6 +2292,26 @@ class PKCS12Tests(TestCase): dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd=b"") + def test_export_without_bytes(self): + """ + Test :py:obj:`PKCS12.export` with text not bytes as passphrase + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + + with catch_warnings(record=True) as w: + simplefilter("always") + dumped_p12 = p12.export(passphrase=b"randomtext".decode("ascii")) + self.assertEqual( + "{0} for passphrase is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd=b"randomtext") + + def test_key_cert_mismatch(self): """ :py:obj:`PKCS12.export` raises an exception when a key and certificate @@ -2463,7 +2702,7 @@ class FunctionTests(TestCase): dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, cb) - def test_load_pkcs7_data(self): + def test_load_pkcs7_data_pem(self): """ :py:obj:`load_pkcs7_data` accepts a PKCS#7 string and returns an instance of :py:obj:`PKCS7Type`. @@ -2472,6 +2711,15 @@ class FunctionTests(TestCase): self.assertTrue(isinstance(pkcs7, PKCS7Type)) + def test_load_pkcs7_data_asn1(self): + """ + :py:obj:`load_pkcs7_data` accepts a bytes containing ASN1 data + representing PKCS#7 and returns an instance of :py:obj`PKCS7Type`. + """ + pkcs7 = load_pkcs7_data(FILETYPE_ASN1, pkcs7DataASN1) + self.assertTrue(isinstance(pkcs7, PKCS7Type)) + + def test_load_pkcs7_data_invalid(self): """ If the data passed to :py:obj:`load_pkcs7_data` is invalid, @@ -2796,11 +3044,9 @@ class CRLTests(TestCase): self.assertRaises(TypeError, CRL, None) - def test_export(self): + def _get_crl(self): """ - Use python to create a simple CRL with a revocation, and export - the CRL in formats of PEM, DER and text. Those outputs are verified - with the openssl program. + Get a new ``CRL`` with a revocation. """ crl = CRL() revoked = Revoked() @@ -2809,26 +3055,110 @@ class CRLTests(TestCase): revoked.set_serial(b('3ab')) revoked.set_reason(b('sUpErSeDEd')) crl.add_revoked(revoked) + return crl + + def test_export_pem(self): + """ + If not passed a format, ``CRL.export`` returns a "PEM" format string + representing a serial number, a revoked reason, and certificate issuer + information. + """ + crl = self._get_crl() # PEM format dumped_crl = crl.export(self.cert, self.pkey, days=20) text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + + # These magic values are based on the way the CRL above was constructed + # and with what certificate it was exported. text.index(b('Serial Number: 03AB')) text.index(b('Superseded')) - text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA')) + text.index( + b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA') + ) + + + def test_export_der(self): + """ + If passed ``FILETYPE_ASN1`` for the format, ``CRL.export`` returns a + "DER" format string representing a serial number, a revoked reason, and + certificate issuer information. + """ + crl = self._get_crl() # DER format dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1) - text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER") + text = _runopenssl( + dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER" + ) text.index(b('Serial Number: 03AB')) text.index(b('Superseded')) - text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA')) + text.index( + b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA') + ) + + + def test_export_text(self): + """ + If passed ``FILETYPE_TEXT`` for the format, ``CRL.export`` returns a + text format string like the one produced by the openssl command line + tool. + """ + crl = self._get_crl() + + dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1) + text = _runopenssl( + dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER" + ) # text format dumped_text = crl.export(self.cert, self.pkey, type=FILETYPE_TEXT) self.assertEqual(text, dumped_text) + def test_export_custom_digest(self): + """ + If passed the name of a digest function, ``CRL.export`` uses a + signature algorithm based on that digest function. + """ + crl = self._get_crl() + dumped_crl = crl.export(self.cert, self.pkey, digest=b"sha1") + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: sha1')) + + + def test_export_md5_digest(self): + """ + If passed md5 as the digest function, ``CRL.export`` uses md5 and does + not emit a deprecation warning. + """ + crl = self._get_crl() + with catch_warnings(record=True) as catcher: + simplefilter("always") + self.assertEqual(0, len(catcher)) + dumped_crl = crl.export(self.cert, self.pkey, digest=b"md5") + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: md5')) + + + def test_export_default_digest(self): + """ + If not passed the name of a digest function, ``CRL.export`` uses a + signature algorithm based on MD5 and emits a deprecation warning. + """ + crl = self._get_crl() + with catch_warnings(record=True) as catcher: + simplefilter("always") + dumped_crl = crl.export(self.cert, self.pkey) + self.assertEqual( + "The default message digest (md5) is deprecated. " + "Pass the name of a message digest explicitly.", + str(catcher[0].message), + ) + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: md5')) + + def test_export_invalid(self): """ If :py:obj:`CRL.export` is used with an uninitialized :py:obj:`X509` @@ -2859,7 +3189,7 @@ class CRLTests(TestCase): crl = CRL() self.assertRaises(TypeError, crl.export) self.assertRaises(TypeError, crl.export, self.cert) - self.assertRaises(TypeError, crl.export, self.cert, self.pkey, FILETYPE_PEM, 10, "foo") + self.assertRaises(TypeError, crl.export, self.cert, self.pkey, FILETYPE_PEM, 10, "md5", "foo") self.assertRaises(TypeError, crl.export, None, self.pkey, FILETYPE_PEM, 10) self.assertRaises(TypeError, crl.export, self.cert, None, FILETYPE_PEM, 10) @@ -2877,6 +3207,19 @@ class CRLTests(TestCase): self.assertRaises(ValueError, crl.export, self.cert, self.pkey, 100, 10) + def test_export_unknown_digest(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with a unsupported digest results + in a :py:obj:`ValueError` being raised. + """ + crl = CRL() + self.assertRaises( + ValueError, + crl.export, + self.cert, self.pkey, FILETYPE_PEM, 10, b"strange-digest" + ) + + def test_get_revoked(self): """ Use python to create a simple CRL with two revocations. @@ -2977,6 +3320,107 @@ class CRLTests(TestCase): +class X509StoreContextTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.X509StoreContext`. + """ + root_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + intermediate_cert = load_certificate(FILETYPE_PEM, intermediate_cert_pem) + intermediate_server_cert = load_certificate(FILETYPE_PEM, intermediate_server_cert_pem) + + def test_valid(self): + """ + :py:obj:`verify_certificate` returns ``None`` when called with a certificate + and valid chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_reuse(self): + """ + :py:obj:`verify_certificate` can be called multiple times with the same + ``X509StoreContext`` instance to produce the same result. + """ + store = X509Store() + store.add_cert(self.root_cert) + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_trusted_self_signed(self): + """ + :py:obj:`verify_certificate` returns ``None`` when called with a self-signed + certificate and itself in the chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store_ctx = X509StoreContext(store, self.root_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_untrusted_self_signed(self): + """ + :py:obj:`verify_certificate` raises error when a self-signed certificate is + verified without itself in the chain. + """ + store = X509Store() + store_ctx = X509StoreContext(store, self.root_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'self signed certificate') + self.assertEqual(e.certificate.get_subject().CN, 'Testing Root CA') + + + def test_invalid_chain_no_root(self): + """ + :py:obj:`verify_certificate` raises error when a root certificate is missing + from the chain. + """ + store = X509Store() + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate') + + + def test_invalid_chain_no_intermediate(self): + """ + :py:obj:`verify_certificate` raises error when an intermediate certificate is + missing from the chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get local issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate-service') + + + def test_modification_pre_verify(self): + """ + :py:obj:`verify_certificate` can use a store context modified after + instantiation. + """ + store_bad = X509Store() + store_bad.add_cert(self.intermediate_cert) + store_good = X509Store() + store_good.add_cert(self.root_cert) + store_good.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store_bad, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate') + store_ctx.set_store(store_good) + self.assertEqual(store_ctx.verify_certificate(), None) + + + class SignVerifyTests(TestCase): """ Tests for :py:obj:`OpenSSL.crypto.sign` and :py:obj:`OpenSSL.crypto.verify`. @@ -3022,6 +3466,47 @@ class SignVerifyTests(TestCase): ValueError, verify, good_cert, sig, content, "strange-digest") + def test_sign_verify_with_text(self): + """ + :py:obj:`sign` generates a cryptographic signature which :py:obj:`verify` can check. + Deprecation warnings raised because using text instead of bytes as content + """ + content = ( + b"It was a bright cold day in April, and the clocks were striking " + b"thirteen. Winston Smith, his chin nuzzled into his breast in an " + b"effort to escape the vile wind, slipped quickly through the " + b"glass doors of Victory Mansions, though not quickly enough to " + b"prevent a swirl of gritty dust from entering along with him." + ).decode("ascii") + + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + cert = load_certificate(FILETYPE_PEM, root_cert_pem) + for digest in ['md5', 'sha1']: + with catch_warnings(record=True) as w: + simplefilter("always") + sig = sign(priv_key, content, digest) + + self.assertEqual( + "{0} for data is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + with catch_warnings(record=True) as w: + simplefilter("always") + verify(cert, sig, content, digest) + + self.assertEqual( + "{0} for data is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + def test_sign_nulls(self): """ :py:obj:`sign` produces a signature for a string with embedded nulls. @@ -3033,5 +3518,154 @@ class SignVerifyTests(TestCase): verify(good_cert, sig, content, "sha1") + +class EllipticCurveTests(TestCase): + """ + Tests for :py:class:`_EllipticCurve`, :py:obj:`get_elliptic_curve`, and + :py:obj:`get_elliptic_curves`. + """ + def test_set(self): + """ + :py:obj:`get_elliptic_curves` returns a :py:obj:`set`. + """ + self.assertIsInstance(get_elliptic_curves(), set) + + + def test_some_curves(self): + """ + If :py:mod:`cryptography` has elliptic curve support then the set + returned by :py:obj:`get_elliptic_curves` has some elliptic curves in + it. + + There could be an OpenSSL that violates this assumption. If so, this + test will fail and we'll find out. + """ + curves = get_elliptic_curves() + if lib.Cryptography_HAS_EC: + self.assertTrue(curves) + else: + self.assertFalse(curves) + + + def test_a_curve(self): + """ + :py:obj:`get_elliptic_curve` can be used to retrieve a particular + supported curve. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + self.assertEqual(curve.name, get_elliptic_curve(curve.name).name) + else: + self.assertRaises(ValueError, get_elliptic_curve, u("prime256v1")) + + + def test_not_a_curve(self): + """ + :py:obj:`get_elliptic_curve` raises :py:class:`ValueError` if called + with a name which does not identify a supported curve. + """ + self.assertRaises( + ValueError, get_elliptic_curve, u("this curve was just invented")) + + + def test_repr(self): + """ + The string representation of a curve object includes simply states the + object is a curve and what its name is. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + self.assertEqual("" % (curve.name,), repr(curve)) + + + def test_to_EC_KEY(self): + """ + The curve object can export a version of itself as an EC_KEY* via the + private :py:meth:`_EllipticCurve._to_EC_KEY`. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + # It's not easy to assert anything about this object. However, see + # leakcheck/crypto.py for a test that demonstrates it at least does + # not leak memory. + curve._to_EC_KEY() + + + +class EllipticCurveFactory(object): + """ + A helper to get the names of two curves. + """ + def __init__(self): + curves = iter(get_elliptic_curves()) + try: + self.curve_name = next(curves).name + self.another_curve_name = next(curves).name + except StopIteration: + self.curve_name = self.another_curve_name = None + + + +class EllipticCurveEqualityTests(TestCase, EqualityTestsMixin): + """ + Tests :py:type:`_EllipticCurve`\ 's implementation of ``==`` and ``!=``. + """ + curve_factory = EllipticCurveFactory() + + if curve_factory.curve_name is None: + skip = "There are no curves available there can be no curve objects." + + + def anInstance(self): + """ + Get the curve object for an arbitrary curve supported by the system. + """ + return get_elliptic_curve(self.curve_factory.curve_name) + + + def anotherInstance(self): + """ + Get the curve object for an arbitrary curve supported by the system - + but not the one returned by C{anInstance}. + """ + return get_elliptic_curve(self.curve_factory.another_curve_name) + + + +class EllipticCurveHashTests(TestCase): + """ + Tests for :py:type:`_EllipticCurve`\ 's implementation of hashing (thus use + as an item in a :py:type:`dict` or :py:type:`set`). + """ + curve_factory = EllipticCurveFactory() + + if curve_factory.curve_name is None: + skip = "There are no curves available there can be no curve objects." + + + def test_contains(self): + """ + The ``in`` operator reports that a :py:type:`set` containing a curve + does contain that curve. + """ + curve = get_elliptic_curve(self.curve_factory.curve_name) + curves = set([curve]) + self.assertIn(curve, curves) + + + def test_does_not_contain(self): + """ + The ``in`` operator reports that a :py:type:`set` not containing a + curve does not contain that curve. + """ + curve = get_elliptic_curve(self.curve_factory.curve_name) + curves = set([get_elliptic_curve(self.curve_factory.another_curve_name)]) + self.assertNotIn(curve, curves) + + + if __name__ == '__main__': main() diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_rand.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_rand.py similarity index 85% rename from Darwin/lib/python3.4/site-packages/OpenSSL/test/test_rand.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/test/test_rand.py index c52cb6b..3d5c290 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_rand.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_rand.py @@ -10,7 +10,7 @@ import os import stat import sys -from OpenSSL.test.util import TestCase, b +from OpenSSL.test.util import NON_ASCII, TestCase, b from OpenSSL import rand @@ -176,27 +176,47 @@ class RandTests(TestCase): self.assertRaises(TypeError, rand.write_file, None) self.assertRaises(TypeError, rand.write_file, "foo", None) + def _read_write_test(self, path): + """ + Verify that ``rand.write_file`` and ``rand.load_file`` can be used. + """ + # Create the file so cleanup is more straightforward + with open(path, "w"): + pass - def test_files(self): - """ - Test reading and writing of files via rand functions. - """ - # Write random bytes to a file - tmpfile = self.mktemp() - # Make sure it exists (so cleanup definitely succeeds) - fObj = open(tmpfile, 'w') - fObj.close() try: - rand.write_file(tmpfile) + # Write random bytes to a file + rand.write_file(path) + # Verify length of written file - size = os.stat(tmpfile)[stat.ST_SIZE] + size = os.stat(path)[stat.ST_SIZE] self.assertEqual(1024, size) + # Read random bytes from file - rand.load_file(tmpfile) - rand.load_file(tmpfile, 4) # specify a length + rand.load_file(path) + rand.load_file(path, 4) # specify a length finally: # Cleanup - os.unlink(tmpfile) + os.unlink(path) + + + def test_bytes_paths(self): + """ + Random data can be saved and loaded to files with paths specified as + bytes. + """ + path = self.mktemp() + path += NON_ASCII.encode(sys.getfilesystemencoding()) + self._read_write_test(path) + + + def test_unicode_paths(self): + """ + Random data can be saved and loaded to files with paths specified as + unicode. + """ + path = self.mktemp().decode('utf-8') + NON_ASCII + self._read_write_test(path) if __name__ == '__main__': diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_ssl.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_ssl.py similarity index 71% rename from Darwin/lib/python3.4/site-packages/OpenSSL/test/test_ssl.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/test/test_ssl.py index a6f0127..bb1c9ae 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/test/test_ssl.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_ssl.py @@ -7,19 +7,21 @@ Unit tests for :py:obj:`OpenSSL.SSL`. from gc import collect, get_referrers from errno import ECONNREFUSED, EINPROGRESS, EWOULDBLOCK, EPIPE, ESHUTDOWN -from sys import platform, version_info +from sys import platform, getfilesystemencoding from socket import SHUT_RDWR, error, socket from os import makedirs from os.path import join from unittest import main from weakref import ref +from warnings import catch_warnings, simplefilter -from six import PY3, u +from six import PY3, text_type, u from OpenSSL.crypto import TYPE_RSA, FILETYPE_PEM from OpenSSL.crypto import PKey, X509, X509Extension, X509Store from OpenSSL.crypto import dump_privatekey, load_privatekey from OpenSSL.crypto import dump_certificate, load_certificate +from OpenSSL.crypto import get_elliptic_curves from OpenSSL.SSL import OPENSSL_VERSION_NUMBER, SSLEAY_VERSION, SSLEAY_CFLAGS from OpenSSL.SSL import SSLEAY_PLATFORM, SSLEAY_DIR, SSLEAY_BUILT_ON @@ -41,10 +43,11 @@ from OpenSSL.SSL import ( from OpenSSL.SSL import ( Context, ContextType, Session, Connection, ConnectionType, SSLeay_version) -from OpenSSL.test.util import TestCase, b -from OpenSSL.test.test_crypto import ( - cleartextCertificatePEM, cleartextPrivateKeyPEM) +from OpenSSL._util import lib as _lib + +from OpenSSL.test.util import WARNING_TYPE_EXPECTED, NON_ASCII, TestCase, b from OpenSSL.test.test_crypto import ( + cleartextCertificatePEM, cleartextPrivateKeyPEM, client_cert_pem, client_key_pem, server_cert_pem, server_key_pem, root_cert_pem) @@ -93,6 +96,23 @@ MBYCEQCobsg29c9WZP/54oAPcwiDAgEC """ +def join_bytes_or_unicode(prefix, suffix): + """ + Join two path components of either ``bytes`` or ``unicode``. + + The return type is the same as the type of ``prefix``. + """ + # If the types are the same, nothing special is necessary. + if type(prefix) == type(suffix): + return join(prefix, suffix) + + # Otherwise, coerce suffix to the type of prefix. + if isinstance(prefix, text_type): + return join(prefix, suffix.decode(getfilesystemencoding())) + else: + return join(prefix, suffix.encode(getfilesystemencoding())) + + def verify_cb(conn, cert, errnum, depth, ok): return ok @@ -277,6 +297,23 @@ class _LoopbackMixin: write.bio_write(dirty) + def _handshakeInMemory(self, client_conn, server_conn): + """ + Perform the TLS handshake between two :py:class:`Connection` instances + connected to each other via memory BIOs. + """ + client_conn.set_connect_state() + server_conn.set_accept_state() + + for conn in [client_conn, server_conn]: + try: + conn.do_handshake() + except WantReadError: + pass + + self._interactInMemory(client_conn, server_conn) + + class VersionTests(TestCase): """ @@ -376,23 +413,52 @@ class ContextTests(TestCase, _LoopbackMixin): self.assertRaises(Error, ctx.use_privatekey_file, self.mktemp()) + def _use_privatekey_file_test(self, pemfile, filetype): + """ + Verify that calling ``Context.use_privatekey_file`` with the given + arguments does not raise an exception. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + + with open(pemfile, "wt") as pem: + pem.write( + dump_privatekey(FILETYPE_PEM, key).decode("ascii") + ) + + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey_file(pemfile, filetype) + + + def test_use_privatekey_file_bytes(self): + """ + A private key can be specified from a file by passing a ``bytes`` + instance giving the file name to ``Context.use_privatekey_file``. + """ + self._use_privatekey_file_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()), + FILETYPE_PEM, + ) + + + def test_use_privatekey_file_unicode(self): + """ + A private key can be specified from a file by passing a ``unicode`` + instance giving the file name to ``Context.use_privatekey_file``. + """ + self._use_privatekey_file_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII, + FILETYPE_PEM, + ) + + if not PY3: def test_use_privatekey_file_long(self): """ On Python 2 :py:obj:`Context.use_privatekey_file` accepts a filetype of type :py:obj:`long` as well as :py:obj:`int`. """ - pemfile = self.mktemp() - - key = PKey() - key.generate_key(TYPE_RSA, 128) - - with open(pemfile, "wt") as pem: - pem.write( - dump_privatekey(FILETYPE_PEM, key).decode("ascii")) - - ctx = Context(TLSv1_METHOD) - ctx.use_privatekey_file(pemfile, long(FILETYPE_PEM)) + self._use_privatekey_file_test(self.mktemp(), long(FILETYPE_PEM)) def test_use_certificate_wrong_args(self): @@ -457,21 +523,40 @@ class ContextTests(TestCase, _LoopbackMixin): self.assertRaises(Error, ctx.use_certificate_file, self.mktemp()) - def test_use_certificate_file(self): + def _use_certificate_file_test(self, certificate_file): """ - :py:obj:`Context.use_certificate` sets the certificate which will be - used to identify connections created using the context. + Verify that calling ``Context.use_certificate_file`` with the given + filename doesn't raise an exception. """ # TODO # Hard to assert anything. But we could set a privatekey then ask # OpenSSL if the cert and key agree using check_privatekey. Then as # long as check_privatekey works right we're good... - pem_filename = self.mktemp() - with open(pem_filename, "wb") as pem_file: + with open(certificate_file, "wb") as pem_file: pem_file.write(cleartextCertificatePEM) ctx = Context(TLSv1_METHOD) - ctx.use_certificate_file(pem_filename) + ctx.use_certificate_file(certificate_file) + + + def test_use_certificate_file_bytes(self): + """ + :py:obj:`Context.use_certificate_file` sets the certificate (given as a + ``bytes`` filename) which will be used to identify connections created + using the context. + """ + filename = self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + self._use_certificate_file_test(filename) + + + def test_use_certificate_file_unicode(self): + """ + :py:obj:`Context.use_certificate_file` sets the certificate (given as a + ``bytes`` filename) which will be used to identify connections created + using the context. + """ + filename = self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + self._use_certificate_file_test(filename) if not PY3: @@ -488,6 +573,43 @@ class ContextTests(TestCase, _LoopbackMixin): ctx.use_certificate_file(pem_filename, long(FILETYPE_PEM)) + def test_check_privatekey_valid(self): + """ + :py:obj:`Context.check_privatekey` returns :py:obj:`None` if the + :py:obj:`Context` instance has been configured to use a matched key and + certificate pair. + """ + key = load_privatekey(FILETYPE_PEM, client_key_pem) + cert = load_certificate(FILETYPE_PEM, client_cert_pem) + context = Context(TLSv1_METHOD) + context.use_privatekey(key) + context.use_certificate(cert) + self.assertIs(None, context.check_privatekey()) + + + def test_check_privatekey_invalid(self): + """ + :py:obj:`Context.check_privatekey` raises :py:obj:`Error` if the + :py:obj:`Context` instance has been configured to use a key and + certificate pair which don't relate to each other. + """ + key = load_privatekey(FILETYPE_PEM, client_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + context = Context(TLSv1_METHOD) + context.use_privatekey(key) + context.use_certificate(cert) + self.assertRaises(Error, context.check_privatekey) + + + def test_check_privatekey_wrong_args(self): + """ + :py:obj:`Context.check_privatekey` raises :py:obj:`TypeError` if called + with other than no arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.check_privatekey, object()) + + def test_set_app_data_wrong_args(self): """ :py:obj:`Context.set_app_data` raises :py:obj:`TypeError` if called with other than @@ -848,12 +970,13 @@ class ContextTests(TestCase, _LoopbackMixin): self.assertEqual(cert.get_subject().CN, 'Testing Root CA') - def test_load_verify_file(self): + def _load_verify_cafile(self, cafile): """ - :py:obj:`Context.load_verify_locations` accepts a file name and uses the - certificates within for verification purposes. + Verify that if path to a file containing a certificate is passed to + ``Context.load_verify_locations`` for the ``cafile`` parameter, that + certificate is used as a trust root for the purposes of verifying + connections created using that ``Context``. """ - cafile = self.mktemp() fObj = open(cafile, 'w') fObj.write(cleartextCertificatePEM.decode('ascii')) fObj.close() @@ -861,6 +984,27 @@ class ContextTests(TestCase, _LoopbackMixin): self._load_verify_locations_test(cafile) + def test_load_verify_bytes_cafile(self): + """ + :py:obj:`Context.load_verify_locations` accepts a file name as a + ``bytes`` instance and uses the certificates within for verification + purposes. + """ + cafile = self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + self._load_verify_cafile(cafile) + + + def test_load_verify_unicode_cafile(self): + """ + :py:obj:`Context.load_verify_locations` accepts a file name as a + ``unicode`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_cafile( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + def test_load_verify_invalid_file(self): """ :py:obj:`Context.load_verify_locations` raises :py:obj:`Error` when passed a @@ -871,25 +1015,47 @@ class ContextTests(TestCase, _LoopbackMixin): Error, clientContext.load_verify_locations, self.mktemp()) - def test_load_verify_directory(self): + def _load_verify_directory_locations_capath(self, capath): """ - :py:obj:`Context.load_verify_locations` accepts a directory name and uses - the certificates within for verification purposes. + Verify that if path to a directory containing certificate files is + passed to ``Context.load_verify_locations`` for the ``capath`` + parameter, those certificates are used as trust roots for the purposes + of verifying connections created using that ``Context``. """ - capath = self.mktemp() makedirs(capath) # Hash values computed manually with c_rehash to avoid depending on # c_rehash in the test suite. One is from OpenSSL 0.9.8, the other # from OpenSSL 1.0.0. for name in [b'c7adac82.0', b'c3705638.0']: - cafile = join(capath, name) - fObj = open(cafile, 'w') - fObj.write(cleartextCertificatePEM.decode('ascii')) - fObj.close() + cafile = join_bytes_or_unicode(capath, name) + with open(cafile, 'w') as fObj: + fObj.write(cleartextCertificatePEM.decode('ascii')) self._load_verify_locations_test(None, capath) + def test_load_verify_directory_bytes_capath(self): + """ + :py:obj:`Context.load_verify_locations` accepts a directory name as a + ``bytes`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_directory_locations_capath( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + ) + + + def test_load_verify_directory_unicode_capath(self): + """ + :py:obj:`Context.load_verify_locations` accepts a directory name as a + ``unicode`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_directory_locations_capath( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + def test_load_verify_locations_wrong_args(self): """ :py:obj:`Context.load_verify_locations` raises :py:obj:`TypeError` if called with @@ -919,8 +1085,8 @@ class ContextTests(TestCase, _LoopbackMixin): # in a unit test is bad, but it's the only way I can think of to # really test this. -exarkun - # Arg, verisign.com doesn't speak TLSv1 - context = Context(SSLv3_METHOD) + # Arg, verisign.com doesn't speak anything newer than TLS 1.0 + context = Context(TLSv1_METHOD) context.set_default_verify_paths() context.set_verify( VERIFY_PEER, @@ -981,6 +1147,34 @@ class ContextTests(TestCase, _LoopbackMixin): pass + def test_set_verify_callback_connection_argument(self): + """ + The first argument passed to the verify callback is the + :py:class:`Connection` instance for which verification is taking place. + """ + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + serverConnection = Connection(serverContext, None) + + class VerifyCallback(object): + def callback(self, connection, *args): + self.connection = connection + return 1 + + verify = VerifyCallback() + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify(VERIFY_PEER, verify.callback) + clientConnection = Connection(clientContext, None) + clientConnection.set_connect_state() + + self._handshakeInMemory(clientConnection, serverConnection) + + self.assertIdentical(verify.connection, clientConnection) + + def test_set_verify_callback_exception(self): """ If the verify callback passed to :py:obj:`Context.set_verify` raises an @@ -1047,43 +1241,67 @@ class ContextTests(TestCase, _LoopbackMixin): self._handshake_test(serverContext, clientContext) - def test_use_certificate_chain_file(self): + def _use_certificate_chain_file_test(self, certdir): """ - :py:obj:`Context.use_certificate_chain_file` reads a certificate chain from - the specified file. + Verify that :py:obj:`Context.use_certificate_chain_file` reads a + certificate chain from a specified file. - The chain is tested by starting a server with scert and connecting - to it with a client which trusts cacert and requires verification to + The chain is tested by starting a server with scert and connecting to + it with a client which trusts cacert and requires verification to succeed. """ chain = _create_certificate_chain() [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + makedirs(certdir) + + chainFile = join_bytes_or_unicode(certdir, "chain.pem") + caFile = join_bytes_or_unicode(certdir, "ca.pem") + # Write out the chain file. - chainFile = self.mktemp() - fObj = open(chainFile, 'wb') - # Most specific to least general. - fObj.write(dump_certificate(FILETYPE_PEM, scert)) - fObj.write(dump_certificate(FILETYPE_PEM, icert)) - fObj.write(dump_certificate(FILETYPE_PEM, cacert)) - fObj.close() + with open(chainFile, 'wb') as fObj: + # Most specific to least general. + fObj.write(dump_certificate(FILETYPE_PEM, scert)) + fObj.write(dump_certificate(FILETYPE_PEM, icert)) + fObj.write(dump_certificate(FILETYPE_PEM, cacert)) + + with open(caFile, 'w') as fObj: + fObj.write(dump_certificate(FILETYPE_PEM, cacert).decode('ascii')) serverContext = Context(TLSv1_METHOD) serverContext.use_certificate_chain_file(chainFile) serverContext.use_privatekey(skey) - fObj = open('ca.pem', 'w') - fObj.write(dump_certificate(FILETYPE_PEM, cacert).decode('ascii')) - fObj.close() - clientContext = Context(TLSv1_METHOD) clientContext.set_verify( VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) - clientContext.load_verify_locations(b"ca.pem") + clientContext.load_verify_locations(caFile) self._handshake_test(serverContext, clientContext) + def test_use_certificate_chain_file_bytes(self): + """ + ``Context.use_certificate_chain_file`` accepts the name of a file (as + an instance of ``bytes``) to specify additional certificates to use to + construct and verify a trust chain. + """ + self._use_certificate_chain_file_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + ) + + + def test_use_certificate_chain_file_unicode(self): + """ + ``Context.use_certificate_chain_file`` accepts the name of a file (as + an instance of ``unicode``) to specify additional certificates to use + to construct and verify a trust chain. + """ + self._use_certificate_chain_file_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + def test_use_certificate_chain_file_wrong_args(self): """ :py:obj:`Context.use_certificate_chain_file` raises :py:obj:`TypeError` @@ -1158,20 +1376,51 @@ class ContextTests(TestCase, _LoopbackMixin): self.assertRaises(Error, context.load_tmp_dh, b"hello") - def test_load_tmp_dh(self): + def _load_tmp_dh_test(self, dhfilename): """ - :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the - specified file. + Verify that calling ``Context.load_tmp_dh`` with the given filename + does not raise an exception. """ context = Context(TLSv1_METHOD) - dhfilename = self.mktemp() - dhfile = open(dhfilename, "w") - dhfile.write(dhparam) - dhfile.close() + with open(dhfilename, "w") as dhfile: + dhfile.write(dhparam) + context.load_tmp_dh(dhfilename) # XXX What should I assert here? -exarkun + def test_load_tmp_dh_bytes(self): + """ + :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the + specified file (given as ``bytes``). + """ + self._load_tmp_dh_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()), + ) + + + def test_load_tmp_dh_unicode(self): + """ + :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the + specified file (given as ``unicode``). + """ + self._load_tmp_dh_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII, + ) + + + def test_set_tmp_ecdh(self): + """ + :py:obj:`Context.set_tmp_ecdh` sets the elliptic curve for + Diffie-Hellman to the specified curve. + """ + context = Context(TLSv1_METHOD) + for curve in get_elliptic_curves(): + # The only easily "assertable" thing is that it does not raise an + # exception. + context.set_tmp_ecdh(curve) + + def test_set_cipher_list_bytes(self): """ :py:obj:`Context.set_cipher_list` accepts a :py:obj:`bytes` naming the @@ -1375,6 +1624,396 @@ class ServerNameCallbackTests(TestCase, _LoopbackMixin): self.assertEqual([(server, b("foo1.example.com"))], args) +class NextProtoNegotiationTests(TestCase, _LoopbackMixin): + """ + Test for Next Protocol Negotiation in PyOpenSSL. + """ + if _lib.Cryptography_HAS_NEXTPROTONEG: + def test_npn_success(self): + """ + Tests that clients and servers that agree on the negotiated next + protocol can correct establish a connection, and that the agreed + protocol is reported by the connections. + """ + advertise_args = [] + select_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server,)], advertise_args) + self.assertEqual([(client, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_next_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_next_proto_negotiated(), b'spdy/2') + + + def test_npn_client_fail(self): + """ + Tests that when clients and servers cannot agree on what protocol + to use next that the TLS connection does not get established. + """ + advertise_args = [] + select_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + select_args.append((conn, options)) + return b'' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises(Error, self._interactInMemory, server, client) + + self.assertEqual([(server,)], advertise_args) + self.assertEqual([(client, [b'http/1.1', b'spdy/2'])], select_args) + + + def test_npn_select_error(self): + """ + Test that we can handle exceptions in the select callback. If + select fails it should be fatal to the connection. + """ + advertise_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + raise TypeError + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the callback throws an exception it should be raised here. + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([(server,)], advertise_args) + + + def test_npn_advertise_error(self): + """ + Test that we can handle exceptions in the advertise callback. If + advertise fails no NPN is advertised to the client. + """ + select_args = [] + def advertise(conn): + raise TypeError + def select(conn, options): + select_args.append((conn, options)) + return b'' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([], select_args) + + else: + # No NPN. + def test_npn_not_implemented(self): + # Test the context methods first. + context = Context(TLSv1_METHOD) + fail_methods = [ + context.set_npn_advertise_callback, + context.set_npn_select_callback, + ] + for method in fail_methods: + self.assertRaises( + NotImplementedError, method, None + ) + + # Now test a connection. + conn = Connection(context) + fail_methods = [ + conn.get_next_proto_negotiated, + ] + for method in fail_methods: + self.assertRaises(NotImplementedError, method) + + + +class ApplicationLayerProtoNegotiationTests(TestCase, _LoopbackMixin): + """ + Tests for ALPN in PyOpenSSL. + """ + # Skip tests on versions that don't support ALPN. + if _lib.Cryptography_HAS_ALPN: + + def test_alpn_success(self): + """ + Clients and servers that agree on the negotiated ALPN protocol can + correct establish a connection, and the agreed protocol is reported + by the connections. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_alpn_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_alpn_proto_negotiated(), b'spdy/2') + + + def test_alpn_set_on_connection(self): + """ + The same as test_alpn_success, but setting the ALPN protocols on + the connection rather than the context. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + # Setup the client context but don't set any ALPN protocols. + client_context = Context(TLSv1_METHOD) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + # Set the ALPN protocols on the client connection. + client = Connection(client_context, None) + client.set_alpn_protos([b'http/1.1', b'spdy/2']) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_alpn_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_alpn_proto_negotiated(), b'spdy/2') + + + def test_alpn_server_fail(self): + """ + When clients and servers cannot agree on what protocol to use next + the TLS connection does not get established. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'' + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises(Error, self._interactInMemory, server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + + def test_alpn_no_server(self): + """ + When clients and servers cannot agree on what protocol to use next + because the server doesn't offer ALPN, no protocol is negotiated. + """ + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # Do the dance. + self._interactInMemory(server, client) + + self.assertEqual(client.get_alpn_proto_negotiated(), b'') + + + def test_alpn_callback_exception(self): + """ + We can handle exceptions in the ALPN select callback. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + raise TypeError() + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + else: + # No ALPN. + def test_alpn_not_implemented(self): + """ + If ALPN is not in OpenSSL, we should raise NotImplementedError. + """ + # Test the context methods first. + context = Context(TLSv1_METHOD) + self.assertRaises( + NotImplementedError, context.set_alpn_protos, None + ) + self.assertRaises( + NotImplementedError, context.set_alpn_select_callback, None + ) + + # Now test a connection. + conn = Connection(context) + self.assertRaises( + NotImplementedError, context.set_alpn_protos, None + ) + + class SessionTests(TestCase): """ @@ -1496,7 +2135,7 @@ class ConnectionTests(TestCase, _LoopbackMixin): self.assertRaises( TypeError, conn.set_tlsext_host_name, b("with\0null")) - if version_info >= (3,): + if PY3: # On Python 3.x, don't accidentally implicitly convert from text. self.assertRaises( TypeError, @@ -1650,6 +2289,40 @@ class ConnectionTests(TestCase, _LoopbackMixin): self.assertEquals(server.get_shutdown(), SENT_SHUTDOWN|RECEIVED_SHUTDOWN) + def test_shutdown_closed(self): + """ + If the underlying socket is closed, :py:obj:`Connection.shutdown` propagates the + write error from the low level write call. + """ + server, client = self._loopback() + server.sock_shutdown(2) + exc = self.assertRaises(SysCallError, server.shutdown) + if platform == "win32": + self.assertEqual(exc.args[0], ESHUTDOWN) + else: + self.assertEqual(exc.args[0], EPIPE) + + + def test_shutdown_truncated(self): + """ + If the underlying connection is truncated, :obj:`Connection.shutdown` + raises an :obj:`Error`. + """ + server_ctx = Context(TLSv1_METHOD) + client_ctx = Context(TLSv1_METHOD) + server_ctx.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_ctx.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(server_ctx, None) + client = Connection(client_ctx, None) + self._handshakeInMemory(client, server) + self.assertEqual(server.shutdown(), False) + self.assertRaises(WantReadError, server.shutdown) + server.bio_shutdown() + self.assertRaises(Error, server.shutdown) + + def test_set_shutdown(self): """ :py:obj:`Connection.set_shutdown` sets the state of the SSL connection shutdown @@ -1911,9 +2584,13 @@ class ConnectionTests(TestCase, _LoopbackMixin): """ client_socket, server_socket = socket_pair() # Fill up the client's send buffer so Connection won't be able to write - # anything. - msg = b"x" * 512 - for i in range(2048): + # anything. Only write a single byte at a time so we can be sure we + # completely fill the buffer. Even though the socket API is allowed to + # signal a short write via its return value it seems this doesn't + # always happen on all platforms (FreeBSD and OS X particular) for the + # very last bit of available buffer space. + msg = b"x" + for i in range(1024 * 1024 * 4): try: client_socket.send(msg) except error as e: @@ -1932,6 +2609,139 @@ class ConnectionTests(TestCase, _LoopbackMixin): # XXX want_read + def test_get_finished_before_connect(self): + """ + :py:obj:`Connection.get_finished` returns :py:obj:`None` before TLS + handshake is completed. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertEqual(connection.get_finished(), None) + + + def test_get_peer_finished_before_connect(self): + """ + :py:obj:`Connection.get_peer_finished` returns :py:obj:`None` before + TLS handshake is completed. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertEqual(connection.get_peer_finished(), None) + + + def test_get_finished(self): + """ + :py:obj:`Connection.get_finished` method returns the TLS Finished + message send from client, or server. Finished messages are send during + TLS handshake. + """ + + server, client = self._loopback() + + self.assertNotEqual(server.get_finished(), None) + self.assertTrue(len(server.get_finished()) > 0) + + + def test_get_peer_finished(self): + """ + :py:obj:`Connection.get_peer_finished` method returns the TLS Finished + message received from client, or server. Finished messages are send + during TLS handshake. + """ + server, client = self._loopback() + + self.assertNotEqual(server.get_peer_finished(), None) + self.assertTrue(len(server.get_peer_finished()) > 0) + + + def test_tls_finished_message_symmetry(self): + """ + The TLS Finished message send by server must be the TLS Finished message + received by client. + + The TLS Finished message send by client must be the TLS Finished message + received by server. + """ + server, client = self._loopback() + + self.assertEqual(server.get_finished(), client.get_peer_finished()) + self.assertEqual(client.get_finished(), server.get_peer_finished()) + + + def test_get_cipher_name_before_connect(self): + """ + :py:obj:`Connection.get_cipher_name` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_name(), None) + + + def test_get_cipher_name(self): + """ + :py:obj:`Connection.get_cipher_name` returns a :py:class:`unicode` + string giving the name of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_name, client_cipher_name = \ + server.get_cipher_name(), client.get_cipher_name() + + self.assertIsInstance(server_cipher_name, text_type) + self.assertIsInstance(client_cipher_name, text_type) + + self.assertEqual(server_cipher_name, client_cipher_name) + + + def test_get_cipher_version_before_connect(self): + """ + :py:obj:`Connection.get_cipher_version` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_version(), None) + + + def test_get_cipher_version(self): + """ + :py:obj:`Connection.get_cipher_version` returns a :py:class:`unicode` + string giving the protocol name of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_version, client_cipher_version = \ + server.get_cipher_version(), client.get_cipher_version() + + self.assertIsInstance(server_cipher_version, text_type) + self.assertIsInstance(client_cipher_version, text_type) + + self.assertEqual(server_cipher_version, client_cipher_version) + + + def test_get_cipher_bits_before_connect(self): + """ + :py:obj:`Connection.get_cipher_bits` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_bits(), None) + + + def test_get_cipher_bits(self): + """ + :py:obj:`Connection.get_cipher_bits` returns the number of secret bits + of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_bits, client_cipher_bits = \ + server.get_cipher_bits(), client.get_cipher_bits() + + self.assertIsInstance(server_cipher_bits, int) + self.assertIsInstance(client_cipher_bits, int) + + self.assertEqual(server_cipher_bits, client_cipher_bits) + class ConnectionGetCipherListTests(TestCase): @@ -1986,6 +2796,26 @@ class ConnectionSendTests(TestCase, _LoopbackMixin): self.assertEquals(count, 2) self.assertEquals(client.recv(2), b('xy')) + + def test_text(self): + """ + When passed a text, :py:obj:`Connection.send` transmits all of it and + returns the number of bytes sent. It also raises a DeprecationWarning. + """ + server, client = self._loopback() + with catch_warnings(record=True) as w: + simplefilter("always") + count = server.send(b"xy".decode("ascii")) + self.assertEqual( + "{0} for buf is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b"xy") + try: memoryview except NameError: @@ -2003,6 +2833,181 @@ class ConnectionSendTests(TestCase, _LoopbackMixin): self.assertEquals(client.recv(2), b('xy')) + try: + buffer + except NameError: + "cannot test sending buffer without buffer" + else: + def test_short_buffer(self): + """ + When passed a buffer containing a small number of bytes, + :py:obj:`Connection.send` transmits all of them and returns the number of + bytes sent. + """ + server, client = self._loopback() + count = server.send(buffer(b('xy'))) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + + +def _make_memoryview(size): + """ + Create a new ``memoryview`` wrapped around a ``bytearray`` of the given + size. + """ + return memoryview(bytearray(size)) + + + +class ConnectionRecvIntoTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.recv_into` + """ + def _no_length_test(self, factory): + """ + Assert that when the given buffer is passed to + ``Connection.recv_into``, whatever bytes are available to be received + that fit into that buffer are written into that buffer. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('xy')) + + self.assertEqual(client.recv_into(output_buffer), 2) + self.assertEqual(output_buffer, bytearray(b('xy\x00\x00\x00'))) + + + def test_bytearray_no_length(self): + """ + :py:obj:`Connection.recv_into` can be passed a ``bytearray`` instance + and data in the receive buffer is written to it. + """ + self._no_length_test(bytearray) + + + def _respects_length_test(self, factory): + """ + Assert that when the given buffer is passed to ``Connection.recv_into`` + along with a value for ``nbytes`` that is less than the size of that + buffer, only ``nbytes`` bytes are written into the buffer. + """ + output_buffer = factory(10) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer, 5), 5) + self.assertEqual( + output_buffer, bytearray(b('abcde\x00\x00\x00\x00\x00')) + ) + + + def test_bytearray_respects_length(self): + """ + When called with a ``bytearray`` instance, + :py:obj:`Connection.recv_into` respects the ``nbytes`` parameter and + doesn't copy in more than that number of bytes. + """ + self._respects_length_test(bytearray) + + + def _doesnt_overfill_test(self, factory): + """ + Assert that if there are more bytes available to be read from the + receive buffer than would fit into the buffer passed to + :py:obj:`Connection.recv_into`, only as many as fit are written into + it. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer), 5) + self.assertEqual(output_buffer, bytearray(b('abcde'))) + rest = client.recv(5) + self.assertEqual(b('fghij'), rest) + + + def test_bytearray_doesnt_overfill(self): + """ + When called with a ``bytearray`` instance, + :py:obj:`Connection.recv_into` respects the size of the array and + doesn't write more bytes into it than will fit. + """ + self._doesnt_overfill_test(bytearray) + + + def _really_doesnt_overfill_test(self, factory): + """ + Assert that if the value given by ``nbytes`` is greater than the actual + size of the output buffer passed to :py:obj:`Connection.recv_into`, the + behavior is as if no value was given for ``nbytes`` at all. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer, 50), 5) + self.assertEqual(output_buffer, bytearray(b('abcde'))) + rest = client.recv(5) + self.assertEqual(b('fghij'), rest) + + + def test_bytearray_really_doesnt_overfill(self): + """ + When called with a ``bytearray`` instance and an ``nbytes`` value that + is too large, :py:obj:`Connection.recv_into` respects the size of the + array and not the ``nbytes`` value and doesn't write more bytes into + the buffer than will fit. + """ + self._doesnt_overfill_test(bytearray) + + + try: + memoryview + except NameError: + "cannot test recv_into memoryview without memoryview" + else: + def test_memoryview_no_length(self): + """ + :py:obj:`Connection.recv_into` can be passed a ``memoryview`` + instance and data in the receive buffer is written to it. + """ + self._no_length_test(_make_memoryview) + + + def test_memoryview_respects_length(self): + """ + When called with a ``memoryview`` instance, + :py:obj:`Connection.recv_into` respects the ``nbytes`` parameter + and doesn't copy more than that number of bytes in. + """ + self._respects_length_test(_make_memoryview) + + + def test_memoryview_doesnt_overfill(self): + """ + When called with a ``memoryview`` instance, + :py:obj:`Connection.recv_into` respects the size of the array and + doesn't write more bytes into it than will fit. + """ + self._doesnt_overfill_test(_make_memoryview) + + + def test_memoryview_really_doesnt_overfill(self): + """ + When called with a ``memoryview`` instance and an ``nbytes`` value + that is too large, :py:obj:`Connection.recv_into` respects the size + of the array and not the ``nbytes`` value and doesn't write more + bytes into the buffer than will fit. + """ + self._doesnt_overfill_test(_make_memoryview) + + class ConnectionSendallTests(TestCase, _LoopbackMixin): """ @@ -2031,6 +3036,25 @@ class ConnectionSendallTests(TestCase, _LoopbackMixin): self.assertEquals(client.recv(1), b('x')) + def test_text(self): + """ + :py:obj:`Connection.sendall` transmits all the content in the string + passed to it raising a DeprecationWarning in case of this being a text. + """ + server, client = self._loopback() + with catch_warnings(record=True) as w: + simplefilter("always") + server.sendall(b"x".decode("ascii")) + self.assertEqual( + "{0} for buf is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.assertEquals(client.recv(1), b"x") + + try: memoryview except NameError: @@ -2046,6 +3070,21 @@ class ConnectionSendallTests(TestCase, _LoopbackMixin): self.assertEquals(client.recv(1), b('x')) + try: + buffer + except NameError: + "cannot test sending buffers without buffers" + else: + def test_short_buffers(self): + """ + When passed a buffer containing a small number of bytes, + :py:obj:`Connection.sendall` transmits all of them. + """ + server, client = self._loopback() + server.sendall(buffer(b('x'))) + self.assertEquals(client.recv(1), b('x')) + + def test_long(self): """ :py:obj:`Connection.sendall` transmits all of the bytes in the string passed to diff --git a/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_tsafe.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_tsafe.py new file mode 100644 index 0000000..0456957 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_tsafe.py @@ -0,0 +1,24 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.tsafe`. +""" + +from OpenSSL.SSL import TLSv1_METHOD, Context +from OpenSSL.tsafe import Connection +from OpenSSL.test.util import TestCase + + +class ConnectionTest(TestCase): + """ + Tests for :py:obj:`OpenSSL.tsafe.Connection`. + """ + def test_instantiation(self): + """ + :py:obj:`OpenSSL.tsafe.Connection` can be instantiated. + """ + # The following line should not throw an error. This isn't an ideal + # test. It would be great to refactor the other Connection tests so + # they could automatically be applied to this class too. + Connection(Context(TLSv1_METHOD), None) diff --git a/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_util.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_util.py new file mode 100644 index 0000000..8d92a3c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/test_util.py @@ -0,0 +1,17 @@ +from OpenSSL._util import exception_from_error_queue, lib +from OpenSSL.test.util import TestCase + + + +class ErrorTests(TestCase): + """ + Tests for handling of certain OpenSSL error cases. + """ + def test_exception_from_error_queue_nonexistent_reason(self): + """ + :py:func:`exception_from_error_queue` raises ``ValueError`` when it + encounters an OpenSSL error code which does not have a reason string. + """ + lib.ERR_put_error(lib.ERR_LIB_EVP, 0, 1112, b"", 10) + exc = self.assertRaises(ValueError, exception_from_error_queue, ValueError) + self.assertEqual(exc.args[0][0][2], "") diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/test/util.py b/Darwin/lib/python3.5/site-packages/OpenSSL/test/util.py similarity index 71% rename from Darwin/lib/python3.4/site-packages/OpenSSL/test/util.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/test/util.py index 4e4d812..b8be91d 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/test/util.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/test/util.py @@ -14,6 +14,8 @@ from tempfile import mktemp from unittest import TestCase import sys +from six import PY3 + from OpenSSL._util import exception_from_error_queue from OpenSSL.crypto import Error @@ -25,6 +27,11 @@ except Exception: from OpenSSL._util import ffi, lib, byte_string as b + +# This is the UTF-8 encoding of the SNOWMAN unicode code point. +NON_ASCII = b("\xe2\x98\x83").decode("utf-8") + + class TestCase(TestCase): """ :py:class:`TestCase` adds useful testing functionality beyond what is available @@ -210,7 +217,24 @@ class TestCase(TestCase): return containee assertIn = failUnlessIn - def failUnlessIdentical(self, first, second, msg=None): + def assertNotIn(self, containee, container, msg=None): + """ + Fail the test if C{containee} is found in C{container}. + + @param containee: the value that should not be in C{container} + @param container: a sequence type, or in the case of a mapping type, + will follow semantics of 'if key in dict.keys()' + @param msg: if msg is None, then the failure message will be + '%r in %r' % (first, second) + """ + if containee in container: + raise self.failureException(msg or "%r in %r" + % (containee, container)) + return containee + failIfIn = assertNotIn + + + def assertIs(self, first, second, msg=None): """ Fail the test if :py:data:`first` is not :py:data:`second`. This is an obect-identity-equality test, not an object equality @@ -222,10 +246,10 @@ class TestCase(TestCase): if first is not second: raise self.failureException(msg or '%r is not %r' % (first, second)) return first - assertIdentical = failUnlessIdentical + assertIdentical = failUnlessIdentical = assertIs - def failIfIdentical(self, first, second, msg=None): + def assertIsNot(self, first, second, msg=None): """ Fail the test if :py:data:`first` is :py:data:`second`. This is an obect-identity-equality test, not an object equality @@ -237,7 +261,7 @@ class TestCase(TestCase): if first is second: raise self.failureException(msg or '%r is %r' % (first, second)) return first - assertNotIdentical = failIfIdentical + assertNotIdentical = failIfIdentical = assertIsNot def failUnlessRaises(self, exception, f, *args, **kwargs): @@ -300,3 +324,140 @@ class TestCase(TestCase): self.assertTrue(isinstance(theType, type)) instance = theType(*constructionArgs) self.assertIdentical(type(instance), theType) + + + +class EqualityTestsMixin(object): + """ + A mixin defining tests for the standard implementation of C{==} and C{!=}. + """ + def anInstance(self): + """ + Return an instance of the class under test. Each call to this method + must return a different object. All objects returned must be equal to + each other. + """ + raise NotImplementedError() + + + def anotherInstance(self): + """ + Return an instance of the class under test. Each call to this method + must return a different object. The objects must not be equal to the + objects returned by C{anInstance}. They may or may not be equal to + each other (they will not be compared against each other). + """ + raise NotImplementedError() + + + def test_identicalEq(self): + """ + An object compares equal to itself using the C{==} operator. + """ + o = self.anInstance() + self.assertTrue(o == o) + + + def test_identicalNe(self): + """ + An object doesn't compare not equal to itself using the C{!=} operator. + """ + o = self.anInstance() + self.assertFalse(o != o) + + + def test_sameEq(self): + """ + Two objects that are equal to each other compare equal to each other + using the C{==} operator. + """ + a = self.anInstance() + b = self.anInstance() + self.assertTrue(a == b) + + + def test_sameNe(self): + """ + Two objects that are equal to each other do not compare not equal to + each other using the C{!=} operator. + """ + a = self.anInstance() + b = self.anInstance() + self.assertFalse(a != b) + + + def test_differentEq(self): + """ + Two objects that are not equal to each other do not compare equal to + each other using the C{==} operator. + """ + a = self.anInstance() + b = self.anotherInstance() + self.assertFalse(a == b) + + + def test_differentNe(self): + """ + Two objects that are not equal to each other compare not equal to each + other using the C{!=} operator. + """ + a = self.anInstance() + b = self.anotherInstance() + self.assertTrue(a != b) + + + def test_anotherTypeEq(self): + """ + The object does not compare equal to an object of an unrelated type + (which does not implement the comparison) using the C{==} operator. + """ + a = self.anInstance() + b = object() + self.assertFalse(a == b) + + + def test_anotherTypeNe(self): + """ + The object compares not equal to an object of an unrelated type (which + does not implement the comparison) using the C{!=} operator. + """ + a = self.anInstance() + b = object() + self.assertTrue(a != b) + + + def test_delegatedEq(self): + """ + The result of comparison using C{==} is delegated to the right-hand + operand if it is of an unrelated type. + """ + class Delegate(object): + def __eq__(self, other): + # Do something crazy and obvious. + return [self] + + a = self.anInstance() + b = Delegate() + self.assertEqual(a == b, [b]) + + + def test_delegateNe(self): + """ + The result of comparison using C{!=} is delegated to the right-hand + operand if it is of an unrelated type. + """ + class Delegate(object): + def __ne__(self, other): + # Do something crazy and obvious. + return [self] + + a = self.anInstance() + b = Delegate() + self.assertEqual(a != b, [b]) + + +# The type name expected in warnings about using the wrong string type. +if PY3: + WARNING_TYPE_EXPECTED = "str" +else: + WARNING_TYPE_EXPECTED = "unicode" diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/tsafe.py b/Darwin/lib/python3.5/site-packages/OpenSSL/tsafe.py similarity index 94% rename from Darwin/lib/python3.4/site-packages/OpenSSL/tsafe.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/tsafe.py index 9d7ad2f..3a9c710 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/tsafe.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/tsafe.py @@ -8,7 +8,7 @@ del threading class Connection: def __init__(self, *args): - self._ssl_conn = apply(_ssl.Connection, args) + self._ssl_conn = _ssl.Connection(*args) self._lock = _RLock() for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', diff --git a/Darwin/lib/python3.4/site-packages/OpenSSL/version.py b/Darwin/lib/python3.5/site-packages/OpenSSL/version.py similarity index 87% rename from Darwin/lib/python3.4/site-packages/OpenSSL/version.py rename to Darwin/lib/python3.5/site-packages/OpenSSL/version.py index 307dba0..eb3b736 100644 --- a/Darwin/lib/python3.4/site-packages/OpenSSL/version.py +++ b/Darwin/lib/python3.5/site-packages/OpenSSL/version.py @@ -6,4 +6,4 @@ pyOpenSSL - A simple wrapper around the OpenSSL library """ -__version__ = '0.14' +__version__ = '0.15.1' diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libfreetype.6.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libfreetype.6.dylib new file mode 100755 index 0000000..f3c5847 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libfreetype.6.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libjpeg.9.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libjpeg.9.dylib new file mode 100755 index 0000000..0f6141e Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libjpeg.9.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/liblcms2.2.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/liblcms2.2.dylib new file mode 100755 index 0000000..81c171f Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/liblcms2.2.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libpng16.16.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libpng16.16.dylib new file mode 100755 index 0000000..d89c718 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libpng16.16.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libtiff.5.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libtiff.5.dylib new file mode 100755 index 0000000..7b89182 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libtiff.5.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebp.5.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebp.5.dylib new file mode 100755 index 0000000..4cf14a1 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebp.5.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpdemux.1.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpdemux.1.dylib new file mode 100755 index 0000000..55e3f09 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpdemux.1.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpmux.1.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpmux.1.dylib new file mode 100755 index 0000000..3785e05 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libwebpmux.1.dylib differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libz.1.2.8.dylib b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libz.1.2.8.dylib new file mode 100755 index 0000000..c068d34 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/.dylibs/libz.1.2.8.dylib differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/BdfFontFile.py b/Darwin/lib/python3.5/site-packages/PIL/BdfFontFile.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/PIL/BdfFontFile.py rename to Darwin/lib/python3.5/site-packages/PIL/BdfFontFile.py index 3a41848..0c1614e 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/BdfFontFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/BdfFontFile.py @@ -26,12 +26,12 @@ from PIL import FontFile # -------------------------------------------------------------------- bdf_slant = { - "R": "Roman", - "I": "Italic", - "O": "Oblique", - "RI": "Reverse Italic", - "RO": "Reverse Oblique", - "OT": "Other" + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other" } bdf_spacing = { @@ -40,8 +40,8 @@ bdf_spacing = { "C": "Cell" } -def bdf_char(f): +def bdf_char(f): # skip to STARTCHAR while True: s = f.readline() @@ -69,8 +69,8 @@ def bdf_char(f): bitmap.append(s[:-1]) bitmap = b"".join(bitmap) - [x, y, l, d] = [int(s) for s in props["BBX"].split()] - [dx, dy] = [int(s) for s in props["DWIDTH"].split()] + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) @@ -82,6 +82,7 @@ def bdf_char(f): return id, int(props["ENCODING"]), bbox, im + ## # Font file plugin for the X11 BDF format. @@ -113,10 +114,10 @@ class BdfFontFile(FontFile.FontFile): font[4] = bdf_slant[font[4].upper()] font[11] = bdf_spacing[font[11].upper()] - ascent = int(props["FONT_ASCENT"]) - descent = int(props["FONT_DESCENT"]) + # ascent = int(props["FONT_ASCENT"]) + # descent = int(props["FONT_DESCENT"]) - fontname = ";".join(font[1:]) + # fontname = ";".join(font[1:]) # print "#", fontname # for i in comments: diff --git a/Darwin/lib/python3.5/site-packages/PIL/BmpImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/BmpImagePlugin.py new file mode 100644 index 0000000..d9aaf19 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/BmpImagePlugin.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# BMP file handler +# +# Windows (and OS/2) native bitmap storage format. +# +# history: +# 1995-09-01 fl Created +# 1996-04-30 fl Added save +# 1997-08-27 fl Fixed save of 1-bit images +# 1998-03-06 fl Load P images as L where possible +# 1998-07-03 fl Load P images as 1 where possible +# 1998-12-29 fl Handle small palettes +# 2002-12-30 fl Fixed load of 1-bit palette images +# 2003-04-21 fl Fixed load of 1-bit monochrome images +# 2003-04-23 fl Added limited support for BI_BITFIELDS compression +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary +import math + +__version__ = "0.7" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +# +# -------------------------------------------------------------------- +# Read BMP file + +BIT2MODE = { + # bits => mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +# ============================================================================== +# Image plugin for the Windows BMP format. +# ============================================================================== +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # -------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + # --------------------------------------------------- BMP Compression values + COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5} + RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5 + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = dict() + file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size) + file_info['direction'] = -1 + # --------------------- If requested, read header at a specific position + header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size + # --------------------------------------------------- IBM OS/2 Bitmap v1 + # ------ This format has different offsets because of width/height types + if file_info['header_size'] == 12: + file_info['width'] = i16(header_data[0:2]) + file_info['height'] = i16(header_data[2:4]) + file_info['planes'] = i16(header_data[4:6]) + file_info['bits'] = i16(header_data[6:8]) + file_info['compression'] = self.RAW + file_info['palette_padding'] = 3 + # ---------------------------------------------- Windows Bitmap v2 to v5 + elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5 + if file_info['header_size'] >= 40: # v3 and OS/2 + file_info['y_flip'] = i8(header_data[7]) == 0xff + file_info['direction'] = 1 if file_info['y_flip'] else -1 + file_info['width'] = i32(header_data[0:4]) + file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8]) + file_info['planes'] = i16(header_data[8:10]) + file_info['bits'] = i16(header_data[10:12]) + file_info['compression'] = i32(header_data[12:16]) + file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data + file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28])) + file_info['colors'] = i32(header_data[28:32]) + file_info['palette_padding'] = 4 + self.info["dpi"] = tuple( + map(lambda x: int(math.ceil(x / 39.3701)), + file_info['pixels_per_meter'])) + if file_info['compression'] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']): + file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) + else: + for mask in ['r_mask', 'g_mask', 'b_mask', 'a_mask']: + file_info[mask] = i32(read(4)) + file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask']) + file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask']) + else: + raise IOError("Unsupported BMP header type (%d)" % file_info['header_size']) + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self.size = file_info['width'], file_info['height'] + # -------- If color count was not found in the header, compute from bits + file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) + # -------------------------------- Check abnormal values for DOS attacks + if file_info['width'] * file_info['height'] > 2**31: + raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) + # ----------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + if self.mode is None: + raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits']) + # ----------------- Process BMP with Bitfields compression (not palette) + if file_info['compression'] == self.BITFIELDS: + SUPPORTED = { + 32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0)], + 24: [(0xff0000, 0xff00, 0xff)], + 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)]} + MASK_MODES = { + (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xff0000, 0xff00, 0xff)): "BGR", + (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15"} + if file_info['bits'] in SUPPORTED: + if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] + self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode + elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])] + else: + raise IOError("Unsupported BMP bitfields layout") + else: + raise IOError("Unsupported BMP bitfields layout") + elif file_info['compression'] == self.RAW: + if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise IOError("Unsupported BMP compression (%d)" % file_info['compression']) + # ---------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ----------------------------------------------------- 1-bit images + if not (0 < file_info['colors'] <= 65536): + raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors']) + else: + padding = file_info['palette_padding'] + palette = read(padding * file_info['colors']) + greyscale = True + indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors'])) + # ------------------ Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind*padding:ind*padding + 3] + if rgb != o8(val) * 3: + greyscale = False + # -------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info['colors'] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette) + + # ----------------------------- Finally set the tile data for the plugin + self.info['compression'] = file_info['compression'] + self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(), + (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction']) + )] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if head_data[0:2] != b"BM": + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data[10:14]) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================== +# Image plugin for the DIB format (BMP alias) +# ============================================================================== +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + +# +# -------------------------------------------------------------------- +# Write BMP file + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _save(im, fp, filename, check=0): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as BMP" % im.mode) + + if check: + return check + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + + stride = ((im.size[0]*bits+7)//8+3) & (~3) + header = 40 # or 64 for OS/2 version 2 + offset = 14 + header + colors * 4 + image = stride * im.size[1] + + # bitmap header + fp.write(b"BM" + # file type (magic) + o32(offset+image) + # file size + o32(0) + # reserved + o32(offset)) # image data offset + + # bitmap info header + fp.write(o32(header) + # info header size + o32(im.size[0]) + # width + o32(im.size[1]) + # height + o16(1) + # planes + o16(bits) + # depth + o32(0) + # compression (0=uncompressed) + o32(image) + # size of bitmap + o32(ppm[0]) + o32(ppm[1]) + # resolution + o32(colors) + # colors used + o32(colors)) # colors important + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, + (rawmode, stride, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") diff --git a/Darwin/lib/python3.4/site-packages/PIL/BufrStubImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/BufrStubImagePlugin.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/BufrStubImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/BufrStubImagePlugin.py index a55ae56..45ee547 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/BufrStubImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/BufrStubImagePlugin.py @@ -13,6 +13,7 @@ from PIL import Image, ImageFile _handler = None + ## # Install application-specific BUFR image handler. # @@ -22,12 +23,14 @@ def register_handler(handler): global _handler _handler = handler + # -------------------------------------------------------------------- # Image adapter def _accept(prefix): return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + class BufrStubImageFile(ImageFile.StubImageFile): format = "BUFR" @@ -53,6 +56,7 @@ class BufrStubImageFile(ImageFile.StubImageFile): def _load(self): return _handler + def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): raise IOError("BUFR save handler not installed") diff --git a/Darwin/lib/python3.4/site-packages/PIL/ContainerIO.py b/Darwin/lib/python3.5/site-packages/PIL/ContainerIO.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/PIL/ContainerIO.py rename to Darwin/lib/python3.5/site-packages/PIL/ContainerIO.py index f4a15b8..262f2af 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ContainerIO.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ContainerIO.py @@ -18,7 +18,8 @@ # A file object that provides read access to a part of an existing # file (for example a TAR file). -class ContainerIO: + +class ContainerIO(object): ## # Create file object. @@ -48,7 +49,7 @@ class ContainerIO: # for current offset, and 2 for end of region. You cannot move # the pointer outside the defined region. - def seek(self, offset, mode = 0): + def seek(self, offset, mode=0): if mode == 1: self.pos = self.pos + offset elif mode == 2: @@ -75,12 +76,12 @@ class ContainerIO: # read until end of region. # @return An 8-bit string. - def read(self, n = 0): + def read(self, n=0): if n: n = min(n, self.length - self.pos) else: n = self.length - self.pos - if not n: # EOF + if not n: # EOF return "" self.pos = self.pos + n return self.fh.read(n) diff --git a/Darwin/lib/python3.4/site-packages/PIL/CurImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/CurImagePlugin.py similarity index 72% rename from Darwin/lib/python3.4/site-packages/PIL/CurImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/CurImagePlugin.py index 4cf2882..3825e09 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/CurImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/CurImagePlugin.py @@ -17,10 +17,9 @@ # -__version__ = "0.1" - from PIL import Image, BmpImagePlugin, _binary +__version__ = "0.1" # # -------------------------------------------------------------------- @@ -33,6 +32,7 @@ i32 = _binary.i32le def _accept(prefix): return prefix[:4] == b"\0\0\2\0" + ## # Image plugin for Windows Cursor files. @@ -48,7 +48,7 @@ class CurImageFile(BmpImagePlugin.BmpImageFile): # check magic s = self.fp.read(6) if not _accept(s): - raise SyntaxError("not an CUR file") + raise SyntaxError("not a CUR file") # pick the largest cursor in the file m = b"" @@ -58,14 +58,14 @@ class CurImageFile(BmpImagePlugin.BmpImageFile): m = s elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]): m = s - #print "width", i8(s[0]) - #print "height", i8(s[1]) - #print "colors", i8(s[2]) - #print "reserved", i8(s[3]) - #print "hotspot x", i16(s[4:]) - #print "hotspot y", i16(s[6:]) - #print "bytes", i32(s[8:]) - #print "offset", i32(s[12:]) + # print "width", i8(s[0]) + # print "height", i8(s[1]) + # print "colors", i8(s[2]) + # print "reserved", i8(s[3]) + # print "hotspot x", i16(s[4:]) + # print "hotspot y", i16(s[6:]) + # print "bytes", i32(s[8:]) + # print "offset", i32(s[12:]) # load as bitmap self._bitmap(i32(m[12:]) + offset) @@ -73,7 +73,7 @@ class CurImageFile(BmpImagePlugin.BmpImageFile): # patch up the bitmap height self.size = self.size[0], self.size[1]//2 d, e, o, a = self.tile[0] - self.tile[0] = d, (0,0)+self.size, o, a + self.tile[0] = d, (0, 0)+self.size, o, a return @@ -81,6 +81,6 @@ class CurImageFile(BmpImagePlugin.BmpImageFile): # # -------------------------------------------------------------------- -Image.register_open("CUR", CurImageFile, _accept) +Image.register_open(CurImageFile.format, CurImageFile, _accept) -Image.register_extension("CUR", ".cur") +Image.register_extension(CurImageFile.format, ".cur") diff --git a/Darwin/lib/python3.4/site-packages/PIL/DcxImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/DcxImagePlugin.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/DcxImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/DcxImagePlugin.py index 631875e..f9034d1 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/DcxImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/DcxImagePlugin.py @@ -21,18 +21,19 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.2" - from PIL import Image, _binary - from PIL.PcxImagePlugin import PcxImageFile -MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? +__version__ = "0.2" + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? i32 = _binary.i32le + def _accept(prefix): - return i32(prefix) == MAGIC + return len(prefix) >= 4 and i32(prefix) == MAGIC + ## # Image plugin for the Intel DCX format. @@ -60,6 +61,14 @@ class DcxImageFile(PcxImageFile): self.__fp = self.fp self.seek(0) + @property + def n_frames(self): + return len(self._offset) + + @property + def is_animated(self): + return len(self._offset) > 1 + def seek(self, frame): if frame >= len(self._offset): raise EOFError("attempt to seek outside DCX directory") @@ -72,6 +81,6 @@ class DcxImageFile(PcxImageFile): return self.frame -Image.register_open("DCX", DcxImageFile, _accept) +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) -Image.register_extension("DCX", ".dcx") +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/Darwin/lib/python3.4/site-packages/PIL/EpsImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/EpsImagePlugin.py similarity index 58% rename from Darwin/lib/python3.4/site-packages/PIL/EpsImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/EpsImagePlugin.py index 9f963f7..fb5bf7f 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/EpsImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/EpsImagePlugin.py @@ -11,7 +11,8 @@ # 1996-08-23 fl Handle files from Macintosh (0.3) # 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) # 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5) -# 2014-05-07 e Handling of EPS with binary preview and fixed resolution resizing +# 2014-05-07 e Handling of EPS with binary preview and fixed resolution +# resizing # # Copyright (c) 1997-2003 by Secret Labs AB. # Copyright (c) 1995-2003 by Fredrik Lundh @@ -19,12 +20,13 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.5" - import re import io +import sys from PIL import Image, ImageFile, _binary +__version__ = "0.5" + # # -------------------------------------------------------------------- @@ -35,7 +37,6 @@ split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$") field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$") gs_windows_binary = None -import sys if sys.platform.startswith('win'): import shutil if hasattr(shutil, 'which'): @@ -51,20 +52,21 @@ if sys.platform.startswith('win'): else: gs_windows_binary = False + def has_ghostscript(): if gs_windows_binary: return True if not sys.platform.startswith('win'): import subprocess try: - gs = subprocess.Popen(['gs','--version'], stdout=subprocess.PIPE) + gs = subprocess.Popen(['gs', '--version'], stdout=subprocess.PIPE) gs.stdout.read() return True except OSError: # no ghostscript pass return False - + def Ghostscript(tile, size, fp, scale=1): """Render an image using Ghostscript""" @@ -72,54 +74,64 @@ def Ghostscript(tile, size, fp, scale=1): # Unpack decoder tile decoder, tile, offset, data = tile[0] length, bbox = data - - #Hack to support hi-res rendering - scale = int(scale) or 1 - orig_size = size - orig_bbox = bbox - size = (size[0] * scale, size[1] * scale) - # resolution is dependend on bbox and size - res = ( float((72.0 * size[0]) / (bbox[2]-bbox[0])), float((72.0 * size[1]) / (bbox[3]-bbox[1])) ) - #print("Ghostscript", scale, size, orig_size, bbox, orig_bbox, res) - import tempfile, os, subprocess + # Hack to support hi-res rendering + scale = int(scale) or 1 + # orig_size = size + # orig_bbox = bbox + size = (size[0] * scale, size[1] * scale) + # resolution is dependent on bbox and size + res = (float((72.0 * size[0]) / (bbox[2]-bbox[0])), + float((72.0 * size[1]) / (bbox[3]-bbox[1]))) + # print("Ghostscript", scale, size, orig_size, bbox, orig_bbox, res) + + import os + import subprocess + import tempfile out_fd, outfile = tempfile.mkstemp() os.close(out_fd) - in_fd, infile = tempfile.mkstemp() - os.close(in_fd) - - # ignore length and offset! - # ghostscript can read it - # copy whole file to read in ghostscript - with open(infile, 'wb') as f: - # fetch length of fp - fp.seek(0, 2) - fsize = fp.tell() - # ensure start position - # go back - fp.seek(0) - lengthfile = fsize - while lengthfile > 0: - s = fp.read(min(lengthfile, 100*1024)) - if not s: - break - length -= len(s) - f.write(s) + + infile_temp = None + if hasattr(fp, 'name') and os.path.exists(fp.name): + infile = fp.name + else: + in_fd, infile_temp = tempfile.mkstemp() + os.close(in_fd) + infile = infile_temp + + # ignore length and offset! + # ghostscript can read it + # copy whole file to read in ghostscript + with open(infile_temp, 'wb') as f: + # fetch length of fp + fp.seek(0, 2) + fsize = fp.tell() + # ensure start position + # go back + fp.seek(0) + lengthfile = fsize + while lengthfile > 0: + s = fp.read(min(lengthfile, 100*1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) # Build ghostscript command command = ["gs", - "-q", # quiet mode - "-g%dx%d" % size, # set output geometry (pixels) - "-r%fx%f" % res, # set input DPI (dots per inch) - "-dNOPAUSE -dSAFER", # don't pause between pages, safe mode - "-sDEVICE=ppmraw", # ppm driver - "-sOutputFile=%s" % outfile, # output file + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dNOPAUSE -dSAFER", # don't pause between pages, + # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file "-c", "%d %d translate" % (-bbox[0], -bbox[1]), - # adjust for image origin - "-f", infile, # input file - ] - + # adjust for image origin + "-f", infile, # input file + ] + if gs_windows_binary is not None: if not gs_windows_binary: raise WindowsError('Unable to locate Ghostscript on paths') @@ -127,7 +139,8 @@ def Ghostscript(tile, size, fp, scale=1): # push data through ghostscript try: - gs = subprocess.Popen(command, stdin=subprocess.PIPE, stdout=subprocess.PIPE) + gs = subprocess.Popen(command, stdin=subprocess.PIPE, + stdout=subprocess.PIPE) gs.stdin.close() status = gs.wait() if status: @@ -136,113 +149,93 @@ def Ghostscript(tile, size, fp, scale=1): finally: try: os.unlink(outfile) - os.unlink(infile) - except: pass - + if infile_temp: + os.unlink(infile_temp) + except: + pass + return im -class PSFile: - """Wrapper that treats either CR or LF as end of line.""" +class PSFile(object): + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ def __init__(self, fp): self.fp = fp self.char = None - def __getattr__(self, id): - v = getattr(self.fp, id) - setattr(self, id, v) - return v + def seek(self, offset, whence=0): self.char = None self.fp.seek(offset, whence) - def read(self, count): - return self.fp.read(count).decode('latin-1') - def readbinary(self, count): - return self.fp.read(count) - def tell(self): - pos = self.fp.tell() - if self.char: - pos -= 1 - return pos + def readline(self): - s = b"" - if self.char: - c = self.char - self.char = None - else: - c = self.fp.read(1) + s = self.char or b"" + self.char = None + + c = self.fp.read(1) while c not in b"\r\n": s = s + c c = self.fp.read(1) - if c == b"\r": - self.char = self.fp.read(1) - if self.char == b"\n": - self.char = None - return s.decode('latin-1') + "\n" + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode('latin-1') def _accept(prefix): - return prefix[:4] == b"%!PS" or i32(prefix) == 0xC6D3D0C5 + return prefix[:4] == b"%!PS" or \ + (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) ## # Image plugin for Encapsulated Postscript. This plugin supports only # a few variants of this format. + class EpsImageFile(ImageFile.ImageFile): """EPS File Parser for the Python Imaging Library""" format = "EPS" format_description = "Encapsulated Postscript" + mode_map = {1: "L", 2: "LAB", 3: "RGB"} + def _open(self): + (length, offset) = self._find_offset(self.fp) - fp = PSFile(self.fp) + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + try: + if bytes is str: + # Python2, no encoding conversion necessary + fp = open(self.fp.name, "Ur") + else: + # Python3, can use bare open command. + fp = open(self.fp.name, "Ur", encoding='latin-1') + except: + # Expect this for bytesio/stringio + fp = PSFile(self.fp) - # FIX for: Some EPS file not handled correctly / issue #302 - # EPS can contain binary data - # or start directly with latin coding - # read header in both ways to handle both - # file types - # more info see http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf - - # for HEAD without binary preview - s = fp.read(4) - # for HEAD with binary preview - fp.seek(0) - sb = fp.readbinary(160) - - if s[:4] == "%!PS": - fp.seek(0, 2) - length = fp.tell() - offset = 0 - elif i32(sb[0:4]) == 0xC6D3D0C5: - offset = i32(sb[4:8]) - length = i32(sb[8:12]) - else: - raise SyntaxError("not an EPS file") - - # go to offset - start of "%!PS" + # go to offset - start of "%!PS" fp.seek(offset) - + box = None self.mode = "RGB" - self.size = 1, 1 # FIXME: huh? + self.size = 1, 1 # FIXME: huh? # # Load EPS header - s = fp.readline() - - while s: + s = fp.readline().strip('\r\n') + while s: if len(s) > 255: raise SyntaxError("not an EPS file") - if s[-2:] == '\r\n': - s = s[:-2] - elif s[-1:] == '\n': - s = s[:-1] - try: m = split.match(s) except re.error as v: @@ -256,17 +249,15 @@ class EpsImageFile(ImageFile.ImageFile): # Note: The DSC spec says that BoundingBox # fields should be integers, but some drivers # put floating point values there anyway. - box = [int(float(s)) for s in v.split()] + box = [int(float(i)) for i in v.split()] self.size = box[2] - box[0], box[3] - box[1] - self.tile = [("eps", (0,0) + self.size, offset, + self.tile = [("eps", (0, 0) + self.size, offset, (length, box))] except: pass else: - m = field.match(s) - if m: k = m.group(1) @@ -276,84 +267,69 @@ class EpsImageFile(ImageFile.ImageFile): self.info[k[:8]] = k[9:] else: self.info[k] = "" - elif s[0:1] == '%': + elif s[0] == '%': # handle non-DSC Postscript comments that some # tools mistakenly put in the Comments section pass else: raise IOError("bad EPS header") - s = fp.readline() + s = fp.readline().strip('\r\n') if s[:1] != "%": break - # # Scan for an "ImageData" descriptor - while s[0] == "%": + while s[:1] == "%": if len(s) > 255: raise SyntaxError("not an EPS file") - if s[-2:] == '\r\n': - s = s[:-2] - elif s[-1:] == '\n': - s = s[:-1] - if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] - [x, y, bi, mo, z3, z4, en, id] =\ - s[11:].split(None, 7) - - x = int(x); y = int(y) - - bi = int(bi) - mo = int(mo) - - en = int(en) - - if en == 1: - decoder = "eps_binary" - elif en == 2: - decoder = "eps_hex" - else: + if int(bi) != 8: break - if bi != 8: - break - if mo == 1: - self.mode = "L" - elif mo == 2: - self.mode = "LAB" - elif mo == 3: - self.mode = "RGB" - else: + try: + self.mode = self.mode_map[int(mo)] + except: break - if id[:1] == id[-1:] == '"': - id = id[1:-1] + self.size = int(x), int(y) + return - # Scan forward to the actual image data - while True: - s = fp.readline() - if not s: - break - if s[:len(id)] == id: - self.size = x, y - self.tile2 = [(decoder, - (0, 0, x, y), - fp.tell(), - 0)] - return - - s = fp.readline() + s = fp.readline().strip('\r\n') if not s: break if not box: raise IOError("cannot determine EPS bounding box") + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, 2) + length = fp.tell() + offset = 0 + elif i32(s[0:4]) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s[4:8]) + length = i32(s[8:12]) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + def load(self, scale=1): # Load EPS via Ghostscript if not self.tile: @@ -363,11 +339,12 @@ class EpsImageFile(ImageFile.ImageFile): self.size = self.im.size self.tile = [] - def load_seek(self,*args,**kwargs): + def load_seek(self, *args, **kwargs): # we can't incrementally load, so force ImageFile.parser to - # use our custom load method by defining this method. + # use our custom load method by defining this method. pass + # # -------------------------------------------------------------------- @@ -389,25 +366,28 @@ def _save(im, fp, filename, eps=1): else: raise ValueError("image mode is not supported") - class NoCloseStream: + class NoCloseStream(object): def __init__(self, fp): self.fp = fp + def __getattr__(self, name): return getattr(self.fp, name) + def close(self): pass base_fp = fp - fp = NoCloseStream(fp) - if sys.version_info[0] > 2: - fp = io.TextIOWrapper(fp, encoding='latin-1') + if fp != sys.stdout: + fp = NoCloseStream(fp) + if sys.version_info[0] > 2: + fp = io.TextIOWrapper(fp, encoding='latin-1') if eps: # # write EPS header fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") fp.write("%%Creator: PIL 0.1 EpsEncode\n") - #fp.write("%%CreationDate: %s"...) + # fp.write("%%CreationDate: %s"...) fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) fp.write("%%Pages: 1\n") fp.write("%%EndComments\n") @@ -421,17 +401,19 @@ def _save(im, fp, filename, eps=1): fp.write("10 dict begin\n") fp.write("/buf %d string def\n" % (im.size[0] * operator[1])) fp.write("%d %d scale\n" % im.size) - fp.write("%d %d 8\n" % im.size) # <= bits + fp.write("%d %d 8\n" % im.size) # <= bits fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) fp.write("{ currentfile buf readhexstring pop } bind\n") fp.write(operator[2] + "\n") - fp.flush() + if hasattr(fp, "flush"): + fp.flush() - ImageFile._save(im, base_fp, [("eps", (0,0)+im.size, 0, None)]) + ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) fp.write("\n%%%%EndBinary\n") fp.write("grestore end\n") - fp.flush() + if hasattr(fp, "flush"): + fp.flush() # # -------------------------------------------------------------------- diff --git a/Darwin/lib/python3.4/site-packages/PIL/ExifTags.py b/Darwin/lib/python3.5/site-packages/PIL/ExifTags.py similarity index 96% rename from Darwin/lib/python3.4/site-packages/PIL/ExifTags.py rename to Darwin/lib/python3.5/site-packages/PIL/ExifTags.py index 25cd080..52e145f 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ExifTags.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ExifTags.py @@ -67,8 +67,8 @@ TAGS = { 0x0213: "YCbCrPositioning", 0x0214: "ReferenceBlackWhite", 0x1000: "RelatedImageFileFormat", - 0x1001: "RelatedImageLength", # FIXME / Dictionary contains duplicate keys - 0x1001: "RelatedImageWidth", # FIXME \ Dictionary contains duplicate keys + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", 0x828d: "CFARepeatPatternDim", 0x828e: "CFAPattern", 0x828f: "BatteryLevel", diff --git a/Darwin/lib/python3.4/site-packages/PIL/FitsStubImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/FitsStubImagePlugin.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/FitsStubImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/FitsStubImagePlugin.py index 0b851ae..7aefff2 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/FitsStubImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/FitsStubImagePlugin.py @@ -18,6 +18,7 @@ _handler = None # # @param handler Handler object. + def register_handler(handler): global _handler _handler = handler @@ -25,9 +26,11 @@ def register_handler(handler): # -------------------------------------------------------------------- # Image adapter + def _accept(prefix): return prefix[:6] == b"SIMPLE" + class FITSStubImageFile(ImageFile.StubImageFile): format = "FITS" diff --git a/Darwin/lib/python3.4/site-packages/PIL/FliImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/FliImagePlugin.py similarity index 59% rename from Darwin/lib/python3.4/site-packages/PIL/FliImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/FliImagePlugin.py index c9a2905..a07dc29 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/FliImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/FliImagePlugin.py @@ -16,20 +16,22 @@ # -__version__ = "0.2" - from PIL import Image, ImageFile, ImagePalette, _binary +__version__ = "0.2" + i8 = _binary.i8 i16 = _binary.i16le i32 = _binary.i32le o8 = _binary.o8 + # # decoder def _accept(prefix): - return i16(prefix[4:6]) in [0xAF11, 0xAF12] + return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] + ## # Image plugin for the FLI/FLC animation format. Use the seek @@ -47,7 +49,7 @@ class FliImageFile(ImageFile.ImageFile): magic = i16(s[4:6]) if not (magic in [0xAF11, 0xAF12] and i16(s[14:16]) in [0, 3] and # flags - s[20:22] == b"\x00\x00"): # reserved + s[20:22] == b"\x00\x00"): # reserved raise SyntaxError("not an FLI/FLC file") # image characteristics @@ -61,7 +63,7 @@ class FliImageFile(ImageFile.ImageFile): self.info["duration"] = duration # look for palette - palette = [(a,a,a) for a in range(256)] + palette = [(a, a, a) for a in range(256)] s = self.fp.read(16) @@ -80,13 +82,15 @@ class FliImageFile(ImageFile.ImageFile): elif i16(s[4:6]) == 4: self._palette(palette, 0) - palette = [o8(r)+o8(g)+o8(b) for (r,g,b) in palette] + palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] self.palette = ImagePalette.raw("RGB", b"".join(palette)) # set things up to decode first frame - self.frame = -1 + self.__frame = -1 self.__fp = self.fp - + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None self.seek(0) def _palette(self, palette, shift): @@ -107,11 +111,55 @@ class FliImageFile(ImageFile.ImageFile): palette[i] = (r, g, b) i += 1 - def seek(self, frame): + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames - if frame != self.frame + 1: + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in FLI file") + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + + if frame != self.__frame + 1: raise ValueError("cannot seek to frame %d" % frame) - self.frame = frame + self.__frame = frame # move to next frame self.fp = self.__fp @@ -124,18 +172,17 @@ class FliImageFile(ImageFile.ImageFile): framesize = i32(s) self.decodermaxblock = framesize - self.tile = [("fli", (0,0)+self.size, self.__offset, None)] + self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] - self.__offset = self.__offset + framesize + self.__offset += framesize def tell(self): - - return self.frame + return self.__frame # # registry -Image.register_open("FLI", FliImageFile, _accept) +Image.register_open(FliImageFile.format, FliImageFile, _accept) -Image.register_extension("FLI", ".fli") -Image.register_extension("FLI", ".flc") +Image.register_extension(FliImageFile.format, ".fli") +Image.register_extension(FliImageFile.format, ".flc") diff --git a/Darwin/lib/python3.4/site-packages/PIL/FontFile.py b/Darwin/lib/python3.5/site-packages/PIL/FontFile.py similarity index 75% rename from Darwin/lib/python3.4/site-packages/PIL/FontFile.py rename to Darwin/lib/python3.5/site-packages/PIL/FontFile.py index 7c5704c..db8e6be 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/FontFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/FontFile.py @@ -17,15 +17,9 @@ import os from PIL import Image, _binary -import marshal - -try: - import zlib -except ImportError: - zlib = None - WIDTH = 800 + def puti16(fp, values): # write network order (big-endian) 16-bit sequence for v in values: @@ -33,10 +27,11 @@ def puti16(fp, values): v += 65536 fp.write(_binary.o16be(v)) + ## # Base class for raster font file handlers. -class FontFile: +class FontFile(object): bitmap = None @@ -83,7 +78,8 @@ class FontFile: glyph = self[i] if glyph: d, dst, src, im = glyph - xx, yy = src[2] - src[0], src[3] - src[1] + xx = src[2] - src[0] + # yy = src[3] - src[1] x0, y0 = x, y x = x + xx if x > WIDTH: @@ -95,9 +91,8 @@ class FontFile: # print chr(i), dst, s self.metrics[i] = d, dst, s - - def save1(self, filename): - "Save font in version 1 format" + def save(self, filename): + "Save font" self.compile() @@ -107,7 +102,7 @@ class FontFile: # font metrics fp = open(os.path.splitext(filename)[0] + ".pil", "wb") fp.write(b"PILfont\n") - fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! fp.write(b"DATA\n") for id in range(256): m = self.metrics[id] @@ -117,30 +112,4 @@ class FontFile: puti16(fp, m[0] + m[1] + m[2]) fp.close() - - def save2(self, filename): - "Save font in version 2 format" - - # THIS IS WORK IN PROGRESS - - self.compile() - - data = marshal.dumps((self.metrics, self.info)) - - if zlib: - data = b"z" + zlib.compress(data, 9) - else: - data = b"u" + data - - fp = open(os.path.splitext(filename)[0] + ".pil", "wb") - - fp.write(b"PILfont2\n" + self.name + "\n" + "DATA\n") - - fp.write(data) - - self.bitmap.save(fp, "PNG") - - fp.close() - - - save = save1 # for now +# End of file diff --git a/Darwin/lib/python3.4/site-packages/PIL/FpxImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/FpxImagePlugin.py similarity index 82% rename from Darwin/lib/python3.4/site-packages/PIL/FpxImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/FpxImagePlugin.py index 64c7b15..d369e05 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/FpxImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/FpxImagePlugin.py @@ -16,11 +16,10 @@ # -__version__ = "0.1" - - from PIL import Image, ImageFile -from PIL.OleFileIO import * +from PIL.OleFileIO import i8, i32, MAGIC, OleFileIO + +__version__ = "0.1" # we map from colour field tuples to (mode, rawmode) descriptors @@ -34,16 +33,18 @@ MODES = { (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), # standard RGB (NIFRGB) - (0x00030000, 0x00030001, 0x00030002): ("RGB","RGB"), - (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA","RGBA"), + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), } + # # -------------------------------------------------------------------- def _accept(prefix): return prefix[:8] == MAGIC + ## # Image plugin for the FlashPix images. @@ -67,7 +68,7 @@ class FpxImageFile(ImageFile.ImageFile): self._open_index(1) - def _open_index(self, index = 1): + def _open_index(self, index=1): # # get the Image Contents Property Set @@ -95,7 +96,7 @@ class FpxImageFile(ImageFile.ImageFile): id = self.maxid << 16 - s = prop[0x2000002|id] + s = prop[0x2000002 | id] colors = [] for i in range(i32(s, 4)): @@ -107,7 +108,7 @@ class FpxImageFile(ImageFile.ImageFile): # load JPEG tables, if any self.jpeg = {} for i in range(256): - id = 0x3000001|(i << 16) + id = 0x3000001 | (i << 16) if id in prop: self.jpeg[i] = prop[id] @@ -115,7 +116,7 @@ class FpxImageFile(ImageFile.ImageFile): self._open_subimage(1, self.maxid) - def _open_subimage(self, index = 1, subimage = 0): + def _open_subimage(self, index=1, subimage=0): # # setup tile descriptors for a given subimage @@ -128,15 +129,15 @@ class FpxImageFile(ImageFile.ImageFile): fp = self.ole.openstream(stream) # skip prefix - p = fp.read(28) + fp.read(28) # header stream s = fp.read(36) size = i32(s, 4), i32(s, 8) - tilecount = i32(s, 12) + # tilecount = i32(s, 12) tilesize = i32(s, 16), i32(s, 20) - channels = i32(s, 24) + # channels = i32(s, 24) offset = i32(s, 28) length = i32(s, 32) @@ -159,14 +160,14 @@ class FpxImageFile(ImageFile.ImageFile): compression = i32(s, i+8) if compression == 0: - self.tile.append(("raw", (x,y,x+xtile,y+ytile), - i32(s, i) + 28, (self.rawmode))) + self.tile.append(("raw", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode))) elif compression == 1: # FIXME: the fill decoder is not implemented - self.tile.append(("fill", (x,y,x+xtile,y+ytile), - i32(s, i) + 28, (self.rawmode, s[12:16]))) + self.tile.append(("fill", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode, s[12:16]))) elif compression == 2: @@ -182,14 +183,14 @@ class FpxImageFile(ImageFile.ImageFile): # this problem : jpegmode, rawmode = "YCbCrK", "CMYK" else: - jpegmode = None # let the decoder decide + jpegmode = None # let the decoder decide else: # The image is stored as defined by rawmode jpegmode = rawmode - self.tile.append(("jpeg", (x,y,x+xtile,y+ytile), - i32(s, i) + 28, (rawmode, jpegmode))) + self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (rawmode, jpegmode))) # FIXME: jpeg tables are tile dependent; the prefix # data must be placed in the tile descriptor itself! @@ -204,7 +205,7 @@ class FpxImageFile(ImageFile.ImageFile): if x >= xsize: x, y = 0, y + ytile if y >= ysize: - break # isn't really required + break # isn't really required self.stream = stream self.fp = None @@ -212,7 +213,8 @@ class FpxImageFile(ImageFile.ImageFile): def load(self): if not self.fp: - self.fp = self.ole.openstream(self.stream[:2] + ["Subimage 0000 Data"]) + self.fp = self.ole.openstream(self.stream[:2] + + ["Subimage 0000 Data"]) ImageFile.ImageFile.load(self) diff --git a/Darwin/lib/python3.4/site-packages/PIL/GbrImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/GbrImagePlugin.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/GbrImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/GbrImagePlugin.py index e2a5d0c..15282ec 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/GbrImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/GbrImagePlugin.py @@ -17,8 +17,10 @@ from PIL import Image, ImageFile, _binary i32 = _binary.i32be + def _accept(prefix): - return i32(prefix) >= 20 and i32(prefix[4:8]) == 1 + return len(prefix) >= 8 and i32(prefix) >= 20 and i32(prefix[4:8]) == 1 + ## # Image plugin for the GIMP brush format. @@ -37,8 +39,8 @@ class GbrImageFile(ImageFile.ImageFile): width = i32(self.fp.read(4)) height = i32(self.fp.read(4)) - bytes = i32(self.fp.read(4)) - if width <= 0 or height <= 0 or bytes != 1: + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0 or color_depth != 1: raise SyntaxError("not a GIMP brush") comment = self.fp.read(header_size - 20)[:-1] @@ -64,6 +66,6 @@ class GbrImageFile(ImageFile.ImageFile): # # registry -Image.register_open("GBR", GbrImageFile, _accept) +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) -Image.register_extension("GBR", ".gbr") +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/Darwin/lib/python3.4/site-packages/PIL/GdImageFile.py b/Darwin/lib/python3.5/site-packages/PIL/GdImageFile.py similarity index 93% rename from Darwin/lib/python3.4/site-packages/PIL/GdImageFile.py rename to Darwin/lib/python3.5/site-packages/PIL/GdImageFile.py index f1dbc7c..ae3500f 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/GdImageFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/GdImageFile.py @@ -23,11 +23,11 @@ # purposes only. -__version__ = "0.1" - from PIL import ImageFile, ImagePalette, _binary from PIL._util import isPath +__version__ = "0.1" + try: import builtins except ImportError: @@ -36,6 +36,7 @@ except ImportError: i16 = _binary.i16be + ## # Image plugin for the GD uncompressed format. Note that this format # is not supported by the standard Image.open function. To use @@ -52,7 +53,7 @@ class GdImageFile(ImageFile.ImageFile): # Header s = self.fp.read(775) - self.mode = "L" # FIXME: "P" + self.mode = "L" # FIXME: "P" self.size = i16(s[0:2]), i16(s[2:4]) # transparency index @@ -62,7 +63,8 @@ class GdImageFile(ImageFile.ImageFile): self.palette = ImagePalette.raw("RGB", s[7:]) - self.tile = [("raw", (0,0)+self.size, 775, ("L", 0, -1))] + self.tile = [("raw", (0, 0)+self.size, 775, ("L", 0, -1))] + ## # Load texture from a GD image file. @@ -73,7 +75,7 @@ class GdImageFile(ImageFile.ImageFile): # @return An image instance. # @exception IOError If the image could not be read. -def open(fp, mode = "r"): +def open(fp, mode="r"): if mode != "r": raise ValueError("bad mode") diff --git a/Darwin/lib/python3.5/site-packages/PIL/GifImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000..83169bf --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,687 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, ImagePalette, \ + ImageChops, ImageSequence, _binary + +__version__ = "0.9" + + +# -------------------------------------------------------------------- +# Helpers + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 +o16 = _binary.o16le + + +# -------------------------------------------------------------------- +# Identify/read GIF files + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and i8(s): + return self.fp.read(i8(s)) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if s[:6] not in [b"GIF87a", b"GIF89a"]: + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self.size = i16(s[6:]), i16(s[8:]) + self.tile = [] + flags = i8(s[10]) + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = i8(s[11]) + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in GIF file") + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + self.palette = copy(self.global_palette) + + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if i8(s) == 249: + # + # graphic control extension + # + flags = i8(block[0]) + if flags & 1: + self.info["transparency"] = i8(block[3]) + self.info["duration"] = i16(block[1:3]) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif i8(s) == 255: + # + # application extension + # + self.info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and i8(block[0]) == 1: + self.info["loop"] = i16(block[1:3]) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s[0:]), i16(s[2:]) + x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + self.dispose_extent = x0, y0, x1, y1 + flags = i8(s[8]) + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette =\ + ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = i8(self.fp.read(1)) + self.__offset = self.fp.tell() + self.tile = [("gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace))] + break + + else: + pass + # raise IOError, "illegal GIF tag `%x`" % i8(s) + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + self.dispose = Image.core.fill("P", self.size, + self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self.dispose.crop(self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self.disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self.im.crop(self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, + updated.convert('RGBA')) + self.im = self._prev_im + self._prev_im = self.im.copy() + +# -------------------------------------------------------------------- +# Write GIF files + +try: + import _imaging_gif +except ImportError: + _imaging_gif = None + +RAWMODE = { + "1": "L", + "L": "L", + "P": "P", +} + + +def _convert_mode(im, initial_call=False): + # convert on the fly (EXPERIMENTAL -- I'm not sure PIL + # should automatically convert images on save...) + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=1, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + + im.encoderinfo.update(im.info) + if _imaging_gif: + # call external driver + try: + _imaging_gif.save(im, fp, filename) + return + except IOError: + pass # write uncompressed file + + if im.mode in RAWMODE: + im_out = im.copy() + else: + im_out = _convert_mode(im, True) + + # header + try: + palette = im.encoderinfo["palette"] + except KeyError: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if save_all: + previous = None + + first_frame = None + for im_frame in ImageSequence.Iterator(im): + im_frame = _convert_mode(im_frame) + + # To specify duration, add the time in milliseconds to getdata(), + # e.g. getdata(im_frame, duration=1000) + if not previous: + # global header + first_frame = getheader(im_frame, palette, im.encoderinfo)[0] + first_frame += getdata(im_frame, (0, 0), **im.encoderinfo) + else: + if first_frame: + for s in first_frame: + fp.write(s) + first_frame = None + + # delta frame + delta = ImageChops.subtract_modulo(im_frame, previous.copy()) + bbox = delta.getbbox() + + if bbox: + # compress difference + for s in getdata(im_frame.crop(bbox), + bbox[:2], **im.encoderinfo): + fp.write(s) + else: + # FIXME: what should we do in this case? + pass + previous = im_frame + if first_frame: + save_all = False + if not save_all: + header = getheader(im_out, palette, im.encoderinfo)[0] + for s in header: + fp.write(s) + + flags = 0 + + if get_interlace(im): + flags = flags | 64 + + # local image header + _get_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, + RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + fp.write(b";") # end of file + + try: + fp.flush() + except: + pass + + +def get_interlace(im): + try: + interlace = im.encoderinfo["interlace"] + except KeyError: + interlace = 1 + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _get_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + if _get_optimize(im, im.encoderinfo): + used_palette_colors = _get_used_palette_colors(im) + + # adjust the transparency index after optimize + if len(used_palette_colors) < 256: + for i in range(len(used_palette_colors)): + if used_palette_colors[i] == transparency: + transparency = i + transparent_color_exists = True + break + else: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + if transparent_color_exists or duration != 0: + transparency_flag = 1 if transparent_color_exists else 0 + if not transparent_color_exists: + transparency = 0 + + fp.write(b"!" + + o8(249) + # extension intro + o8(4) + # length + o8(transparency_flag) + # transparency info present + o16(duration) + # duration + o8(transparency) + # transparency index + o8(0)) + + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write(b"!" + + o8(255) + # extension intro + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) + # number of loops + o8(0)) + fp.write(b"," + + o16(offset[0]) + # offset + o16(offset[1]) + + o16(im.size[0]) + # size + o16(im.size[1]) + + o8(flags) + # flags + o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + + import os + from subprocess import Popen, check_call, PIPE, CalledProcessError + import tempfile + file = im._dump() + + if im.mode != "RGB": + with open(filename, 'wb') as f: + stderr = tempfile.TemporaryFile() + check_call(["ppmtogif", file], stdout=f, stderr=stderr) + else: + with open(filename, 'wb') as f: + + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) + quant_cmd = ["ppmquant", "256", file] + togif_cmd = ["ppmtogif"] + stderr = tempfile.TemporaryFile() + quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr) + stderr = tempfile.TemporaryFile() + togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f, + stderr=stderr) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise CalledProcessError(retcode, togif_cmd) + + try: + os.unlink(file) + except: + pass + + +# -------------------------------------------------------------------- +# GIF utilities + +def _get_optimize(im, info): + return im.mode in ("P", "L") and info and info.get("optimize", 0) + + +def _get_used_palette_colors(im): + used_palette_colors = [] + + # check which colors are used + i = 0 + for count in im.histogram(): + if count: + used_palette_colors.append(i) + i += 1 + + return used_palette_colors + + +def getheader(im, palette=None, info=None): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop"]: + if info and extensionKey in info and \ + not (extensionKey == "duration" and info[extensionKey] == 0): + version = b"89a" + break + else: + if im.info.get("version") == "89a": + version = b"89a" + + header = [ + b"GIF"+version + # signature + version + o16(im.size[0]) + # canvas width + o16(im.size[1]) # canvas height + ] + + if im.mode == "P": + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = bytearray([i//3 for i in range(768)]) + + used_palette_colors = palette_bytes = None + + if _get_optimize(im, info): + used_palette_colors = _get_used_palette_colors(im) + + # create the new palette if not every color is used + if len(used_palette_colors) < 256: + palette_bytes = b"" + new_positions = {} + + i = 0 + # pick only the used colors from the palette + for oldPosition in used_palette_colors: + palette_bytes += source_palette[oldPosition*3:oldPosition*3+3] + new_positions[oldPosition] = i + i += 1 + + # replace the palette color id of all pixel with the new id + image_bytes = bytearray(im.tobytes()) + for i in range(len(image_bytes)): + image_bytes[i] = new_positions[image_bytes[i]] + im.frombytes(bytes(image_bytes)) + new_palette_bytes = (palette_bytes + + (768 - len(palette_bytes)) * b'\x00') + im.putpalette(new_palette_bytes) + im.palette = ImagePalette.ImagePalette("RGB", + palette=palette_bytes, + size=len(palette_bytes)) + + if not palette_bytes: + palette_bytes = source_palette + + # Logical Screen Descriptor + # calculate the palette size for the header + import math + color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 + if color_table_size < 0: + color_table_size = 0 + # size of global color table + global color table flag + header.append(o8(color_table_size + 128)) + # background + reserved/aspect + if info and "background" in info: + background = info["background"] + elif "background" in im.info: + # This elif is redundant within GifImagePlugin + # since im.info parameters are bundled into the info dictionary + # However, external scripts may call getheader directly + # So this maintains earlier behaviour + background = im.info["background"] + else: + background = 0 + header.append(o8(background) + o8(0)) + # end of Logical Screen Descriptor + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + + # Header + Logical Screen Descriptor + Global Color Table + header.append(palette_bytes) + return header, used_palette_colors + + +def getdata(im, offset=(0, 0), **params): + """Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data.""" + + class Collector(object): + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + try: + im.encoderinfo = params + + # local image header + _get_local_header(fp, im, offset, 0) + + ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])]) + + fp.write(b"\0") # end of image data + + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/Darwin/lib/python3.4/site-packages/PIL/GimpGradientFile.py b/Darwin/lib/python3.5/site-packages/PIL/GimpGradientFile.py similarity index 85% rename from Darwin/lib/python3.4/site-packages/PIL/GimpGradientFile.py rename to Darwin/lib/python3.5/site-packages/PIL/GimpGradientFile.py index 7c88add..45af573 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/GimpGradientFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/GimpGradientFile.py @@ -24,6 +24,7 @@ from PIL._binary import o8 EPSILON = 1e-10 + def linear(middle, pos): if pos <= middle: if middle < EPSILON: @@ -38,25 +39,30 @@ def linear(middle, pos): else: return 0.5 + 0.5 * pos / middle + def curved(middle, pos): return pos ** (log(0.5) / log(max(middle, EPSILON))) + def sine(middle, pos): return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + def sphere_increasing(middle, pos): return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + def sphere_decreasing(middle, pos): return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) -SEGMENTS = [ linear, curved, sine, sphere_increasing, sphere_decreasing ] +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] -class GradientFile: + +class GradientFile(object): gradient = None - def getpalette(self, entries = 256): + def getpalette(self, entries=256): palette = [] @@ -89,6 +95,7 @@ class GradientFile: return b"".join(palette), "RGBA" + ## # File handler for GIMP's gradient format. @@ -99,7 +106,13 @@ class GimpGradientFile(GradientFile): if fp.readline()[:13] != b"GIMP Gradient": raise SyntaxError("not a GIMP gradient file") - count = int(fp.readline()) + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) gradient = [] @@ -108,13 +121,13 @@ class GimpGradientFile(GradientFile): s = fp.readline().split() w = [float(x) for x in s[:11]] - x0, x1 = w[0], w[2] - xm = w[1] - rgb0 = w[3:7] - rgb1 = w[7:11] + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] segment = SEGMENTS[int(s[11])] - cspace = int(s[12]) + cspace = int(s[12]) if cspace != 0: raise IOError("cannot handle HSV colour space") diff --git a/Darwin/lib/python3.4/site-packages/PIL/GimpPaletteFile.py b/Darwin/lib/python3.5/site-packages/PIL/GimpPaletteFile.py similarity index 97% rename from Darwin/lib/python3.4/site-packages/PIL/GimpPaletteFile.py rename to Darwin/lib/python3.5/site-packages/PIL/GimpPaletteFile.py index 6f71ec6..4bf3ca3 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/GimpPaletteFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/GimpPaletteFile.py @@ -17,10 +17,11 @@ import re from PIL._binary import o8 + ## # File handler for GIMP's palette format. -class GimpPaletteFile: +class GimpPaletteFile(object): rawmode = "RGB" @@ -56,7 +57,6 @@ class GimpPaletteFile: self.palette = b"".join(self.palette) - def getpalette(self): return self.palette, self.rawmode diff --git a/Darwin/lib/python3.4/site-packages/PIL/GribStubImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/GribStubImagePlugin.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/GribStubImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/GribStubImagePlugin.py index d76585c..8ffad81 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/GribStubImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/GribStubImagePlugin.py @@ -13,6 +13,7 @@ from PIL import Image, ImageFile _handler = None + ## # Install application-specific GRIB image handler. # @@ -22,12 +23,14 @@ def register_handler(handler): global _handler _handler = handler + # -------------------------------------------------------------------- # Image adapter def _accept(prefix): return prefix[0:4] == b"GRIB" and prefix[7] == b'\x01' + class GribStubImageFile(ImageFile.StubImageFile): format = "GRIB" @@ -53,6 +56,7 @@ class GribStubImageFile(ImageFile.StubImageFile): def _load(self): return _handler + def _save(im, fp, filename): if _handler is None or not hasattr("_handler", "save"): raise IOError("GRIB save handler not installed") diff --git a/Darwin/lib/python3.4/site-packages/PIL/Hdf5StubImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/Hdf5StubImagePlugin.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/Hdf5StubImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/Hdf5StubImagePlugin.py index eb888d8..f7945be 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/Hdf5StubImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/Hdf5StubImagePlugin.py @@ -13,6 +13,7 @@ from PIL import Image, ImageFile _handler = None + ## # Install application-specific HDF5 image handler. # @@ -22,12 +23,14 @@ def register_handler(handler): global _handler _handler = handler + # -------------------------------------------------------------------- # Image adapter def _accept(prefix): return prefix[:8] == b"\x89HDF\r\n\x1a\n" + class HDF5StubImageFile(ImageFile.StubImageFile): format = "HDF5" diff --git a/Darwin/lib/python3.4/site-packages/PIL/IcnsImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/IcnsImagePlugin.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/IcnsImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/IcnsImagePlugin.py index ca7a149..a4366e9 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/IcnsImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/IcnsImagePlugin.py @@ -16,7 +16,12 @@ # from PIL import Image, ImageFile, PngImagePlugin, _binary -import struct, io +import io +import os +import shutil +import struct +import sys +import tempfile enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') if enable_jpeg2k: @@ -26,9 +31,11 @@ i8 = _binary.i8 HEADERSIZE = 8 + def nextheader(fobj): return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + def read_32t(fobj, start_length, size): # The 128x128 icon seems to have an extra header for some reason. (start, length) = start_length @@ -38,6 +45,7 @@ def read_32t(fobj, start_length, size): raise SyntaxError('Unknown signature, expecting 0x00000000') return read_32(fobj, (start + 4, length - 4), size) + def read_32(fobj, start_length, size): """ Read a 32bit RGB icon resource. Seems to be either uncompressed or @@ -83,9 +91,10 @@ def read_32(fobj, start_length, size): im.im.putband(band.im, band_ix) return {"RGB": im} + def read_mk(fobj, start_length, size): # Alpha masks seem to be uncompressed - (start, length) = start_length + start = start_length[0] fobj.seek(start) pixel_size = (size[0] * size[2], size[1] * size[2]) sizesq = pixel_size[0] * pixel_size[1] @@ -94,6 +103,7 @@ def read_mk(fobj, start_length, size): ) return {"A": band} + def read_png_or_jpeg2000(fobj, start_length, size): (start, length) = start_length fobj.seek(start) @@ -103,10 +113,11 @@ def read_png_or_jpeg2000(fobj, start_length, size): im = PngImagePlugin.PngImageFile(fobj) return {"RGBA": im} elif sig[:4] == b'\xff\x4f\xff\x51' \ - or sig[:4] == b'\x0d\x0a\x87\x0a' \ - or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + or sig[:4] == b'\x0d\x0a\x87\x0a' \ + or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': if not enable_jpeg2k: - raise ValueError('Unsupported icon subimage format (rebuild PIL with JPEG 2000 support to fix this)') + raise ValueError('Unsupported icon subimage format (rebuild PIL ' + 'with JPEG 2000 support to fix this)') # j2k, jpc or j2c fobj.seek(start) jp2kstream = fobj.read(length) @@ -118,7 +129,8 @@ def read_png_or_jpeg2000(fobj, start_length, size): else: raise ValueError('Unsupported icon subimage format') -class IcnsFile: + +class IcnsFile(object): SIZES = { (512, 512, 2): [ @@ -225,7 +237,7 @@ class IcnsFile: im = channels.get('RGBA', None) if im: return im - + im = channels.get("RGB").copy() try: im.putalpha(channels["A"]) @@ -233,12 +245,13 @@ class IcnsFile: pass return im + ## # Image plugin for Mac OS icons. class IcnsImageFile(ImageFile.ImageFile): """ - PIL read-only image support for Mac OS .icns files. + PIL image support for Mac OS .icns files. Chooses the best resolution, but will possibly load a different size image if you mutate the size attribute before calling 'load'. @@ -275,7 +288,7 @@ class IcnsImageFile(ImageFile.ImageFile): # If this is a PNG or JPEG 2000, it won't be loaded yet im.load() - + self.im = im.im self.mode = im.mode self.size = im.size @@ -284,11 +297,63 @@ class IcnsImageFile(ImageFile.ImageFile): self.tile = () self.load_end() -Image.register_open("ICNS", IcnsImageFile, lambda x: x[:4] == b'icns') -Image.register_extension("ICNS", '.icns') + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the OS X command line utility 'iconutil'. + + OS X only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + iconset = tempfile.mkdtemp('.iconset') + last_w = None + last_im = None + for w in [16, 32, 128, 256, 512]: + prefix = 'icon_{}x{}'.format(w, w) + + if last_w == w: + im_scaled = last_im + else: + im_scaled = im.resize((w, w), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'.png')) + + im_scaled = im.resize((w*2, w*2), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'@2x.png')) + last_im = im_scaled + + # iconutil -c icns -o {} {} + from subprocess import Popen, PIPE, CalledProcessError + + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + stderr = tempfile.TemporaryFile() + convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=stderr) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + # remove the temporary files + shutil.rmtree(iconset) + + if retcode: + raise CalledProcessError(retcode, convert_cmd) + +Image.register_open(IcnsImageFile.format, IcnsImageFile, + lambda x: x[:4] == b'icns') +Image.register_extension(IcnsImageFile.format, '.icns') + +if sys.platform == 'darwin': + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + if __name__ == '__main__': - import os, sys imf = IcnsImageFile(open(sys.argv[1], 'rb')) for size in imf.info['sizes']: imf.size = size diff --git a/Darwin/lib/python3.4/site-packages/PIL/IcoImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/IcoImagePlugin.py similarity index 71% rename from Darwin/lib/python3.4/site-packages/PIL/IcoImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/IcoImagePlugin.py index 268e93d..0b8f469 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/IcoImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/IcoImagePlugin.py @@ -13,7 +13,8 @@ # See the README file for information on usage and redistribution. # -# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis . +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . # https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin # # Icon format references: @@ -21,11 +22,14 @@ # * http://msdn.microsoft.com/en-us/library/ms997538.aspx -__version__ = "0.1" +import struct +from io import BytesIO from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary from math import log, ceil +__version__ = "0.1" + # # -------------------------------------------------------------------- @@ -35,11 +39,47 @@ i32 = _binary.i32le _MAGIC = b"\0\0\1\0" + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get("sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (255, 255)]) + width, height = im.size + filter(lambda x: False if (x[0] > width or x[1] > height or + x[0] > 255 or x[1] > 255) else True, sizes) + fp.write(struct.pack("=8bpp) + 'nb_color': i8(s[2]), # No. of colors in image (0 if >=8bpp) 'reserved': i8(s[3]), 'planes': i16(s[4:]), 'bpp': i16(s[6:]), @@ -78,10 +118,14 @@ class IcoFile: # See Wikipedia notes about color depth. # We need this just to differ images with equal sizes - icon_header['color_depth'] = (icon_header['bpp'] or (icon_header['nb_color'] != 0 and ceil(log(icon_header['nb_color'],2))) or 256) + icon_header['color_depth'] = (icon_header['bpp'] or + (icon_header['nb_color'] != 0 and + ceil(log(icon_header['nb_color'], + 2))) or 256) icon_header['dim'] = (icon_header['width'], icon_header['height']) - icon_header['square'] = icon_header['width'] * icon_header['height'] + icon_header['square'] = (icon_header['width'] * + icon_header['height']) self.entry.append(icon_header) @@ -102,7 +146,7 @@ class IcoFile: Get an image from the icon """ for (i, h) in enumerate(self.entry): - if size == h['dim'] and (bpp == False or bpp == h['color_depth']): + if size == h['dim'] and (bpp is False or bpp == h['color_depth']): return self.frame(i) return self.frame(0) @@ -127,7 +171,7 @@ class IcoFile: # change tile dimension to only encompass XOR image im.size = (im.size[0], int(im.size[1] / 2)) d, e, o, a = im.tile[0] - im.tile[0] = d, (0,0) + im.size, o, a + im.tile[0] = d, (0, 0) + im.size, o, a # figure out where AND mask image starts mode = a[0] @@ -139,8 +183,9 @@ class IcoFile: if 32 == bpp: # 32-bit color depth icon image allows semitransparent areas - # PIL's DIB format ignores transparency bits, recover them - # The DIB is packed in BGRX byte order where X is the alpha channel + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. # Back up to start of bmp data self.buf.seek(o) @@ -162,9 +207,11 @@ class IcoFile: # bitmap row data is aligned to word boundaries w += 32 - (im.size[0] % 32) - # the total mask data is padded row size * height / bits per char + # the total mask data is + # padded row size * height / bits per char - and_mask_offset = o + int(im.size[0] * im.size[1] * (bpp / 8.0)) + and_mask_offset = o + int(im.size[0] * im.size[1] * + (bpp / 8.0)) total_bytes = int((w * im.size[1]) / 8) self.buf.seek(and_mask_offset) @@ -187,6 +234,7 @@ class IcoFile: return im + ## # Image plugin for Windows Icon files. @@ -194,15 +242,16 @@ class IcoImageFile(ImageFile.ImageFile): """ PIL read-only image support for Microsoft Windows .ico files. - By default the largest resolution image in the file will be loaded. This can - be changed by altering the 'size' attribute before calling 'load'. + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. The info dictionary has a key 'sizes' that is a list of the sizes available in the icon file. Handles classic, XP and Vista icon formats. - This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis . + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin """ format = "ICO" @@ -222,12 +271,13 @@ class IcoImageFile(ImageFile.ImageFile): self.mode = im.mode self.size = im.size - def load_seek(self): - # Flage the ImageFile.Parser so that it just does all the decode at the end. + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. pass # # -------------------------------------------------------------------- -Image.register_open("ICO", IcoImageFile, _accept) -Image.register_extension("ICO", ".ico") +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/ImImagePlugin.py similarity index 84% rename from Darwin/lib/python3.4/site-packages/PIL/ImImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/ImImagePlugin.py index a5eeef7..dd4f829 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImImagePlugin.py @@ -26,11 +26,11 @@ # -__version__ = "0.7" - import re from PIL import Image, ImageFile, ImagePalette -from PIL._binary import i8, o8 +from PIL._binary import i8 + +__version__ = "0.7" # -------------------------------------------------------------------- @@ -46,8 +46,8 @@ SCALE = "Scale (x,y)" SIZE = "Image size (x*y)" MODE = "Image type" -TAGS = { COMMENT:0, DATE:0, EQUIPMENT:0, FRAMES:0, LUT:0, NAME:0, - SCALE:0, SIZE:0, MODE:0 } +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, + SCALE: 0, SIZE: 0, MODE: 0} OPEN = { # ifunc93/p3cfunc formats @@ -94,12 +94,14 @@ for i in range(2, 33): split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + def number(s): try: return int(s) except ValueError: return float(s) + ## # Image plugin for the IFUNC IM file format. @@ -113,7 +115,7 @@ class ImImageFile(ImageFile.ImageFile): # Quick rejection: if there's not an LF among the first # 100 bytes, this is (probably) not a text header. - if not b"\n" in self.fp.read(100): + if b"\n" not in self.fp.read(100): raise SyntaxError("not an IM file") self.fp.seek(0) @@ -155,10 +157,10 @@ class ImImageFile(ImageFile.ImageFile): if m: - k, v = m.group(1,2) + k, v = m.group(1, 2) - # Don't know if this is the correct encoding, but a decent guess - # (I guess) + # Don't know if this is the correct encoding, + # but a decent guess (I guess) k = k.decode('latin-1', 'replace') v = v.decode('latin-1', 'replace') @@ -186,7 +188,8 @@ class ImImageFile(ImageFile.ImageFile): else: - raise SyntaxError("Syntax error in IM header: " + s.decode('ascii', 'replace')) + raise SyntaxError("Syntax error in IM header: " + + s.decode('ascii', 'replace')) if not n: raise SyntaxError("Not an IM file") @@ -204,8 +207,8 @@ class ImImageFile(ImageFile.ImageFile): if LUT in self.info: # convert lookup table to palette or lut attribute palette = self.fp.read(768) - greyscale = 1 # greyscale palette - linear = 1 # linear greyscale palette + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette for i in range(256): if palette[i] == palette[i+256] == palette[i+512]: if i8(palette[i]) != i: @@ -230,7 +233,7 @@ class ImImageFile(ImageFile.ImageFile): self.__offset = offs = self.fp.tell() - self.__fp = self.fp # FIXME: hack + self.__fp = self.fp # FIXME: hack if self.rawmode[:2] == "F;": @@ -239,7 +242,7 @@ class ImImageFile(ImageFile.ImageFile): # use bit decoder (if necessary) bits = int(self.rawmode[2:]) if bits not in [8, 16, 32]: - self.tile = [("bit", (0,0)+self.size, offs, + self.tile = [("bit", (0, 0)+self.size, offs, (bits, 8, 3, 0, -1))] return except ValueError: @@ -249,12 +252,21 @@ class ImImageFile(ImageFile.ImageFile): # Old LabEye/3PC files. Would be very surprised if anyone # ever stumbled upon such a file ;-) size = self.size[0] * self.size[1] - self.tile = [("raw", (0,0)+self.size, offs, ("G", 0, -1)), - ("raw", (0,0)+self.size, offs+size, ("R", 0, -1)), - ("raw", (0,0)+self.size, offs+2*size, ("B", 0, -1))] + self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), + ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), + ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] else: # LabEye/IFUNC files - self.tile = [("raw", (0,0)+self.size, offs, (self.rawmode, 0, -1))] + self.tile = [("raw", (0, 0)+self.size, offs, + (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 def seek(self, frame): @@ -276,7 +288,7 @@ class ImImageFile(ImageFile.ImageFile): self.fp = self.__fp - self.tile = [("raw", (0,0)+self.size, offs, (self.rawmode, 0, -1))] + self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] def tell(self): @@ -305,10 +317,11 @@ SAVE = { "YCbCr": ("YCC", "YCbCr;L") } + def _save(im, fp, filename, check=0): try: - type, rawmode = SAVE[im.mode] + image_type, rawmode = SAVE[im.mode] except KeyError: raise ValueError("Cannot save %s images as IM" % im.mode) @@ -320,7 +333,7 @@ def _save(im, fp, filename, check=0): if check: return check - fp.write(("Image type: %s image\r\n" % type).encode('ascii')) + fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) if filename: fp.write(("Name: %s\r\n" % filename).encode('ascii')) fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) @@ -329,14 +342,14 @@ def _save(im, fp, filename, check=0): fp.write(b"Lut: 1\r\n") fp.write(b"\000" * (511-fp.tell()) + b"\032") if im.mode == "P": - fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, 0, -1))]) + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) # # -------------------------------------------------------------------- # Registry -Image.register_open("IM", ImImageFile) -Image.register_save("IM", _save) +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) -Image.register_extension("IM", ".im") +Image.register_extension(ImImageFile.format, ".im") diff --git a/Darwin/lib/python3.4/site-packages/PIL/Image.py b/Darwin/lib/python3.5/site-packages/PIL/Image.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/PIL/Image.py rename to Darwin/lib/python3.5/site-packages/PIL/Image.py index 787e602..06bf7ce 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/Image.py +++ b/Darwin/lib/python3.5/site-packages/PIL/Image.py @@ -28,13 +28,17 @@ from __future__ import print_function from PIL import VERSION, PILLOW_VERSION, _plugins +import logging import warnings +logger = logging.getLogger(__name__) + class DecompressionBombWarning(RuntimeWarning): pass -class _imaging_not_installed: + +class _imaging_not_installed(object): # module placeholder def __getattr__(self, id): raise ImportError("The _imaging C module is not installed") @@ -54,10 +58,11 @@ except ImportError: pass try: - # If the _imaging C module is not present, you can still use - # the "open" function to identify files, but you cannot load - # them. Note that other modules should not refer to _imaging - # directly; import Image and use the Image.core variable instead. + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. from PIL import _imaging as core if PILLOW_VERSION != getattr(core, 'PILLOW_VERSION', None): raise ImportError("The _imaging extension was built for another " @@ -90,6 +95,7 @@ except ImportError as v: RuntimeWarning ) # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting-pil-to-pillow.rst raise try: @@ -106,6 +112,8 @@ from PIL._util import deferred_error import os import sys +import io +import struct # type stuff import collections @@ -116,7 +124,7 @@ USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') try: import cffi HAS_CFFI = True -except: +except ImportError: HAS_CFFI = False @@ -133,11 +141,6 @@ def isImageType(t): """ return hasattr(t, "im") -# -# Debug level - -DEBUG = 0 - # # Constants (also defined in _imagingmodule.c!) @@ -149,6 +152,7 @@ FLIP_TOP_BOTTOM = 1 ROTATE_90 = 2 ROTATE_180 = 3 ROTATE_270 = 4 +TRANSPOSE = 5 # transforms AFFINE = 0 @@ -158,11 +162,10 @@ QUAD = 3 MESH = 4 # resampling filters -NONE = 0 -NEAREST = 0 -ANTIALIAS = 1 # 3-lobed lanczos -LINEAR = BILINEAR = 2 -CUBIC = BICUBIC = 3 +NEAREST = NONE = 0 +LANCZOS = ANTIALIAS = 1 +BILINEAR = LINEAR = 2 +BICUBIC = CUBIC = 3 # dithers NONE = 0 @@ -199,6 +202,7 @@ ID = [] OPEN = {} MIME = {} SAVE = {} +SAVE_ALL = {} EXTENSION = {} # -------------------------------------------------------------------- @@ -220,6 +224,7 @@ _MODEINFO = { "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and # BGR;24. Use these modes only if you know exactly what you're @@ -380,13 +385,10 @@ def init(): for plugin in _plugins: try: - if DEBUG: - print ("Importing %s" % plugin) + logger.debug("Importing %s", plugin) __import__("PIL.%s" % plugin, globals(), locals(), []) - except ImportError: - if DEBUG: - print("Image: failed to import", end=' ') - print(plugin, ":", sys.exc_info()[1]) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) if OPEN or SAVE: _initialized = 2 @@ -437,7 +439,7 @@ def coerce_e(value): return value if isinstance(value, _E) else _E(value) -class _E: +class _E(object): def __init__(self, data): self.data = data @@ -472,7 +474,7 @@ def _getscaleoffset(expr): # -------------------------------------------------------------------- # Implementation wrapper -class Image: +class Image(object): """ This class represents an image object. To create :py:class:`~PIL.Image.Image` objects, use the appropriate factory @@ -498,12 +500,21 @@ class Image: self.readonly = 0 self.pyaccess = None + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + def _new(self, im): new = Image() new.im = im new.mode = im.mode new.size = im.size - new.palette = self.palette + if self.palette: + new.palette = self.palette.copy() if im.mode == "P" and not new.palette: from PIL import ImagePalette new.palette = ImagePalette.ImagePalette() @@ -529,7 +540,7 @@ class Image: """ Closes the file pointer, if possible. - This operation will destroy the image core and release it's memory. + This operation will destroy the image core and release its memory. The image data will be unusable afterward. This function is only required to close images that have not @@ -539,8 +550,7 @@ class Image: try: self.fp.close() except Exception as msg: - if DEBUG: - print ("Error closing: %s" % msg) + logger.debug("Error closing: %s" % msg) # Instead of simply setting to None, we're setting up a # deferred error that will better explain that the core image @@ -554,7 +564,6 @@ class Image: self.readonly = 0 def _dump(self, file=None, format=None): - import os import tempfile suffix = '' if format: @@ -573,6 +582,8 @@ class Image: return file def __eq__(self, other): + if self.__class__.__name__ != other.__class__.__name__: + return False a = (self.mode == other.mode) b = (self.size == other.size) c = (self.getpalette() == other.getpalette()) @@ -593,6 +604,16 @@ class Image: id(self) ) + def _repr_png_(self): + """ iPython display hook support + + :returns: png version of the image as bytes + """ + from io import BytesIO + b = BytesIO() + self.save(b, 'PNG') + return b.getvalue() + def __getattr__(self, name): if name == "__array_interface__": # numpy array interface support @@ -620,7 +641,7 @@ class Image: self.mode = mode self.size = size self.im = core.new(mode, size) - if mode in ("L", "P"): + if mode in ("L", "P") and palette: self.putpalette(palette) self.frombytes(data) @@ -660,14 +681,9 @@ class Image: return b"".join(data) - # Declare tostring as alias to tobytes def tostring(self, *args, **kw): - warnings.warn( - 'tostring() is deprecated. Please call tobytes() instead.', - DeprecationWarning, - stacklevel=2, - ) - return self.tobytes(*args, **kw) + raise Exception("tostring() has been removed. " + + "Please call tobytes() instead.") def tobitmap(self, name="image"): """ @@ -717,14 +733,8 @@ class Image: raise ValueError("cannot decode image data") def fromstring(self, *args, **kw): - """Deprecated alias to frombytes. - - .. deprecated:: 2.0 - """ - warnings.warn( - 'fromstring() is deprecated. Please call frombytes() instead.', - DeprecationWarning) - return self.frombytes(*args, **kw) + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") def load(self): """ @@ -735,6 +745,7 @@ class Image: associated with the image. :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` """ if self.im and self.palette and self.palette.dirty: # realize palette @@ -794,9 +805,9 @@ class Image: use other thresholds, use the :py:meth:`~PIL.Image.Image.point` method. - :param mode: The requested mode. + :param mode: The requested mode. See: :ref:`concept-modes`. :param matrix: An optional conversion matrix. If given, this - should be 4- or 16-tuple containing floating point values. + should be 4- or 12-tuple containing floating point values. :param dither: Dithering method, used when converting from mode "RGB" to "P" or from "RGB" or "L" to "1". Available methods are NONE or FLOYDSTEINBERG (default). @@ -845,8 +856,9 @@ class Image: t = self.info['transparency'] if isinstance(t, bytes): # Dragons. This can't be represented by a single color - warnings.warn('Palette images with Transparency expressed ' + - ' in bytes should be converted to RGBA images') + warnings.warn('Palette images with Transparency ' + + ' expressed in bytes should be converted ' + + 'to RGBA images') delete_trns = True else: # get the new transparency color. @@ -862,11 +874,20 @@ class Image: # can't just retrieve the palette number, got to do it # after quantization. trns_im = trns_im.convert('RGB') - trns = trns_im.getpixel((0,0)) + trns = trns_im.getpixel((0, 0)) elif self.mode == 'P' and mode == 'RGBA': + t = self.info['transparency'] delete_trns = True + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should" + + " be bytes or int") + if mode == "P" and palette == ADAPTIVE: im = self.im.quantize(colors) new = self._new(im) @@ -918,14 +939,19 @@ class Image: return new_im def quantize(self, colors=256, method=None, kmeans=0, palette=None): + """ + Convert the image to 'P' mode with the specified number + of colors. - # methods: - # 0 = median cut - # 1 = maximum coverage - # 2 = fast octree + :param colors: The desired number of colors, <= 256 + :param method: 0 = median cut + 1 = maximum coverage + 2 = fast octree + :param kmeans: Integer + :param palette: Quantize to the :py:class:`PIL.ImagingPalette` palette. + :returns: A new image - # NOTE: this functionality will be moved to the extended - # quantizer interface in a later version of PIL. + """ self.load() @@ -992,8 +1018,6 @@ class Image: def draft(self, mode, size): """ - NYI - Configures the image file loader so it returns a version of the image that as closely as possible matches the given mode and size. For example, you can use this method to convert a color @@ -1209,27 +1233,8 @@ class Image: return self.im.histogram() def offset(self, xoffset, yoffset=None): - """ - .. deprecated:: 2.0 - - .. note:: New code should use :py:func:`PIL.ImageChops.offset`. - - Returns a copy of the image where the data has been offset by the given - distances. Data wraps around the edges. If **yoffset** is omitted, it - is assumed to be equal to **xoffset**. - - :param xoffset: The horizontal distance. - :param yoffset: The vertical distance. If omitted, both - distances are set to the same value. - :returns: An :py:class:`~PIL.Image.Image` object. - """ - if warnings: - warnings.warn( - "'offset' is deprecated; use 'ImageChops.offset' instead", - DeprecationWarning, stacklevel=2 - ) - from PIL import ImageChops - return ImageChops.offset(self, xoffset, yoffset) + raise Exception("offset() has been removed. " + + "Please call ImageChops.offset() instead.") def paste(self, im, box=None, mask=None): """ @@ -1253,11 +1258,11 @@ class Image: images (in the latter case, the alpha band is used as mask). Where the mask is 255, the given image is copied as is. Where the mask is 0, the current value is preserved. Intermediate - values can be used for transparency effects. + values will mix the two images together, including their alpha + channels if they have them. - Note that if you paste an "RGBA" image, the alpha band is - ignored. You can work around this by using the same image as - both source image and mask. + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. :param im: Source image or pixel value (integer or tuple). :param box: An optional 4-tuple giving the region to paste into. @@ -1498,36 +1503,30 @@ class Image: (width, height). :param resample: An optional resampling filter. This can be one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), - :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 - environment), :py:attr:`PIL.Image.BICUBIC` (cubic spline - interpolation in a 4x4 environment), or - :py:attr:`PIL.Image.ANTIALIAS` (a high-quality downsampling filter). + :py:attr:`PIL.Image.BILINEAR` (linear interpolation), + :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation), or + :py:attr:`PIL.Image.LANCZOS` (a high-quality downsampling filter). If omitted, or if the image has mode "1" or "P", it is set :py:attr:`PIL.Image.NEAREST`. :returns: An :py:class:`~PIL.Image.Image` object. """ - if resample not in (NEAREST, BILINEAR, BICUBIC, ANTIALIAS): + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS): raise ValueError("unknown resampling filter") self.load() + size = tuple(size) + if self.size == size: + return self._new(self.im) + if self.mode in ("1", "P"): resample = NEAREST if self.mode == 'RGBA': return self.convert('RGBa').resize(size, resample).convert('RGBA') - if resample == ANTIALIAS: - # requires stretch support (imToolkit & PIL 1.1.3) - try: - im = self.im.stretch(size, resample) - except AttributeError: - raise ValueError("unsupported resampling filter") - else: - im = self.im.resize(size, resample) - - return self._new(im) + return self._new(self.im.resize(size, resample)) def rotate(self, angle, resample=NEAREST, expand=0): """ @@ -1588,7 +1587,7 @@ class Image: if self.mode in ("1", "P"): resample = NEAREST - return self._new(self.im.rotate(angle, resample)) + return self._new(self.im.rotate(angle, resample, expand)) def save(self, fp, format=None, **params): """ @@ -1598,15 +1597,16 @@ class Image: Keyword options can be used to provide additional instructions to the writer. If a writer doesn't recognise an option, it is - silently ignored. The available options are described later in - this handbook. + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. You can use a file object instead of a filename. In this case, you must always specify the format. The file object must - implement the **seek**, **tell**, and **write** + implement the ``seek``, ``tell``, and ``write`` methods, and be opened in binary mode. - :param file: File name or file object. + :param fp: A filename (string), pathlib.Path object or file object. :param format: Optional format override. If omitted, the format to use is determined from the filename extension. If a file object was used instead of a filename, this @@ -1619,17 +1619,23 @@ class Image: may have been created, and may contain partial data. """ + filename = "" if isPath(fp): filename = fp - else: - if hasattr(fp, "name") and isPath(fp.name): - filename = fp.name - else: - filename = "" + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + elif hasattr(fp, "name") and isPath(fp.name): + filename = fp.name # may mutate self! self.load() + save_all = False + if 'save_all' in params: + save_all = params['save_all'] + del params['save_all'] self.encoderinfo = params self.encoderconfig = () @@ -1638,23 +1644,19 @@ class Image: ext = os.path.splitext(filename)[1].lower() if not format: - try: - format = EXTENSION[ext] - except KeyError: + if ext not in EXTENSION: init() - try: - format = EXTENSION[ext] - except KeyError: - raise KeyError(ext) # unknown extension + format = EXTENSION[ext] - try: - save_handler = SAVE[format.upper()] - except KeyError: + if format.upper() not in SAVE: init() - save_handler = SAVE[format.upper()] # unknown format + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] - if isPath(fp): - fp = builtins.open(fp, "wb") + if filename: + fp = builtins.open(filename, "wb") close = 1 else: close = 0 @@ -1733,7 +1735,7 @@ class Image: """ return 0 - def thumbnail(self, size, resample=ANTIALIAS): + def thumbnail(self, size, resample=BICUBIC): """ Make this image into a thumbnail. This method modifies the image to contain a thumbnail version of itself, no larger than @@ -1742,12 +1744,7 @@ class Image: :py:meth:`~PIL.Image.Image.draft` method to configure the file reader (where applicable), and finally resizes the image. - Note that the bilinear and bicubic filters in the current - version of PIL are not well-suited for thumbnail generation. - You should use :py:attr:`PIL.Image.ANTIALIAS` unless speed is much more - important than quality. - - Also note that this function modifies the :py:class:`~PIL.Image.Image` + Note that this function modifies the :py:class:`~PIL.Image.Image` object in place. If you need to use the full resolution image as well, apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original image. @@ -1755,10 +1752,9 @@ class Image: :param size: Requested size. :param resample: Optional resampling filter. This can be one of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, - :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.ANTIALIAS` - (best quality). If omitted, it defaults to - :py:attr:`PIL.Image.ANTIALIAS`. (was :py:attr:`PIL.Image.NEAREST` - prior to version 2.5.0) + :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) :returns: None """ @@ -1777,14 +1773,7 @@ class Image: self.draft(None, size) - self.load() - - try: - im = self.resize(size, resample) - except ValueError: - if resample != ANTIALIAS: - raise - im = self.resize(size, NEAREST) # fallback + im = self.resize(size, resample) self.im = im.im self.mode = im.mode @@ -1793,7 +1782,7 @@ class Image: self.readonly = 0 self.pyaccess = None - # FIXME: the different tranform methods need further explanation + # FIXME: the different transform methods need further explanation # instead of bloating the method docs, add a separate chapter. def transform(self, size, method, data=None, resample=NEAREST, fill=1): """ @@ -1900,14 +1889,38 @@ class Image: :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`, :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`, - :py:attr:`PIL.Image.ROTATE_180`, or :py:attr:`PIL.Image.ROTATE_270`. + :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270` or + :py:attr:`PIL.Image.TRANSPOSE`. :returns: Returns a flipped or rotated copy of this image. """ self.load() - im = self.im.transpose(method) + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + im = self.im.effect_spread(distance) return self._new(im) + def toqimage(self): + """Returns a QImage copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + # -------------------------------------------------------------------- # Lazy operations @@ -1948,12 +1961,12 @@ class _ImageCrop(Image): # -------------------------------------------------------------------- # Abstract handlers. -class ImagePointHandler: +class ImagePointHandler(object): # used as a mixin by point transforms (for use with im.point) pass -class ImageTransformHandler: +class ImageTransformHandler(object): # used as a mixin by geometry transforms (for use with im.transform) pass @@ -1974,7 +1987,8 @@ def new(mode, size, color=0): """ Creates a new image with the given mode and size. - :param mode: The mode to use for the new image. + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. :param size: A 2-tuple, containing (width, height) in pixels. :param color: What color to use for the image. Default is black. If given, this should be a single integer or floating point value @@ -2007,14 +2021,14 @@ def frombytes(mode, size, data, decoder_name="raw", *args): You can also use any pixel decoder supported by PIL. For more information on available decoders, see the section - **Writing Your Own File Decoder**. + :ref:`Writing Your Own File Decoder `. Note that this function decodes pixel data only, not entire images. If you have an entire image in a string, wrap it in a :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load it. - :param mode: The image mode. + :param mode: The image mode. See: :ref:`concept-modes`. :param size: The image size. :param data: A byte buffer containing raw data for the given mode. :param decoder_name: What decoder to use. @@ -2035,16 +2049,8 @@ def frombytes(mode, size, data, decoder_name="raw", *args): def fromstring(*args, **kw): - """Deprecated alias to frombytes. - - .. deprecated:: 2.0 - """ - warnings.warn( - 'fromstring() is deprecated. Please call frombytes() instead.', - DeprecationWarning, - stacklevel=2 - ) - return frombytes(*args, **kw) + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") def frombuffer(mode, size, data, decoder_name="raw", *args): @@ -2066,7 +2072,7 @@ def frombuffer(mode, size, data, decoder_name="raw", *args): issues a warning if you do this; to disable the warning, you should provide the full set of parameters. See below for details. - :param mode: The image mode. + :param mode: The image mode. See: :ref:`concept-modes`. :param size: The image size. :param data: A bytes or other buffer object containing raw data for the given mode. @@ -2117,7 +2123,8 @@ def fromarray(obj, mode=None): :param obj: Object with array interface :param mode: Mode to use (will be determined from type if None) - :returns: An image memory. + See: :ref:`concept-modes`. + :returns: An image object. .. versionadded:: 1.1.6 """ @@ -2155,6 +2162,22 @@ def fromarray(obj, mode=None): return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + _fromarray_typemap = { # (shape, typestr) => mode, rawmode # first two members of shape are set to one @@ -2202,9 +2225,10 @@ def open(fp, mode="r"): :py:meth:`~PIL.Image.Image.load` method). See :py:func:`~PIL.Image.new`. - :param file: A filename (string) or a file object. The file object - must implement :py:meth:`~file.read`, :py:meth:`~file.seek`, and - :py:meth:`~file.tell` methods, and be opened in binary mode. + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement :py:meth:`~file.read`, + :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods, + and be opened in binary mode. :param mode: The mode. If given, this argument must be "r". :returns: An :py:class:`~PIL.Image.Image` object. :exception IOError: If the file cannot be found, or the image cannot be @@ -2214,31 +2238,26 @@ def open(fp, mode="r"): if mode != "r": raise ValueError("bad mode %r" % mode) + filename = "" if isPath(fp): filename = fp - fp = builtins.open(fp, "rb") - else: - filename = "" + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + if filename: + fp = builtins.open(filename, "rb") + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) prefix = fp.read(16) preinit() - for i in ID: - try: - factory, accept = OPEN[i] - if not accept or accept(prefix): - fp.seek(0) - im = factory(fp, filename) - _decompression_bomb_check(im.size) - return im - except (SyntaxError, IndexError, TypeError): - # import traceback - # traceback.print_exc() - pass - - if init(): - + def _open_core(fp, filename, prefix): for i in ID: try: factory, accept = OPEN[i] @@ -2247,18 +2266,29 @@ def open(fp, mode="r"): im = factory(fp, filename) _decompression_bomb_check(im.size) return im - except (SyntaxError, IndexError, TypeError): - # import traceback - # traceback.print_exc() - pass + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + #logger.debug("", exc_info=True) + continue + return None + + im = _open_core(fp, filename, prefix) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix) + + if im: + return im raise IOError("cannot identify image file %r" % (filename if filename else fp)) - # # Image processing. + def alpha_composite(im1, im2): """ Alpha composite im2 over im1. @@ -2304,7 +2334,7 @@ def composite(image1, image2, mask): :param image1: The first image. :param image2: The second image. Must have the same mode and size as the first image. - :param mask: A mask image. This image can can have mode + :param mask: A mask image. This image can have mode "1", "L", or "RGBA", and must have the same size as the other two images. """ @@ -2334,7 +2364,8 @@ def merge(mode, bands): """ Merge a set of single band images into a new multiband image. - :param mode: The mode to use for the output image. + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. :param bands: A sequence containing one single-band image for each band in the output image. All bands must have the same size. @@ -2395,6 +2426,18 @@ def register_save(id, driver): SAVE[id.upper()] = driver +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + def register_extension(id, extension): """ Registers an image extension. This function should not be @@ -2417,3 +2460,32 @@ def _show(image, **options): def _showxv(image, title=None, **options): from PIL import ImageShow ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + +# End of file diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageChops.py b/Darwin/lib/python3.5/site-packages/PIL/ImageChops.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/PIL/ImageChops.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageChops.py diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageCms.py b/Darwin/lib/python3.5/site-packages/PIL/ImageCms.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/PIL/ImageCms.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageCms.py index fc17695..ebf127d 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageCms.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageCms.py @@ -1,19 +1,19 @@ -""" -The Python Imaging Library. -$Id$ +# The Python Imaging Library. +# $Id$ -Optional color managment support, based on Kevin Cazabon's PyCMS -library. +# Optional color managment support, based on Kevin Cazabon's PyCMS +# library. -History: -2009-03-08 fl Added to PIL. +# History: -Copyright (C) 2002-2003 Kevin Cazabon -Copyright (c) 2009 by Fredrik Lundh +# 2009-03-08 fl Added to PIL. -See the README file for information on usage and redistribution. See -below for the original description. -""" +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. from __future__ import print_function @@ -64,7 +64,7 @@ pyCMS 0.0.2 alpha Jan 6, 2002 - Added try/except statements arount type() checks of + Added try/except statements around type() checks of potential CObjects... Python won't let you use type() on them, and raises a TypeError (stupid, if you ask me!) @@ -90,8 +90,8 @@ try: except ImportError as ex: # Allow error import for doc purposes, but error out when accessing # anything in core. - from _util import import_err - _imagingcms = import_err(ex) + from _util import deferred_error + _imagingcms = deferred_error(ex) from PIL._util import isStringType core = _imagingcms @@ -123,8 +123,8 @@ FLAGS = { "NOTCACHE": 64, # Inhibit 1-pixel cache "NOTPRECALC": 256, "NULLTRANSFORM": 512, # Don't transform anyway - "HIGHRESPRECALC": 1024, # Use more memory to give better accurancy - "LOWRESPRECALC": 2048, # Use less memory to minimize resouces + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources "WHITEBLACKCOMPENSATION": 8192, "BLACKPOINTCOMPENSATION": 8192, "GAMUTCHECK": 4096, # Out of Gamut alarm @@ -147,11 +147,16 @@ for flag in FLAGS.values(): ## # Profile. -class ImageCmsProfile: +class ImageCmsProfile(object): def __init__(self, profile): - # accepts a string (filename), a file-like object, or a low-level - # profile object + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + if isStringType(profile): self._set(core.profile_open(profile), profile) elif hasattr(profile, "read"): @@ -169,12 +174,23 @@ class ImageCmsProfile: self.product_name = None self.product_info = None + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + class ImageCmsTransform(Image.ImagePointHandler): - """Transform. This can be used with the procedural API, or with the - standard Image.point() method. - """ + # Transform. This can be used with the procedural API, or with the + # standard Image.point() method. + # + # Will return the output profile in the output.info['icc_profile']. def __init__(self, input, output, input_mode, output_mode, intent=INTENT_PERCEPTUAL, proof=None, @@ -197,6 +213,8 @@ class ImageCmsTransform(Image.ImagePointHandler): self.input_mode = self.inputMode = input_mode self.output_mode = self.outputMode = output_mode + self.output_profile = output + def point(self, im): return self.apply(im) @@ -205,6 +223,7 @@ class ImageCmsTransform(Image.ImagePointHandler): if imOut is None: imOut = Image.new(self.output_mode, im.size, None) self.transform.apply(im.im.id, imOut.im.id) + imOut.info['icc_profile'] = self.output_profile.tobytes() return imOut def apply_in_place(self, im): @@ -212,6 +231,7 @@ class ImageCmsTransform(Image.ImagePointHandler): if im.mode != self.output_mode: raise ValueError("mode mismatch") # wrong output mode self.transform.apply(im.im.id, im.im.id) + im.info['icc_profile'] = self.output_profile.tobytes() return im @@ -553,7 +573,7 @@ def applyTransform(im, transform, inPlace=0): This function applies a pre-calculated transform (from ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) to an image. The transform can be used for multiple images, saving - considerable calcuation time if doing the same conversion multiple times. + considerable calculation time if doing the same conversion multiple times. If you want to modify im in-place instead of receiving a new image as the return value, set inPlace to TRUE. This can only be done if @@ -570,7 +590,7 @@ def applyTransform(im, transform, inPlace=0): with the transform applied is returned (and im is not changed). The default is False. :returns: Either None, or a new PIL Image object, depending on the value of - inPlace + inPlace. The profile will be returned in the image's info['icc_profile']. :exception PyCMSError: """ @@ -637,7 +657,7 @@ def getProfileName(profile): (pyCMS) Gets the internal product name for the given profile. - If profile isn't a valid CmsProfile object or filename to a profile, + If profile isn't a valid CmsProfile object or filename to a profile, a PyCMSError is raised If an error occurs while trying to obtain the name tag, a PyCMSError is raised. @@ -838,7 +858,7 @@ def getDefaultIntent(profile): If an error occurs while trying to obtain the default intent, a PyCMSError is raised. - Use this function to determine the default (and usually best optomized) + Use this function to determine the default (and usually best optimized) rendering intent for this profile. Most profiles support multiple rendering intents, but are intended mostly for one type of conversion. If you wish to use a different intent than returned, use @@ -876,7 +896,7 @@ def isIntentSupported(profile, intent, direction): input/output/proof profile as you desire. Some profiles are created specifically for one "direction", can cannot - be used for others. Some profiles can only be used for certain + be used for others. Some profiles can only be used for certain rendering intents... so it's best to either verify this before trying to create a transform with them (using this function), or catch the potential PyCMSError that will occur if they don't support the modes @@ -894,7 +914,7 @@ def isIntentSupported(profile, intent, direction): see the pyCMS documentation for details on rendering intents and what they do. - :param direction: Integer specifing if the profile is to be used for input, + :param direction: Integer specifying if the profile is to be used for input, output, or proof INPUT = 0 (or use ImageCms.DIRECTION_INPUT) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageColor.py b/Darwin/lib/python3.5/site-packages/PIL/ImageColor.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/ImageColor.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageColor.py index 98a241b..fc95e6d 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageColor.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageColor.py @@ -20,6 +20,7 @@ from PIL import Image import re + def getrgb(color): """ Convert a color string to an RGB tuple. If the string cannot be parsed, @@ -86,7 +87,8 @@ def getrgb(color): int(rgb[1] * 255 + 0.5), int(rgb[2] * 255 + 0.5) ) - m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", + color) if m: return ( int(m.group(1)), @@ -96,6 +98,7 @@ def getrgb(color): ) raise ValueError("unknown color specifier: %r" % color) + def getcolor(color, mode): """ Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageDraw.py b/Darwin/lib/python3.5/site-packages/PIL/ImageDraw.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/ImageDraw.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageDraw.py index a03d260..9e154f2 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageDraw.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageDraw.py @@ -40,13 +40,14 @@ try: except ImportError: warnings = None + ## # A simple 2D drawing interface for PIL images. #

# Application code should use the Draw factory, instead of # directly. -class ImageDraw: +class ImageDraw(object): ## # Create a drawing instance. @@ -61,7 +62,7 @@ class ImageDraw: def __init__(self, im, mode=None): im.load() if im.readonly: - im._copy() # make it writable + im._copy() # make it writeable blend = 0 if mode is None: mode = im.mode @@ -85,42 +86,22 @@ class ImageDraw: # FIXME: fix Fill2 to properly support matte for I+F images self.fontmode = "1" else: - self.fontmode = "L" # aliasing is okay for other modes + self.fontmode = "L" # aliasing is okay for other modes self.fill = 0 self.font = None - ## - # Set the default pen color. - def setink(self, ink): - # compatibility - if warnings: - warnings.warn( - "'setink' is deprecated; use keyword arguments instead", - DeprecationWarning, stacklevel=2 - ) - if isStringType(ink): - ink = ImageColor.getcolor(ink, self.mode) - if self.palette and not isinstance(ink, numbers.Number): - ink = self.palette.getcolor(ink) - self.ink = self.draw.draw_ink(ink, self.mode) - - ## - # Set the default background color. + raise Exception("setink() has been removed. " + + "Please use keyword arguments instead.") def setfill(self, onoff): - # compatibility - if warnings: - warnings.warn( - "'setfill' is deprecated; use keyword arguments instead", - DeprecationWarning, stacklevel=2 - ) - self.fill = onoff - - ## - # Set the default font. + raise Exception("setfill() has been removed. " + + "Please use keyword arguments instead.") def setfont(self, font): + if warnings: + warnings.warn("setfont() is deprecated. " + + "Please set the attribute directly instead.") # compatibility self.font = font @@ -255,7 +236,20 @@ class ImageDraw: ## # Draw text. + def _multiline_check(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return text.split(split_character) + def text(self, xy, text, fill=None, font=None, anchor=None): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor) + ink, fill = self._getink(fill) if font is None: font = self.getfont() @@ -272,14 +266,52 @@ class ImageDraw: mask = font.getmask(text) self.draw.draw_bitmap(xy, mask, ink) + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=0, align="left"): + widths, heights = [], [] + max_width = 0 + lines = self._multiline_split(text) + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + heights.append(line_height) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + assert False, 'align must be "left", "center" or "right"' + self.text((left, top), line, fill, font, anchor) + top += heights[idx] + spacing + left = xy[0] + ## # Get the size of a given string, in pixels. def textsize(self, text, font=None): + if self._multiline_check(text): + return self.multiline_textsize(text, font) + if font is None: font = self.getfont() return font.getsize(text) + def multiline_textsize(self, text, font=None, spacing=0): + max_width = 0 + height = 0 + lines = self._multiline_split(text) + for line in lines: + line_width, line_height = self.textsize(line, font) + height += line_height + spacing + max_width = max(max_width, line_width) + return max_width, height + + ## # A simple 2D drawing interface for PIL images. # @@ -299,9 +331,10 @@ def Draw(im, mode=None): # experimental access to the outline API try: Outline = Image.core.outline -except: +except AttributeError: Outline = None + ## # (Experimental) A more advanced 2D drawing interface for PIL images, # based on the WCK interface. @@ -325,6 +358,7 @@ def getdraw(im=None, hints=None): im = handler.Draw(im) return im, handler + ## # (experimental) Fills a bounded region with a given color. # @@ -344,10 +378,10 @@ def floodfill(image, xy, value, border=None): try: background = pixel[x, y] if background == value: - return # seed point already has fill color + return # seed point already has fill color pixel[x, y] = value except IndexError: - return # seed point outside image + return # seed point outside image edge = [(x, y)] if border is None: while edge: diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageDraw2.py b/Darwin/lib/python3.5/site-packages/PIL/ImageDraw2.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/PIL/ImageDraw2.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageDraw2.py index 146cc8b..62ee116 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageDraw2.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageDraw2.py @@ -18,22 +18,26 @@ from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePath -class Pen: + +class Pen(object): def __init__(self, color, width=1, opacity=255): self.color = ImageColor.getrgb(color) self.width = width -class Brush: + +class Brush(object): def __init__(self, color, opacity=255): self.color = ImageColor.getrgb(color) -class Font: + +class Font(object): def __init__(self, color, file, size=12): # FIXME: add support for bitmap fonts self.color = ImageColor.getrgb(color) self.font = ImageFont.truetype(file, size) -class Draw: + +class Draw(object): def __init__(self, image, size=None, color=None): if not hasattr(image, "im"): @@ -47,7 +51,8 @@ class Draw: def render(self, op, xy, pen, brush=None): # handle color arguments - outline = fill = None; width = 1 + outline = fill = None + width = 1 if isinstance(pen, Pen): outline = pen.color width = pen.width diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageEnhance.py b/Darwin/lib/python3.5/site-packages/PIL/ImageEnhance.py similarity index 82% rename from Darwin/lib/python3.4/site-packages/PIL/ImageEnhance.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageEnhance.py index f802dc1..56b5c01 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageEnhance.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageEnhance.py @@ -21,7 +21,7 @@ from PIL import Image, ImageFilter, ImageStat -class _Enhance: +class _Enhance(object): def enhance(self, factor): """ @@ -47,7 +47,11 @@ class Color(_Enhance): """ def __init__(self, image): self.image = image - self.degenerate = image.convert("L").convert(image.mode) + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) class Contrast(_Enhance): @@ -62,11 +66,14 @@ class Contrast(_Enhance): mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + class Brightness(_Enhance): """Adjust image brightness. - This class can be used to control the brighntess of an image. An + This class can be used to control the brightness of an image. An enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the original image. """ @@ -74,6 +81,9 @@ class Brightness(_Enhance): self.image = image self.degenerate = Image.new(image.mode, image.size, 0) + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + class Sharpness(_Enhance): """Adjust image sharpness. @@ -85,3 +95,6 @@ class Sharpness(_Enhance): def __init__(self, image): self.image = image self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageFile.py b/Darwin/lib/python3.5/site-packages/PIL/ImageFile.py similarity index 87% rename from Darwin/lib/python3.4/site-packages/PIL/ImageFile.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageFile.py index adb27f2..f26a726 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageFile.py @@ -29,8 +29,13 @@ from PIL import Image from PIL._util import isPath -import traceback, os, sys import io +import logging +import os +import sys +import struct + +logger = logging.getLogger(__name__) MAXBLOCK = 65536 @@ -46,6 +51,7 @@ ERRORS = { -9: "out of memory error" } + def raise_ioerror(error): try: message = Image.core.getcodecstatus(error) @@ -55,6 +61,7 @@ def raise_ioerror(error): message = "decoder error %d" % error raise IOError(message + " when reading image file") + # # -------------------------------------------------------------------- # Helpers @@ -63,6 +70,7 @@ def _tilesort(t): # sort on offset return t[2] + # # -------------------------------------------------------------------- # ImageFile base class @@ -74,7 +82,7 @@ class ImageFile(Image.Image): Image.Image.__init__(self) self.tile = None - self.readonly = 1 # until we know better + self.readonly = 1 # until we know better self.decoderconfig = () self.decodermaxblock = MAXBLOCK @@ -90,21 +98,12 @@ class ImageFile(Image.Image): try: self._open() - except IndexError as v: # end of data - if Image.DEBUG > 1: - traceback.print_exc() - raise SyntaxError(v) - except TypeError as v: # end of data (ord) - if Image.DEBUG > 1: - traceback.print_exc() - raise SyntaxError(v) - except KeyError as v: # unsupported mode - if Image.DEBUG > 1: - traceback.print_exc() - raise SyntaxError(v) - except EOFError as v: # got header but not the first frame - if Image.DEBUG > 1: - traceback.print_exc() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + logger.exception("%s") raise SyntaxError(v) if not self.mode or self.size[0] <= 0: @@ -133,11 +132,27 @@ class ImageFile(Image.Image): return pixel self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') readonly = 0 - if self.filename and len(self.tile) == 1 and not hasattr(sys, 'pypy_version_info'): - # As of pypy 2.1.0, memory mapping was failing here. + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: # try memory mapping d, e, o, a = self.tile[0] if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES: @@ -152,10 +167,10 @@ class ImageFile(Image.Image): else: # use mmap, if possible import mmap - file = open(self.filename, "r+") + fp = open(self.filename, "r+") size = os.path.getsize(self.filename) # FIXME: on Unix, use PROT_READ etc - self.map = mmap.mmap(file.fileno(), size) + self.map = mmap.mmap(fp.fileno(), size) self.im = Image.core.map_buffer( self.map, self.size, d, e, o, a ) @@ -165,19 +180,7 @@ class ImageFile(Image.Image): self.load_prepare() - # look for read/seek overrides - try: - read = self.load_read - except AttributeError: - read = self.fp.read - - try: - seek = self.load_seek - except AttributeError: - seek = self.fp.seek - if not self.map: - # sort tiles in file order self.tile.sort(key=_tilesort) @@ -195,41 +198,43 @@ class ImageFile(Image.Image): except ValueError: continue b = prefix - t = len(b) while True: try: s = read(self.decodermaxblock) - except IndexError as ie: # truncated png/gif + except (IndexError, struct.error): # truncated png/gif if LOAD_TRUNCATED_IMAGES: break else: - raise IndexError(ie) + raise IOError("image file is truncated") - if not s and not d.handles_eof: # truncated jpeg + if not s and not d.handles_eof: # truncated jpeg self.tile = [] # JpegDecode needs to clean things up here either way - # If we don't destroy the decompressor, we have a memory leak. + # If we don't destroy the decompressor, + # we have a memory leak. d.cleanup() if LOAD_TRUNCATED_IMAGES: break else: - raise IOError("image file is truncated (%d bytes not processed)" % len(b)) + raise IOError("image file is truncated " + "(%d bytes not processed)" % len(b)) b = b + s n, e = d.decode(b) if n < 0: break b = b[n:] - t = t + n + # Need to cleanup here to prevent leaks in PyPy + d.cleanup() self.tile = [] self.readonly = readonly - self.fp = None # might be shared + self.fp = None # might be shared - if not self.map and (not LOAD_TRUNCATED_IMAGES or t == 0) and e < 0: + if not self.map and not LOAD_TRUNCATED_IMAGES and e < 0: # still raised if decoder fails to return anything raise_ioerror(e) @@ -295,7 +300,7 @@ class StubImageFile(ImageFile): ) -class Parser: +class Parser(object): """ Incremental image parser. This class implements the standard feed/close consumer interface. @@ -306,6 +311,7 @@ class Parser: image = None data = None decoder = None + offset = 0 finished = 0 def reset(self): @@ -374,10 +380,10 @@ class Parser: fp = io.BytesIO(self.data) im = Image.open(fp) finally: - fp.close() # explicitly close the virtual file + fp.close() # explicitly close the virtual file except IOError: # traceback.print_exc() - pass # not enough data + pass # not enough data else: flag = hasattr(im, "load_seek") or hasattr(im, "load_read") if flag or len(im.tile) != 1: @@ -427,9 +433,10 @@ class Parser: self.image = Image.open(fp) finally: self.image.load() - fp.close() # explicitly close the virtual file + fp.close() # explicitly close the virtual file return self.image + # -------------------------------------------------------------------- def _save(im, fp, tile, bufsize=0): @@ -446,10 +453,13 @@ def _save(im, fp, tile, bufsize=0): im.encoderconfig = () tile.sort(key=_tilesort) # FIXME: make MAXBLOCK a configuration parameter - # It would be great if we could have the encoder specifiy what it needs + # It would be great if we could have the encoder specify what it needs # But, it would need at least the image size in most cases. RawEncode is # a tricky case. - bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return try: fh = fp.fileno() fp.flush() @@ -467,6 +477,7 @@ def _save(im, fp, tile, bufsize=0): break if s < 0: raise IOError("encoder error %d when writing image file" % s) + e.cleanup() else: # slight speedup: compress to real file object for e, b, o, a in tile: @@ -477,9 +488,9 @@ def _save(im, fp, tile, bufsize=0): s = e.encode_to_file(fh, bufsize) if s < 0: raise IOError("encoder error %d when writing image file" % s) - try: + e.cleanup() + if hasattr(fp, "flush"): fp.flush() - except: pass def _safe_read(fp, size): diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageFilter.py b/Darwin/lib/python3.5/site-packages/PIL/ImageFilter.py similarity index 96% rename from Darwin/lib/python3.4/site-packages/PIL/ImageFilter.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageFilter.py index ac8fe9f..baa168a 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageFilter.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageFilter.py @@ -15,7 +15,7 @@ # See the README file for information on usage and redistribution. # -from functools import reduce +import functools class Filter(object): @@ -43,7 +43,7 @@ class Kernel(Filter): def __init__(self, size, kernel, scale=None, offset=0): if scale is None: # default scale is sum of kernel - scale = reduce(lambda a,b: a+b, kernel) + scale = functools.reduce(lambda a, b: a+b, kernel) if size[0] * size[1] != len(kernel): raise ValueError("not enough coefficients in kernel") self.filterargs = size, scale, offset, kernel @@ -162,7 +162,13 @@ class UnsharpMask(Filter): See Wikipedia's entry on `digital unsharp masking`_ for an explanation of the parameters. + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + """ name = "UnsharpMask" diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageFont.py b/Darwin/lib/python3.5/site-packages/PIL/ImageFont.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/ImageFont.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageFont.py index 18d09b8..c3ec579 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageFont.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageFont.py @@ -25,18 +25,18 @@ # See the README file for information on usage and redistribution. # -from __future__ import print_function - from PIL import Image from PIL._util import isDirectory, isPath -import os, sys +import os +import sys try: import warnings except ImportError: warnings = None -class _imagingft_not_installed: + +class _imagingft_not_installed(object): # module placeholder def __getattr__(self, id): raise ImportError("The _imagingft C module is not installed") @@ -62,12 +62,12 @@ except ImportError: # -------------------------------------------------------------------- -class ImageFont: +class ImageFont(object): "PIL font wrapper" def _load_pilfont(self, filename): - file = open(filename, "rb") + fp = open(filename, "rb") for ext in (".png", ".gif", ".pbm"): try: @@ -83,15 +83,15 @@ class ImageFont: self.file = fullname - return self._load_pilfont_data(file, image) + return self._load_pilfont_data(fp, image) def _load_pilfont_data(self, file, image): # read PILfont header if file.readline() != b"PILfont\n": raise SyntaxError("Not a PILfont file") - d = file.readline().split(b";") - self.info = [] # FIXME: should be a dictionary + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary while True: s = file.readline() if not s or s == b"DATA\n": @@ -113,25 +113,28 @@ class ImageFont: self.getsize = self.font.getsize self.getmask = self.font.getmask + ## # Wrapper for FreeType fonts. Application code should use the # truetype factory function to create font objects. -class FreeTypeFont: +class FreeTypeFont(object): "FreeType font wrapper (requires _imagingft service)" - def __init__(self, font=None, size=10, index=0, encoding="", file=None): + def __init__(self, font=None, size=10, index=0, encoding=""): # FIXME: use service provider instead - if file: - if warnings: - warnings.warn('file parameter deprecated, please use font parameter instead.', DeprecationWarning) - font = file + + self.path = font + self.size = size + self.index = index + self.encoding = encoding if isPath(font): self.font = core.getfont(font, size, index, encoding) else: self.font_bytes = font.read() - self.font = core.getfont("", size, index, encoding, self.font_bytes) + self.font = core.getfont( + "", size, index, encoding, self.font_bytes) def getname(self): return self.font.family, self.font.style @@ -140,7 +143,8 @@ class FreeTypeFont: return self.font.ascent, self.font.descent def getsize(self, text): - return self.font.getsize(text)[0] + size, offset = self.font.getsize(text) + return (size[0] + offset[0], size[1] + offset[1]) def getoffset(self, text): return self.font.getsize(text)[1] @@ -151,9 +155,25 @@ class FreeTypeFont: def getmask2(self, text, mode="", fill=Image.core.fill): size, offset = self.font.getsize(text) im = fill("L", size, 0) - self.font.render(text, im.id, mode=="1") + self.font.render(text, im.id, mode == "1") return im, offset + def font_variant(self, font=None, size=None, index=None, encoding=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont(font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else + encoding) + ## # Wrapper that creates a transposed font from any existing font # object. @@ -163,12 +183,13 @@ class FreeTypeFont: # be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, # Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. -class TransposedFont: + +class TransposedFont(object): "Wrapper for writing rotated or mirrored text" def __init__(self, font, orientation=None): self.font = font - self.orientation = orientation # any 'transpose' argument, or None + self.orientation = orientation # any 'transpose' argument, or None def getsize(self, text): w, h = self.font.getsize(text) @@ -197,7 +218,7 @@ def load(filename): return f -def truetype(font=None, size=10, index=0, encoding="", filename=None): +def truetype(font=None, size=10, index=0, encoding=""): """ Load a TrueType or OpenType font file, and create a font object. This function loads a font object from the given file, and creates @@ -205,7 +226,7 @@ def truetype(font=None, size=10, index=0, encoding="", filename=None): This function requires the _imagingft service. - :param filename: A truetype font file. Under Windows, if the file + :param font: A truetype font file. Under Windows, if the file is not found in this filename, the loader also looks in Windows :file:`fonts/` directory. :param size: The requested size, in points. @@ -219,22 +240,48 @@ def truetype(font=None, size=10, index=0, encoding="", filename=None): :exception IOError: If the file could not be read. """ - if filename: - if warnings: - warnings.warn('filename parameter deprecated, please use font parameter instead.', DeprecationWarning) - font = filename - try: return FreeTypeFont(font, size, index, encoding) except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] if sys.platform == "win32": # check the windows font repository # NOTE: must use uppercase WINDIR, to work around bugs in # 1.5.2's os.environ.get() windir = os.environ.get("WINDIR") if windir: - filename = os.path.join(windir, "fonts", font) - return FreeTypeFont(filename, size, index, encoding) + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, encoding) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, encoding) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding) raise @@ -247,15 +294,15 @@ def load_path(filename): :return: A font object. :exception IOError: If the file could not be read. """ - for dir in sys.path: - if isDirectory(dir): + for directory in sys.path: + if isDirectory(directory): if not isinstance(filename, str): if bytes is str: filename = filename.encode("utf-8") else: filename = filename.decode("utf-8") try: - return load(os.path.join(dir, filename)) + return load(os.path.join(directory, filename)) except IOError: pass raise IOError("cannot find font file") @@ -272,8 +319,8 @@ def load_default(): import base64 f = ImageFont() f._load_pilfont_data( - # courB08 - BytesIO(base64.decodestring(b''' + # courB08 + BytesIO(base64.decodestring(b''' UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA @@ -392,15 +439,4 @@ w7IkEbzhVQAAAABJRU5ErkJggg== ''')))) return f - -if __name__ == "__main__": - # create font data chunk for embedding - import base64, os, sys - font = "../Tests/images/courB08" - print(" f._load_pilfont_data(") - print(" # %s" % os.path.basename(font)) - print(" BytesIO(base64.decodestring(b'''") - base64.encode(open(font + ".pil", "rb"), sys.stdout) - print("''')), Image.open(BytesIO(base64.decodestring(b'''") - base64.encode(open(font + ".pbm", "rb"), sys.stdout) - print("'''))))") +# End of file diff --git a/Darwin/lib/python3.5/site-packages/PIL/ImageGrab.py b/Darwin/lib/python3.5/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000..c521891 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageGrab.py @@ -0,0 +1,67 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is OS X and Windows only") + +if sys.platform == "win32": + try: + # built-in driver (1.1.3 and later) + grabber = Image.core.grabscreen + except AttributeError: + # stand-alone driver (pil plus) + import _grabscreen + grabber = _grabscreen.grab +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + f, file = tempfile.mkstemp('.png') + os.close(f) + subprocess.call(['screencapture', '-x', file]) + im = Image.open(file) + im.load() + os.unlink(file) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origo in lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + raise NotImplementedError("Method is not implemented on OS X") + debug = 0 # temporary interface + data = Image.core.grabclipboard(debug) + if isinstance(data, bytes): + from PIL import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageMath.py b/Darwin/lib/python3.5/site-packages/PIL/ImageMath.py similarity index 94% rename from Darwin/lib/python3.4/site-packages/PIL/ImageMath.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageMath.py index adfcc4f..f92d500 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageMath.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageMath.py @@ -26,10 +26,12 @@ except ImportError: VERBOSE = 0 + def _isconstant(v): return isinstance(v, int) or isinstance(v, float) -class _Operand: + +class _Operand(object): # wraps an image operand, providing standard operators def __init__(self, im): @@ -68,20 +70,25 @@ class _Operand: im2 = self.__fixup(im2) if im1.mode != im2.mode: # convert both arguments to floating point - if im1.mode != "F": im1 = im1.convert("F") - if im2.mode != "F": im2 = im2.convert("F") + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") if im1.mode != im2.mode: raise ValueError("mode mismatch") if im1.size != im2.size: # crop both arguments to a common size size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1])) - if im1.size != size: im1 = im1.crop((0, 0) + size) - if im2.size != size: im2 = im2.crop((0, 0) + size) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) out = Image.new(mode or im1.mode, size, None) else: out = Image.new(mode or im1.mode, im1.size, None) - im1.load(); im2.load() + im1.load() + im2.load() try: op = getattr(_imagingmath, op+"_"+im1.mode) except AttributeError: @@ -101,34 +108,47 @@ class _Operand: def __abs__(self): return self.apply("abs", self) + def __pos__(self): return self + def __neg__(self): return self.apply("neg", self) # binary operators def __add__(self, other): return self.apply("add", self, other) + def __radd__(self, other): return self.apply("add", other, self) + def __sub__(self, other): return self.apply("sub", self, other) + def __rsub__(self, other): return self.apply("sub", other, self) + def __mul__(self, other): return self.apply("mul", self, other) + def __rmul__(self, other): return self.apply("mul", other, self) + def __truediv__(self, other): return self.apply("div", self, other) + def __rtruediv__(self, other): return self.apply("div", other, self) + def __mod__(self, other): return self.apply("mod", self, other) + def __rmod__(self, other): return self.apply("mod", other, self) + def __pow__(self, other): return self.apply("pow", self, other) + def __rpow__(self, other): return self.apply("pow", other, self) @@ -142,54 +162,77 @@ class _Operand: # bitwise def __invert__(self): return self.apply("invert", self) + def __and__(self, other): return self.apply("and", self, other) + def __rand__(self, other): return self.apply("and", other, self) + def __or__(self, other): return self.apply("or", self, other) + def __ror__(self, other): return self.apply("or", other, self) + def __xor__(self, other): return self.apply("xor", self, other) + def __rxor__(self, other): return self.apply("xor", other, self) + def __lshift__(self, other): return self.apply("lshift", self, other) + def __rshift__(self, other): return self.apply("rshift", self, other) # logical def __eq__(self, other): return self.apply("eq", self, other) + def __ne__(self, other): return self.apply("ne", self, other) + def __lt__(self, other): return self.apply("lt", self, other) + def __le__(self, other): return self.apply("le", self, other) + def __gt__(self, other): return self.apply("gt", self, other) + def __ge__(self, other): return self.apply("ge", self, other) + # conversions def imagemath_int(self): return _Operand(self.im.convert("I")) + + def imagemath_float(self): return _Operand(self.im.convert("F")) + # logical def imagemath_equal(self, other): return self.apply("eq", self, other, mode="I") + + def imagemath_notequal(self, other): return self.apply("ne", self, other, mode="I") + def imagemath_min(self, other): return self.apply("min", self, other) + + def imagemath_max(self, other): return self.apply("max", self, other) + def imagemath_convert(self, mode): return _Operand(self.im.convert(mode)) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageMode.py b/Darwin/lib/python3.5/site-packages/PIL/ImageMode.py similarity index 97% rename from Darwin/lib/python3.4/site-packages/PIL/ImageMode.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageMode.py index c3931b5..d896001 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageMode.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageMode.py @@ -16,10 +16,11 @@ # mode descriptor cache _modes = {} + ## # Wrapper for mode strings. -class ModeDescriptor: +class ModeDescriptor(object): def __init__(self, mode, bands, basemode, basetype): self.mode = mode @@ -30,6 +31,7 @@ class ModeDescriptor: def __str__(self): return self.mode + ## # Gets a mode descriptor for the given mode. diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageMorph.py b/Darwin/lib/python3.5/site-packages/PIL/ImageMorph.py similarity index 90% rename from Darwin/lib/python3.4/site-packages/PIL/ImageMorph.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageMorph.py index b24dd13..44a7e8c 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageMorph.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageMorph.py @@ -12,22 +12,22 @@ import re LUT_SIZE = 1 << 9 -class LutBuilder: +class LutBuilder(object): """A class for building a MorphLut from a descriptive language - The input patterns is a list of a strings sequences like these: + The input patterns is a list of a strings sequences like these:: - 4:(... - .1. - 111)->1 + 4:(... + .1. + 111)->1 (whitespaces including linebreaks are ignored). The option 4 describes a series of symmetry operations (in this case a 4-rotation), the pattern is described by: - . or X - Ignore - 1 - Pixel is on - 0 - Pixel is off + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off The result of the operation is described after "->" string. @@ -35,15 +35,16 @@ class LutBuilder: returned if no other match is found. Operations: - 4 - 4 way rotation - N - Negate - 1 - Dummy op for no other operation (an op must always be given) - M - Mirroring - Example: + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring - lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) - lut = lb.build_lut() + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() """ def __init__(self, patterns=None, op_name=None): @@ -175,7 +176,7 @@ class LutBuilder: return self.lut -class MorphOp: +class MorphOp(object): """A class for binary morphological operators""" def __init__(self, @@ -197,6 +198,9 @@ class MorphOp: if self.lut is None: raise Exception('No operator loaded') + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return outimage = Image.new(image.mode, image.size, None) count = _imagingmorph.apply( bytes(self.lut), image.im.id, outimage.im.id) @@ -211,6 +215,9 @@ class MorphOp: if self.lut is None: raise Exception('No operator loaded') + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return return _imagingmorph.match(bytes(self.lut), image.im.id) def get_on_pixels(self, image): @@ -219,6 +226,9 @@ class MorphOp: Returns a list of tuples of (x,y) coordinates of all matching pixels.""" + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return return _imagingmorph.get_on_pixels(image.im.id) def load_lut(self, filename): diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageOps.py b/Darwin/lib/python3.5/site-packages/PIL/ImageOps.py similarity index 93% rename from Darwin/lib/python3.4/site-packages/PIL/ImageOps.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageOps.py index 64c35cc..f317645 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageOps.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageOps.py @@ -20,7 +20,8 @@ from PIL import Image from PIL._util import isStringType import operator -from functools import reduce +import functools + # # helpers @@ -35,12 +36,14 @@ def _border(border): left = top = right = bottom = border return left, top, right, bottom + def _color(color, mode): if isStringType(color): from PIL import ImageColor color = ImageColor.getcolor(color, mode) return color + def _lut(image, lut): if image.mode == "P": # FIXME: apply to lookup table, not image data @@ -147,7 +150,9 @@ def colorize(image, black, white): assert image.mode == "L" black = _color(black, "RGB") white = _color(white, "RGB") - red = []; green = []; blue = [] + red = [] + green = [] + blue = [] for i in range(256): red.append(black[0]+i*(white[0]-black[0])//255) green.append(black[1]+i*(white[1]-black[1])//255) @@ -208,7 +213,7 @@ def equalize(image, mask=None): if len(histo) <= 1: lut.extend(list(range(256))) else: - step = (reduce(operator.add, histo) - histo[-1]) // 255 + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 if not step: lut.extend(list(range(256))) else: @@ -228,7 +233,6 @@ def expand(image, border=0, fill=0): :param fill: Pixel fill value (a color value). Default is 0 (black). :return: An image. """ - "Add border to image" left, top, right, bottom = _border(border) width = left + image.size[0] + right height = top + image.size[1] + bottom @@ -273,7 +277,7 @@ def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): centering = [centering[0], centering[1]] if centering[0] > 1.0 or centering[0] < 0.0: - centering [0] = 0.50 + centering[0] = 0.50 if centering[1] > 1.0 or centering[1] < 0.0: centering[1] = 0.50 @@ -392,7 +396,7 @@ def solarize(image, threshold=128): """ Invert all pixel values above a threshold. - :param image: The image to posterize. + :param image: The image to solarize. :param threshold: All pixels above this greyscale level are inverted. :return: An image. """ @@ -404,6 +408,7 @@ def solarize(image, threshold=128): lut.append(255-i) return _lut(image, lut) + # -------------------------------------------------------------------- # PIL USM components, from Kevin Cazabon. @@ -419,6 +424,7 @@ def gaussian_blur(im, radius=None): gblur = gaussian_blur + def unsharp_mask(im, radius=None, percent=None, threshold=None): """ PIL_usm.usm(im, [radius, percent, threshold])""" @@ -434,3 +440,22 @@ def unsharp_mask(im, radius=None, percent=None, threshold=None): return im.im.unsharp_mask(radius, percent, threshold) usm = unsharp_mask + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImagePalette.py b/Darwin/lib/python3.5/site-packages/PIL/ImagePalette.py similarity index 77% rename from Darwin/lib/python3.4/site-packages/PIL/ImagePalette.py rename to Darwin/lib/python3.5/site-packages/PIL/ImagePalette.py index 5988682..fdc5a46 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImagePalette.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImagePalette.py @@ -17,22 +17,46 @@ # import array -from PIL import Image, ImageColor +from PIL import ImageColor -class ImagePalette: - "Color palette for palette mapped images" +class ImagePalette(object): + """ + Color palette for palette mapped images - def __init__(self, mode = "RGB", palette = None, size = 0): + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): self.mode = mode - self.rawmode = None # if set, palette contains raw data + self.rawmode = None # if set, palette contains raw data self.palette = palette or list(range(256))*len(self.mode) self.colors = {} self.dirty = None - if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or (size != 0 and size != len(self.palette))): raise ValueError("wrong palette size") + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + def getdata(self): """ Get palette contents in format suitable # for the low-level @@ -55,7 +79,6 @@ class ImagePalette: return self.palette arr = array.array("B", self.palette) if hasattr(arr, 'tobytes'): - #py3k has a tobytes, tostring is deprecated. return arr.tobytes() return arr.tostring() @@ -109,6 +132,7 @@ class ImagePalette: fp.write("\n") fp.close() + # -------------------------------------------------------------------- # Internal @@ -119,32 +143,33 @@ def raw(rawmode, data): palette.dirty = 1 return palette + # -------------------------------------------------------------------- # Factories -def _make_linear_lut(black, white): +def make_linear_lut(black, white): lut = [] if black == 0: for i in range(256): lut.append(white*i//255) else: - raise NotImplementedError # FIXME + raise NotImplementedError # FIXME return lut -def _make_gamma_lut(exp, mode="RGB"): + +def make_gamma_lut(exp): lut = [] for i in range(256): lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) return lut -def new(mode, data): - return Image.core.new_palette(mode, data) def negative(mode="RGB"): palette = list(range(256)) palette.reverse() return ImagePalette(mode, palette * len(mode)) + def random(mode="RGB"): from random import randint palette = [] @@ -152,16 +177,19 @@ def random(mode="RGB"): palette.append(randint(0, 255)) return ImagePalette(mode, palette) + def sepia(white="#fff0c0"): r, g, b = ImageColor.getrgb(white) - r = _make_linear_lut(0, r) - g = _make_linear_lut(0, g) - b = _make_linear_lut(0, b) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) return ImagePalette("RGB", r + g + b) + def wedge(mode="RGB"): return ImagePalette(mode, list(range(256)) * len(mode)) + def load(filename): # FIXME: supports GIMP gradients only @@ -177,8 +205,8 @@ def load(filename): p = GimpPaletteFile.GimpPaletteFile(fp) lut = p.getpalette() except (SyntaxError, ValueError): - #import traceback - #traceback.print_exc() + # import traceback + # traceback.print_exc() pass if not lut: @@ -188,8 +216,8 @@ def load(filename): p = GimpGradientFile.GimpGradientFile(fp) lut = p.getpalette() except (SyntaxError, ValueError): - #import traceback - #traceback.print_exc() + # import traceback + # traceback.print_exc() pass if not lut: @@ -199,11 +227,11 @@ def load(filename): p = PaletteFile.PaletteFile(fp) lut = p.getpalette() except (SyntaxError, ValueError): - import traceback - traceback.print_exc() + # import traceback + # traceback.print_exc() pass if not lut: raise IOError("cannot load palette") - return lut # data, rawmode + return lut # data, rawmode diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImagePath.py b/Darwin/lib/python3.5/site-packages/PIL/ImagePath.py similarity index 98% rename from Darwin/lib/python3.4/site-packages/PIL/ImagePath.py rename to Darwin/lib/python3.5/site-packages/PIL/ImagePath.py index 656d5ce..f23d014 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImagePath.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImagePath.py @@ -20,7 +20,7 @@ from PIL import Image # the Python class below is overridden by the C implementation. -class Path: +class Path(object): def __init__(self, xy): pass diff --git a/Darwin/lib/python3.5/site-packages/PIL/ImageQt.py b/Darwin/lib/python3.5/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000..b37177a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageQt.py @@ -0,0 +1,196 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import PIL +from PIL._util import isPath +from io import BytesIO + +qt_is_installed = True +qt_version = None +try: + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + qt_version = '5' +except ImportError: + try: + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + qt_version = '4' + except ImportError: + try: + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + qt_version = 'side' + except ImportError: + qt_is_installed = False + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +# :param im A PIL Image object, or a file name +# (given either as Python string or a PyQt string object) + +def fromqimage(im): + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return PIL.Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return PIL.Image.open(bytes_io) + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding) + + return b''.join(new_data) + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if str is bytes: + im = unicode(im.toUtf8(), "utf-8") + else: + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = PIL.Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = PIL.Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + # must keep a reference, or Qt will crash! + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + +## +# An PIL image wrapper for Qt. This is a subclass of PyQt's QImage +# class. +# +# @param im A PIL Image object, or a file name (given either as Python +# string or a PyQt string object). + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + im_data = _toqclass_helper(im) + QImage.__init__(self, + im_data['data'], im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageSequence.py b/Darwin/lib/python3.5/site-packages/PIL/ImageSequence.py similarity index 92% rename from Darwin/lib/python3.4/site-packages/PIL/ImageSequence.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageSequence.py index 513c924..256bcbe 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageSequence.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageSequence.py @@ -15,7 +15,8 @@ ## -class Iterator: + +class Iterator(object): """ This class implements an iterator object that can be used to loop over an image sequence. @@ -38,4 +39,4 @@ class Iterator: self.im.seek(ix) return self.im except EOFError: - raise IndexError # end of sequence + raise IndexError # end of sequence diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageShow.py b/Darwin/lib/python3.5/site-packages/PIL/ImageShow.py similarity index 94% rename from Darwin/lib/python3.4/site-packages/PIL/ImageShow.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageShow.py index 40fe629..51417c3 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageShow.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageShow.py @@ -15,7 +15,8 @@ from __future__ import print_function from PIL import Image -import os, sys +import os +import sys if sys.version_info >= (3, 3): from shlex import quote @@ -24,17 +25,19 @@ else: _viewers = [] + def register(viewer, order=1): try: if issubclass(viewer, Viewer): viewer = viewer() except TypeError: - pass # raised if viewer wasn't a class + pass # raised if viewer wasn't a class if order > 0: _viewers.append(viewer) elif order < 0: _viewers.insert(0, viewer) + ## # Displays a given image. # @@ -49,10 +52,11 @@ def show(image, title=None, **options): return 1 return 0 + ## # Base class for viewers. -class Viewer: +class Viewer(object): # main api @@ -102,6 +106,7 @@ if sys.platform == "win32": class WindowsViewer(Viewer): format = "BMP" + def get_command(self, file, **options): return ('start "Pillow" /WAIT "%s" ' '&& ping -n 2 127.0.0.1 >NUL ' @@ -113,11 +118,13 @@ elif sys.platform == "darwin": class MacViewer(Viewer): format = "BMP" + def get_command(self, file, **options): # on darwin open returns immediately resulting in the temp # file removal while app is opening command = "open -a /Applications/Preview.app" - command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), quote(file)) + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) return command register(MacViewer) @@ -140,7 +147,8 @@ else: class UnixViewer(Viewer): def show_file(self, file, **options): command, executable = self.get_command_ex(file, **options) - command = "(%s %s; rm -f %s)&" % (command, quote(file), quote(file)) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) os.system(command) return 1 diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageStat.py b/Darwin/lib/python3.5/site-packages/PIL/ImageStat.py similarity index 87% rename from Darwin/lib/python3.4/site-packages/PIL/ImageStat.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageStat.py index d84e2cb..f3c138b 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageStat.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageStat.py @@ -21,20 +21,21 @@ # See the README file for information on usage and redistribution. # -import operator, math -from functools import reduce +import math +import operator +import functools -class Stat: +class Stat(object): - def __init__(self, image_or_list, mask = None): + def __init__(self, image_or_list, mask=None): try: if mask: self.h = image_or_list.histogram(mask) else: self.h = image_or_list.histogram() except AttributeError: - self.h = image_or_list # assume it to be a histogram list + self.h = image_or_list # assume it to be a histogram list if not isinstance(self.h, list): raise TypeError("first argument must be image or list") self.bands = list(range(len(self.h) // 256)) @@ -58,7 +59,7 @@ class Stat: if histogram[i]: n = min(n, i) x = max(x, i) - return n, x # returns (255, 0) if there's no data in the histogram + return n, x # returns (255, 0) if there's no data in the histogram v = [] for i in range(0, len(self.h), 256): @@ -70,7 +71,7 @@ class Stat: v = [] for i in range(0, len(self.h), 256): - v.append(reduce(operator.add, self.h[i:i+256])) + v.append(functools.reduce(operator.add, self.h[i:i+256])) return v def _getsum(self): @@ -78,10 +79,10 @@ class Stat: v = [] for i in range(0, len(self.h), 256): - sum = 0.0 + layerSum = 0.0 for j in range(256): - sum += j * self.h[i + j] - v.append(sum) + layerSum += j * self.h[i + j] + v.append(layerSum) return v def _getsum2(self): @@ -126,7 +127,6 @@ class Stat: v.append(math.sqrt(self.sum2[i] / self.count[i])) return v - def _getvar(self): "Get variance for each layer" @@ -144,4 +144,4 @@ class Stat: v.append(math.sqrt(self.var[i])) return v -Global = Stat # compatibility +Global = Stat # compatibility diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageTk.py b/Darwin/lib/python3.5/site-packages/PIL/ImageTk.py similarity index 94% rename from Darwin/lib/python3.4/site-packages/PIL/ImageTk.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageTk.py index 1e81d24..68d388e 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageTk.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageTk.py @@ -40,21 +40,23 @@ from PIL import Image _pilbitmap_ok = None + def _pilbitmap_check(): global _pilbitmap_ok if _pilbitmap_ok is None: try: - im = Image.new("1", (1,1)) + im = Image.new("1", (1, 1)) tkinter.BitmapImage(data="PIL:%d" % im.im.id) _pilbitmap_ok = 1 except tkinter.TclError: _pilbitmap_ok = 0 return _pilbitmap_ok + # -------------------------------------------------------------------- # PhotoImage -class PhotoImage: +class PhotoImage(object): """ A Tkinter-compatible photo image. This can be used everywhere Tkinter expects an image object. If the image is an RGBA @@ -95,7 +97,7 @@ class PhotoImage: try: mode = image.palette.mode except AttributeError: - mode = "RGB" # default + mode = "RGB" # default size = image.size kw["width"], kw["height"] = size else: @@ -118,8 +120,7 @@ class PhotoImage: try: self.__photo.tk.call("image", "delete", name) except: - pass # ignore internal errors - + pass # ignore internal errors def __str__(self): """ @@ -131,7 +132,6 @@ class PhotoImage: """ return str(self.__photo) - def width(self): """ Get the width of the image. @@ -140,7 +140,6 @@ class PhotoImage: """ return self.__size[0] - def height(self): """ Get the height of the image. @@ -149,7 +148,6 @@ class PhotoImage: """ return self.__size[1] - def paste(self, im, box=None): """ Paste a PIL image into the photo image. Note that this can @@ -170,13 +168,13 @@ class PhotoImage: block = image else: block = image.new_block(self.__mode, im.size) - image.convert2(block, image) # convert directly between buffers + image.convert2(block, image) # convert directly between buffers tk = self.__photo.tk try: tk.call("PyImagingPhoto", self.__photo, block.id) - except tkinter.TclError as v: + except tkinter.TclError: # activate Tkinter hook try: from PIL import _imagingtk @@ -186,13 +184,13 @@ class PhotoImage: _imagingtk.tkinit(id(tk), 0) tk.call("PyImagingPhoto", self.__photo, block.id) except (ImportError, AttributeError, tkinter.TclError): - raise # configuration problem; cannot attach to Tkinter + raise # configuration problem; cannot attach to Tkinter # -------------------------------------------------------------------- # BitmapImage -class BitmapImage: +class BitmapImage(object): """ A Tkinter-compatible bitmap image. This can be used everywhere Tkinter @@ -226,7 +224,7 @@ class BitmapImage: # fast way (requires the pilbitmap booster patch) image.load() kw["data"] = "PIL:%d" % image.im.id - self.__im = image # must keep a reference + self.__im = image # must keep a reference else: # slow but safe way kw["data"] = image.tobitmap() @@ -238,8 +236,7 @@ class BitmapImage: try: self.__photo.tk.call("image", "delete", name) except: - pass # ignore internal errors - + pass # ignore internal errors def width(self): """ @@ -249,7 +246,6 @@ class BitmapImage: """ return self.__size[0] - def height(self): """ Get the height of the image. @@ -258,7 +254,6 @@ class BitmapImage: """ return self.__size[1] - def __str__(self): """ Get the Tkinter bitmap image identifier. This method is automatically @@ -274,6 +269,7 @@ def getimage(photo): """Copies the contents of a PhotoImage to a PIL image memory.""" photo.tk.call("PyImagingPhotoGet", photo) + # -------------------------------------------------------------------- # Helper for the Image.show method. @@ -286,7 +282,7 @@ def _show(image, title): else: self.image = PhotoImage(im, master=master) tkinter.Label.__init__(self, master, image=self.image, - bg="black", bd=0) + bg="black", bd=0) if not tkinter._default_root: raise IOError("tkinter not initialized") diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageTransform.py b/Darwin/lib/python3.5/site-packages/PIL/ImageTransform.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/ImageTransform.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageTransform.py index 5a8f9e9..81f9050 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageTransform.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageTransform.py @@ -15,16 +15,20 @@ from PIL import Image + class Transform(Image.ImageTransformHandler): def __init__(self, data): self.data = data + def getdata(self): return self.method, self.data + def transform(self, size, image, **options): # can be overridden method, data = self.getdata() return image.transform(size, method, data, **options) + ## # Define an affine image transform. #

@@ -43,9 +47,11 @@ class Transform(Image.ImageTransformHandler): # the first two rows from an affine transform matrix. # @see Image#Image.transform + class AffineTransform(Transform): method = Image.AFFINE + ## # Define a transform to extract a subregion from an image. #

@@ -68,6 +74,7 @@ class AffineTransform(Transform): class ExtentTransform(Transform): method = Image.EXTENT + ## # Define an quad image transform. #

@@ -83,6 +90,7 @@ class ExtentTransform(Transform): class QuadTransform(Transform): method = Image.QUAD + ## # Define an mesh image transform. A mesh transform consists of one # or more individual quad transforms. diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImageWin.py b/Darwin/lib/python3.5/site-packages/PIL/ImageWin.py similarity index 89% rename from Darwin/lib/python3.4/site-packages/PIL/ImageWin.py rename to Darwin/lib/python3.5/site-packages/PIL/ImageWin.py index aa90b88..58894d6 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImageWin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImageWin.py @@ -17,34 +17,36 @@ # See the README file for information on usage and redistribution. # -import warnings from PIL import Image -class HDC: +class HDC(object): """ - Wraps a HDC integer. The resulting object can be passed to the + Wraps an HDC integer. The resulting object can be passed to the :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` methods. """ def __init__(self, dc): self.dc = dc + def __int__(self): return self.dc -class HWND: + +class HWND(object): """ - Wraps a HWND integer. The resulting object can be passed to the + Wraps an HWND integer. The resulting object can be passed to the :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` methods, instead of a DC. """ def __init__(self, wnd): self.wnd = wnd + def __int__(self): return self.wnd -class Dib: +class Dib(object): """ A Windows bitmap with the given mode and size. The mode can be one of "1", "L", "P", or "RGB". @@ -79,13 +81,12 @@ class Dib: if image: self.paste(image) - def expose(self, handle): """ Copy the bitmap contents to a device context. - :param handle: Device context (HDC), cast to a Python integer, or a HDC - or HWND instance. In PythonWin, you can use the + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. """ if isinstance(handle, HWND): @@ -109,7 +110,7 @@ class Dib: necessary. """ if not src: - src = (0,0) + self.size + src = (0, 0) + self.size if isinstance(handle, HWND): dc = self.image.getdc(handle) try: @@ -120,7 +121,6 @@ class Dib: result = self.image.draw(handle, dst, src) return result - def query_palette(self, handle): """ Installs the palette associated with the image in the given device @@ -146,7 +146,6 @@ class Dib: result = self.image.query_palette(handle) return result - def paste(self, im, box=None): """ Paste a PIL image into the bitmap image. @@ -166,7 +165,6 @@ class Dib: else: self.image.paste(im.im) - def frombytes(self, buffer): """ Load display memory contents from byte data. @@ -176,7 +174,6 @@ class Dib: """ return self.image.frombytes(buffer) - def tobytes(self): """ Copy display memory contents to bytes object. @@ -185,29 +182,19 @@ class Dib: """ return self.image.tobytes() - ## - # Deprecated aliases to frombytes & tobytes. - def fromstring(self, *args, **kw): - warnings.warn( - 'fromstring() is deprecated. Please call frombytes() instead.', - DeprecationWarning, - stacklevel=2 - ) - return self.frombytes(*args, **kw) + raise Exception("fromstring() has been removed. " + + "Please use frombytes() instead.") + + def tostring(self, *args, **kw): + raise Exception("tostring() has been removed. " + + "Please use tobytes() instead.") - def tostring(self): - warnings.warn( - 'tostring() is deprecated. Please call tobytes() instead.', - DeprecationWarning, - stacklevel=2 - ) - return self.tobytes() ## # Create a Window with the given title size. -class Window: +class Window(object): def __init__(self, title="PIL", width=None, height=None): self.hwnd = Image.core.createwindow( @@ -235,6 +222,7 @@ class Window: def mainloop(self): Image.core.eventloop() + ## # Create an image window which displays the given image. diff --git a/Darwin/lib/python3.4/site-packages/PIL/ImtImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/ImtImagePlugin.py similarity index 92% rename from Darwin/lib/python3.4/site-packages/PIL/ImtImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/ImtImagePlugin.py index e68b003..1ca2c25 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/ImtImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/ImtImagePlugin.py @@ -15,17 +15,19 @@ # -__version__ = "0.2" - import re from PIL import Image, ImageFile +__version__ = "0.2" + + # # -------------------------------------------------------------------- field = re.compile(br"([a-z]*) ([^ \r\n]*)") + ## # Image plugin for IM Tools images. @@ -39,7 +41,7 @@ class ImtImageFile(ImageFile.ImageFile): # Quick rejection: if there's not a LF among the first # 100 bytes, this is (probably) not a text header. - if not b"\n" in self.fp.read(100): + if b"\n" not in self.fp.read(100): raise SyntaxError("not an IM file") self.fp.seek(0) @@ -54,7 +56,7 @@ class ImtImageFile(ImageFile.ImageFile): if s == b'\x0C': # image data begins - self.tile = [("raw", (0,0)+self.size, + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), (self.mode, 0, 1))] @@ -68,12 +70,12 @@ class ImtImageFile(ImageFile.ImageFile): if len(s) == 1 or len(s) > 100: break if s[0] == b"*": - continue # comment + continue # comment m = field.match(s) if not m: break - k, v = m.group(1,2) + k, v = m.group(1, 2) if k == "width": xsize = int(v) self.size = xsize, ysize diff --git a/Darwin/lib/python3.4/site-packages/PIL/IptcImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/IptcImagePlugin.py similarity index 77% rename from Darwin/lib/python3.4/site-packages/PIL/IptcImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/IptcImagePlugin.py index 8557561..b5aa84b 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/IptcImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/IptcImagePlugin.py @@ -17,11 +17,11 @@ from __future__ import print_function -__version__ = "0.3" - - from PIL import Image, ImageFile, _binary -import os, tempfile +import os +import tempfile + +__version__ = "0.3" i8 = _binary.i8 i16 = _binary.i16be @@ -35,17 +35,20 @@ COMPRESSION = { PAD = o8(0) * 4 + # # Helpers def i(c): return i32((PAD + c)[-4:]) + def dump(c): for i in c: print("%02x" % i8(i), end=' ') print() + ## # Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields # from TIFF and JPEG files, use the getiptcinfo function. @@ -84,35 +87,13 @@ class IptcImageFile(ImageFile.ImageFile): return tag, size - def _is_raw(self, offset, size): - # - # check if the file can be mapped - - # DISABLED: the following only slows things down... - return 0 - - self.fp.seek(offset) - t, sz = self.field() - if sz != size[0]: - return 0 - y = 1 - while True: - self.fp.seek(sz, 1) - t, s = self.field() - if t != (8, 10): - break - if s != sz: - return 0 - y += 1 - return y == size[1] - def _open(self): # load descriptive fields while True: offset = self.fp.tell() tag, size = self.field() - if not tag or tag == (8,10): + if not tag or tag == (8, 10): break if size: tagdata = self.fp.read(size) @@ -129,10 +110,10 @@ class IptcImageFile(ImageFile.ImageFile): # print tag, self.info[tag] # mode - layers = i8(self.info[(3,60)][0]) - component = i8(self.info[(3,60)][1]) - if (3,65) in self.info: - id = i8(self.info[(3,65)][0])-1 + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 else: id = 0 if layers == 1 and not component: @@ -143,22 +124,18 @@ class IptcImageFile(ImageFile.ImageFile): self.mode = "CMYK"[id] # size - self.size = self.getint((3,20)), self.getint((3,30)) + self.size = self.getint((3, 20)), self.getint((3, 30)) # compression try: - compression = COMPRESSION[self.getint((3,120))] + compression = COMPRESSION[self.getint((3, 120))] except KeyError: raise IOError("Unknown IPTC image compression") # tile - if tag == (8,10): - if compression == "raw" and self._is_raw(offset, self.size): - self.tile = [(compression, (offset, size + 5, -1), - (0, 0, self.size[0], self.size[1]))] - else: - self.tile = [("iptc", (compression, offset), - (0, 0, self.size[0], self.size[1]))] + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] def load(self): @@ -200,13 +177,16 @@ class IptcImageFile(ImageFile.ImageFile): im.load() self.im = im.im finally: - try: os.unlink(outfile) - except: pass + try: + os.unlink(outfile) + except: + pass -Image.register_open("IPTC", IptcImageFile) +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") -Image.register_extension("IPTC", ".iim") ## # Get IPTC information from TIFF, JPEG, or IPTC file. @@ -230,23 +210,23 @@ def getiptcinfo(im): # extract the IPTC/NAA resource try: app = im.app["APP13"] - if app[:14] == "Photoshop 3.0\x00": + if app[:14] == b"Photoshop 3.0\x00": app = app[14:] # parse the image resource block offset = 0 - while app[offset:offset+4] == "8BIM": + while app[offset:offset+4] == b"8BIM": offset += 4 # resource code - code = JpegImagePlugin.i16(app, offset) + code = i16(app, offset) offset += 2 # resource name (usually empty) name_len = i8(app[offset]) - name = app[offset+1:offset+1+name_len] + # name = app[offset+1:offset+1+name_len] offset = 1 + offset + name_len if offset & 1: offset += 1 # resource data block - size = JpegImagePlugin.i32(app, offset) + size = i32(app, offset) offset += 4 if code == 0x0404: # 0x0404 contains IPTC/NAA data @@ -267,10 +247,10 @@ def getiptcinfo(im): pass if data is None: - return None # no properties + return None # no properties # create an IptcImagePlugin object without initializing it - class FakeImage: + class FakeImage(object): pass im = FakeImage() im.__class__ = IptcImageFile @@ -282,6 +262,6 @@ def getiptcinfo(im): try: im._open() except (IndexError, KeyError): - pass # expected failure + pass # expected failure return im.info diff --git a/Darwin/lib/python3.4/site-packages/PIL/Jpeg2KImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/Jpeg2KImagePlugin.py similarity index 91% rename from Darwin/lib/python3.4/site-packages/PIL/Jpeg2KImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/Jpeg2KImagePlugin.py index 53b10ca..b82acdd 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/Jpeg2KImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/Jpeg2KImagePlugin.py @@ -12,14 +12,13 @@ # # See the README file for information on usage and redistribution. # - -__version__ = "0.1" - from PIL import Image, ImageFile import struct import os import io +__version__ = "0.1" + def _parse_codestream(fp): """Parse the JPEG 2000 codestream to extract the size and component @@ -72,7 +71,7 @@ def _parse_jp2_header(fp): if lbox < hlen: raise SyntaxError('Invalid JP2 header length') - + if tbox == b'jp2h': header = fp.read(lbox - hlen) break @@ -208,8 +207,8 @@ class Jpeg2KImageFile(ImageFile.ImageFile): def _accept(prefix): - return (prefix[:4] == b'\xff\x4f\xff\x51' - or prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') # ------------------------------------------------------------ @@ -263,15 +262,15 @@ def _save(im, fp, filename): # ------------------------------------------------------------ # Registry stuff -Image.register_open('JPEG2000', Jpeg2KImageFile, _accept) -Image.register_save('JPEG2000', _save) +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) -Image.register_extension('JPEG2000', '.jp2') -Image.register_extension('JPEG2000', '.j2k') -Image.register_extension('JPEG2000', '.jpc') -Image.register_extension('JPEG2000', '.jpf') -Image.register_extension('JPEG2000', '.jpx') -Image.register_extension('JPEG2000', '.j2c') +Image.register_extension(Jpeg2KImageFile.format, '.jp2') +Image.register_extension(Jpeg2KImageFile.format, '.j2k') +Image.register_extension(Jpeg2KImageFile.format, '.jpc') +Image.register_extension(Jpeg2KImageFile.format, '.jpf') +Image.register_extension(Jpeg2KImageFile.format, '.jpx') +Image.register_extension(Jpeg2KImageFile.format, '.j2c') -Image.register_mime('JPEG2000', 'image/jp2') -Image.register_mime('JPEG2000', 'image/jpx') +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/Darwin/lib/python3.4/site-packages/PIL/JpegImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/JpegImagePlugin.py similarity index 73% rename from Darwin/lib/python3.4/site-packages/PIL/JpegImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/JpegImagePlugin.py index a434c55..5779519 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/JpegImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/JpegImagePlugin.py @@ -4,7 +4,7 @@ # # JPEG (JFIF) file handling # -# See "Digital Compression and Coding of Continous-Tone Still Images, +# See "Digital Compression and Coding of Continuous-Tone Still Images, # Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) # # History: @@ -32,11 +32,12 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.6" - import array import struct -from PIL import Image, ImageFile, _binary +import io +import warnings +from struct import unpack_from +from PIL import Image, ImageFile, TiffImagePlugin, _binary from PIL.JpegPresets import presets from PIL._util import isStringType @@ -45,6 +46,8 @@ o8 = _binary.o8 i16 = _binary.i16be i32 = _binary.i32be +__version__ = "0.6" + # # Parser @@ -110,6 +113,12 @@ def APP(self, marker): pass else: self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 def COM(self, marker): @@ -279,7 +288,7 @@ class JpegImageFile(ImageFile.ImageFile): s = self.fp.read(1) - if i8(s[0]) != 255: + if i8(s) != 255: raise SyntaxError("not a JPEG file") # Create attributes @@ -302,7 +311,7 @@ class JpegImageFile(ImageFile.ImageFile): i = i16(s) else: # Skip non-0xFF junk - s = b"\xff" + s = self.fp.read(1) continue if i in MARKER: @@ -314,7 +323,8 @@ class JpegImageFile(ImageFile.ImageFile): rawmode = self.mode if self.mode == "CMYK": rawmode = "CMYK;I" # assume adobe conventions - self.tile = [("jpeg", (0, 0) + self.size, 0, (rawmode, ""))] + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] # self.__offset = self.fp.tell() break s = self.fp.read(1) @@ -346,7 +356,7 @@ class JpegImageFile(ImageFile.ImageFile): scale = s self.tile = [(d, e, o, a)] - self.decoderconfig = (scale, 1) + self.decoderconfig = (scale, 0) return self @@ -380,18 +390,15 @@ class JpegImageFile(ImageFile.ImageFile): def _getexif(self): return _getexif(self) + def _getmp(self): + return _getmp(self) + def _getexif(self): # Extract EXIF information. This method is highly experimental, # and is likely to be replaced with something better in a future # version. - from PIL import TiffImagePlugin - import io - def fixup(value): - if len(value) == 1: - return value[0] - return value # The EXIF record consists of a TIFF file embedded in a JPEG # application marker (!). try: @@ -400,35 +407,107 @@ def _getexif(self): return None file = io.BytesIO(data[6:]) head = file.read(8) - exif = {} # process dictionary - info = TiffImagePlugin.ImageFileDirectory(head) + info = TiffImagePlugin.ImageFileDirectory_v2(head) info.load(file) - for key, value in info.items(): - exif[key] = fixup(value) + exif = dict(info) # get exif extension try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. file.seek(exif[0x8769]) - except KeyError: + except (KeyError, TypeError): pass else: - info = TiffImagePlugin.ImageFileDirectory(head) + info = TiffImagePlugin.ImageFileDirectory_v2(head) info.load(file) - for key, value in info.items(): - exif[key] = fixup(value) + exif.update(info) # get gpsinfo extension try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. file.seek(exif[0x8825]) - except KeyError: + except (KeyError, TypeError): pass else: - info = TiffImagePlugin.ImageFileDirectory(head) + info = TiffImagePlugin.ImageFileDirectory_v2(head) info.load(file) - exif[0x8825] = gps = {} - for key, value in info.items(): - gps[key] = fixup(value) + exif[0x8825] = dict(info) return exif + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = unpack_from( + '{0}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + # -------------------------------------------------------------------- # stuff to save JPEG files @@ -442,20 +521,19 @@ RAWMODE = { "YCbCr": "YCbCr", } -zigzag_index = ( 0, 1, 5, 6, 14, 15, 27, 28, - 2, 4, 7, 13, 16, 26, 29, 42, - 3, 8, 12, 17, 25, 30, 41, 43, - 9, 11, 18, 24, 31, 40, 44, 53, - 10, 19, 23, 32, 39, 45, 52, 54, - 20, 22, 33, 38, 46, 51, 55, 60, - 21, 34, 37, 47, 50, 56, 59, 61, - 35, 36, 48, 49, 57, 58, 62, 63) +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) -samplings = { - (1, 1, 1, 1, 1, 1): 0, +samplings = {(1, 1, 1, 1, 1, 1): 0, (2, 1, 1, 1, 1, 1): 1, (2, 2, 1, 1, 1, 1): 2, - } + } def convert_dict_qtables(qtables): @@ -466,6 +544,15 @@ def convert_dict_qtables(qtables): def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] return samplings.get(sampling, -1) @@ -510,7 +597,8 @@ def _save(im, fp, filename): subsampling = 2 elif subsampling == "keep": if im.format != "JPEG": - raise ValueError("Cannot use 'keep' when original image is not a JPEG") + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") subsampling = get_sampling(im) def validate_qtables(qtables): @@ -544,7 +632,8 @@ def _save(im, fp, filename): if qtables == "keep": if im.format != "JPEG": - raise ValueError("Cannot use 'keep' when original image is not a JPEG") + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") qtables = getattr(im, "quantization", None) qtables = validate_qtables(qtables) @@ -562,7 +651,8 @@ def _save(im, fp, filename): i = 1 for marker in markers: size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) - extra += b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + o8(len(markers)) + marker + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) i += 1 # get keyword arguments @@ -585,10 +675,11 @@ def _save(im, fp, filename): # if we optimize, libjpeg needs a buffer big enough to hold the whole image # in a shot. Guessing on the size, at im.size bytes. (raw pizel size is # channels*size, this is a value that's been used in a django patch. - # https://github.com/jdriscoll/django-imagekit/issues/50 + # https://github.com/matthewwithanm/django-imagekit/issues/50 bufsize = 0 if "optimize" in info or "progressive" in info or "progression" in info: - if quality >= 95: + # keep sets quality to 0, but the actual value may be high. + if quality >= 95 or quality == 0: bufsize = 2 * im.size[0] * im.size[1] else: bufsize = im.size[0] * im.size[1] @@ -607,19 +698,40 @@ def _save_cjpeg(im, fp, filename): tempfile = im._dump() subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) try: - os.unlink(file) + os.unlink(tempfile) except: pass + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + pass + return im + + # -------------------------------------------------------------------q- # Registry stuff -Image.register_open("JPEG", JpegImageFile, _accept) -Image.register_save("JPEG", _save) +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) -Image.register_extension("JPEG", ".jfif") -Image.register_extension("JPEG", ".jpe") -Image.register_extension("JPEG", ".jpg") -Image.register_extension("JPEG", ".jpeg") +Image.register_extension(JpegImageFile.format, ".jfif") +Image.register_extension(JpegImageFile.format, ".jpe") +Image.register_extension(JpegImageFile.format, ".jpg") +Image.register_extension(JpegImageFile.format, ".jpeg") -Image.register_mime("JPEG", "image/jpeg") +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/Darwin/lib/python3.4/site-packages/PIL/JpegPresets.py b/Darwin/lib/python3.5/site-packages/PIL/JpegPresets.py similarity index 84% rename from Darwin/lib/python3.4/site-packages/PIL/JpegPresets.py rename to Darwin/lib/python3.5/site-packages/PIL/JpegPresets.py index e7bec14..66cbde1 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/JpegPresets.py +++ b/Darwin/lib/python3.5/site-packages/PIL/JpegPresets.py @@ -48,8 +48,8 @@ You can get the quantization tables of a JPEG with:: im.quantization -This will return a dict with a number of arrays. You can pass this dict directly -as the qtables argument when saving a JPEG. +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. The tables format between im.quantization and quantization in presets differ in 3 ways: @@ -67,8 +67,8 @@ Libjpeg ref.: http://www.jpegcameras.com/libjpeg/libjpeg-3.html """ presets = { - 'web_low': {'subsampling': 2, # "4:1:1" - 'quantization': [ + 'web_low': {'subsampling': 2, # "4:1:1" + 'quantization': [ [20, 16, 25, 39, 50, 46, 62, 68, 16, 18, 23, 38, 38, 53, 65, 68, 25, 23, 31, 38, 53, 65, 68, 68, @@ -85,9 +85,9 @@ presets = { 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68] - ]}, - 'web_medium': {'subsampling': 2, # "4:1:1" - 'quantization': [ + ]}, + 'web_medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ [16, 11, 11, 16, 23, 27, 31, 30, 11, 12, 12, 15, 20, 23, 23, 30, 11, 12, 13, 16, 23, 26, 35, 47, @@ -104,10 +104,10 @@ presets = { 26, 26, 30, 39, 48, 63, 64, 64, 38, 35, 46, 53, 64, 64, 64, 64, 48, 43, 53, 64, 64, 64, 64, 64] - ]}, - 'web_high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [ 6, 4, 4, 6, 9, 11, 12, 16, + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, 4, 5, 5, 6, 8, 10, 12, 12, 4, 5, 5, 6, 10, 12, 14, 19, 6, 6, 6, 11, 12, 15, 19, 28, @@ -115,7 +115,7 @@ presets = { 11, 10, 12, 15, 20, 27, 31, 31, 12, 12, 14, 19, 27, 31, 31, 31, 16, 12, 19, 28, 31, 31, 31, 31], - [ 7, 7, 13, 24, 26, 31, 31, 31, + [7, 7, 13, 24, 26, 31, 31, 31, 7, 12, 16, 21, 31, 31, 31, 31, 13, 16, 17, 31, 31, 31, 31, 31, 24, 21, 31, 31, 31, 31, 31, 31, @@ -123,10 +123,10 @@ presets = { 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31] - ]}, - 'web_very_high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [ 2, 2, 2, 2, 3, 4, 5, 6, + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, 2, 2, 2, 2, 3, 4, 5, 6, 2, 2, 2, 2, 4, 5, 7, 9, 2, 2, 2, 4, 5, 7, 9, 12, @@ -134,7 +134,7 @@ presets = { 4, 4, 5, 7, 10, 12, 12, 12, 5, 5, 7, 9, 12, 12, 12, 12, 6, 6, 9, 12, 12, 12, 12, 12], - [ 3, 3, 5, 9, 13, 15, 15, 15, + [3, 3, 5, 9, 13, 15, 15, 15, 3, 4, 6, 11, 14, 12, 12, 12, 5, 6, 9, 14, 12, 12, 12, 12, 9, 11, 14, 12, 12, 12, 12, 12, @@ -142,10 +142,10 @@ presets = { 15, 12, 12, 12, 12, 12, 12, 12, 15, 12, 12, 12, 12, 12, 12, 12, 15, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'web_maximum': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [ 1, 1, 1, 1, 1, 1, 1, 1, + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 2, @@ -153,7 +153,7 @@ presets = { 1, 1, 1, 1, 2, 2, 3, 3, 1, 1, 1, 2, 2, 3, 3, 3, 1, 1, 2, 2, 3, 3, 3, 3], - [ 1, 1, 1, 2, 2, 3, 3, 3, + [1, 1, 1, 2, 2, 3, 3, 3, 1, 1, 1, 2, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, @@ -161,9 +161,9 @@ presets = { 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3] - ]}, - 'low': {'subsampling': 2, # "4:1:1" - 'quantization': [ + ]}, + 'low': {'subsampling': 2, # "4:1:1" + 'quantization': [ [18, 14, 14, 21, 30, 35, 34, 17, 14, 16, 16, 19, 26, 23, 12, 12, 14, 16, 17, 21, 23, 12, 12, 12, @@ -180,9 +180,9 @@ presets = { 20, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'medium': {'subsampling': 2, # "4:1:1" - 'quantization': [ + ]}, + 'medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ [12, 8, 8, 12, 17, 21, 24, 17, 8, 9, 9, 11, 15, 19, 12, 12, 8, 9, 10, 12, 19, 12, 12, 12, @@ -199,10 +199,10 @@ presets = { 20, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'high': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [ 6, 4, 4, 6, 9, 11, 12, 16, + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, 4, 5, 5, 6, 8, 10, 12, 12, 4, 5, 5, 6, 10, 12, 12, 12, 6, 6, 6, 11, 12, 12, 12, 12, @@ -210,7 +210,7 @@ presets = { 11, 10, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 16, 12, 12, 12, 12, 12, 12, 12], - [ 7, 7, 13, 24, 20, 20, 17, 17, + [7, 7, 13, 24, 20, 20, 17, 17, 7, 12, 16, 14, 14, 12, 12, 12, 13, 16, 14, 14, 12, 12, 12, 12, 24, 14, 14, 12, 12, 12, 12, 12, @@ -218,10 +218,10 @@ presets = { 20, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12, 17, 12, 12, 12, 12, 12, 12, 12] - ]}, - 'maximum': {'subsampling': 0, # "4:4:4" - 'quantization': [ - [ 2, 2, 2, 2, 3, 4, 5, 6, + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, 2, 2, 2, 2, 3, 4, 5, 6, 2, 2, 2, 2, 4, 5, 7, 9, 2, 2, 2, 4, 5, 7, 9, 12, @@ -229,7 +229,7 @@ presets = { 4, 4, 5, 7, 10, 12, 12, 12, 5, 5, 7, 9, 12, 12, 12, 12, 6, 6, 9, 12, 12, 12, 12, 12], - [ 3, 3, 5, 9, 13, 15, 15, 15, + [3, 3, 5, 9, 13, 15, 15, 15, 3, 4, 6, 10, 14, 12, 12, 12, 5, 6, 9, 14, 12, 12, 12, 12, 9, 10, 14, 12, 12, 12, 12, 12, @@ -237,5 +237,5 @@ presets = { 15, 12, 12, 12, 12, 12, 12, 12, 15, 12, 12, 12, 12, 12, 12, 12, 15, 12, 12, 12, 12, 12, 12, 12] - ]}, -} \ No newline at end of file + ]}, +} diff --git a/Darwin/lib/python3.4/site-packages/PIL/McIdasImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/McIdasImagePlugin.py similarity index 93% rename from Darwin/lib/python3.4/site-packages/PIL/McIdasImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/McIdasImagePlugin.py index 3aef10b..705fa57 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/McIdasImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/McIdasImagePlugin.py @@ -16,14 +16,16 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.2" - import struct from PIL import Image, ImageFile +__version__ = "0.2" + + def _accept(s): return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + ## # Image plugin for McIdas area images. @@ -47,10 +49,12 @@ class McIdasImageFile(ImageFile.ImageFile): mode = rawmode = "L" elif w[11] == 2: # FIXME: add memory map support - mode = "I"; rawmode = "I;16B" + mode = "I" + rawmode = "I;16B" elif w[11] == 4: # FIXME: add memory map support - mode = "I"; rawmode = "I;32B" + mode = "I" + rawmode = "I;32B" else: raise SyntaxError("unsupported McIdas format") diff --git a/Darwin/lib/python3.4/site-packages/PIL/MicImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/MicImagePlugin.py similarity index 80% rename from Darwin/lib/python3.4/site-packages/PIL/MicImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/MicImagePlugin.py index 84e9628..3c91244 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/MicImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/MicImagePlugin.py @@ -17,11 +17,10 @@ # -__version__ = "0.1" - - from PIL import Image, TiffImagePlugin -from PIL.OleFileIO import * +from PIL.OleFileIO import MAGIC, OleFileIO + +__version__ = "0.1" # @@ -31,6 +30,7 @@ from PIL.OleFileIO import * def _accept(prefix): return prefix[:8] == MAGIC + ## # Image plugin for Microsoft's Image Composer file format. @@ -53,9 +53,9 @@ class MicImageFile(TiffImagePlugin.TiffImageFile): # best way to identify MIC files, but what the... ;-) self.images = [] - for file in self.ole.listdir(): - if file[1:] and file[0][-4:] == ".ACI" and file[1] == "Image": - self.images.append(file) + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) # if we didn't find any images, this is probably not # an MIC file. @@ -70,6 +70,14 @@ class MicImageFile(TiffImagePlugin.TiffImageFile): self.seek(0) + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + def seek(self, frame): try: @@ -90,6 +98,6 @@ class MicImageFile(TiffImagePlugin.TiffImageFile): # # -------------------------------------------------------------------- -Image.register_open("MIC", MicImageFile, _accept) +Image.register_open(MicImageFile.format, MicImageFile, _accept) -Image.register_extension("MIC", ".mic") +Image.register_extension(MicImageFile.format, ".mic") diff --git a/Darwin/lib/python3.4/site-packages/PIL/MpegImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/MpegImagePlugin.py similarity index 86% rename from Darwin/lib/python3.4/site-packages/PIL/MpegImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/MpegImagePlugin.py index 02e6adc..6671b86 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/MpegImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/MpegImagePlugin.py @@ -13,15 +13,17 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.1" from PIL import Image, ImageFile from PIL._binary import i8 +__version__ = "0.1" + + # # Bitstream parser -class BitStream: +class BitStream(object): def __init__(self, fp): self.fp = fp @@ -52,6 +54,7 @@ class BitStream: self.bits = self.bits - bits return v + ## # Image plugin for MPEG streams. This plugin can identify a stream, # but it cannot read it. @@ -75,9 +78,9 @@ class MpegImageFile(ImageFile.ImageFile): # -------------------------------------------------------------------- # Registry stuff -Image.register_open("MPEG", MpegImageFile) +Image.register_open(MpegImageFile.format, MpegImageFile) -Image.register_extension("MPEG", ".mpg") -Image.register_extension("MPEG", ".mpeg") +Image.register_extension(MpegImageFile.format, ".mpg") +Image.register_extension(MpegImageFile.format, ".mpeg") -Image.register_mime("MPEG", "video/mpeg") +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/Darwin/lib/python3.5/site-packages/PIL/MpoImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..1d26021 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if frame < 0 or frame >= self.__framecount: + raise EOFError("no more images in MPO file") + else: + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------q- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/Darwin/lib/python3.4/site-packages/PIL/MspImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/MspImagePlugin.py similarity index 71% rename from Darwin/lib/python3.4/site-packages/PIL/MspImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/MspImagePlugin.py index 743ebe1..85f8e76 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/MspImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/MspImagePlugin.py @@ -17,19 +17,21 @@ # -__version__ = "0.1" - from PIL import Image, ImageFile, _binary +__version__ = "0.1" + # # read MSP files i16 = _binary.i16le + def _accept(prefix): return prefix[:4] in [b"DanM", b"LinS"] + ## # Image plugin for Windows MSP images. This plugin supports both # uncompressed (Windows 1.0). @@ -47,25 +49,26 @@ class MspImageFile(ImageFile.ImageFile): raise SyntaxError("not an MSP file") # Header checksum - sum = 0 + checksum = 0 for i in range(0, 32, 2): - sum = sum ^ i16(s[i:i+2]) - if sum != 0: + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: raise SyntaxError("bad MSP checksum") self.mode = "1" self.size = i16(s[4:]), i16(s[6:]) if s[:4] == b"DanM": - self.tile = [("raw", (0,0)+self.size, 32, ("1", 0, 1))] + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] else: - self.tile = [("msp", (0,0)+self.size, 32+2*self.size[1], None)] + self.tile = [("msp", (0, 0)+self.size, 32+2*self.size[1], None)] # # write MSP files (uncompressed only) o16 = _binary.o16le + def _save(im, fp, filename): if im.mode != "1": @@ -74,28 +77,28 @@ def _save(im, fp, filename): # create MSP header header = [0] * 16 - header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 header[2], header[3] = im.size header[4], header[5] = 1, 1 header[6], header[7] = 1, 1 header[8], header[9] = im.size - sum = 0 + checksum = 0 for h in header: - sum = sum ^ h - header[12] = sum # FIXME: is this the right field? + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? # header for h in header: fp.write(o16(h)) # image body - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 32, ("1", 0, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) # # registry -Image.register_open("MSP", MspImageFile, _accept) -Image.register_save("MSP", _save) +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) -Image.register_extension("MSP", ".msp") +Image.register_extension(MspImageFile.format, ".msp") diff --git a/Darwin/lib/python3.4/site-packages/PIL/OleFileIO-README.md b/Darwin/lib/python3.5/site-packages/PIL/OleFileIO-README.md similarity index 100% rename from Darwin/lib/python3.4/site-packages/PIL/OleFileIO-README.md rename to Darwin/lib/python3.5/site-packages/PIL/OleFileIO-README.md diff --git a/Darwin/lib/python3.4/site-packages/PIL/OleFileIO.py b/Darwin/lib/python3.5/site-packages/PIL/OleFileIO.py similarity index 62% rename from Darwin/lib/python3.4/site-packages/PIL/OleFileIO.py rename to Darwin/lib/python3.5/site-packages/PIL/OleFileIO.py index 8a3c77b..4cf106d 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/OleFileIO.py +++ b/Darwin/lib/python3.5/site-packages/PIL/OleFileIO.py @@ -1,47 +1,70 @@ -#!/usr/local/bin/python -# -*- coding: latin-1 -*- -""" -OleFileIO_PL: -Module to read Microsoft OLE2 files (also called Structured Storage or -Microsoft Compound Document File Format), such as Microsoft Office -documents, Image Composer and FlashPix files, Outlook messages, ... -This version is compatible with Python 2.6+ and 3.x +#!/usr/bin/env python -version 0.30 2014-02-04 Philippe Lagadec - http://www.decalage.info +# olefile (formerly OleFileIO_PL) version 0.42 2015-01-25 +# +# Module to read/write Microsoft OLE2 files (also called Structured Storage or +# Microsoft Compound Document File Format), such as Microsoft Office 97-2003 +# documents, Image Composer and FlashPix files, Outlook messages, ... +# This version is compatible with Python 2.6+ and 3.x +# +# Project website: http://www.decalage.info/olefile +# +# olefile is copyright (c) 2005-2015 Philippe Lagadec (http://www.decalage.info) +# +# olefile is based on the OleFileIO module from the PIL library v1.1.6 +# See: http://www.pythonware.com/products/pil/index.htm +# +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# See source code and LICENSE.txt for information on usage and redistribution. -Project website: http://www.decalage.info/python/olefileio -Improved version of the OleFileIO module from PIL library v1.1.6 -See: http://www.pythonware.com/products/pil/index.htm - -The Python Imaging Library (PIL) is - Copyright (c) 1997-2005 by Secret Labs AB - Copyright (c) 1995-2005 by Fredrik Lundh -OleFileIO_PL changes are Copyright (c) 2005-2014 by Philippe Lagadec - -See source code and LICENSE.txt for information on usage and redistribution. - -WARNING: THIS IS (STILL) WORK IN PROGRESS. -""" - -# Starting with OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported +# Since OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported # This import enables print() as a function rather than a keyword # (main requirement to be compatible with Python 3.x) # The comment on the line below should be printed on Python 2.5 or older: -from __future__ import print_function # This version of OleFileIO_PL requires Python 2.6+ or 3.x. +from __future__ import print_function # This version of olefile requires Python 2.6+ or 3.x. -__author__ = "Philippe Lagadec, Fredrik Lundh (Secret Labs AB)" -__date__ = "2014-02-04" -__version__ = '0.30' +__author__ = "Philippe Lagadec" +__date__ = "2015-01-25" +__version__ = '0.42b' #--- LICENSE ------------------------------------------------------------------ -# OleFileIO_PL is an improved version of the OleFileIO module from the -# Python Imaging Library (PIL). - -# OleFileIO_PL changes are Copyright (c) 2005-2014 by Philippe Lagadec +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +# (http://www.decalage.info) # +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ---------- +# PIL License: +# +# olefile is based on source code from the OleFileIO module of the Python +# Imaging Library (PIL) published by Fredrik Lundh under the following license: + # The Python Imaging Library (PIL) is # Copyright (c) 1997-2005 by Secret Labs AB # Copyright (c) 1995-2005 by Fredrik Lundh @@ -67,7 +90,7 @@ __version__ = '0.30' # PERFORMANCE OF THIS SOFTWARE. #----------------------------------------------------------------------------- -# CHANGELOG: (only OleFileIO_PL changes compared to PIL 1.1.6) +# CHANGELOG: (only olefile/OleFileIO_PL changes compared to PIL 1.1.6) # 2005-05-11 v0.10 PL: - a few fixes for Python 2.4 compatibility # (all changes flagged with [PL]) # 2006-02-22 v0.11 PL: - a few fixes for some Office 2003 documents which raise @@ -142,10 +165,29 @@ __version__ = '0.30' # 2014-02-04 v0.30 PL: - upgraded code to support Python 3.x by Martin Panter # - several fixes for Python 2.6 (xrange, MAGIC) # - reused i32 from Pillow's _binary +# 2014-07-18 v0.31 - preliminary support for 4K sectors +# 2014-07-27 v0.31 PL: - a few improvements in OleFileIO.open (header parsing) +# - Fixed loadfat for large files with 4K sectors (issue #3) +# 2014-07-30 v0.32 PL: - added write_sect to write sectors to disk +# - added write_mode option to OleFileIO.__init__ and open +# 2014-07-31 PL: - fixed padding in write_sect for Python 3, added checks +# - added write_stream to write a stream to disk +# 2014-09-26 v0.40 PL: - renamed OleFileIO_PL to olefile +# 2014-11-09 NE: - added support for Jython (Niko Ehrenfeuchter) +# 2014-11-13 v0.41 PL: - improved isOleFile and OleFileIO.open to support OLE +# data in a string buffer and file-like objects. +# 2014-11-21 PL: - updated comments according to Pillow's commits +# 2015-01-24 v0.42 PL: - changed the default path name encoding from Latin-1 +# to UTF-8 on Python 2.x (Unicode on Python 3.x) +# - added path_encoding option to override the default +# - fixed a bug in _list when a storage is empty #----------------------------------------------------------------------------- # TODO (for version 1.0): -# + isOleFile should accept file-like objects like open +# + get rid of print statements, to simplify Python 2.x and 3.x support +# + add is_stream and is_storage +# + remove leading and trailing slashes where a path is used +# + add functions path_list2str and path_str2list # + fix how all the methods handle unicode str and/or bytes as arguments # + add path attrib to _OleDirEntry, set it once and for all in init or # append_kids (then listdir/_list can be simplified) @@ -177,30 +219,16 @@ __version__ = '0.30' # - move all debug code (and maybe dump methods) to a separate module, with # a class which inherits OleFileIO ? # - fix docstrings to follow epydoc format -# - add support for 4K sectors ? # - add support for big endian byte order ? # - create a simple OLE explorer with wxPython # FUTURE EVOLUTIONS to add write support: -# 1) add ability to write a stream back on disk from BytesIO (same size, no -# change in FAT/MiniFAT). -# 2) rename a stream/storage if it doesn't change the RB tree -# 3) use rbtree module to update the red-black tree + any rename -# 4) remove a stream/storage: free sectors in FAT/MiniFAT -# 5) allocate new sectors in FAT/MiniFAT -# 6) create new storage/stream -#----------------------------------------------------------------------------- +# see issue #6 on Bitbucket: +# https://bitbucket.org/decalage/olefileio_pl/issue/6/improve-olefileio_pl-to-write-ole-files + +#----------------------------------------------------------------------------- +# NOTES from PIL 1.1.6: -# -# THIS IS WORK IN PROGRESS -# -# The Python Imaging Library -# $Id$ -# -# stuff to deal with OLE2 Structured Storage files. this module is -# used by PIL to read Image Composer and FlashPix files, but can also -# be used to read other files of this type. -# # History: # 1997-01-20 fl Created # 1997-01-22 fl Fixed 64-bit portability quirk @@ -222,22 +250,22 @@ __version__ = '0.30' # "If this document and functionality of the Software conflict, # the actual functionality of the Software represents the correct # functionality" -- Microsoft, in the OLE format specification -# -# Copyright (c) Secret Labs AB 1997. -# Copyright (c) Fredrik Lundh 1997. -# -# See the README file for information on usage and redistribution. -# #------------------------------------------------------------------------------ import io import sys -import struct, array, os.path, datetime +import struct +import array +import os.path +import datetime -#[PL] Define explicitly the public API to avoid private objects in pydoc: -__all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] +#=== COMPATIBILITY WORKAROUNDS ================================================ + +# [PL] Define explicitly the public API to avoid private objects in pydoc: +#TODO: add more +# __all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] # For Python 3.x, need to redefine long as int: if str is not bytes: @@ -251,48 +279,73 @@ except: # no xrange, for Python 3 it was renamed as range: iterrange = range -#[PL] workaround to fix an issue with array item size on 64 bits systems: +# [PL] workaround to fix an issue with array item size on 64 bits systems: if array.array('L').itemsize == 4: # on 32 bits platforms, long integers in an array are 32 bits: UINT32 = 'L' elif array.array('I').itemsize == 4: # on 64 bits platforms, integers in an array are 32 bits: UINT32 = 'I' +elif array.array('i').itemsize == 4: + # On 64 bit Jython, signed integers ('i') are the only way to store our 32 + # bit values in an array in a *somewhat* reasonable way, as the otherwise + # perfectly suited 'H' (unsigned int, 32 bits) results in a completely + # unusable behaviour. This is most likely caused by the fact that Java + # doesn't have unsigned values, and thus Jython's "array" implementation, + # which is based on "jarray", doesn't have them either. + # NOTE: to trick Jython into converting the values it would normally + # interpret as "signed" into "unsigned", a binary-and operation with + # 0xFFFFFFFF can be used. This way it is possible to use the same comparing + # operations on all platforms / implementations. The corresponding code + # lines are flagged with a 'JYTHON-WORKAROUND' tag below. + UINT32 = 'i' else: raise ValueError('Need to fix a bug with 32 bit arrays, please contact author...') -#[PL] These workarounds were inspired from the Path module +# [PL] These workarounds were inspired from the Path module # (see http://www.jorendorff.com/articles/python/path/) -#TODO: test with old Python versions - -# Pre-2.3 workaround for basestring. try: basestring except NameError: - try: - # is Unicode supported (Python >2.0 or >1.6 ?) - basestring = (str, unicode) - except NameError: - basestring = str + basestring = str -#[PL] Experimental setting: if True, OLE filenames will be kept in Unicode +# [PL] Experimental setting: if True, OLE filenames will be kept in Unicode # if False (default PIL behaviour), all filenames are converted to Latin-1. -KEEP_UNICODE_NAMES = False +KEEP_UNICODE_NAMES = True -#[PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on +if sys.version_info[0] < 3: + # On Python 2.x, the default encoding for path names is UTF-8: + DEFAULT_PATH_ENCODING = 'utf-8' +else: + # On Python 3.x, the default encoding for path names is Unicode (None): + DEFAULT_PATH_ENCODING = None + + +#=== DEBUGGING =============================================================== + +#TODO: replace this by proper logging + +# [PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on # command line to change it. DEBUG_MODE = False + + def debug_print(msg): print(msg) + + def debug_pass(msg): pass + + debug = debug_pass + def set_debug_mode(debug_mode): """ Set debug mode on or off, to control display of debugging messages. - mode: True or False + :param mode: True or False """ global DEBUG_MODE, debug DEBUG_MODE = debug_mode @@ -301,41 +354,45 @@ def set_debug_mode(debug_mode): else: debug = debug_pass + +#=== CONSTANTS =============================================================== + +# magic bytes that should be at the beginning of every OLE file: MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' -#[PL]: added constants for Sector IDs (from AAF specifications) -MAXREGSECT = 0xFFFFFFFA; # maximum SECT -DIFSECT = 0xFFFFFFFC; # (-4) denotes a DIFAT sector in a FAT -FATSECT = 0xFFFFFFFD; # (-3) denotes a FAT sector in a FAT -ENDOFCHAIN = 0xFFFFFFFE; # (-2) end of a virtual stream chain -FREESECT = 0xFFFFFFFF; # (-1) unallocated sector +# [PL]: added constants for Sector IDs (from AAF specifications) +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector -#[PL]: added constants for Directory Entry IDs (from AAF specifications) -MAXREGSID = 0xFFFFFFFA; # maximum directory entry ID -NOSTREAM = 0xFFFFFFFF; # (-1) unallocated directory entry +# [PL]: added constants for Directory Entry IDs (from AAF specifications) +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry -#[PL] object types in storage (from AAF specifications) -STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) -STGTY_STORAGE = 1 # element is a storage object -STGTY_STREAM = 2 # element is a stream object -STGTY_LOCKBYTES = 3 # element is an ILockBytes object -STGTY_PROPERTY = 4 # element is an IPropertyStorage object -STGTY_ROOT = 5 # element is a root storage +# [PL] object types in storage (from AAF specifications) +STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage # # -------------------------------------------------------------------- # property types -VT_EMPTY=0; VT_NULL=1; VT_I2=2; VT_I4=3; VT_R4=4; VT_R8=5; VT_CY=6; -VT_DATE=7; VT_BSTR=8; VT_DISPATCH=9; VT_ERROR=10; VT_BOOL=11; -VT_VARIANT=12; VT_UNKNOWN=13; VT_DECIMAL=14; VT_I1=16; VT_UI1=17; -VT_UI2=18; VT_UI4=19; VT_I8=20; VT_UI8=21; VT_INT=22; VT_UINT=23; -VT_VOID=24; VT_HRESULT=25; VT_PTR=26; VT_SAFEARRAY=27; VT_CARRAY=28; -VT_USERDEFINED=29; VT_LPSTR=30; VT_LPWSTR=31; VT_FILETIME=64; -VT_BLOB=65; VT_STREAM=66; VT_STORAGE=67; VT_STREAMED_OBJECT=68; -VT_STORED_OBJECT=69; VT_BLOB_OBJECT=70; VT_CF=71; VT_CLSID=72; -VT_VECTOR=0x1000; +VT_EMPTY = 0; VT_NULL = 1; VT_I2 = 2; VT_I4 = 3; VT_R4 = 4; VT_R8 = 5; VT_CY = 6; +VT_DATE = 7; VT_BSTR = 8; VT_DISPATCH = 9; VT_ERROR = 10; VT_BOOL = 11; +VT_VARIANT = 12; VT_UNKNOWN = 13; VT_DECIMAL = 14; VT_I1 = 16; VT_UI1 = 17; +VT_UI2 = 18; VT_UI4 = 19; VT_I8 = 20; VT_UI8 = 21; VT_INT = 22; VT_UINT = 23; +VT_VOID = 24; VT_HRESULT = 25; VT_PTR = 26; VT_SAFEARRAY = 27; VT_CARRAY = 28; +VT_USERDEFINED = 29; VT_LPSTR = 30; VT_LPWSTR = 31; VT_FILETIME = 64; +VT_BLOB = 65; VT_STREAM = 66; VT_STORAGE = 67; VT_STREAMED_OBJECT = 68; +VT_STORED_OBJECT = 69; VT_BLOB_OBJECT = 70; VT_CF = 71; VT_CLSID = 72; +VT_VECTOR = 0x1000; # map property id to name (for debugging purposes) @@ -351,30 +408,53 @@ for keyword, var in list(vars().items()): WORD_CLSID = "00020900-0000-0000-C000-000000000046" #TODO: check Excel, PPT, ... -#[PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() -DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect -DEFECT_POTENTIAL = 20 # a potential defect -DEFECT_INCORRECT = 30 # an error according to specifications, but parsing - # can go on -DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is - # impossible +# [PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() +DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect +DEFECT_POTENTIAL = 20 # a potential defect +DEFECT_INCORRECT = 30 # an error according to specifications, but parsing + # can go on +DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is + # impossible -#[PL] add useful constants to __all__: -for key in list(vars().keys()): - if key.startswith('STGTY_') or key.startswith('DEFECT_'): - __all__.append(key) +# Minimal size of an empty OLE file, with 512-bytes sectors = 1536 bytes +# (this is used in isOleFile and OleFile.open) +MINIMAL_OLEFILE_SIZE = 1536 + +# [PL] add useful constants to __all__: +# for key in list(vars().keys()): +# if key.startswith('STGTY_') or key.startswith('DEFECT_'): +# __all__.append(key) -#--- FUNCTIONS ---------------------------------------------------------------- +#=== FUNCTIONS =============================================================== -def isOleFile (filename): +def isOleFile(filename): """ - Test if file is an OLE container (according to its header). - filename: file name or path (str, unicode) - return: True if OLE, False otherwise. + Test if a file is an OLE container (according to the magic bytes in its header). + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read and seek methods), + it is parsed as-is. + + :returns: True if OLE, False otherwise. """ - f = open(filename, 'rb') - header = f.read(len(MAGIC)) + # check if filename is a string-like or file-like object: + if hasattr(filename, 'read'): + # file-like object: use it directly + header = filename.read(len(MAGIC)) + # just in case, seek back to start of file: + filename.seek(0) + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + header = filename[:len(MAGIC)] + else: + # string-like object: filename of file on disk + header = open(filename, 'rb').read(len(MAGIC)) if header == MAGIC: return True else: @@ -400,7 +480,7 @@ def i16(c, o = 0): c: string containing bytes to convert o: offset of bytes to convert in string """ - return i8(c[o]) | (i8(c[o+1])<<8) + return struct.unpack("=len(fat): + if sect < 0 or sect >= len(fat): debug('sect=%d (%X) / len(fat)=%d' % (sect, sect, len(fat))) - debug('i=%d / nb_sectors=%d' %(i, nb_sectors)) + debug('i=%d / nb_sectors=%d' % (i, nb_sectors)) ## tmp_data = b"".join(data) ## f = open('test_debug.bin', 'wb') ## f.write(tmp_data) @@ -721,7 +771,7 @@ class _OleStream(io.BytesIO): # Note: if sector is the last of the file, sometimes it is not a # complete sector (of 512 or 4K), so we may read less than # sectorsize. - if len(sector_data)!=sectorsize and sect!=(len(fat)-1): + if len(sector_data) != sectorsize and sect != (len(fat)-1): debug('sect=%d / len(fat)=%d, seek=%d / filesize=%d, len read=%d' % (sect, len(fat), offset+sectorsize*sect, filesize, len(sector_data))) debug('seek+len(read)=%d' % (offset+sectorsize*sect+len(sector_data))) @@ -729,11 +779,11 @@ class _OleStream(io.BytesIO): data.append(sector_data) # jump to next sector in the FAT: try: - sect = fat[sect] + sect = fat[sect] & 0xFFFFFFFF # JYTHON-WORKAROUND except IndexError: # [PL] if pointer is out of the FAT an exception is raised raise IOError('incorrect OLE FAT, sector index out of range') - #[PL] Last sector should be a "end of chain" marker: + # [PL] Last sector should be a "end of chain" marker: if sect != ENDOFCHAIN: raise IOError('incorrect last sector index in OLE stream') data = b"".join(data) @@ -757,12 +807,12 @@ class _OleStream(io.BytesIO): #--- _OleDirectoryEntry ------------------------------------------------------- -class _OleDirectoryEntry: +class _OleDirectoryEntry(object): """ OLE2 Directory Entry """ - #[PL] parsing code moved from OleFileIO.loaddirectory + # [PL] parsing code moved from OleFileIO.loaddirectory # struct to parse directory entries: # <: little-endian byte order, standard sizes @@ -787,15 +837,14 @@ class _OleDirectoryEntry: DIRENTRY_SIZE = 128 assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE - def __init__(self, entry, sid, olefile): """ Constructor for an _OleDirectoryEntry object. Parses a 128-bytes entry from the OLE Directory stream. - entry : string (must be 128 bytes long) - sid : index of this directory entry in the OLE file directory - olefile: OleFileIO containing this directory entry + :param entry : string (must be 128 bytes long) + :param sid : index of this directory entry in the OLE file directory + :param olefile: OleFileIO containing this directory entry """ self.sid = sid # ref to olefile is stored for future use @@ -828,23 +877,26 @@ class _OleDirectoryEntry: sizeHigh ) = struct.unpack(_OleDirectoryEntry.STRUCT_DIRENTRY, entry) if self.entry_type not in [STGTY_ROOT, STGTY_STORAGE, STGTY_STREAM, STGTY_EMPTY]: - olefile._raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') + olefile.raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') # only first directory entry can (and should) be root: if self.entry_type == STGTY_ROOT and sid != 0: - olefile._raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') + olefile.raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') if sid == 0 and self.entry_type != STGTY_ROOT: - olefile._raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') #debug (struct.unpack(fmt_entry, entry[:len_entry])) # name should be at most 31 unicode characters + null character, # so 64 bytes in total (31*2 + 2): - if namelength>64: - olefile._raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') + if namelength > 64: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') # if exception not raised, namelength is set to the maximum value: namelength = 64 # only characters without ending null char are kept: name = name[:(namelength-2)] - # name is converted from unicode to Latin-1: - self.name = _unicode(name) + #TODO: check if the name is actually followed by a null unicode character ([MS-CFB] 2.6.1) + #TODO: check if the name does not contain forbidden characters: + # [MS-CFB] 2.6.1: "The following characters are illegal and MUST NOT be part of the name: '/', '\', ':', '!'." + # name is converted from UTF-16LE to the path encoding specified in the OleFileIO: + self.name = olefile._decode_utf16_str(name) debug('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) debug(' - type: %d' % self.entry_type) @@ -859,29 +911,27 @@ class _OleDirectoryEntry: if sizeHigh != 0 and sizeHigh != 0xFFFFFFFF: debug('sectorsize=%d, sizeLow=%d, sizeHigh=%d (%X)' % (olefile.sectorsize, sizeLow, sizeHigh, sizeHigh)) - olefile._raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') + olefile.raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') self.size = sizeLow else: - self.size = sizeLow + (long(sizeHigh)<<32) + self.size = sizeLow + (long(sizeHigh) << 32) debug(' - size: %d (sizeLow=%d, sizeHigh=%d)' % (self.size, sizeLow, sizeHigh)) self.clsid = _clsid(clsid) # a storage should have a null size, BUT some implementations such as # Word 8 for Mac seem to allow non-null values => Potential defect: if self.entry_type == STGTY_STORAGE and self.size != 0: - olefile._raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') + olefile.raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') # check if stream is not already referenced elsewhere: - if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size>0: + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: if self.size < olefile.minisectorcutoff \ - and self.entry_type==STGTY_STREAM: # only streams can be in MiniFAT + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT # ministream object minifat = True else: minifat = False olefile._check_duplicate_stream(self.isectStart, minifat) - - def build_storage_tree(self): """ Read and build the red-black tree attached to this _OleDirectoryEntry @@ -905,23 +955,22 @@ class _OleDirectoryEntry: # (see rich comparison methods in this class) self.kids.sort() - def append_kids(self, child_sid): """ Walk through red-black tree of children of this directory entry to add all of them to the kids list. (recursive method) - child_sid : index of child directory entry to use, or None when called - first time for the root. (only used during recursion) + :param child_sid : index of child directory entry to use, or None when called + first time for the root. (only used during recursion) """ - #[PL] this method was added to use simple recursion instead of a complex + # [PL] this method was added to use simple recursion instead of a complex # algorithm. # if this is not a storage or a leaf of the tree, nothing to do: if child_sid == NOSTREAM: return # check if child SID is in the proper range: - if child_sid<0 or child_sid>=len(self.olefile.direntries): - self.olefile._raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') + if child_sid < 0 or child_sid >= len(self.olefile.direntries): + self.olefile.raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') # get child direntry: child = self.olefile._load_direntry(child_sid) #direntries[child_sid] debug('append_kids: child_sid=%d - %s - sid_left=%d, sid_right=%d, sid_child=%d' @@ -933,7 +982,7 @@ class _OleDirectoryEntry: # Check if its name is not already used (case-insensitive): name_lower = child.name.lower() if name_lower in self.kids_dict: - self.olefile._raise_defect(DEFECT_INCORRECT, + self.olefile.raise_defect(DEFECT_INCORRECT, "Duplicate filename in OLE storage") # Then the child_sid _OleDirectoryEntry object is appended to the # kids list and dictionary: @@ -941,7 +990,7 @@ class _OleDirectoryEntry: self.kids_dict[name_lower] = child # Check if kid was not already referenced in a storage: if child.used: - self.olefile._raise_defect(DEFECT_INCORRECT, + self.olefile.raise_defect(DEFECT_INCORRECT, 'OLE Entry referenced more than once') child.used = True # Finally walk through right side of the tree: @@ -949,7 +998,6 @@ class _OleDirectoryEntry: # Afterwards build kid's own tree if it's also a storage: child.build_storage_tree() - def __eq__(self, other): "Compare entries by name" return self.name == other.name @@ -969,7 +1017,6 @@ class _OleDirectoryEntry: #TODO: replace by the same function as MS implementation ? # (order by name length first, then case-insensitive order) - def dump(self, tab = 0): "Dump this entry, and all its subentries (for debug purposes only)" TYPES = ["(invalid)", "(storage)", "(stream)", "(lockbytes)", @@ -984,13 +1031,12 @@ class _OleDirectoryEntry: for kid in self.kids: kid.dump(tab + 2) - def getmtime(self): """ Return modification time of a directory entry. - return: None if modification time is null, a python datetime object - otherwise (UTC timezone) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) new in version 0.26 """ @@ -998,13 +1044,12 @@ class _OleDirectoryEntry: return None return filetime2datetime(self.modifyTime) - def getctime(self): """ Return creation time of a directory entry. - return: None if modification time is null, a python datetime object - otherwise (UTC timezone) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) new in version 0.26 """ @@ -1015,12 +1060,13 @@ class _OleDirectoryEntry: #--- OleFileIO ---------------------------------------------------------------- -class OleFileIO: +class OleFileIO(object): """ OLE container object This class encapsulates the interface to an OLE 2 structured - storage file. Use the {@link listdir} and {@link openstream} methods to + storage file. Use the :py:meth:`~PIL.OleFileIO.OleFileIO.listdir` and + :py:meth:`~PIL.OleFileIO.OleFileIO.openstream` methods to access the contents of this file. Object names are given as a list of strings, one for each subentry @@ -1044,37 +1090,62 @@ class OleFileIO: TIFF files). """ - def __init__(self, filename = None, raise_defects=DEFECT_FATAL): + def __init__(self, filename=None, raise_defects=DEFECT_FATAL, + write_mode=False, debug=False, path_encoding=DEFAULT_PATH_ENCODING): """ - Constructor for OleFileIO class. + Constructor for the OleFileIO class. - filename: file to open. - raise_defects: minimal level for defects to be raised as exceptions. - (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a - security-oriented application, see source code for details) + :param filename: file to open. + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param raise_defects: minimal level for defects to be raised as exceptions. + (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a + security-oriented application, see source code for details) + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. + + :param debug: bool, set debug mode + + :param path_encoding: None or str, name of the codec to use for path + names (streams and storages), or None for Unicode. + Unicode by default on Python 3+, UTF-8 on Python 2.x. + (new in olefile 0.42, was hardcoded to Latin-1 until olefile v0.41) """ + set_debug_mode(debug) # minimal level for defects to be raised as exceptions: self._raise_defects_level = raise_defects # list of defects/issues not raised as exceptions: # tuples of (exception type, message) self.parsing_issues = [] + self.write_mode = write_mode + self.path_encoding = path_encoding + self._filesize = None + self.fp = None if filename: - self.open(filename) + self.open(filename, write_mode=write_mode) - - def _raise_defect(self, defect_level, message, exception_type=IOError): + def raise_defect(self, defect_level, message, exception_type=IOError): """ This method should be called for any defect found during file parsing. It may raise an IOError exception according to the minimal level chosen for the OleFileIO object. - defect_level: defect level, possible values are: - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect - DEFECT_POTENTIAL : a potential defect - DEFECT_INCORRECT : an error according to specifications, but parsing can go on - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible - message: string describing the defect, used with raised exception. - exception_type: exception class to be raised, IOError by default + :param defect_level: defect level, possible values are: + + - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect + - DEFECT_POTENTIAL : a potential defect + - DEFECT_INCORRECT : an error according to specifications, but parsing can go on + - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible + + :param message: string describing the defect, used with raised exception. + :param exception_type: exception class to be raised, IOError by default """ # added by [PL] if defect_level >= self._raise_defects_level: @@ -1083,32 +1154,68 @@ class OleFileIO: # just record the issue, no exception raised: self.parsing_issues.append((exception_type, message)) - - def open(self, filename): + def _decode_utf16_str(self, utf16_str, errors='replace'): """ - Open an OLE2 file. - Reads the header, FAT and directory. + Decode a string encoded in UTF-16 LE format, as found in the OLE + directory or in property streams. Return a string encoded + according to the path_encoding specified for the OleFileIO object. - filename: string-like or file-like object + :param utf16_str: bytes string encoded in UTF-16 LE format + :param errors: str, see python documentation for str.decode() + :return: str, encoded according to path_encoding """ - #[PL] check if filename is a string-like or file-like object: + unicode_str = utf16_str.decode('UTF-16LE', errors) + if self.path_encoding: + # an encoding has been specified for path names: + return unicode_str.encode(self.path_encoding, errors) + else: + # path_encoding=None, return the Unicode string as-is: + return unicode_str + + def open(self, filename, write_mode=False): + """ + Open an OLE2 file in read-only or read/write mode. + Read and parse the header, FAT and directory. + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. (ignored if filename is not a path) + """ + self.write_mode = write_mode + # [PL] check if filename is a string-like or file-like object: # (it is better to check for a read() method) if hasattr(filename, 'read'): - # file-like object + #TODO: also check seek and tell methods? + # file-like object: use it directly self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + # convert it to BytesIO + self.fp = io.BytesIO(filename) else: # string-like object: filename of file on disk - #TODO: if larger than 1024 bytes, this could be the actual data => BytesIO - self.fp = open(filename, "rb") - # old code fails if filename is not a plain string: - #if isinstance(filename, (bytes, basestring)): - # self.fp = open(filename, "rb") - #else: - # self.fp = filename + if self.write_mode: + # open file in mode 'read with update, binary' + # According to https://docs.python.org/2/library/functions.html#open + # 'w' would truncate the file, 'a' may only append on some Unixes + mode = 'r+b' + else: + # read-only mode by default + mode = 'rb' + self.fp = open(filename, mode) # obtain the filesize by using seek and tell, which should work on most # file-like objects: #TODO: do it above, using getsize with filename when possible? #TODO: fix code to fail with clear exception when filesize cannot be obtained + filesize = 0 self.fp.seek(0, os.SEEK_END) try: filesize = self.fp.tell() @@ -1124,7 +1231,7 @@ class OleFileIO: header = self.fp.read(512) if len(header) != 512 or header[:8] != MAGIC: - self._raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") + self.raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") # [PL] header structure according to AAF specifications: ##Header @@ -1165,7 +1272,7 @@ class OleFileIO: # '<' indicates little-endian byte ordering for Intel (cf. struct module help) fmt_header = '<8s16sHHHHHHLLLLLLLLLL' header_size = struct.calcsize(fmt_header) - debug( "fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4) ) + debug("fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4)) header1 = header[:header_size] ( self.Sig, @@ -1186,61 +1293,78 @@ class OleFileIO: self.sectDifStart, self.csectDif ) = struct.unpack(fmt_header, header1) - debug( struct.unpack(fmt_header, header1)) + debug(struct.unpack(fmt_header, header1)) if self.Sig != MAGIC: # OLE signature should always be present - self._raise_defect(DEFECT_FATAL, "incorrect OLE signature") + self.raise_defect(DEFECT_FATAL, "incorrect OLE signature") if self.clsid != bytearray(16): # according to AAF specs, CLSID should always be zero - self._raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") - debug( "MinorVersion = %d" % self.MinorVersion ) - debug( "DllVersion = %d" % self.DllVersion ) + self.raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") + debug("MinorVersion = %d" % self.MinorVersion) + debug("DllVersion = %d" % self.DllVersion) if self.DllVersion not in [3, 4]: # version 3: usual format, 512 bytes per sector # version 4: large format, 4K per sector - self._raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") - debug( "ByteOrder = %X" % self.ByteOrder ) + self.raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") + debug("ByteOrder = %X" % self.ByteOrder) if self.ByteOrder != 0xFFFE: # For now only common little-endian documents are handled correctly - self._raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") + self.raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") # TODO: add big-endian support for documents created on Mac ? + # But according to [MS-CFB] ? v20140502, ByteOrder MUST be 0xFFFE. self.SectorSize = 2**self.SectorShift - debug( "SectorSize = %d" % self.SectorSize ) + debug("SectorSize = %d" % self.SectorSize) if self.SectorSize not in [512, 4096]: - self._raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") - if (self.DllVersion==3 and self.SectorSize!=512) \ - or (self.DllVersion==4 and self.SectorSize!=4096): - self._raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") + self.raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") + if (self.DllVersion == 3 and self.SectorSize != 512) \ + or (self.DllVersion == 4 and self.SectorSize != 4096): + self.raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") self.MiniSectorSize = 2**self.MiniSectorShift - debug( "MiniSectorSize = %d" % self.MiniSectorSize ) + debug("MiniSectorSize = %d" % self.MiniSectorSize) if self.MiniSectorSize not in [64]: - self._raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") if self.Reserved != 0 or self.Reserved1 != 0: - self._raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") - debug( "csectDir = %d" % self.csectDir ) - if self.SectorSize==512 and self.csectDir!=0: - self._raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") - debug( "csectFat = %d" % self.csectFat ) - debug( "sectDirStart = %X" % self.sectDirStart ) - debug( "signature = %d" % self.signature ) + self.raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") + debug("csectDir = %d" % self.csectDir) + # Number of directory sectors (only allowed if DllVersion != 3) + if self.SectorSize == 512 and self.csectDir != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") + debug("csectFat = %d" % self.csectFat) + # csectFat = number of FAT sectors in the file + debug("sectDirStart = %X" % self.sectDirStart) + # sectDirStart = 1st sector containing the directory + debug("signature = %d" % self.signature) # Signature should be zero, BUT some implementations do not follow this # rule => only a potential defect: + # (according to MS-CFB, may be != 0 for applications supporting file + # transactions) if self.signature != 0: - self._raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") - debug( "MiniSectorCutoff = %d" % self.MiniSectorCutoff ) - debug( "MiniFatStart = %X" % self.MiniFatStart ) - debug( "csectMiniFat = %d" % self.csectMiniFat ) - debug( "sectDifStart = %X" % self.sectDifStart ) - debug( "csectDif = %d" % self.csectDif ) + self.raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") + debug("MiniSectorCutoff = %d" % self.MiniSectorCutoff) + # MS-CFB: This integer field MUST be set to 0x00001000. This field + # specifies the maximum size of a user-defined data stream allocated + # from the mini FAT and mini stream, and that cutoff is 4096 bytes. + # Any user-defined data stream larger than or equal to this cutoff size + # must be allocated as normal sectors from the FAT. + if self.MiniSectorCutoff != 0x1000: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorCutoff in OLE header") + debug("MiniFatStart = %X" % self.MiniFatStart) + debug("csectMiniFat = %d" % self.csectMiniFat) + debug("sectDifStart = %X" % self.sectDifStart) + debug("csectDif = %d" % self.csectDif) # calculate the number of sectors in the file # (-1 because header doesn't count) - self.nb_sect = ( (filesize + self.SectorSize-1) // self.SectorSize) - 1 - debug( "Number of sectors in the file: %d" % self.nb_sect ) + self.nb_sect = ((filesize + self.SectorSize-1) // self.SectorSize) - 1 + debug("Number of sectors in the file: %d" % self.nb_sect) + #TODO: change this test, because an OLE file MAY contain other data + # after the last sector. - # file clsid (probably never used, so we don't store it) - clsid = _clsid(header[8:24]) + # file clsid + self.clsid = _clsid(header[8:24]) + + #TODO: remove redundant attributes, and fix the code which uses them? self.sectorsize = self.SectorSize #1 << i16(header, 30) self.minisectorsize = self.MiniSectorSize #1 << i16(header, 32) self.minisectorcutoff = self.MiniSectorCutoff # i32(header, 56) @@ -1257,27 +1381,26 @@ class OleFileIO: # Load file allocation tables self.loadfat(header) - # Load direcory. This sets both the direntries list (ordered by sid) + # Load directory. This sets both the direntries list (ordered by sid) # and the root (ordered by hierarchy) members. self.loaddirectory(self.sectDirStart)#i32(header, 48)) self.ministream = None self.minifatsect = self.MiniFatStart #i32(header, 60) - def close(self): """ close the OLE file, to release the file object """ self.fp.close() - def _check_duplicate_stream(self, first_sect, minifat=False): """ Checks if a stream has not been already referenced elsewhere. This method should only be called once for each known stream, and only if stream size is not null. - first_sect: index of first sector of the stream in FAT - minifat: if True, stream is located in the MiniFAT, else in the FAT + + :param first_sect: int, index of first sector of the stream in FAT + :param minifat: bool, if True, stream is located in the MiniFAT, else in the FAT """ if minifat: debug('_check_duplicate_stream: sect=%d in MiniFAT' % first_sect) @@ -1285,24 +1408,23 @@ class OleFileIO: else: debug('_check_duplicate_stream: sect=%d in FAT' % first_sect) # some values can be safely ignored (not a real stream): - if first_sect in (DIFSECT,FATSECT,ENDOFCHAIN,FREESECT): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): return used_streams = self._used_streams_fat #TODO: would it be more efficient using a dict or hash values, instead # of a list of long ? if first_sect in used_streams: - self._raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') + self.raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') else: used_streams.append(first_sect) - def dumpfat(self, fat, firstindex=0): "Displays a part of FAT in human-readable form for debugging purpose" # [PL] added only for debug if not DEBUG_MODE: return # dictionary to convert special FAT values in human-readable strings - VPL=8 # valeurs par ligne (8+1 * 8+1 = 81) + VPL = 8 # values per line (8+1 * 8+1 = 81) fatnames = { FREESECT: "..free..", ENDOFCHAIN: "[ END. ]", @@ -1319,26 +1441,28 @@ class OleFileIO: index = l*VPL print("%8X:" % (firstindex+index), end=" ") for i in range(index, index+VPL): - if i>=nbsect: + if i >= nbsect: break sect = fat[i] - if sect in fatnames: - nom = fatnames[sect] + aux = sect & 0xFFFFFFFF # JYTHON-WORKAROUND + if aux in fatnames: + name = fatnames[aux] else: if sect == i+1: - nom = " --->" + name = " --->" else: - nom = "%8X" % sect - print(nom, end=" ") + name = "%8X" % sect + print(name, end=" ") print() - def dumpsect(self, sector, firstindex=0): "Displays a sector in a human-readable form, for debugging purpose." if not DEBUG_MODE: return - VPL=8 # number of values per line (8+1 * 8+1 = 81) + VPL = 8 # number of values per line (8+1 * 8+1 = 81) tab = array.array(UINT32, sector) + if sys.byteorder == 'big': + tab.byteswap() nbsect = len(tab) nlines = (nbsect+VPL-1)//VPL print("index", end=" ") @@ -1349,11 +1473,11 @@ class OleFileIO: index = l*VPL print("%8X:" % (firstindex+index), end=" ") for i in range(index, index+VPL): - if i>=nbsect: + if i >= nbsect: break sect = tab[i] - nom = "%8X" % sect - print(nom, end=" ") + name = "%8X" % sect + print(name, end=" ") print() def sect2array(self, sect): @@ -1367,12 +1491,12 @@ class OleFileIO: a.byteswap() return a - def loadfat_sect(self, sect): """ Adds the indexes of the given sector to the FAT - sect: string containing the first FAT sector, or array of long integers - return: index of last FAT sector. + + :param sect: string containing the first FAT sector, or array of long integers + :returns: index of last FAT sector. """ # a FAT sector is an array of ulong integers. if isinstance(sect, array.array): @@ -1384,9 +1508,11 @@ class OleFileIO: self.dumpsect(sect) # The FAT is a sector chain starting at the first index of itself. for isect in fat1: - #print("isect = %X" % isect) + isect = isect & 0xFFFFFFFF # JYTHON-WORKAROUND + debug("isect = %X" % isect) if isect == ENDOFCHAIN or isect == FREESECT: # the end of the sector chain has been reached + debug("found end of sector chain") break # read the FAT sector s = self.getsect(isect) @@ -1396,17 +1522,17 @@ class OleFileIO: self.fat = self.fat + nextfat return isect - def loadfat(self, header): """ Load the FAT table. """ - # The header contains a sector numbers - # for the first 109 FAT sectors. Additional sectors are - # described by DIF blocks + # The 1st sector of the file contains sector numbers for the first 109 + # FAT sectors, right after the header which is 76 bytes long. + # (always 109, whatever the sector size: 512 bytes = 76+4*109) + # Additional sectors are described by DIF blocks sect = header[76:512] - debug( "len(sect)=%d, so %d integers" % (len(sect), len(sect)//4) ) + debug("len(sect)=%d, so %d integers" % (len(sect), len(sect)//4)) #fat = [] # [PL] FAT is an array of 32 bits unsigned ints, it's more effective # to use an array than a list in Python. @@ -1416,7 +1542,7 @@ class OleFileIO: #self.dumpfat(self.fat) ## for i in range(0, len(sect), 4): ## ix = i32(sect, i) -## #[PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## # [PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: ## if ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: ## break ## s = self.getsect(ix) @@ -1428,28 +1554,31 @@ class OleFileIO: if self.csectFat <= 109: # there must be at least 109 blocks in header and the rest in # DIFAT, so number of sectors must be >109. - self._raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') + self.raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') if self.sectDifStart >= self.nb_sect: # initial DIFAT block index must be valid - self._raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') - debug( "DIFAT analysis..." ) + self.raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') + debug("DIFAT analysis...") # We compute the necessary number of DIFAT sectors : - # (each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) - nb_difat = (self.csectFat-109 + 126)//127 - debug( "nb_difat = %d" % nb_difat ) + # Number of pointers per DIFAT sector = (sectorsize/4)-1 + # (-1 because the last pointer is the next DIFAT sector number) + nb_difat_sectors = (self.sectorsize//4)-1 + # (if 512 bytes: each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) + nb_difat = (self.csectFat-109 + nb_difat_sectors-1)//nb_difat_sectors + debug("nb_difat = %d" % nb_difat) if self.csectDif != nb_difat: raise IOError('incorrect DIFAT') isect_difat = self.sectDifStart for i in iterrange(nb_difat): - debug( "DIFAT block %d, sector %X" % (i, isect_difat) ) + debug("DIFAT block %d, sector %X" % (i, isect_difat)) #TODO: check if corresponding FAT SID = DIFSECT sector_difat = self.getsect(isect_difat) difat = self.sect2array(sector_difat) self.dumpsect(sector_difat) - self.loadfat_sect(difat[:127]) + self.loadfat_sect(difat[:nb_difat_sectors]) # last DIFAT pointer is next DIFAT sector: - isect_difat = difat[127] - debug( "next DIFAT sector: %X" % isect_difat ) + isect_difat = difat[nb_difat_sectors] + debug("next DIFAT sector: %X" % isect_difat) # checks: if isect_difat not in [ENDOFCHAIN, FREESECT]: # last DIFAT pointer value must be ENDOFCHAIN or FREESECT @@ -1467,7 +1596,6 @@ class OleFileIO: debug('\nFAT:') self.dumpfat(self.fat) - def loadminifat(self): """ Load the MiniFAT table. @@ -1489,11 +1617,11 @@ class OleFileIO: (self.minifatsect, self.csectMiniFat, used_size, stream_size, nb_minisectors)) if used_size > stream_size: # This is not really a problem, but may indicate a wrong implementation: - self._raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') + self.raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') # In any case, first read stream_size: s = self._open(self.minifatsect, stream_size, force_FAT=True).read() - #[PL] Old code replaced by an array: - #self.minifat = [i32(s, i) for i in range(0, len(s), 4)] + # [PL] Old code replaced by an array: + # self.minifat = [i32(s, i) for i in range(0, len(s), 4)] self.minifat = self.sect2array(s) # Then shrink the array to used size, to avoid indexes out of MiniStream: debug('MiniFAT shrunk from %d to %d sectors' % (len(self.minifat), nb_minisectors)) @@ -1505,32 +1633,65 @@ class OleFileIO: def getsect(self, sect): """ Read given sector from file on disk. - sect: sector index - returns a string containing the sector data. + + :param sect: int, sector index + :returns: a string containing the sector data. """ - # [PL] this original code was wrong when sectors are 4KB instead of + # From [MS-CFB]: A sector number can be converted into a byte offset + # into the file by using the following formula: + # (sector number + 1) x Sector Size. + # This implies that sector #0 of the file begins at byte offset Sector + # Size, not at 0. + + # [PL] the original code in PIL was wrong when sectors are 4KB instead of # 512 bytes: - #self.fp.seek(512 + self.sectorsize * sect) - #[PL]: added safety checks: - #print("getsect(%X)" % sect) + # self.fp.seek(512 + self.sectorsize * sect) + # [PL]: added safety checks: + # print("getsect(%X)" % sect) try: self.fp.seek(self.sectorsize * (sect+1)) except: debug('getsect(): sect=%X, seek=%d, filesize=%d' % (sect, self.sectorsize*(sect+1), self._filesize)) - self._raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') sector = self.fp.read(self.sectorsize) if len(sector) != self.sectorsize: debug('getsect(): sect=%X, read=%d, sectorsize=%d' % (sect, len(sector), self.sectorsize)) - self._raise_defect(DEFECT_FATAL, 'incomplete OLE sector') + self.raise_defect(DEFECT_FATAL, 'incomplete OLE sector') return sector + def write_sect(self, sect, data, padding=b'\x00'): + """ + Write given sector to file on disk. + + :param sect: int, sector index + :param data: bytes, sector data + :param padding: single byte, padding character if data < sector size + """ + if not isinstance(data, bytes): + raise TypeError("write_sect: data must be a bytes string") + if not isinstance(padding, bytes) or len(padding) != 1: + raise TypeError("write_sect: padding must be a bytes string of 1 char") + #TODO: we could allow padding=None for no padding at all + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('write_sect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + if len(data) < self.sectorsize: + # add padding + data += padding * (self.sectorsize - len(data)) + elif len(data) < self.sectorsize: + raise ValueError("Data is larger than sector size") + self.fp.write(data) def loaddirectory(self, sect): """ Load the directory. - sect: sector index of directory stream. + + :param sect: sector index of directory stream. """ # The directory is stored in a standard # substream, independent of its size. @@ -1539,14 +1700,14 @@ class OleFileIO: # (stream size is not known in advance) self.directory_fp = self._open(sect) - #[PL] to detect malformed documents and avoid DoS attacks, the maximum + # [PL] to detect malformed documents and avoid DoS attacks, the maximum # number of directory entries can be calculated: max_entries = self.directory_fp.size // 128 debug('loaddirectory: size=%d, max_entries=%d' % (self.directory_fp.size, max_entries)) # Create list of directory entries - #self.direntries = [] + # self.direntries = [] # We start with a list of "None" object self.direntries = [None] * max_entries ## for sid in iterrange(max_entries): @@ -1561,22 +1722,23 @@ class OleFileIO: # read and build all storage trees, starting from the root: self.root.build_storage_tree() - - def _load_direntry (self, sid): + def _load_direntry(self, sid): """ Load a directory entry from the directory. This method should only be called once for each storage/stream when loading the directory. - sid: index of storage/stream in the directory. - return: a _OleDirectoryEntry object - raise: IOError if the entry has always been referenced. + + :param sid: index of storage/stream in the directory. + :returns: a _OleDirectoryEntry object + + :exception IOError: if the entry has always been referenced. """ # check if SID is OK: - if sid<0 or sid>=len(self.direntries): - self._raise_defect(DEFECT_FATAL, "OLE directory index out of range") + if sid < 0 or sid >= len(self.direntries): + self.raise_defect(DEFECT_FATAL, "OLE directory index out of range") # check if entry was already referenced: if self.direntries[sid] is not None: - self._raise_defect(DEFECT_INCORRECT, + self.raise_defect(DEFECT_INCORRECT, "double reference for OLE stream/storage") # if exception not raised, return the object return self.direntries[sid] @@ -1585,23 +1747,21 @@ class OleFileIO: self.direntries[sid] = _OleDirectoryEntry(entry, sid, self) return self.direntries[sid] - def dumpdirectory(self): """ Dump directory (for debugging only) """ self.root.dump() - def _open(self, start, size = 0x7FFFFFFF, force_FAT=False): """ Open a stream, either in FAT or MiniFAT according to its size. (openstream helper) - start: index of first sector - size: size of stream (or nothing if size is unknown) - force_FAT: if False (default), stream will be opened in FAT or MiniFAT - according to size. If True, it will always be opened in FAT. + :param start: index of first sector + :param size: size of stream (or nothing if size is unknown) + :param force_FAT: if False (default), stream will be opened in FAT or MiniFAT + according to size. If True, it will always be opened in FAT. """ debug('OleFileIO.open(): sect=%d, size=%d, force_FAT=%s' % (start, size, str(force_FAT))) @@ -1618,66 +1778,71 @@ class OleFileIO: (self.root.isectStart, size_ministream)) self.ministream = self._open(self.root.isectStart, size_ministream, force_FAT=True) - return _OleStream(self.ministream, start, size, 0, - self.minisectorsize, self.minifat, - self.ministream.size) + return _OleStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) else: # standard stream - return _OleStream(self.fp, start, size, 512, - self.sectorsize, self.fat, self._filesize) - + return _OleStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) def _list(self, files, prefix, node, streams=True, storages=False): """ - (listdir helper) - files: list of files to fill in - prefix: current location in storage tree (list of names) - node: current node (_OleDirectoryEntry object) - streams: bool, include streams if True (True by default) - new in v0.26 - storages: bool, include storages if True (False by default) - new in v0.26 - (note: the root storage is never included) + listdir helper + + :param files: list of files to fill in + :param prefix: current location in storage tree (list of names) + :param node: current node (_OleDirectoryEntry object) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) """ prefix = prefix + [node.name] for entry in node.kids: - if entry.kids: + if entry.entry_type == STGTY_STORAGE: # this is a storage if storages: # add it to the list files.append(prefix[1:] + [entry.name]) # check its kids self._list(files, prefix, entry, streams, storages) - else: + elif entry.entry_type == STGTY_STREAM: # this is a stream if streams: # add it to the list files.append(prefix[1:] + [entry.name]) - + else: + self.raise_defect(DEFECT_INCORRECT, 'The directory tree contains an entry which is not a stream nor a storage.') def listdir(self, streams=True, storages=False): """ - Return a list of streams stored in this file + Return a list of streams and/or storages stored in this file - streams: bool, include streams if True (True by default) - new in v0.26 - storages: bool, include storages if True (False by default) - new in v0.26 - (note: the root storage is never included) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + :returns: list of stream and/or storage paths """ files = [] self._list(files, [], self.root, streams, storages) return files - def _find(self, filename): """ Returns directory entry of given filename. (openstream helper) Note: this method is case-insensitive. - filename: path of stream in storage tree (except root entry), either: + :param filename: path of stream in storage tree (except root entry), either: + - a string using Unix path syntax, for example: 'storage_1/storage_1.2/stream' - - a list of storage filenames, path to the desired stream/storage. + - or a list of storage filenames, path to the desired stream/storage. Example: ['storage_1', 'storage_1.2', 'stream'] - return: sid of requested filename - raise IOError if file not found + + :returns: sid of requested filename + :exception IOError: if file not found """ # if filename is a string instead of a list, split it on slashes to @@ -1695,18 +1860,20 @@ class OleFileIO: node = kid return node.sid - def openstream(self, filename): """ Open a stream as a read-only file object (BytesIO). + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: - filename: path of stream in storage tree (except root entry), either: - a string using Unix path syntax, for example: 'storage_1/storage_1.2/stream' - - a list of storage filenames, path to the desired stream/storage. + - or a list of storage filenames, path to the desired stream/storage. Example: ['storage_1', 'storage_1.2', 'stream'] - return: file object (read-only) - raise IOError if filename not found, or if this is not a stream. + + :returns: file object (read-only) + :exception IOError: if filename not found, or if this is not a stream. """ sid = self._find(filename) entry = self.direntries[sid] @@ -1714,14 +1881,74 @@ class OleFileIO: raise IOError("this file is not a stream") return self._open(entry.isectStart, entry.size) + def write_stream(self, stream_name, data): + """ + Write a stream to disk. For now, it is only possible to replace an + existing stream by data of the same size. + + :param stream_name: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :param data: bytes, data to be written, must be the same size as the original + stream. + """ + if not isinstance(data, bytes): + raise TypeError("write_stream: data must be a bytes string") + sid = self._find(stream_name) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this is not a stream") + size = entry.size + if size != len(data): + raise ValueError("write_stream: data must be the same size as the existing stream") + if size < self.minisectorcutoff: + raise NotImplementedError("Writing a stream in MiniFAT is not implemented yet") + sect = entry.isectStart + # number of sectors to write + nb_sectors = (size + (self.sectorsize-1)) // self.sectorsize + debug('nb_sectors = %d' % nb_sectors) + for i in range(nb_sectors): + # try: + # self.fp.seek(offset + self.sectorsize * sect) + # except: + # debug('sect=%d, seek=%d' % + # (sect, offset+self.sectorsize*sect)) + # raise IOError('OLE sector index out of range') + # extract one sector from data, the last one being smaller: + if i < (nb_sectors-1): + data_sector = data[i*self.sectorsize:(i+1)*self.sectorsize] + #TODO: comment this if it works + assert(len(data_sector) == self.sectorsize) + else: + data_sector = data[i*self.sectorsize:] + # TODO: comment this if it works + debug('write_stream: size=%d sectorsize=%d data_sector=%d size%%sectorsize=%d' + % (size, self.sectorsize, len(data_sector), size % self.sectorsize)) + assert(len(data_sector) % self.sectorsize == size % self.sectorsize) + self.write_sect(sect, data_sector) +# self.fp.write(data_sector) + # jump to next sector in the FAT: + try: + sect = self.fat[sect] + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') def get_type(self, filename): """ Test if given filename exists as a stream or a storage in the OLE container, and return its type. - filename: path of stream in storage tree. (see openstream for syntax) - return: False if object does not exist, its entry type (>0) otherwise: + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: False if object does not exist, its entry type (>0) otherwise: + - STGTY_STREAM: a stream - STGTY_STORAGE: a storage - STGTY_ROOT: the root entry @@ -1733,15 +1960,14 @@ class OleFileIO: except: return False - def getmtime(self, filename): """ Return modification time of a stream/storage. - filename: path of stream/storage in storage tree. (see openstream for - syntax) - return: None if modification time is null, a python datetime object - otherwise (UTC timezone) + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) new in version 0.26 """ @@ -1749,15 +1975,14 @@ class OleFileIO: entry = self.direntries[sid] return entry.getmtime() - def getctime(self, filename): """ Return creation time of a stream/storage. - filename: path of stream/storage in storage tree. (see openstream for - syntax) - return: None if creation time is null, a python datetime object - otherwise (UTC timezone) + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if creation time is null, a python datetime object + otherwise (UTC timezone) new in version 0.26 """ @@ -1765,14 +1990,14 @@ class OleFileIO: entry = self.direntries[sid] return entry.getctime() - def exists(self, filename): """ Test if given filename exists as a stream or a storage in the OLE container. + Note: filename is case-insensitive. - filename: path of stream in storage tree. (see openstream for syntax) - return: True if object exist, else False. + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: True if object exist, else False. """ try: sid = self._find(filename) @@ -1780,14 +2005,14 @@ class OleFileIO: except: return False - def get_size(self, filename): """ Return size of a stream in the OLE container, in bytes. - filename: path of stream in storage tree (see openstream for syntax) - return: size in bytes (long integer) - raise: IOError if file not found, TypeError if this is not a stream. + :param filename: path of stream in storage tree (see openstream for syntax) + :returns: size in bytes (long integer) + :exception IOError: if file not found + :exception TypeError: if this is not a stream. """ sid = self._find(filename) entry = self.direntries[sid] @@ -1796,7 +2021,6 @@ class OleFileIO: raise TypeError('object is not an OLE stream') return entry.size - def get_rootentry_name(self): """ Return root entry name. Should usually be 'Root Entry' or 'R' in most @@ -1804,19 +2028,20 @@ class OleFileIO: """ return self.root.name - def getproperties(self, filename, convert_time=False, no_conversion=None): """ Return properties described in substream. - filename: path of stream in storage tree (see openstream for syntax) - convert_time: bool, if True timestamps will be converted to Python datetime - no_conversion: None or list of int, timestamps not to be converted - (for example total editing time is not a real timestamp) - return: a dictionary of values indexed by id (integer) + :param filename: path of stream in storage tree (see openstream for syntax) + :param convert_time: bool, if True timestamps will be converted to Python datetime + :param no_conversion: None or list of int, timestamps not to be converted + (for example total editing time is not a real timestamp) + + :returns: a dictionary of values indexed by id (integer) """ + # REFERENCE: [MS-OLEPS] https://msdn.microsoft.com/en-us/library/dd942421.aspx # make sure no_conversion is a list, just to simplify code below: - if no_conversion == None: + if no_conversion is None: no_conversion = [] # stream path as a string to report exceptions: streampath = filename @@ -1847,7 +2072,7 @@ class OleFileIO: # a fatal error when parsing the whole file msg = 'Error while parsing properties header in stream %s: %s' % ( repr(streampath), exc) - self._raise_defect(DEFECT_INCORRECT, msg, type(exc)) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) return data for i in range(num_props): @@ -1857,7 +2082,7 @@ class OleFileIO: offset = i32(s, 12+i*8) type = i32(s, offset) - debug ('property id=%d: type=%d offset=%X' % (id, type, offset)) + debug('property id=%d: type=%d offset=%X' % (id, type, offset)) # test for common types first (should perhaps use # a dictionary instead?) @@ -1866,14 +2091,14 @@ class OleFileIO: value = i16(s, offset+4) if value >= 32768: value = value - 65536 - elif type == VT_UI2: # 2-byte unsigned integer + elif type == VT_UI2: # 2-byte unsigned integer value = i16(s, offset+4) elif type in (VT_I4, VT_INT, VT_ERROR): # VT_I4: 32-bit signed integer # VT_ERROR: HRESULT, similar to 32-bit signed integer, # see http://msdn.microsoft.com/en-us/library/cc230330.aspx value = i32(s, offset+4) - elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer value = i32(s, offset+4) # FIXME elif type in (VT_BSTR, VT_LPSTR): # CodePageString, see http://msdn.microsoft.com/en-us/library/dd942354.aspx @@ -1895,14 +2120,14 @@ class OleFileIO: # "the string should NOT contain embedded or additional trailing # null characters." count = i32(s, offset+4) - value = _unicode(s[offset+8:offset+8+count*2]) + value = self._decode_utf16_str(s[offset+8:offset+8+count*2]) elif type == VT_FILETIME: - value = long(i32(s, offset+4)) + (long(i32(s, offset+8))<<32) + value = long(i32(s, offset+4)) + (long(i32(s, offset+8)) << 32) # FILETIME is a 64-bit int: "number of 100ns periods # since Jan 1,1601". if convert_time and id not in no_conversion: debug('Converting property #%d to python datetime, value=%d=%fs' - %(id, value, float(value)/10000000)) + % (id, value, float(value) / 10000000)) # convert FILETIME to Python datetime.datetime # inspired from http://code.activestate.com/recipes/511425-filetime-to-datetime/ _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) @@ -1927,7 +2152,7 @@ class OleFileIO: value = bool(i16(s, offset+4)) else: value = None # everything else yields "None" - debug ('property id=%d: type=%d not implemented in parser yet' % (id, type)) + debug('property id=%d: type=%d not implemented in parser yet' % (id, type)) # missing: VT_EMPTY, VT_NULL, VT_R4, VT_R8, VT_CY, VT_DATE, # VT_DECIMAL, VT_I1, VT_I8, VT_UI8, @@ -1939,8 +2164,8 @@ class OleFileIO: # type of items, e.g. VT_VECTOR|VT_BSTR # see http://msdn.microsoft.com/en-us/library/dd942011.aspx - #print("%08x" % id, repr(value), end=" ") - #print("(%s)" % VT[i32(s, offset) & 0xFFF]) + # print("%08x" % id, repr(value), end=" ") + # print("(%s)" % VT[i32(s, offset) & 0xFFF]) data[id] = value except BaseException as exc: @@ -1948,7 +2173,7 @@ class OleFileIO: # a DEFECT_INCORRECT, because parsing can go on msg = 'Error while parsing property id %d in stream %s: %s' % ( id, repr(streampath), exc) - self._raise_defect(DEFECT_INCORRECT, msg, type(exc)) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) return data @@ -1971,107 +2196,110 @@ class OleFileIO: if __name__ == "__main__": - import sys - # [PL] display quick usage info if launched from command-line if len(sys.argv) <= 1: - print(__doc__) - print(""" -Launched from command line, this script parses OLE files and prints info. + print('olefile version %s %s - %s' % (__version__, __date__, __author__)) + print( +""" +Launched from the command line, this script parses OLE files and prints info. -Usage: OleFileIO_PL.py [-d] [-c] [file2 ...] +Usage: olefile.py [-d] [-c] [file2 ...] Options: --d : debug mode (display a lot of debug information, for developers only) +-d : debug mode (displays a lot of debug information, for developers only) -c : check all streams (for debugging purposes) + +For more information, see http://www.decalage.info/olefile """) sys.exit() check_streams = False for filename in sys.argv[1:]: -## try: - # OPTIONS: - if filename == '-d': - # option to switch debug mode on: - set_debug_mode(True) - continue - if filename == '-c': - # option to switch check streams mode on: - check_streams = True - continue + # try: + # OPTIONS: + if filename == '-d': + # option to switch debug mode on: + set_debug_mode(True) + continue + if filename == '-c': + # option to switch check streams mode on: + check_streams = True + continue - ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) - print("-" * 68) - print(filename) - print("-" * 68) - ole.dumpdirectory() + ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) + print("-" * 68) + print(filename) + print("-" * 68) + ole.dumpdirectory() + for streamname in ole.listdir(): + if streamname[-1][0] == "\005": + print(streamname, ": properties") + props = ole.getproperties(streamname, convert_time=True) + props = sorted(props.items()) + for k, v in props: + # [PL]: avoid to display too large or binary values: + if isinstance(v, (basestring, bytes)): + if len(v) > 50: + v = v[:50] + if isinstance(v, bytes): + # quick and dirty binary check: + for c in (1, 2, 3, 4, 5, 6, 7, 11, 12, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31): + if c in bytearray(v): + v = '(binary data)' + break + print(" ", k, v) + + if check_streams: + # Read all streams to check if there are errors: + print('\nChecking streams...') for streamname in ole.listdir(): - if streamname[-1][0] == "\005": - print(streamname, ": properties") - props = ole.getproperties(streamname, convert_time=True) - props = sorted(props.items()) - for k, v in props: - #[PL]: avoid to display too large or binary values: - if isinstance(v, (basestring, bytes)): - if len(v) > 50: - v = v[:50] - if isinstance(v, bytes): - # quick and dirty binary check: - for c in (1,2,3,4,5,6,7,11,12,14,15,16,17,18,19,20, - 21,22,23,24,25,26,27,28,29,30,31): - if c in bytearray(v): - v = '(binary data)' - break - print(" ", k, v) - - if check_streams: - # Read all streams to check if there are errors: - print('\nChecking streams...') - for streamname in ole.listdir(): - # print name using repr() to convert binary chars to \xNN: - print('-', repr('/'.join(streamname)),'-', end=' ') - st_type = ole.get_type(streamname) - if st_type == STGTY_STREAM: - print('size %d' % ole.get_size(streamname)) - # just try to read stream in memory: - ole.openstream(streamname) - else: - print('NOT a stream : type=%d' % st_type) - print() - -## for streamname in ole.listdir(): -## # print name using repr() to convert binary chars to \xNN: -## print('-', repr('/'.join(streamname)),'-', end=' ') -## print(ole.getmtime(streamname)) -## print() - - print('Modification/Creation times of all directory entries:') - for entry in ole.direntries: - if entry is not None: - print('- %s: mtime=%s ctime=%s' % (entry.name, - entry.getmtime(), entry.getctime())) + # print name using repr() to convert binary chars to \xNN: + print('-', repr('/'.join(streamname)), '-', end=' ') + st_type = ole.get_type(streamname) + if st_type == STGTY_STREAM: + print('size %d' % ole.get_size(streamname)) + # just try to read stream in memory: + ole.openstream(streamname) + else: + print('NOT a stream : type=%d' % st_type) print() - # parse and display metadata: - meta = ole.get_metadata() - meta.dump() - print() - #[PL] Test a few new methods: - root = ole.get_rootentry_name() - print('Root entry name: "%s"' % root) - if ole.exists('worddocument'): - print("This is a Word document.") - print("type of stream 'WordDocument':", ole.get_type('worddocument')) - print("size :", ole.get_size('worddocument')) - if ole.exists('macros/vba'): - print("This document may contain VBA macros.") +# for streamname in ole.listdir(): +# # print name using repr() to convert binary chars to \xNN: +# print('-', repr('/'.join(streamname)),'-', end=' ') +# print(ole.getmtime(streamname)) +# print() - # print parsing issues: - print('\nNon-fatal issues raised during parsing:') - if ole.parsing_issues: - for exctype, msg in ole.parsing_issues: - print('- %s: %s' % (exctype.__name__, msg)) - else: - print('None') + print('Modification/Creation times of all directory entries:') + for entry in ole.direntries: + if entry is not None: + print('- %s: mtime=%s ctime=%s' % (entry.name, + entry.getmtime(), entry.getctime())) + print() + + # parse and display metadata: + meta = ole.get_metadata() + meta.dump() + print() + # [PL] Test a few new methods: + root = ole.get_rootentry_name() + print('Root entry name: "%s"' % root) + if ole.exists('worddocument'): + print("This is a Word document.") + print("type of stream 'WordDocument':", ole.get_type('worddocument')) + print("size :", ole.get_size('worddocument')) + if ole.exists('macros/vba'): + print("This document may contain VBA macros.") + + # print parsing issues: + print('\nNon-fatal issues raised during parsing:') + if ole.parsing_issues: + for exctype, msg in ole.parsing_issues: + print('- %s: %s' % (exctype.__name__, msg)) + else: + print('None') ## except IOError as v: ## print("***", "cannot read", file, "-", v) + +# this code was developed while listening to The Wedding Present "Sea Monsters" diff --git a/Darwin/lib/python3.4/site-packages/PIL/PSDraw.py b/Darwin/lib/python3.5/site-packages/PIL/PSDraw.py similarity index 79% rename from Darwin/lib/python3.4/site-packages/PIL/PSDraw.py rename to Darwin/lib/python3.5/site-packages/PIL/PSDraw.py index 88593bb..d4e7b18 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PSDraw.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PSDraw.py @@ -15,14 +15,14 @@ # See the README file for information on usage and redistribution. # -from __future__ import print_function - from PIL import EpsImagePlugin +import sys ## # Simple Postscript graphics interface. -class PSDraw: + +class PSDraw(object): """ Sets up printing to the given file. If **file** is omitted, :py:attr:`sys.stdout` is assumed. @@ -30,29 +30,34 @@ class PSDraw: def __init__(self, fp=None): if not fp: - import sys fp = sys.stdout self.fp = fp - def begin_document(self, id = None): + def _fp_write(self, to_write): + if bytes is str or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): """Set up printing of a document. (Write Postscript DSC header.)""" # FIXME: incomplete - self.fp.write("%!PS-Adobe-3.0\n" - "save\n" - "/showpage { } def\n" - "%%EndComments\n" - "%%BeginDocument\n") - #self.fp.write(ERROR_PS) # debugging! - self.fp.write(EDROFF_PS) - self.fp.write(VDI_PS) - self.fp.write("%%EndProlog\n") + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") self.isofont = {} def end_document(self): """Ends printing. (Write Postscript DSC footer.)""" - self.fp.write("%%EndDocument\n" - "restore showpage\n" - "%%End\n") + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") if hasattr(self.fp, "flush"): self.fp.flush() @@ -65,19 +70,11 @@ class PSDraw: """ if font not in self.isofont: # reencode font - self.fp.write("/PSDraw-%s ISOLatin1Encoding /%s E\n" %\ - (font, font)) + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) self.isofont[font] = 1 # rough - self.fp.write("/F0 %d /PSDraw-%s F\n" % (size, font)) - - def setink(self, ink): - """ - .. warning:: - - This has been in the PIL API for ages but was never implemented. - """ - print("*** NOT YET IMPLEMENTED ***") + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) def line(self, xy0, xy1): """ @@ -86,7 +83,7 @@ class PSDraw: left corner of the page). """ xy = xy0 + xy1 - self.fp.write("%d %d %d %d Vl\n" % xy) + self._fp_write("%d %d %d %d Vl\n" % xy) def rectangle(self, box): """ @@ -101,7 +98,7 @@ class PSDraw: %d %d M %d %d 0 Vr\n """ - self.fp.write("%d %d M %d %d 0 Vr\n" % box) + self._fp_write("%d %d M %d %d 0 Vr\n" % box) def text(self, xy, text): """ @@ -111,16 +108,16 @@ class PSDraw: text = "\\(".join(text.split("(")) text = "\\)".join(text.split(")")) xy = xy + (text,) - self.fp.write("%d %d M (%s) S\n" % xy) + self._fp_write("%d %d M (%s) S\n" % xy) - def image(self, box, im, dpi = None): + def image(self, box, im, dpi=None): """Draw a PIL image, centered in the given box.""" # default resolution depends on mode if not dpi: if im.mode == "1": - dpi = 200 # fax + dpi = 200 # fax else: - dpi = 100 # greyscale + dpi = 100 # greyscale # image size (on paper) x = float(im.size[0] * 72) / dpi y = float(im.size[1] * 72) / dpi @@ -128,19 +125,21 @@ class PSDraw: xmax = float(box[2] - box[0]) ymax = float(box[3] - box[1]) if x > xmax: - y = y * xmax / x; x = xmax + y = y * xmax / x + x = xmax if y > ymax: - x = x * ymax / y; y = ymax + x = x * ymax / y + y = ymax dx = (xmax - x) / 2 + box[0] dy = (ymax - y) / 2 + box[1] - self.fp.write("gsave\n%f %f translate\n" % (dx, dy)) + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) if (x, y) != im.size: # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) sx = x / im.size[0] sy = y / im.size[1] - self.fp.write("%f %f scale\n" % (sx, sy)) + self._fp_write("%f %f scale\n" % (sx, sy)) EpsImagePlugin._save(im, self.fp, None, 0) - self.fp.write("\ngrestore\n") + self._fp_write("\ngrestore\n") # -------------------------------------------------------------------- # Postscript driver diff --git a/Darwin/lib/python3.4/site-packages/PIL/PaletteFile.py b/Darwin/lib/python3.5/site-packages/PIL/PaletteFile.py similarity index 97% rename from Darwin/lib/python3.4/site-packages/PIL/PaletteFile.py rename to Darwin/lib/python3.5/site-packages/PIL/PaletteFile.py index 5627f7b..ef50fee 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PaletteFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PaletteFile.py @@ -15,10 +15,11 @@ from PIL._binary import o8 + ## # File handler for Teragon-style palette files. -class PaletteFile: +class PaletteFile(object): rawmode = "RGB" @@ -49,7 +50,6 @@ class PaletteFile: self.palette = b"".join(self.palette) - def getpalette(self): return self.palette, self.rawmode diff --git a/Darwin/lib/python3.5/site-packages/PIL/PalmImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..4f415ff --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,241 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from PIL import Image, ImageFile, _binary + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues),)) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for i in range(len(_Palm8BitColormapValues)): + palettedata = palettedata + _Palm8BitColormapValues[i] + for i in range(256 - len(_Palm8BitColormapValues)): + palettedata = palettedata + (0, 0, 0) + image.putpalette(palettedata) + return image + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + +o8 = _binary.o8 +o16b = _binary.o16be + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename, check=0): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + if check: + return check + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PcdImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PcdImagePlugin.py similarity index 56% rename from Darwin/lib/python3.4/site-packages/PIL/PcdImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PcdImagePlugin.py index 70066e7..b53635a 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PcdImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PcdImagePlugin.py @@ -15,13 +15,13 @@ # -__version__ = "0.1" - - from PIL import Image, ImageFile, _binary +__version__ = "0.1" + i8 = _binary.i8 + ## # Image plugin for PhotoCD images. This plugin only reads the 768x512 # image from the file; higher resolutions are encoded in a proprietary @@ -43,36 +43,17 @@ class PcdImageFile(ImageFile.ImageFile): orientation = i8(s[1538]) & 3 if orientation == 1: - self.tile_post_rotate = 90 # hack + self.tile_post_rotate = 90 # hack elif orientation == 3: self.tile_post_rotate = -90 self.mode = "RGB" - self.size = 768, 512 # FIXME: not correct for rotated images! - self.tile = [("pcd", (0,0)+self.size, 96*2048, None)] - - def draft(self, mode, size): - - if len(self.tile) != 1: - return - - d, e, o, a = self.tile[0] - - if size: - scale = max(self.size[0] / size[0], self.size[1] / size[1]) - for s, o in [(4,0*2048), (2,0*2048), (1,96*2048)]: - if scale >= s: - break - # e = e[0], e[1], (e[2]-e[0]+s-1)/s+e[0], (e[3]-e[1]+s-1)/s+e[1] - # self.size = ((self.size[0]+s-1)/s, (self.size[1]+s-1)/s) - - self.tile = [(d, e, o, a)] - - return self + self.size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] # # registry -Image.register_open("PCD", PcdImageFile) +Image.register_open(PcdImageFile.format, PcdImageFile) -Image.register_extension("PCD", ".pcd") +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PcfFontFile.py b/Darwin/lib/python3.5/site-packages/PIL/PcfFontFile.py similarity index 90% rename from Darwin/lib/python3.4/site-packages/PIL/PcfFontFile.py rename to Darwin/lib/python3.5/site-packages/PIL/PcfFontFile.py index c40d398..c200690 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PcfFontFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PcfFontFile.py @@ -23,20 +23,20 @@ from PIL import _binary # -------------------------------------------------------------------- # declarations -PCF_MAGIC = 0x70636601 # "\x01fcp" +PCF_MAGIC = 0x70636601 # "\x01fcp" -PCF_PROPERTIES = (1<<0) -PCF_ACCELERATORS = (1<<1) -PCF_METRICS = (1<<2) -PCF_BITMAPS = (1<<3) -PCF_INK_METRICS = (1<<4) -PCF_BDF_ENCODINGS = (1<<5) -PCF_SWIDTHS = (1<<6) -PCF_GLYPH_NAMES = (1<<7) -PCF_BDF_ACCELERATORS = (1<<8) +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) BYTES_PER_ROW = [ - lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+7) >> 3), lambda bits: ((bits+15) >> 3) & ~1, lambda bits: ((bits+31) >> 3) & ~3, lambda bits: ((bits+63) >> 3) & ~7, @@ -48,9 +48,11 @@ l32 = _binary.i32le b16 = _binary.i16be b32 = _binary.i32be + def sz(s, o): return s[o:s.index(b"\0", o)] + ## # Font file plugin for the X11 PCF format. @@ -122,7 +124,7 @@ class PcfFontFile(FontFile.FontFile): for i in range(nprops): p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) if nprops & 3: - fp.seek(4 - (nprops & 3), 1) # pad + fp.seek(4 - (nprops & 3), 1) # pad data = fp.read(i32(fp.read(4))) @@ -202,16 +204,16 @@ class PcfFontFile(FontFile.FontFile): for i in range(4): bitmapSizes.append(i32(fp.read(4))) - byteorder = format & 4 # non-zero => MSB - bitorder = format & 8 # non-zero => MSB - padindex = format & 3 + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 bitmapsize = bitmapSizes[padindex] offsets.append(bitmapsize) data = fp.read(bitmapsize) - pad = BYTES_PER_ROW[padindex] + pad = BYTES_PER_ROW[padindex] mode = "1;R" if bitorder: mode = "1" @@ -245,6 +247,6 @@ class PcfFontFile(FontFile.FontFile): try: encoding[i+firstCol] = encodingOffset except IndexError: - break # only load ISO-8859-1 glyphs + break # only load ISO-8859-1 glyphs return encoding diff --git a/Darwin/lib/python3.4/site-packages/PIL/PcxImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PcxImagePlugin.py similarity index 83% rename from Darwin/lib/python3.4/site-packages/PIL/PcxImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PcxImagePlugin.py index 4f6d5a3..9440d53 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PcxImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PcxImagePlugin.py @@ -25,17 +25,24 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.6" +from __future__ import print_function +import logging from PIL import Image, ImageFile, ImagePalette, _binary +logger = logging.getLogger(__name__) + i8 = _binary.i8 i16 = _binary.i16le o8 = _binary.o8 +__version__ = "0.6" + + def _accept(prefix): return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + ## # Image plugin for Paintbrush images. @@ -52,23 +59,20 @@ class PcxImageFile(ImageFile.ImageFile): raise SyntaxError("not a PCX file") # image - bbox = i16(s,4), i16(s,6), i16(s,8)+1, i16(s,10)+1 + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: raise SyntaxError("bad PCX image size") - if Image.DEBUG: - print ("BBox: %s %s %s %s" % bbox) - + logger.debug("BBox: %s %s %s %s", *bbox) # format version = i8(s[1]) bits = i8(s[3]) planes = i8(s[65]) - stride = i16(s,66) - if Image.DEBUG: - print ("PCX version %s, bits %s, planes %s, stride %s" % - (version, bits, planes, stride)) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) - self.info["dpi"] = i16(s,12), i16(s,14) + self.info["dpi"] = i16(s, 12), i16(s, 14) if bits == 1 and planes == 1: mode = rawmode = "1" @@ -104,9 +108,8 @@ class PcxImageFile(ImageFile.ImageFile): self.size = bbox[2]-bbox[0], bbox[3]-bbox[1] bbox = (0, 0) + self.size - if Image.DEBUG: - print ("size: %sx%s" % self.size) - + logger.debug("size: %sx%s", *self.size) + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] # -------------------------------------------------------------------- @@ -122,6 +125,7 @@ SAVE = { o16 = _binary.o16le + def _save(im, fp, filename, check=0): try: @@ -138,12 +142,10 @@ def _save(im, fp, filename, check=0): stride += stride % 2 # Stride needs to be kept in sync with the PcxEncode.c version. # Ideally it should be passed in in the state, but the bytes value - # gets overwritten. + # gets overwritten. - - if Image.DEBUG: - print ("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d" % ( - im.size[0], bits, stride)) + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) # under windows, we could determine the current screen size with # "Image.core.display_mode()[1]", but I think that's overkill... @@ -163,13 +165,13 @@ def _save(im, fp, filename, check=0): assert fp.tell() == 128 - ImageFile._save(im, fp, [("pcx", (0,0)+im.size, 0, + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, (rawmode, bits*planes))]) if im.mode == "P": # colour palette fp.write(o8(12)) - fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes elif im.mode == "L": # greyscale palette fp.write(o8(12)) @@ -179,7 +181,7 @@ def _save(im, fp, filename, check=0): # -------------------------------------------------------------------- # registry -Image.register_open("PCX", PcxImageFile, _accept) -Image.register_save("PCX", _save) +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) -Image.register_extension("PCX", ".pcx") +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PdfImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PdfImagePlugin.py similarity index 52% rename from Darwin/lib/python3.4/site-packages/PIL/PdfImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PdfImagePlugin.py index 5113f09..7decf0e 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PdfImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PdfImagePlugin.py @@ -20,12 +20,12 @@ # Image plugin for PDF images (output only). ## -__version__ = "0.4" - from PIL import Image, ImageFile from PIL._binary import i8 import io +__version__ = "0.4" + # # -------------------------------------------------------------------- @@ -51,19 +51,23 @@ def _endobj(fp): fp.write("endobj\n") +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + ## # (Internal) Image save plugin for the PDF format. -def _save(im, fp, filename): +def _save(im, fp, filename, save_all=False): resolution = im.encoderinfo.get("resolution", 72.0) # # make sure image data is available im.load() - xref = [0]*(5+1) # placeholders + xref = [0] - class TextWriter: + class TextWriter(object): def __init__(self, fp): self.fp = fp @@ -78,11 +82,6 @@ def _save(im, fp, filename): fp.write("%PDF-1.2\n") fp.write("% created by PIL PDF driver " + __version__ + "\n") - # - # Get image characteristics - - width, height = im.size - # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports # Flatedecode (zip compression). @@ -125,7 +124,7 @@ def _save(im, fp, filename): # # catalogue - xref[1] = fp.tell() + xref.append(fp.tell()) _obj( fp, 1, Type="/Catalog", @@ -134,89 +133,108 @@ def _save(im, fp, filename): # # pages + numberOfPages = 1 + if save_all: + try: + numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. It is a single frame image + pass + pages = [str(pageNumber*3+4)+" 0 R" + for pageNumber in range(0, numberOfPages)] - xref[2] = fp.tell() + xref.append(fp.tell()) _obj( fp, 2, Type="/Pages", - Count=1, - Kids="[4 0 R]") + Count=len(pages), + Kids="["+"\n".join(pages)+"]") _endobj(fp) - # - # image + for pageNumber in range(0, numberOfPages): + im.seek(pageNumber) - op = io.BytesIO() + # + # image - if filter == "/ASCIIHexDecode": - if bits == 1: - # FIXME: the hex encoder doesn't support packed 1-bit - # images; do things the hard way... - data = im.tobytes("raw", "1") - im = Image.new("L", (len(data), 1), None) - im.putdata(data) - ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) - elif filter == "/DCTDecode": - Image.SAVE["JPEG"](im, op, filename) - elif filter == "/FlateDecode": - ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) - elif filter == "/RunLengthDecode": - ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) - else: - raise ValueError("unsupported PDF filter (%s)" % filter) + op = io.BytesIO() - xref[3] = fp.tell() - _obj( - fp, 3, - Type="/XObject", - Subtype="/Image", - Width=width, # * 72.0 / resolution, - Height=height, # * 72.0 / resolution, - Length=len(op.getvalue()), - Filter=filter, - BitsPerComponent=bits, - DecodeParams=params, - ColorSpace=colorspace) + if filter == "/ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "/FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) - fp.write("stream\n") - fp.fp.write(op.getvalue()) - fp.write("\nendstream\n") + # + # Get image characteristics - _endobj(fp) + width, height = im.size - # - # page + xref.append(fp.tell()) + _obj( + fp, pageNumber*3+3, + Type="/XObject", + Subtype="/Image", + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Length=len(op.getvalue()), + Filter=filter, + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) - xref[4] = fp.tell() - _obj(fp, 4) - fp.write( - "<<\n/Type /Page\n/Parent 2 0 R\n" - "/Resources <<\n/ProcSet [ /PDF %s ]\n" - "/XObject << /image 3 0 R >>\n>>\n" - "/MediaBox [ 0 0 %d %d ]\n/Contents 5 0 R\n>>\n" % ( - procset, - int(width * 72.0 / resolution), - int(height * 72.0 / resolution))) - _endobj(fp) + fp.write("stream\n") + fp.fp.write(op.getvalue()) + fp.write("\nendstream\n") - # - # page contents + _endobj(fp) - op = TextWriter(io.BytesIO()) + # + # page - op.write( - "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( - int(width * 72.0 / resolution), - int(height * 72.0 / resolution))) + xref.append(fp.tell()) + _obj(fp, pageNumber*3+4) + fp.write( + "<<\n/Type /Page\n/Parent 2 0 R\n" + "/Resources <<\n/ProcSet [ /PDF %s ]\n" + "/XObject << /image %d 0 R >>\n>>\n" + "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( + procset, + pageNumber*3+3, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + pageNumber*3+5)) + _endobj(fp) - xref[5] = fp.tell() - _obj(fp, 5, Length=len(op.fp.getvalue())) + # + # page contents - fp.write("stream\n") - fp.fp.write(op.fp.getvalue()) - fp.write("\nendstream\n") + op = TextWriter(io.BytesIO()) - _endobj(fp) + op.write( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) + + fp.write("stream\n") + fp.fp.write(op.fp.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) # # trailer @@ -226,12 +244,14 @@ def _save(im, fp, filename): fp.write("%010d 00000 n \n" % x) fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) fp.write("startxref\n%d\n%%%%EOF\n" % startxref) - fp.flush() + if hasattr(fp, "flush"): + fp.flush() # # -------------------------------------------------------------------- Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) Image.register_extension("PDF", ".pdf") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PixarImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PixarImagePlugin.py similarity index 94% rename from Darwin/lib/python3.4/site-packages/PIL/PixarImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PixarImagePlugin.py index a4c9032..7fef354 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PixarImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PixarImagePlugin.py @@ -19,15 +19,15 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.1" - from PIL import Image, ImageFile, _binary +__version__ = "0.1" + # # helpers i16 = _binary.i16le -i32 = _binary.i32le + ## # Image plugin for PIXAR raster images. @@ -57,7 +57,7 @@ class PixarImageFile(ImageFile.ImageFile): # FIXME: to be continued... # create tile descriptor (assuming "dumped") - self.tile = [("raw", (0,0)+self.size, 1024, (self.mode, 0, 1))] + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] # # -------------------------------------------------------------------- diff --git a/Darwin/lib/python3.4/site-packages/PIL/PngImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PngImagePlugin.py similarity index 69% rename from Darwin/lib/python3.4/site-packages/PIL/PngImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PngImagePlugin.py index e794ef7..d677882 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PngImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PngImagePlugin.py @@ -33,12 +33,15 @@ from __future__ import print_function -__version__ = "0.9" - +import logging import re +import zlib from PIL import Image, ImageFile, ImagePalette, _binary -import zlib + +__version__ = "0.9" + +logger = logging.getLogger(__name__) i8 = _binary.i8 i16 = _binary.i16be @@ -52,30 +55,46 @@ _MAGIC = b"\211PNG\r\n\032\n" _MODES = { # supported bits/color combinations, and corresponding modes/rawmodes - (1, 0): ("1", "1"), - (2, 0): ("L", "L;2"), - (4, 0): ("L", "L;4"), - (8, 0): ("L", "L"), - (16,0): ("I", "I;16B"), - (8, 2): ("RGB", "RGB"), - (16,2): ("RGB", "RGB;16B"), - (1, 3): ("P", "P;1"), - (2, 3): ("P", "P;2"), - (4, 3): ("P", "P;4"), - (8, 3): ("P", "P"), - (8, 4): ("LA", "LA"), - (16,4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available - (8, 6): ("RGBA", "RGBA"), - (16,6): ("RGBA", "RGBA;16B"), + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), } _simple_palette = re.compile(b'^\xff+\x00\xff*$') +_null_palette = re.compile(b'^\x00*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + # -------------------------------------------------------------------- # Support classes. Suitable for PNG and related formats like MNG etc. -class ChunkStream: +class ChunkStream(object): def __init__(self, fp): @@ -113,8 +132,7 @@ class ChunkStream: def call(self, cid, pos, length): "Call the appropriate chunk handler" - if Image.DEBUG: - print("STREAM", cid, pos, length) + logger.debug("STREAM %s %s %s", cid, pos, length) return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) def crc(self, cid, data): @@ -123,15 +141,15 @@ class ChunkStream: crc1 = Image.core.crc32(data, Image.core.crc32(cid)) crc2 = i16(self.fp.read(2)), i16(self.fp.read(2)) if crc1 != crc2: - raise SyntaxError("broken PNG file"\ - "(bad header checksum in %s)" % cid) + raise SyntaxError("broken PNG file" + "(bad header checksum in %s)" % cid) def crc_skip(self, cid, data): "Read checksum. Used if the C module is not present" self.fp.read(4) - def verify(self, endchunk = b"IEND"): + def verify(self, endchunk=b"IEND"): # Simple approach; just calculate checksum for all remaining # blocks. Must be called directly after open. @@ -148,31 +166,100 @@ class ChunkStream: return cids -# -------------------------------------------------------------------- -# PNG chunk container (for use with save(pnginfo=)) +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information -class PngInfo: + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ def __init__(self): self.chunks = [] def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + self.chunks.append((cid, data)) + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + def add_text(self, key, value, zip=0): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, bool(zip)) + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=bool(zip)) + if not isinstance(key, bytes): key = key.encode('latin-1', 'strict') - if not isinstance(value, bytes): - value = value.encode('latin-1', 'replace') - if zip: - import zlib self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) else: self.add(b"tEXt", key + b"\0" + value) + # -------------------------------------------------------------------- # PNG image stream (IHDR/IEND) @@ -185,11 +272,19 @@ class PngStream(ChunkStream): # local copies of Image attributes self.im_info = {} self.im_text = {} - self.im_size = (0,0) + self.im_size = (0, 0) self.im_mode = None self.im_tile = None self.im_palette = None + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" % + self.text_memory) + def chunk_iCCP(self, pos, length): # ICC profile @@ -200,16 +295,16 @@ class PngStream(ChunkStream): # Compression method 1 byte (0) # Compressed profile n bytes (zlib with deflate compression) i = s.find(b"\0") - if Image.DEBUG: - print("iCCP profile name", s[:i]) - print("Compression method", i8(s[i])) + logger.debug("iCCP profile name %s", s[:i]) + logger.debug("Compression method %s", i8(s[i])) comp_method = i8(s[i]) if comp_method != 0: - raise SyntaxError("Unknown compression method %s in iCCP chunk" % comp_method) + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) try: - icc_profile = zlib.decompress(s[i+2:]) + icc_profile = _safe_zlib_decompress(s[i+2:]) except zlib.error: - icc_profile = None # FIXME + icc_profile = None # FIXME self.im_info["icc_profile"] = icc_profile return s @@ -231,7 +326,7 @@ class PngStream(ChunkStream): def chunk_IDAT(self, pos, length): # image data - self.im_tile = [("zip", (0,0)+self.im_size, pos, self.im_rawmode)] + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] self.im_idat = length raise EOFError @@ -257,6 +352,8 @@ class PngStream(ChunkStream): i = s.find(b"\0") if i >= 0: self.im_info["transparency"] = i + elif _null_palette.match(s): + self.im_info["transparency"] = 0 else: self.im_info["transparency"] = s elif self.im_mode == "L": @@ -278,7 +375,7 @@ class PngStream(ChunkStream): s = ImageFile._safe_read(self.fp, length) px, py = i32(s), i32(s[4:]) unit = i8(s[8]) - if unit == 1: # meter + if unit == 1: # meter dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) self.im_info["dpi"] = dpi elif unit == 0: @@ -292,13 +389,17 @@ class PngStream(ChunkStream): try: k, v = s.split(b"\0", 1) except ValueError: - k = s; v = b"" # fallback for broken tEXt tags + # fallback for broken tEXt tags + k = s + v = b"" if k: if bytes is not str: k = k.decode('latin-1', 'strict') v = v.decode('latin-1', 'replace') self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + return s def chunk_zTXt(self, pos, length): @@ -308,16 +409,17 @@ class PngStream(ChunkStream): try: k, v = s.split(b"\0", 1) except ValueError: - k = s; v = b"" + k = s + v = b"" if v: comp_method = i8(v[0]) else: comp_method = 0 if comp_method != 0: - raise SyntaxError("Unknown compression method %s in zTXt chunk" % comp_method) - import zlib + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) try: - v = zlib.decompress(v[1:]) + v = _safe_zlib_decompress(v[1:]) except zlib.error: v = b"" @@ -327,14 +429,55 @@ class PngStream(ChunkStream): v = v.decode('latin-1', 'replace') self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + return s + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except zlib.error: + return s + else: + return s + if bytes is not str: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + # -------------------------------------------------------------------- # PNG reader def _accept(prefix): return prefix[:8] == _MAGIC + ## # Image plugin for PNG images. @@ -365,8 +508,7 @@ class PngImageFile(ImageFile.ImageFile): except EOFError: break except AttributeError: - if Image.DEBUG: - print(cid, pos, length, "(unknown)") + logger.debug("%s %s %s (unknown)", cid, pos, length) s = ImageFile._safe_read(self.fp, length) self.png.crc(cid, s) @@ -381,7 +523,7 @@ class PngImageFile(ImageFile.ImageFile): self.mode = self.png.im_mode self.size = self.png.im_size self.info = self.png.im_info - self.text = self.png.im_text # experimental + self.text = self.png.im_text # experimental self.tile = self.png.im_tile if self.png.im_palette: @@ -390,7 +532,6 @@ class PngImageFile(ImageFile.ImageFile): self.__idat = length # used by load_read() - def verify(self): "Verify PNG file" @@ -419,7 +560,7 @@ class PngImageFile(ImageFile.ImageFile): while self.__idat == 0: # end of chunk, skip forward to next one - self.fp.read(4) # CRC + self.fp.read(4) # CRC cid, pos, length = self.png.read() @@ -439,7 +580,6 @@ class PngImageFile(ImageFile.ImageFile): return self.fp.read(read_bytes) - def load_end(self): "internal: finished reading image data" @@ -456,21 +596,22 @@ o32 = _binary.o32be _OUTMODES = { # supported PIL modes, and corresponding rawmodes/bits/color combinations - "1": ("1", b'\x01\x00'), - "L;1": ("L;1", b'\x01\x00'), - "L;2": ("L;2", b'\x02\x00'), - "L;4": ("L;4", b'\x04\x00'), - "L": ("L", b'\x08\x00'), - "LA": ("LA", b'\x08\x04'), - "I": ("I;16B", b'\x10\x00'), - "P;1": ("P;1", b'\x01\x03'), - "P;2": ("P;2", b'\x02\x03'), - "P;4": ("P;4", b'\x04\x03'), - "P": ("P", b'\x08\x03'), - "RGB": ("RGB", b'\x08\x02'), - "RGBA":("RGBA", b'\x08\x06'), + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), } + def putchunk(fp, cid, *data): "Write a PNG chunk (including CRC field)" @@ -481,15 +622,18 @@ def putchunk(fp, cid, *data): hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) fp.write(o16(hi) + o16(lo)) -class _idat: + +class _idat(object): # wrap output from the encoder in IDAT chunks def __init__(self, fp, chunk): self.fp = fp self.chunk = chunk + def write(self, data): self.chunk(self.fp, b"IDAT", data) + def _save(im, fp, filename, chunk=putchunk, check=0): # save an image to disk (called by the save method) @@ -527,9 +671,9 @@ def _save(im, fp, filename, chunk=putchunk, check=0): dictionary = b"" im.encoderconfig = ("optimize" in im.encoderinfo, - im.encoderinfo.get("compress_level", -1), - im.encoderinfo.get("compress_type", -1), - dictionary) + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + dictionary) # get the corresponding PNG mode try: @@ -546,8 +690,8 @@ def _save(im, fp, filename, chunk=putchunk, check=0): fp.write(_MAGIC) chunk(fp, b"IHDR", - o32(im.size[0]), o32(im.size[1]), # 0: size - mode, # 8: depth/type + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type b'\0', # 10: compression b'\0', # 11: filter category b'\0') # 12: interlace flag @@ -559,7 +703,8 @@ def _save(im, fp, filename, chunk=putchunk, check=0): palette_bytes += b'\0' chunk(fp, b"PLTE", palette_bytes) - transparency = im.encoderinfo.get('transparency',im.info.get('transparency', None)) + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) if transparency or transparency == 0: if im.mode == "P": @@ -588,10 +733,6 @@ def _save(im, fp, filename, chunk=putchunk, check=0): alpha_bytes = 2**bits chunk(fp, b"tRNS", alpha[:alpha_bytes]) - if 0: - # FIXME: to be supported some day - chunk(fp, b"gAMA", o32(int(gamma * 100000.0))) - dpi = im.encoderinfo.get("dpi") if dpi: chunk(fp, b"pHYs", @@ -616,14 +757,13 @@ def _save(im, fp, filename, chunk=putchunk, check=0): data = name + b"\0\0" + zlib.compress(im.info["icc_profile"]) chunk(fp, b"iCCP", data) - ImageFile._save(im, _idat(fp, chunk), [("zip", (0,0)+im.size, 0, rawmode)]) + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) chunk(fp, b"IEND", b"") - try: + if hasattr(fp, "flush"): fp.flush() - except: - pass # -------------------------------------------------------------------- @@ -632,10 +772,12 @@ def _save(im, fp, filename, chunk=putchunk, check=0): def getchunks(im, **params): """Return a list of PNG chunks representing this image.""" - class collector: + class collector(object): data = [] + def write(self, data): pass + def append(self, chunk): self.data.append(chunk) @@ -659,9 +801,9 @@ def getchunks(im, **params): # -------------------------------------------------------------------- # Registry -Image.register_open("PNG", PngImageFile, _accept) -Image.register_save("PNG", _save) +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) -Image.register_extension("PNG", ".png") +Image.register_extension(PngImageFile.format, ".png") -Image.register_mime("PNG", "image/png") +Image.register_mime(PngImageFile.format, "image/png") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PpmImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PpmImagePlugin.py similarity index 83% rename from Darwin/lib/python3.4/site-packages/PIL/PpmImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PpmImagePlugin.py index 070efd1..68073ca 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PpmImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PpmImagePlugin.py @@ -15,24 +15,25 @@ # -__version__ = "0.2" - import string from PIL import Image, ImageFile +__version__ = "0.2" + # # -------------------------------------------------------------------- b_whitespace = string.whitespace try: import locale - locale_lang,locale_enc = locale.getlocale() + locale_lang, locale_enc = locale.getlocale() if locale_enc is None: - locale_lang,locale_enc = locale.getdefaultlocale() + locale_lang, locale_enc = locale.getdefaultlocale() b_whitespace = b_whitespace.decode(locale_enc) -except: pass -b_whitespace = b_whitespace.encode('ascii','ignore') +except: + pass +b_whitespace = b_whitespace.encode('ascii', 'ignore') MODES = { # standard @@ -47,9 +48,11 @@ MODES = { b"PyCMYK": "CMYK" } + def _accept(prefix): return prefix[0:1] == b"P" and prefix[1] in b"0456y" + ## # Image plugin for PBM, PGM, and PPM images. @@ -58,8 +61,8 @@ class PpmImageFile(ImageFile.ImageFile): format = "PPM" format_description = "Pbmplus image" - def _token(self, s = b""): - while True: # read until next whitespace + def _token(self, s=b""): + while True: # read until next whitespace c = self.fp.read(1) if not c or c in b_whitespace: break @@ -90,6 +93,8 @@ class PpmImageFile(ImageFile.ImageFile): s = self.fp.read(1) if s not in b_whitespace: break + if s == b"": + raise ValueError("File does not extend beyond magic number") if s != b"#": break s = self.fp.readline() @@ -104,14 +109,14 @@ class PpmImageFile(ImageFile.ImageFile): # maxgrey if s > 255: if not mode == 'L': - raise ValueError("Too many colors for band: %s" %s) + raise ValueError("Too many colors for band: %s" % s) if s < 2**16: self.mode = 'I' rawmode = 'I;16B' else: - self.mode = 'I'; + self.mode = 'I' rawmode = 'I;32B' - + self.size = xsize, ysize self.tile = [("raw", (0, 0, xsize, ysize), @@ -123,6 +128,7 @@ class PpmImageFile(ImageFile.ImageFile): # self.mode = self.im.mode # self.size = self.im.size + # # -------------------------------------------------------------------- @@ -152,7 +158,7 @@ def _save(im, fp, filename): fp.write(b"65535\n") elif rawmode == "I;32B": fp.write(b"2147483648\n") - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, 0, 1))]) + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) # ALTERNATIVE: save via builtin debug function # im._dump(filename) @@ -160,9 +166,9 @@ def _save(im, fp, filename): # # -------------------------------------------------------------------- -Image.register_open("PPM", PpmImageFile, _accept) -Image.register_save("PPM", _save) +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) -Image.register_extension("PPM", ".pbm") -Image.register_extension("PPM", ".pgm") -Image.register_extension("PPM", ".ppm") +Image.register_extension(PpmImageFile.format, ".pbm") +Image.register_extension(PpmImageFile.format, ".pgm") +Image.register_extension(PpmImageFile.format, ".ppm") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PsdImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/PsdImagePlugin.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/PIL/PsdImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/PsdImagePlugin.py index 9e64e7c..d06e320 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PsdImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PsdImagePlugin.py @@ -28,8 +28,8 @@ MODES = { (2, 8): ("P", 1), (3, 8): ("RGB", 3), (4, 8): ("CMYK", 4), - (7, 8): ("L", 1), # FIXME: multilayer - (8, 8): ("L", 1), # duotone + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone (9, 8): ("LAB", 3) } @@ -40,12 +40,14 @@ i8 = _binary.i8 i16 = _binary.i16be i32 = _binary.i32be + # --------------------------------------------------------------------. # read PSD images def _accept(prefix): return prefix[:4] == b"8BPS" + ## # Image plugin for Photoshop images. @@ -100,12 +102,12 @@ class PsdImageFile(ImageFile.ImageFile): id = i16(read(2)) name = read(i8(read(1))) if not (len(name) & 1): - read(1) # padding + read(1) # padding data = read(i32(read(4))) if (len(data) & 1): - read(1) # padding + read(1) # padding self.resources.append((id, name, data)) - if id == 1039: # ICC profile + if id == 1039: # ICC profile self.info["icc_profile"] = data # @@ -130,6 +132,14 @@ class PsdImageFile(ImageFile.ImageFile): self._fp = self.fp self.frame = 0 + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + def seek(self, layer): # seek to given layer (1..max) if layer == self.frame: @@ -159,6 +169,7 @@ class PsdImageFile(ImageFile.ImageFile): if self.mode == "P": Image.Image.load(self) + def _layerinfo(file): # read layerinfo block layers = [] @@ -166,8 +177,10 @@ def _layerinfo(file): for i in range(abs(i16(read(2)))): # bounding box - y0 = i32(read(4)); x0 = i32(read(4)) - y1 = i32(read(4)); x1 = i32(read(4)) + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) # image info info = [] @@ -197,7 +210,7 @@ def _layerinfo(file): elif mode == ["A", "B", "G", "R"]: mode = "RGBA" else: - mode = None # unknown + mode = None # unknown # skip over blend flags and extra information filler = read(12) @@ -207,8 +220,10 @@ def _layerinfo(file): if size: length = i32(read(4)) if length: - mask_y = i32(read(4)); mask_x = i32(read(4)) - mask_h = i32(read(4)) - mask_y; mask_w = i32(read(4)) - mask_x + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x file.seek(length - 16, 1) combined += length + 4 @@ -219,7 +234,8 @@ def _layerinfo(file): length = i8(read(1)) if length: - # Don't know the proper encoding, Latin-1 should be a good guess + # Don't know the proper encoding, + # Latin-1 should be a good guess name = read(length).decode('latin-1', 'replace') combined += length + 1 @@ -239,6 +255,7 @@ def _layerinfo(file): return layers + def _maketile(file, mode, bbox, channels): tile = None @@ -283,13 +300,13 @@ def _maketile(file, mode, bbox, channels): file.seek(offset) if offset & 1: - read(1) # padding + read(1) # padding return tile # -------------------------------------------------------------------- # registry -Image.register_open("PSD", PsdImageFile, _accept) +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) -Image.register_extension("PSD", ".psd") +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/Darwin/lib/python3.4/site-packages/PIL/PyAccess.py b/Darwin/lib/python3.5/site-packages/PIL/PyAccess.py similarity index 75% rename from Darwin/lib/python3.4/site-packages/PIL/PyAccess.py rename to Darwin/lib/python3.5/site-packages/PIL/PyAccess.py index f76beb8..0d4c8b2 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/PyAccess.py +++ b/Darwin/lib/python3.5/site-packages/PIL/PyAccess.py @@ -16,16 +16,21 @@ # * Implements the pixel access object following Access. # * Does not implement the line functions, as they don't appear to be used # * Taking only the tuple form, which is used from python. -# * Fill.c uses the integer form, but it's still going to use the old Access.c implementation. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. # from __future__ import print_function -from cffi import FFI +import logging import sys -DEBUG = 0 - +from cffi import FFI + + +logger = logging.getLogger(__name__) + + defs = """ struct Pixel_RGBA { unsigned char r,g,b,a; @@ -39,8 +44,8 @@ ffi.cdef(defs) class PyAccess(object): - - def __init__(self, img, readonly = False): + + def __init__(self, img, readonly=False): vals = dict(img.im.unsafe_ptrs) self.readonly = readonly self.image8 = ffi.cast('unsigned char **', vals['image8']) @@ -48,13 +53,15 @@ class PyAccess(object): self.image = ffi.cast('unsigned char **', vals['image']) self.xsize = vals['xsize'] self.ysize = vals['ysize'] - - if DEBUG: - print (vals) + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + #logger.debug("%s", vals) self._post_init() - def _post_init(): pass - + def _post_init(self): + pass + def __setitem__(self, xy, color): """ Modifies the pixel at x,y. The color is given as a single @@ -62,11 +69,12 @@ class PyAccess(object): multi-band images :param xy: The pixel coordinate, given as (x, y). - :param value: The pixel value. + :param value: The pixel value. """ - if self.readonly: raise ValueError('Attempt to putpixel a read only image') - (x,y) = self.check_xy(xy) - return self.set_pixel(x,y,color) + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) def __getitem__(self, xy): """ @@ -75,95 +83,101 @@ class PyAccess(object): images :param xy: The pixel coordinate, given as (x, y). + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. """ - - (x,y) = self.check_xy(xy) - return self.get_pixel(x,y) + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) putpixel = __setitem__ getpixel = __getitem__ def check_xy(self, xy): - (x,y) = xy + (x, y) = xy if not (0 <= x < self.xsize and 0 <= y < self.ysize): raise ValueError('pixel location out of range') return xy + class _PyAccess32_2(PyAccess): """ PA, LA, stored in first and last bytes of a 32 bit word """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x,y): + + def get_pixel(self, x, y): pixel = self.pixels[y][x] return (pixel.r, pixel.a) - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): pixel = self.pixels[y][x] # tuple - pixel.r = min(color[0],255) - pixel.a = min(color[1],255) - + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + class _PyAccess32_3(PyAccess): """ RGB and friends, stored in the first three bytes of a 32 bit word """ - + def _post_init(self, *args, **kwargs): self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x,y): + + def get_pixel(self, x, y): pixel = self.pixels[y][x] return (pixel.r, pixel.g, pixel.b) - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): pixel = self.pixels[y][x] # tuple - pixel.r = min(color[0],255) - pixel.g = min(color[1],255) - pixel.b = min(color[2],255) + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + class _PyAccess32_4(PyAccess): """ RGBA etc, all 4 bytes of a 32 bit word """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) - - def get_pixel(self, x,y): + + def get_pixel(self, x, y): pixel = self.pixels[y][x] return (pixel.r, pixel.g, pixel.b, pixel.a) - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): pixel = self.pixels[y][x] # tuple - pixel.r = min(color[0],255) - pixel.g = min(color[1],255) - pixel.b = min(color[2],255) - pixel.a = min(color[3],255) + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + - class _PyAccess8(PyAccess): """ 1, L, P, 8 bit images stored as uint8 """ def _post_init(self, *args, **kwargs): self.pixels = self.image8 - - def get_pixel(self, x,y): + + def get_pixel(self, x, y): return self.pixels[y][x] - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): try: # integer - self.pixels[y][x] = min(color,255) + self.pixels[y][x] = min(color, 255) except: # tuple - self.pixels[y][x] = min(color[0],255) + self.pixels[y][x] = min(color[0], 255) + class _PyAccessI16_N(PyAccess): """ I;16 access, native bitendian without conversion """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast('unsigned short **', self.image) - def get_pixel(self, x,y): + def get_pixel(self, x, y): return self.pixels[y][x] - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): try: # integer self.pixels[y][x] = min(color, 65535) @@ -171,35 +185,37 @@ class _PyAccessI16_N(PyAccess): # tuple self.pixels[y][x] = min(color[0], 65535) + class _PyAccessI16_L(PyAccess): """ I;16L access, with conversion """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast('struct Pixel_I16 **', self.image) - def get_pixel(self, x,y): + def get_pixel(self, x, y): pixel = self.pixels[y][x] return pixel.l + pixel.r * 256 - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): pixel = self.pixels[y][x] try: color = min(color, 65535) - except: + except TypeError: color = min(color[0], 65535) pixel.l = color & 0xFF pixel.r = color >> 8 + class _PyAccessI16_B(PyAccess): """ I;16B access, with conversion """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast('struct Pixel_I16 **', self.image) - def get_pixel(self, x,y): + def get_pixel(self, x, y): pixel = self.pixels[y][x] - return pixel.l *256 + pixel.r + return pixel.l * 256 + pixel.r - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): pixel = self.pixels[y][x] try: color = min(color, 65535) @@ -209,17 +225,19 @@ class _PyAccessI16_B(PyAccess): pixel.l = color >> 8 pixel.r = color & 0xFF + class _PyAccessI32_N(PyAccess): """ Signed Int32 access, native endian """ def _post_init(self, *args, **kwargs): self.pixels = self.image32 - def get_pixel(self, x,y): + def get_pixel(self, x, y): return self.pixels[y][x] - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): self.pixels[y][x] = color + class _PyAccessI32_Swap(PyAccess): """ I;32L/B access, with byteswapping conversion """ def _post_init(self, *args, **kwargs): @@ -228,24 +246,26 @@ class _PyAccessI32_Swap(PyAccess): def reverse(self, i): orig = ffi.new('int *', i) chars = ffi.cast('unsigned char *', orig) - chars[0],chars[1],chars[2],chars[3] = chars[3], chars[2],chars[1],chars[0] + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] return ffi.cast('int *', chars)[0] - - def get_pixel(self, x,y): + + def get_pixel(self, x, y): return self.reverse(self.pixels[y][x]) - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): self.pixels[y][x] = self.reverse(color) + class _PyAccessF(PyAccess): """ 32 bit float access """ def _post_init(self, *args, **kwargs): self.pixels = ffi.cast('float **', self.image32) - def get_pixel(self, x,y): + def get_pixel(self, x, y): return self.pixels[y][x] - def set_pixel(self, x,y, color): + def set_pixel(self, x, y, color): try: # not a tuple self.pixels[y][x] = color @@ -261,6 +281,7 @@ mode_map = {'1': _PyAccess8, 'PA': _PyAccess32_2, 'RGB': _PyAccess32_3, 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, 'YCbCr': _PyAccess32_3, 'RGBA': _PyAccess32_4, 'RGBa': _PyAccess32_4, @@ -274,7 +295,7 @@ if sys.byteorder == 'little': mode_map['I;16'] = _PyAccessI16_N mode_map['I;16L'] = _PyAccessI16_N mode_map['I;16B'] = _PyAccessI16_B - + mode_map['I;32L'] = _PyAccessI32_N mode_map['I;32B'] = _PyAccessI32_Swap else: @@ -284,14 +305,13 @@ else: mode_map['I;32L'] = _PyAccessI32_Swap mode_map['I;32B'] = _PyAccessI32_N - -def new(img, readonly=False): + +def new(img, readonly=False): access_type = mode_map.get(img.mode, None) if not access_type: - if DEBUG: print ("PyAccess Not Implemented: %s" % img.mode) + logger.debug("PyAccess Not Implemented: %s", img.mode) return None - if DEBUG: print ("New PyAccess: %s" % img.mode) return access_type(img, readonly) - +# End of file diff --git a/Darwin/lib/python3.4/site-packages/PIL/SgiImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/SgiImagePlugin.py similarity index 71% rename from Darwin/lib/python3.4/site-packages/PIL/SgiImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/SgiImagePlugin.py index b60df47..f890c7e 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/SgiImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/SgiImagePlugin.py @@ -18,18 +18,17 @@ # -__version__ = "0.2" - - from PIL import Image, ImageFile, _binary +__version__ = "0.2" + i8 = _binary.i8 i16 = _binary.i16be -i32 = _binary.i32be def _accept(prefix): - return i16(prefix) == 474 + return len(prefix) >= 2 and i16(prefix) == 474 + ## # Image plugin for SGI images. @@ -44,7 +43,7 @@ class SgiImageFile(ImageFile.ImageFile): # HEAD s = self.fp.read(512) if i16(s) != 474: - raise SyntaxError("not an SGI image file") + raise ValueError("Not an SGI image file") # relevant header entries compression = i8(s[2]) @@ -60,30 +59,31 @@ class SgiImageFile(ImageFile.ImageFile): elif layout == (1, 3, 4): self.mode = "RGBA" else: - raise SyntaxError("unsupported SGI image mode") + raise ValueError("Unsupported SGI image mode") # size self.size = i16(s[6:]), i16(s[8:]) - # decoder info if compression == 0: offset = 512 pagesize = self.size[0]*self.size[1]*layout[0] self.tile = [] for layer in self.mode: - self.tile.append(("raw", (0,0)+self.size, offset, (layer,0,-1))) + self.tile.append( + ("raw", (0, 0)+self.size, offset, (layer, 0, -1))) offset = offset + pagesize elif compression == 1: - self.tile = [("sgi_rle", (0,0)+self.size, 512, (self.mode, 0, -1))] + raise ValueError("SGI RLE encoding not supported") # # registry -Image.register_open("SGI", SgiImageFile, _accept) +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) -Image.register_extension("SGI", ".bw") -Image.register_extension("SGI", ".rgb") -Image.register_extension("SGI", ".rgba") +Image.register_extension(SgiImageFile.format, ".bw") +Image.register_extension(SgiImageFile.format, ".rgb") +Image.register_extension(SgiImageFile.format, ".rgba") +Image.register_extension(SgiImageFile.format, ".sgi") -Image.register_extension("SGI", ".sgi") # really? +# End of file diff --git a/Darwin/lib/python3.4/site-packages/PIL/SpiderImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/SpiderImagePlugin.py similarity index 93% rename from Darwin/lib/python3.4/site-packages/PIL/SpiderImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/SpiderImagePlugin.py index 306b348..6344a15 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/SpiderImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/SpiderImagePlugin.py @@ -48,7 +48,7 @@ def isInt(f): return 1 else: return 0 - except: + except ValueError: return 0 iforms = [1, 3, -11, -12, -21, -22] @@ -127,12 +127,12 @@ class SpiderImageFile(ImageFile.ImageFile): if self.istack == 0 and self.imgnumber == 0: # stk=0, img=0: a regular 2D image offset = hdrlen - self.nimages = 1 + self._nimages = 1 elif self.istack > 0 and self.imgnumber == 0: # stk>0, img=0: Opening the stack for the first time self.imgbytes = int(h[12]) * int(h[2]) * 4 self.hdrlen = hdrlen - self.nimages = int(h[26]) + self._nimages = int(h[26]) # Point to the first image in the stack offset = hdrlen * 2 self.imgnumber = 1 @@ -154,6 +154,14 @@ class SpiderImageFile(ImageFile.ImageFile): (self.rawmode, 0, 1))] self.__fp = self.fp # FIXME: hack + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + # 1st image index is zero (although SPIDER imgnumber starts at 1) def tell(self): if self.imgnumber < 1: @@ -164,7 +172,7 @@ class SpiderImageFile(ImageFile.ImageFile): def seek(self, frame): if self.istack == 0: return - if frame >= self.nimages: + if frame >= self._nimages: raise EOFError("attempt to seek past end of file") self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) self.fp = self.__fp @@ -173,11 +181,11 @@ class SpiderImageFile(ImageFile.ImageFile): # returns a byte image after rescaling to 0..255 def convert2byte(self, depth=255): - (min, max) = self.getextrema() + (minimum, maximum) = self.getextrema() m = 1 - if max != min: - m = depth / (max-min) - b = -m * min + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum return self.point(lambda i, m=m, b=b: i * m + b).convert("L") # returns a ImageTk.PhotoImage object, after rescaling to 0..255 @@ -271,14 +279,14 @@ def _save(im, fp, filename): def _save_spider(im, fp, filename): # get the filename extension and register it with Image - fn, ext = os.path.splitext(filename) + ext = os.path.splitext(filename)[1] Image.register_extension("SPIDER", ext) _save(im, fp, filename) # -------------------------------------------------------------------- -Image.register_open("SPIDER", SpiderImageFile) -Image.register_save("SPIDER", _save_spider) +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) if __name__ == "__main__": diff --git a/Darwin/lib/python3.4/site-packages/PIL/SunImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/SunImagePlugin.py similarity index 83% rename from Darwin/lib/python3.4/site-packages/PIL/SunImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/SunImagePlugin.py index 0db02ad..af63144 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/SunImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/SunImagePlugin.py @@ -17,17 +17,16 @@ # -__version__ = "0.3" - - from PIL import Image, ImageFile, ImagePalette, _binary -i16 = _binary.i16be +__version__ = "0.3" + i32 = _binary.i32be def _accept(prefix): - return i32(prefix) == 0x59a66a95 + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + ## # Image plugin for Sun raster files. @@ -70,13 +69,13 @@ class SunImageFile(ImageFile.ImageFile): stride = (((self.size[0] * depth + 7) // 8) + 3) & (~3) if compression == 1: - self.tile = [("raw", (0,0)+self.size, offset, (rawmode, stride))] + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] elif compression == 2: - self.tile = [("sun_rle", (0,0)+self.size, offset, rawmode)] + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] # # registry -Image.register_open("SUN", SunImageFile, _accept) +Image.register_open(SunImageFile.format, SunImageFile, _accept) -Image.register_extension("SUN", ".ras") +Image.register_extension(SunImageFile.format, ".ras") diff --git a/Darwin/lib/python3.4/site-packages/PIL/TarIO.py b/Darwin/lib/python3.5/site-packages/PIL/TarIO.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/TarIO.py rename to Darwin/lib/python3.5/site-packages/PIL/TarIO.py index bba493e..4e5115b 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/TarIO.py +++ b/Darwin/lib/python3.5/site-packages/PIL/TarIO.py @@ -16,6 +16,7 @@ from PIL import ContainerIO + ## # A file object that provides read access to a given member of a TAR # file. diff --git a/Darwin/lib/python3.4/site-packages/PIL/TgaImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/TgaImagePlugin.py similarity index 82% rename from Darwin/lib/python3.4/site-packages/PIL/TgaImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/TgaImagePlugin.py index 55790db..a75ce29 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/TgaImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/TgaImagePlugin.py @@ -17,10 +17,10 @@ # -__version__ = "0.3" - from PIL import Image, ImageFile, ImagePalette, _binary +__version__ = "0.3" + # # -------------------------------------------------------------------- @@ -28,7 +28,6 @@ from PIL import Image, ImageFile, ImagePalette, _binary i8 = _binary.i8 i16 = _binary.i16le -i32 = _binary.i32le MODES = { @@ -42,9 +41,6 @@ MODES = { } -def _accept(prefix): - return prefix[0:1] == b"\0" - ## # Image plugin for Targa files. @@ -58,7 +54,7 @@ class TgaImageFile(ImageFile.ImageFile): # process header s = self.fp.read(18) - id = i8(s[0]) + idlen = i8(s[0]) colormaptype = i8(s[1]) imagetype = i8(s[2]) @@ -70,7 +66,7 @@ class TgaImageFile(ImageFile.ImageFile): self.size = i16(s[12:]), i16(s[14:]) # validate header fields - if id != 0 or colormaptype not in (0, 1) or\ + if colormaptype not in (0, 1) or\ self.size[0] <= 0 or self.size[1] <= 0 or\ depth not in (1, 8, 16, 24, 32): raise SyntaxError("not a TGA file") @@ -79,7 +75,7 @@ class TgaImageFile(ImageFile.ImageFile): if imagetype in (3, 11): self.mode = "L" if depth == 1: - self.mode = "1" # ??? + self.mode = "1" # ??? elif imagetype in (1, 9): self.mode = "P" elif imagetype in (2, 10): @@ -103,22 +99,25 @@ class TgaImageFile(ImageFile.ImageFile): if imagetype & 8: self.info["compression"] = "tga_rle" + if idlen: + self.info["id_section"] = self.fp.read(idlen) + if colormaptype: # read palette start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) if mapdepth == 16: - self.palette = ImagePalette.raw("BGR;16", - b"\0"*2*start + self.fp.read(2*size)) + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) elif mapdepth == 24: - self.palette = ImagePalette.raw("BGR", - b"\0"*3*start + self.fp.read(3*size)) + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) elif mapdepth == 32: - self.palette = ImagePalette.raw("BGRA", - b"\0"*4*start + self.fp.read(4*size)) + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) # setup tile descriptor try: - rawmode = MODES[(imagetype&7, depth)] + rawmode = MODES[(imagetype & 7, depth)] if imagetype & 8: # compressed self.tile = [("tga_rle", (0, 0)+self.size, @@ -127,7 +126,7 @@ class TgaImageFile(ImageFile.ImageFile): self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), (rawmode, 0, orientation))] except KeyError: - pass # cannot decode + pass # cannot decode # # -------------------------------------------------------------------- @@ -145,6 +144,7 @@ SAVE = { "RGBA": ("BGRA", 32, 0, 2), } + def _save(im, fp, filename, check=0): try: @@ -185,13 +185,14 @@ def _save(im, fp, filename, check=0): if colormaptype: fp.write(im.im.getpalette("RGB", "BGR")) - ImageFile._save(im, fp, [("raw", (0,0)+im.size, 0, (rawmode, 0, orientation))]) + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) # # -------------------------------------------------------------------- # Registry -Image.register_open("TGA", TgaImageFile, _accept) -Image.register_save("TGA", _save) +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) -Image.register_extension("TGA", ".tga") +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/Darwin/lib/python3.5/site-packages/PIL/TiffImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..1f97422 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,1341 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from PIL import Image, ImageFile +from PIL import ImagePalette +from PIL import _binary + +import collections +from fractions import Fraction +import io +import itertools +from numbers import Number +import os +import struct +import sys +import warnings + +from .TiffTags import TAGS_V2, TYPES, TagInfo + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +i8 = _binary.i8 +o8 = _binary.o8 + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()]) + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + # ? + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I;16S", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I;16BS", "I;16BS"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I;32BS", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 2, (1, 1, 1, 1), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBA"), # OSX Grab + (MM, 2, (1, 1, 1, 1), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBA"), # OSX Grab + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + f = Fraction.from_float(1 / val if inv else val).limit_denominator(max_val) + n_d = (f.numerator, f.denominator) + return n_d[::-1] if inv else n_d + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + + +class ImageFileDirectory_v2(collections.MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def as_dict(self): + """Return a dictionary of the image's tags. + + use `dict(ifd)` instead. + + .. deprecated:: 3.0.0 + """ + # FIXME Deprecate: use dict(self) + return dict(self) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TAGS_V2.get(code, TagInfo()).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if bytes is str: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if bytes is str: + basetypes += unicode, + + info = TAGS_V2.get(tag, TagInfo()) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + try: + self.tagtype[tag] = info.type + except KeyError: + self.tagtype[tag] = 7 + if all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if bytes is str: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + else: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and bytes is not str: + values = [value.encode("ascii", 'replace') if isinstance(value, str) else value + for value in values] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + if info.length == 1: + if legacy_api and self.tagtype[tag] in [5, 10]: + values = values, + dest[tag], = values + else: + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from PIL.TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from PIL.TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{0}{1}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), (4, "L", "long"), + (6, "b", "signed byte"), (8, "h", "signed short"), + (9, "l", "signed long"), (11, "f", "float"), (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return (data if legacy_api else + tuple(map(ord, data) if bytes is str else data)) + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info[0] == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{0}L".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else a / b + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{0}l".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else a / b + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + if DEBUG: + tagname = TAGS_V2.get(tag, TagInfo()).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d. " + "Skipping tag %s" % (size, len(data), tag)) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TAGS_V2.get(tag, TagInfo()).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print ifd[key] + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + self._seek(max(frame, 0)) # Questionable backwards compatibility. + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + def _decoder(self, rawmode, layer, tile=None): + "Setup decoder contexts" + + args = None + if rawmode == "RGB" and self._planar_configuration == 2: + rawmode = rawmode[layer] + compression = self._compression + if compression == "raw": + args = (rawmode, 0, 1) + elif compression == "jpeg": + args = rawmode, "" + if JPEGTABLES in self.tag_v2: + # Hack to handle abbreviated JPEG headers + # FIXME This will fail with more than one value + self.tile_prefix, = self.tag_v2[JPEGTABLES] + elif compression == "packbits": + args = rawmode + elif compression == "tiff_lzw": + args = rawmode + if PREDICTOR in self.tag_v2: + # Section 14: Differencing Predictor + self.decoderconfig = (self.tag_v2[PREDICTOR],) + + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + return args + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = self.tile[0][3] + (self.tag_v2.offset,) + decoder = Image._getdecoder(self.mode, 'libtiff', args, + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if hasattr(self.fp, 'close'): + if not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + self.load_end() + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self.size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + format = self.tag_v2.get(SAMPLEFORMAT, (1,)) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, photo, format, fillorder, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION, (1, 1)) + yres = self.tag_v2.get(Y_RESOLUTION, (1, 1)) + + if xres and not isinstance(xres, tuple): + xres = (xres, 1.) + if yres and not isinstance(yres, tuple): + yres = (yres, 1.) + if xres and yres: + xres = xres[0] / (xres[1] or 1) + yres = yres[0] / (yres[1] or 1) + resunit = self.tag_v2.get(RESOLUTION_UNIT, 1) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = l = 0 + self.tile = [] + if STRIPOFFSETS in self.tag_v2: + # striped image + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", + "group4", "tiff_jpeg", + "tiff_adobe_deflate", + "tiff_thunderscan", + "tiff_deflate", + "tiff_sgilog", + "tiff_sgilog24", + "tiff_raw_16"]: + # if DEBUG: + # print "Activating g4 compression for whole file" + + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # replace the existing load function with our + # _load_libtiff function. + + self.load = self._load_libtiff + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and \ + os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + key = ( + self.tag_v2.prefix, photo, format, 1, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: + rawmode = 'I;16N' + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, fp) + self.tile.append( + (self._compression, + (0, 0, w, ysize), + 0, a)) + a = None + + else: + for i in range(len(offsets)): + a = self._decoder(rawmode, l, i) + self.tile.append( + (self._compression, + (0, min(y, ysize), w, min(y+h, ysize)), + offsets[i], a)) + if DEBUG: + print("tiles: ", self.tile) + y = y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + elif TILEOFFSETS in self.tag_v2: + # tiled image + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + a = None + for o in self.tag_v2[TILEOFFSETS]: + if not a: + a = self._decoder(rawmode, l) + # FIXME: this doesn't work if the image size + # is not a multiple of the tile size... + self.tile.append( + (self._compression, + (x, y, x+w, y+h), + o, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', + im.info.get('compression', 'raw')) + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype.get(key, None) + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + name_with_spaces = name.replace("_", " ") + if "_" in name and name_with_spaces in im.encoderinfo: + warnings.warn("%r is deprecated; use %r instead" % + (name_with_spaces, name), DeprecationWarning) + ifd[key] = im.encoderinfo[name.replace("_", " ")] + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # ICC Profile crashes. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS, ROWSPERSTRIP, ICCPROFILE] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for k, v in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + if k not in atts and k not in blocklist: + if isinstance(v, unicode if bytes is str else str): + atts[k] = v.encode('ascii', 'replace') + b"\0" + else: + atts[k] = v + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + # print(im.mode, compression, a, im.encoderconfig) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) + +Image.register_extension(TiffImageFile.format, ".tif") +Image.register_extension(TiffImageFile.format, ".tiff") + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/Darwin/lib/python3.5/site-packages/PIL/TiffTags.py b/Darwin/lib/python3.5/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000..012d676 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/TiffTags.py @@ -0,0 +1,315 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=4, length=0, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# +TAGS_V2 = { + + 254: ("NewSubfileType", 4, 1), + 255: ("SubfileType", 3, 1), + 256: ("ImageWidth", 4, 1), + 257: ("ImageLength", 4, 1), + 258: ("BitsPerSample", 3, 0), + 259: ("Compression", 3, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4, + "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", 3, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RBG Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Thresholding", 3, 1), + 264: ("CellWidth", 3, 1), + 265: ("CellHeight", 3, 1), + 266: ("FillOrder", 3, 1), + 269: ("DocumentName", 2, 1), + + 270: ("ImageDescription", 2, 1), + 271: ("Make", 2, 1), + 272: ("Model", 2, 1), + 273: ("StripOffsets", 4, 0), + 274: ("Orientation", 3, 1), + 277: ("SamplesPerPixel", 3, 1), + 278: ("RowsPerStrip", 4, 1), + 279: ("StripByteCounts", 4, 0), + + 280: ("MinSampleValue", 4, 0), + 281: ("MaxSampleValue", 3, 0), + 282: ("XResolution", 5, 1), + 283: ("YResolution", 5, 1), + 284: ("PlanarConfiguration", 3, 1, {"Contigous": 1, "Separate": 2}), + 285: ("PageName", 2, 1), + 286: ("XPosition", 5, 1), + 287: ("YPosition", 5, 1), + 288: ("FreeOffsets", 4, 1), + 289: ("FreeByteCounts", 4, 1), + + 290: ("GrayResponseUnit", 3, 1), + 291: ("GrayResponseCurve", 3, 0), + 292: ("T4Options", 4, 1), + 293: ("T6Options", 4, 1), + 296: ("ResolutionUnit", 3, 1, {"inch": 1, "cm": 2}), + 297: ("PageNumber", 3, 2), + + 301: ("TransferFunction", 3, 0), + 305: ("Software", 2, 1), + 306: ("DateTime", 2, 1), + + 315: ("Artist", 2, 1), + 316: ("HostComputer", 2, 1), + 317: ("Predictor", 3, 1), + 318: ("WhitePoint", 5, 2), + 319: ("PrimaryChromaticies", 3, 6), + + 320: ("ColorMap", 3, 0), + 321: ("HalftoneHints", 3, 2), + 322: ("TileWidth", 4, 1), + 323: ("TileLength", 4, 1), + 324: ("TileOffsets", 4, 0), + 325: ("TileByteCounts", 4, 0), + + 332: ("InkSet", 3, 1), + 333: ("InkNames", 2, 1), + 334: ("NumberOfInks", 3, 1), + 336: ("DotRange", 3, 0), + 337: ("TargetPrinter", 2, 1), + 338: ("ExtraSamples", 1, 0), + 339: ("SampleFormat", 3, 0), + + 340: ("SMinSampleValue", 12, 0), + 341: ("SMaxSampleValue", 12, 0), + 342: ("TransferRange", 3, 6), + + # obsolete JPEG tags + 512: ("JPEGProc", 3, 1), + 513: ("JPEGInterchangeFormat", 4, 1), + 514: ("JPEGInterchangeFormatLength", 4, 1), + 515: ("JPEGRestartInterval", 3, 1), + 517: ("JPEGLosslessPredictors", 3, 0), + 518: ("JPEGPointTransforms", 3, 0), + 519: ("JPEGQTables", 4, 0), + 520: ("JPEGDCTables", 4, 0), + 521: ("JPEGACTables", 4, 0), + + 529: ("YCbCrCoefficients", 5, 3), + 530: ("YCbCrSubSampling", 3, 2), + 531: ("YCbCrPositioning", 3, 1), + 532: ("ReferenceBlackWhite", 4, 0), + + 33432: ("Copyright", 2, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", 3, 1), + + # MPInfo + 45056: ("MPFVersion", 7, 1), + 45057: ("NumberOfImages", 4, 1), + 45058: ("MPEntry", 7, 1), + 45059: ("ImageUIDList", 7, 0), + 45060: ("TotalFrames", 4, 1), + 45313: ("MPIndividualNum", 4, 1), + 45569: ("PanOrientation", 4, 1), + 45570: ("PanOverlap_H", 5, 1), + 45571: ("PanOverlap_V", 5, 1), + 45572: ("BaseViewpointNum", 4, 1), + 45573: ("ConvergenceAngle", 10, 1), + 45574: ("BaselineLength", 5, 1), + 45575: ("VerticalDivergence", 10, 1), + 45576: ("AxisDistance_X", 10, 1), + 45577: ("AxisDistance_Y", 10, 1), + 45578: ("AxisDistance_Z", 10, 1), + 45579: ("YawAngle", 10, 1), + 45580: ("PitchAngle", 10, 1), + 45581: ("RollAngle", 10, 1), + + 50741: ("MakerNoteSafety", 3, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", 5, 1), + 50838: ("ImageJMetaDataByteCounts", 4, 1), + 50839: ("ImageJMetaData", 7, 1) +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 33434: 'ExposureTime', + 33437: 'FNumber', + 33723: 'IptcNaaInfo', + 34377: 'PhotoshopInfo', + 34675: 'ICCProfile', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34853: 'GPSInfoIFD', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42240: 'Gamma', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } diff --git a/Darwin/lib/python3.4/site-packages/PIL/WalImageFile.py b/Darwin/lib/python3.5/site-packages/PIL/WalImageFile.py similarity index 98% rename from Darwin/lib/python3.4/site-packages/PIL/WalImageFile.py rename to Darwin/lib/python3.5/site-packages/PIL/WalImageFile.py index d494bfd..fc2bb30 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/WalImageFile.py +++ b/Darwin/lib/python3.5/site-packages/PIL/WalImageFile.py @@ -1,5 +1,3 @@ -# -*- coding: iso-8859-1 -*- -# # The Python Imaging Library. # $Id$ # @@ -33,6 +31,7 @@ except ImportError: i32 = _binary.i32le + ## # Load texture from a Quake2 WAL texture file. #

@@ -75,7 +74,7 @@ def open(filename): quake2palette = ( - # default palette taken from piffo 0.93 by Hans Häggström + # default palette taken from piffo 0.93 by Hans Häggström b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" diff --git a/Darwin/lib/python3.4/site-packages/PIL/WebPImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/WebPImagePlugin.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/PIL/WebPImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/WebPImagePlugin.py index ab60c8d..6837b53 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/WebPImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/WebPImagePlugin.py @@ -12,7 +12,7 @@ _VALID_WEBP_MODES = { _VP8_MODES_BY_IDENTIFIER = { b"VP8 ": "RGB", b"VP8X": "RGBA", - b"VP8L": "RGBA", # lossless + b"VP8L": "RGBA", # lossless } @@ -30,7 +30,8 @@ class WebPImageFile(ImageFile.ImageFile): format_description = "WebP image" def _open(self): - data, width, height, self.mode, icc_profile, exif = _webp.WebPDecode(self.fp.read()) + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) if icc_profile: self.info["icc_profile"] = icc_profile @@ -72,8 +73,8 @@ def _save(im, fp, filename): fp.write(data) -Image.register_open("WEBP", WebPImageFile, _accept) -Image.register_save("WEBP", _save) +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +Image.register_save(WebPImageFile.format, _save) -Image.register_extension("WEBP", ".webp") -Image.register_mime("WEBP", "image/webp") +Image.register_extension(WebPImageFile.format, ".webp") +Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/Darwin/lib/python3.4/site-packages/PIL/WmfImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/WmfImagePlugin.py similarity index 92% rename from Darwin/lib/python3.4/site-packages/PIL/WmfImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/WmfImagePlugin.py index 40b2037..3163210 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/WmfImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/WmfImagePlugin.py @@ -15,15 +15,16 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.2" - from PIL import Image, ImageFile, _binary +__version__ = "0.2" + _handler = None if str != bytes: long = int + ## # Install application-specific WMF image handler. # @@ -36,14 +37,14 @@ def register_handler(handler): if hasattr(Image.core, "drawwmf"): # install default handler (windows only) - class WmfHandler: + class WmfHandler(object): def open(self, im): im.mode = "RGB" self.bbox = im.info["wmf_bbox"] def load(self, im): - im.fp.seek(0) # rewind + im.fp.seek(0) # rewind return Image.frombytes( "RGB", im.size, Image.core.drawwmf(im.fp.read(), im.size, self.bbox), @@ -56,6 +57,7 @@ if hasattr(Image.core, "drawwmf"): word = _binary.i16le + def short(c, o=0): v = word(c, o) if v >= 32768: @@ -64,6 +66,7 @@ def short(c, o=0): dword = _binary.i32le + # # -------------------------------------------------------------------- # Read WMF file @@ -74,6 +77,7 @@ def _accept(prefix): prefix[:4] == b"\x01\x00\x00\x00" ) + ## # Image plugin for Windows metafiles. @@ -95,8 +99,10 @@ class WmfStubImageFile(ImageFile.StubImageFile): inch = word(s, 14) # get bounding box - x0 = short(s, 6); y0 = short(s, 8) - x1 = short(s, 10); y1 = short(s, 12) + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) # normalize size to 72 dots per inch size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch @@ -115,8 +121,10 @@ class WmfStubImageFile(ImageFile.StubImageFile): # enhanced metafile # get bounding box - x0 = dword(s, 8); y0 = dword(s, 12) - x1 = dword(s, 16); y1 = dword(s, 20) + x0 = dword(s, 8) + y0 = dword(s, 12) + x1 = dword(s, 16) + y1 = dword(s, 20) # get frame (in 0.01 millimeter units) frame = dword(s, 24), dword(s, 28), dword(s, 32), dword(s, 36) diff --git a/Darwin/lib/python3.4/site-packages/PIL/XVThumbImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/XVThumbImagePlugin.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/XVThumbImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/XVThumbImagePlugin.py index e5bf55a..9d4b704 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/XVThumbImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/XVThumbImagePlugin.py @@ -17,10 +17,10 @@ # FIXME: make save work (this requires quantization support) # -__version__ = "0.1" - from PIL import Image, ImageFile, ImagePalette, _binary +__version__ = "0.1" + o8 = _binary.o8 # standard color palette for thumbnails (RGB332) @@ -30,6 +30,7 @@ for r in range(8): for b in range(4): PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + ## # Image plugin for XV thumbnail images. diff --git a/Darwin/lib/python3.4/site-packages/PIL/XbmImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/XbmImagePlugin.py similarity index 83% rename from Darwin/lib/python3.4/site-packages/PIL/XbmImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/XbmImagePlugin.py index 799d727..bca8828 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/XbmImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/XbmImagePlugin.py @@ -19,15 +19,15 @@ # See the README file for information on usage and redistribution. # -__version__ = "0.6" - import re from PIL import Image, ImageFile +__version__ = "0.6" + # XBM header xbm_head = re.compile( - b"\s*#define[ \t]+[^_]*_width[ \t]+(?P[0-9]+)[\r\n]+" - b"#define[ \t]+[^_]*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" b"(?P" b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" @@ -35,9 +35,11 @@ xbm_head = re.compile( b"[\\000-\\377]*_bits\\[\\]" ) + def _accept(prefix): return prefix.lstrip()[:7] == b"#define" + ## # Image plugin for X11 bitmaps. @@ -81,14 +83,14 @@ def _save(im, fp, filename): fp.write(b"static char im_bits[] = {\n") - ImageFile._save(im, fp, [("xbm", (0,0)+im.size, 0, None)]) + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) fp.write(b"};\n") -Image.register_open("XBM", XbmImageFile, _accept) -Image.register_save("XBM", _save) +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) -Image.register_extension("XBM", ".xbm") +Image.register_extension(XbmImageFile.format, ".xbm") -Image.register_mime("XBM", "image/xbm") +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/Darwin/lib/python3.4/site-packages/PIL/XpmImagePlugin.py b/Darwin/lib/python3.5/site-packages/PIL/XpmImagePlugin.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/PIL/XpmImagePlugin.py rename to Darwin/lib/python3.5/site-packages/PIL/XpmImagePlugin.py index 701a23b..556adb8 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/XpmImagePlugin.py +++ b/Darwin/lib/python3.5/site-packages/PIL/XpmImagePlugin.py @@ -15,13 +15,12 @@ # -__version__ = "0.2" - - import re from PIL import Image, ImageFile, ImagePalette from PIL._binary import i8, o8 +__version__ = "0.2" + # XPM header xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") @@ -29,6 +28,7 @@ xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") def _accept(prefix): return prefix[:9] == b"/* XPM */" + ## # Image plugin for X11 pixel maps. @@ -86,9 +86,9 @@ class XpmImageFile(ImageFile.ImageFile): elif rgb[0:1] == b"#": # FIXME: handle colour names (see ImagePalette.py) rgb = int(rgb[1:], 16) - palette[c] = o8((rgb >> 16) & 255) +\ - o8((rgb >> 8) & 255) +\ - o8(rgb & 255) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) else: # unknown colour raise ValueError("cannot read this XPM file") @@ -123,8 +123,8 @@ class XpmImageFile(ImageFile.ImageFile): # # Registry -Image.register_open("XPM", XpmImageFile, _accept) +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) -Image.register_extension("XPM", ".xpm") +Image.register_extension(XpmImageFile.format, ".xpm") -Image.register_mime("XPM", "image/xpm") +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/Darwin/lib/python3.4/site-packages/PIL/__init__.py b/Darwin/lib/python3.5/site-packages/PIL/__init__.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/PIL/__init__.py rename to Darwin/lib/python3.5/site-packages/PIL/__init__.py index d5894c4..6756d7a 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/__init__.py +++ b/Darwin/lib/python3.5/site-packages/PIL/__init__.py @@ -12,7 +12,7 @@ # ;-) VERSION = '1.1.7' # PIL version -PILLOW_VERSION = '2.5.3' # Pillow +PILLOW_VERSION = '3.0.0' # Pillow _plugins = ['BmpImagePlugin', 'BufrStubImagePlugin', @@ -36,6 +36,7 @@ _plugins = ['BmpImagePlugin', 'McIdasImagePlugin', 'MicImagePlugin', 'MpegImagePlugin', + 'MpoImagePlugin', 'MspImagePlugin', 'PalmImagePlugin', 'PcdImagePlugin', diff --git a/Darwin/lib/python3.4/site-packages/PIL/_binary.py b/Darwin/lib/python3.5/site-packages/PIL/_binary.py similarity index 66% rename from Darwin/lib/python3.4/site-packages/PIL/_binary.py rename to Darwin/lib/python3.5/site-packages/PIL/_binary.py index 71b2b78..2f5e8ff 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/_binary.py +++ b/Darwin/lib/python3.5/site-packages/PIL/_binary.py @@ -11,21 +11,24 @@ # See the README file for information on usage and redistribution. # +from struct import unpack, pack + if bytes is str: def i8(c): return ord(c) def o8(i): - return chr(i&255) + return chr(i & 255) else: def i8(c): return c if c.__class__ is int else c[0] def o8(i): - return bytes((i&255,)) + return bytes((i & 255,)) + # Input, le = little endian, be = big endian -#TODO: replace with more readable struct.unpack equivalent +# TODO: replace with more readable struct.unpack equivalent def i16le(c, o=0): """ Converts a 2-bytes (16 bits) string to an integer. @@ -33,7 +36,8 @@ def i16le(c, o=0): c: string containing bytes to convert o: offset of bytes to convert in string """ - return i8(c[o]) | (i8(c[o+1])<<8) + return unpack("H", c[o:o+2])[0] + def i32be(c, o=0): - return (i8(c[o])<<24) | (i8(c[o+1])<<16) | (i8(c[o+2])<<8) | i8(c[o+3]) + return unpack(">I", c[o:o+4])[0] + # Output, le = little endian, be = big endian def o16le(i): - return o8(i) + o8(i>>8) + return pack(">8) + o8(i>>16) + o8(i>>24) + return pack(">8) + o8(i) + return pack(">H", i) + def o32be(i): - return o8(i>>24) + o8(i>>16) + o8(i>>8) + o8(i) + return pack(">I", i) +# End of file diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imaging.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imaging.cpython-35m-darwin.so new file mode 100755 index 0000000..50d68eb Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imaging.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imagingcms.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imagingcms.cpython-35m-darwin.so new file mode 100755 index 0000000..108e117 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imagingcms.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imagingft.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imagingft.cpython-35m-darwin.so new file mode 100755 index 0000000..2a3fd5b Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imagingft.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imagingmath.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imagingmath.cpython-35m-darwin.so new file mode 100755 index 0000000..4726cae Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imagingmath.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imagingmorph.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imagingmorph.cpython-35m-darwin.so new file mode 100755 index 0000000..2791fdc Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imagingmorph.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/_imagingtk.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_imagingtk.cpython-35m-darwin.so new file mode 100755 index 0000000..a3add98 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_imagingtk.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/site-packages/PIL/_util.py b/Darwin/lib/python3.5/site-packages/PIL/_util.py similarity index 99% rename from Darwin/lib/python3.4/site-packages/PIL/_util.py rename to Darwin/lib/python3.5/site-packages/PIL/_util.py index eb5c2c2..51c6f68 100644 --- a/Darwin/lib/python3.4/site-packages/PIL/_util.py +++ b/Darwin/lib/python3.5/site-packages/PIL/_util.py @@ -3,20 +3,25 @@ import os if bytes is str: def isStringType(t): return isinstance(t, basestring) + def isPath(f): return isinstance(f, basestring) else: def isStringType(t): return isinstance(t, str) + def isPath(f): return isinstance(f, (bytes, str)) + # Checks if an object is a string, and that it points to a directory. def isDirectory(f): return isPath(f) and os.path.isdir(f) + class deferred_error(object): def __init__(self, ex): self.ex = ex + def __getattr__(self, elt): raise self.ex diff --git a/Darwin/lib/python3.5/site-packages/PIL/_webp.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/PIL/_webp.cpython-35m-darwin.so new file mode 100755 index 0000000..26cc91a Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/PIL/_webp.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/PIL/features.py b/Darwin/lib/python3.5/site-packages/PIL/features.py new file mode 100644 index 0000000..fd87f09 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/PIL/features.py @@ -0,0 +1,67 @@ +from PIL import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._imagingtk", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", + "transp_webp": ("WEBP", "WebPDecoderBuggyAlpha") +} + + +def check_module(feature): + if feature not in modules: + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + method_to_call = None + if type(module) is tuple: + module, method_to_call = module + + try: + imported_module = __import__(module) + except ImportError: + # If a method is being checked, None means that + # rather than the method failing, the module required for the method + # failed to be imported first + return None if method_to_call else False + + if method_to_call: + method = getattr(imported_module, method_to_call) + return method() is True + else: + return True + + +def get_supported_modules(): + supported_modules = [] + for feature in modules: + if check_module(feature): + supported_modules.append(feature) + return supported_modules + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + supported_codecs = [] + for feature in codecs: + if check_codec(feature): + supported_codecs.append(feature) + return supported_codecs diff --git a/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..a7e6ee9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,54 @@ +Pillow +====== + +Python Imaging Library (Fork) +----------------------------- + +Pillow is the friendly PIL fork by `Alex Clark and Contributors `_. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. + +.. image:: https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build + :target: https://travis-ci.org/python-pillow/Pillow + :alt: Travis CI build status (Linux) + +.. image:: https://img.shields.io/travis/python-pillow/pillow-wheels/latest.svg?label=OS%20X%20build + :target: https://travis-ci.org/python-pillow/pillow-wheels + :alt: Travis CI build status (OS X) + +.. image:: https://img.shields.io/appveyor/ci/Pythonpillow/pillow/master.svg?label=Windows%20build + :target: https://ci.appveyor.com/project/Pythonpillow/pillow + :alt: AppVeyor CI build status (Windows) + +.. image:: https://img.shields.io/pypi/v/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Latest PyPI version + +.. image:: https://img.shields.io/pypi/dm/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Number of PyPI downloads + +.. image:: https://coveralls.io/repos/python-pillow/Pillow/badge.svg?branch=master&service=github + :target: https://coveralls.io/r/python-pillow/Pillow?branch=master + :alt: Code coverage + +.. image:: https://landscape.io/github/python-pillow/Pillow/master/landscape.svg + :target: https://landscape.io/github/python-pillow/Pillow/master + :alt: Code health + +More Information +---------------- + +- `Documentation `_ + + - `Installation `_ + - `Handbook `_ + +- `Contribute `_ + + - `Issues `_ + - `Pull requests `_ + +- `Changelog `_ + + - `Pre-fork `_ + + diff --git a/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/METADATA b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/METADATA new file mode 100644 index 0000000..1bcc7d7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/METADATA @@ -0,0 +1,81 @@ +Metadata-Version: 2.0 +Name: Pillow +Version: 3.0.0 +Summary: Python Imaging Library (Fork) +Home-page: http://python-pillow.github.io/ +Author: Alex Clark (Fork Author) +Author-email: aclark@aclark.net +License: Standard PIL License +Keywords: Imaging +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Scanners +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy + +Pillow +====== + +Python Imaging Library (Fork) +----------------------------- + +Pillow is the friendly PIL fork by `Alex Clark and Contributors `_. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. + +.. image:: https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build + :target: https://travis-ci.org/python-pillow/Pillow + :alt: Travis CI build status (Linux) + +.. image:: https://img.shields.io/travis/python-pillow/pillow-wheels/latest.svg?label=OS%20X%20build + :target: https://travis-ci.org/python-pillow/pillow-wheels + :alt: Travis CI build status (OS X) + +.. image:: https://img.shields.io/appveyor/ci/Pythonpillow/pillow/master.svg?label=Windows%20build + :target: https://ci.appveyor.com/project/Pythonpillow/pillow + :alt: AppVeyor CI build status (Windows) + +.. image:: https://img.shields.io/pypi/v/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Latest PyPI version + +.. image:: https://img.shields.io/pypi/dm/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Number of PyPI downloads + +.. image:: https://coveralls.io/repos/python-pillow/Pillow/badge.svg?branch=master&service=github + :target: https://coveralls.io/r/python-pillow/Pillow?branch=master + :alt: Code coverage + +.. image:: https://landscape.io/github/python-pillow/Pillow/master/landscape.svg + :target: https://landscape.io/github/python-pillow/Pillow/master + :alt: Code health + +More Information +---------------- + +- `Documentation `_ + + - `Installation `_ + - `Handbook `_ + +- `Contribute `_ + + - `Issues `_ + - `Pull requests `_ + +- `Changelog `_ + + - `Pre-fork `_ + + diff --git a/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/RECORD new file mode 100644 index 0000000..67034e7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/RECORD @@ -0,0 +1,222 @@ +PIL/__init__.py,sha256=2m1fUdHWXtVbQ3dcz9F7SxPMAeM27-6N18YmvWXLRnk,1554 +PIL/_binary.py,sha256=io-M3LN24r6l4JEb3famSD-VsbO9NKNdX6paf8gNVm4,1408 +PIL/_imaging.cpython-35m-darwin.so,sha256=HYWdcLZCPkHSY8fjoX741q6mSmvOc6bjlBmrg8iMshU,719328 +PIL/_imagingcms.cpython-35m-darwin.so,sha256=6qcOmpKgHEdhO-YhC8eY7w3wvF49JmbdE7c4K_iVRaQ,43652 +PIL/_imagingft.cpython-35m-darwin.so,sha256=hr_XWXqwolbn-jitoJdf1qdTHsrEXBKnQ6S2CxpBiEo,41280 +PIL/_imagingmath.cpython-35m-darwin.so,sha256=EEDKcVk4sitmKUw20hbVOphCq8Rx_PaR32I2SL4JptI,55068 +PIL/_imagingmorph.cpython-35m-darwin.so,sha256=S_4kkObre4SGIo6pYJOEmUSBEjSYoKAMi0VXR1JPQeM,26284 +PIL/_imagingtk.cpython-35m-darwin.so,sha256=vinFau9o7RG8clan7aYxHytQs5ivvesF6FVJBZVW0lo,26628 +PIL/_util.py,sha256=b3m5R7BosKxQ1NJud7vuxdmFouSfSYoaJ2P7WqlGtcE,553 +PIL/_webp.cpython-35m-darwin.so,sha256=mhXvO5-mFDTXzsJkW1-YfWmWwThv6k5hWINacmYd39w,32240 +PIL/BdfFontFile.py,sha256=Ua5NnIroZuhYrHtvMqFgZA8p4nXhjE6Cs0KsH048ts0,3355 +PIL/BmpImagePlugin.py,sha256=yvVehULJzrhCdfyd90ZoSROXi9XbqPVAgvlKcPcOQtY,12465 +PIL/BufrStubImagePlugin.py,sha256=bnQDTRwjOe-uDVkmO8KuW6-DsN2-eNL7zWh7aCdeOeY,1504 +PIL/ContainerIO.py,sha256=UpxILbI9Ln0kL5rBQtPucOiYTo0XftTJPEFQToM3vzg,2605 +PIL/CurImagePlugin.py,sha256=AzCO8venAYEcg3CutKaGl2Q7LSf7i8oR0sWogeIuVRc,1970 +PIL/DcxImagePlugin.py,sha256=pqM5S2HZbqZMfqk3EjGJuCwun7gW45WCK6xCKHKPd68,2002 +PIL/EpsImagePlugin.py,sha256=V1wUjjKjPK1n2c-3y3E2XmWmETj8hIyDkFUW-xF4Vgc,12539 +PIL/ExifTags.py,sha256=klIomCR-gXvHbeRRPmFg-PFJWEWw3DO_ZFPjmnoiPiA,5092 +PIL/features.py,sha256=Sst8D0wPI4ONak8tQHNilZ-vdDAtiKoRprK264-PZ-0,1613 +PIL/FitsStubImagePlugin.py,sha256=bOyEulT-U_VQkkS9vErugqldfjVAUzgcZufrO3XS7yc,1656 +PIL/FliImagePlugin.py,sha256=pm4TiJjYD2hdIzdF_erSqMGovQo1CmhIkYbwROeNVW0,4824 +PIL/FontFile.py,sha256=YObcDjI_-T4q7kxf-JLJuhVxyC7zklFy0N6b3Ji5lGA,2793 +PIL/FpxImagePlugin.py,sha256=hcdmDpmKlkvIJnXq-2k1QV9IX-CTLZzUtxgvp3elQSM,6288 +PIL/GbrImagePlugin.py,sha256=0vx5Nn7K2SuSNDOZlMr7UL5NI-Xry1SMtOgsWh3YsDY,1642 +PIL/GdImageFile.py,sha256=sgFVwaYV5FZDza1U4VzVh5ru2K4XMdQ7slAhB8F-ebc,2182 +PIL/GifImagePlugin.py,sha256=gQ0doUmE40NAMO3jcaWJk5uMXwsf4AzRuhStjPgBudg,21393 +PIL/GimpGradientFile.py,sha256=K71Bj58GMicSwxywhEfQA2rkjBSkfi595R-loWn4vGY,3347 +PIL/GimpPaletteFile.py,sha256=NyYbhFn0ZenrzDZHyq76xjw1j6PslhLJTZX81AXO9dI,1341 +PIL/GribStubImagePlugin.py,sha256=BrMgUAZs1bfvNcgphqlDG0Bbvd5PY-pHEn_1zGmVHSo,1505 +PIL/Hdf5StubImagePlugin.py,sha256=AthIydrNqzKj_68Pa29b08fKKMu089A_Ef_G49aLLbo,1549 +PIL/IcnsImagePlugin.py,sha256=pT-H9tJPhcWVuIdue1ZcxDcwg6y0DJJaYsQj9uFeEsM,10668 +PIL/IcoImagePlugin.py,sha256=42H53MY25mlE90nCJVX6u7ydXaBS-jWZ86mCHoy_bmY,9213 +PIL/Image.py,sha256=VBxuzW65cmTvy9abNTSgnhsCAd13KG5G1nvmzVlmWYE,81182 +PIL/ImageChops.py,sha256=jZCVJwGjIpbe4tKudbxpsLVvel4FvujQcd3HbFdtLCM,6184 +PIL/ImageCms.py,sha256=q-H1ZsD3ujjtHnm_VqsnDuut4rTXrehDkytUnSg-L_0,37141 +PIL/ImageColor.py,sha256=3CAnIw4NwIZ5W24_fPV4FTTd-0aU1GDMuoKN5VyU_z8,7981 +PIL/ImageDraw.py,sha256=ufokEIdmEGnDaHM8So9krJoNCXc9dtVvRj4omV-bdDk,13108 +PIL/ImageDraw2.py,sha256=-2J8X4_1edRkR09DJlhVY0BuP6N3vA0QPhELz-Lnh8Q,3231 +PIL/ImageEnhance.py,sha256=-oS6xX6PHULHa-lJk5fQeemTk-d-sqI8rsv2odTrXEc,3185 +PIL/ImageFile.py,sha256=9I3sfK3G-bd5TLYMX5mFmlmEQ2HhfYovjw0Z9_Oph8A,15785 +PIL/ImageFilter.py,sha256=EZxAfoxda-K4RAVNCfwht0AaH090I8Uh3PdHwJT15rM,6618 +PIL/ImageFont.py,sha256=tMl7YjWs7elQZdHU-AmYMSrWU3JA5SCJBJGaBylZhxo,19557 +PIL/ImageGrab.py,sha256=pFwxC4I7eAGZht7fLrLNmN4prc4FxT5DkKAFW4DwO6M,1747 +PIL/ImageMath.py,sha256=KF_7l0uVUexavD5PaE9en6A24yvJhHVR5wb8KN0mpfc,7462 +PIL/ImageMode.py,sha256=BvfsmKlPmT1attWhs_SkjfdDRL5G2ZPy82S1QZVF8OE,1306 +PIL/ImageMorph.py,sha256=xeUd90xH-GxD7ki1l17XaF3ECoNl-K9ViTwMhlk9l7s,8365 +PIL/ImageOps.py,sha256=gGT5MoU-VAX5FNVIVnJhf9miuEZBrTdWzc2rmyYDlxA,13895 +PIL/ImagePalette.py,sha256=pnrnyGg8S6UjRUCGt3KbuPqvZwSkJlR9PFJnrE1sR1I,6738 +PIL/ImagePath.py,sha256=30TyaVd4FNYg08dq_H2lPTkbFFRYBG1RpZsmjscxpyQ,1239 +PIL/ImageQt.py,sha256=zHf-KX_DqdtMxhDVslORZo66pBR_zhe8hwiVxVWp3Cg,5661 +PIL/ImageSequence.py,sha256=FcwPuCTIC2v37YGK5-eeebe7Jj5sOx6a0ePOCx8g06k,947 +PIL/ImageShow.py,sha256=zFofzMjn4cL2L0ShXHaBmiJUZWt3ArXWVkKeD3K59Sk,4795 +PIL/ImageStat.py,sha256=7tfTNkkBFduqOHspgu-qVckndwomvJIsToV27yNFqX0,3847 +PIL/ImageTk.py,sha256=Oujs08p7jta7H9a0UZYw_MMDLqSGncjqguqLABgUMDM,9108 +PIL/ImageTransform.py,sha256=rksN-m5SPqnTuINfQt_4Pkm_Qbfef78Lx-qK1CteeE0,2876 +PIL/ImageWin.py,sha256=vQjPXEEbmS0Xv8aIWQwOM2ihPObeeINJ-DckOftR8wQ,7473 +PIL/ImImagePlugin.py,sha256=FTiPg7uAgT2NS9ngb5X1dZA41ZQM3tkjbQWzpIF6LaE,10309 +PIL/ImtImagePlugin.py,sha256=bLDwFTA3FLPzODQtw1FEwLuToJP91QwvqCTzeH73-TY,2223 +PIL/IptcImagePlugin.py,sha256=EF3X7DyiGOY3YKsPKXYsKCxsSDX6FR2_KG5H-nh4kNw,6969 +PIL/Jpeg2KImagePlugin.py,sha256=OAkVUMLPT4I_1QlTcW7scju3WsCqr0BilBdn7C2aXoM,7866 +PIL/JpegImagePlugin.py,sha256=EQLvtCvdhay-LlQrswHtwHgWGli89gA49Rrybbn6UcI,25408 +PIL/JpegPresets.py,sha256=jcMjcWmpnQhMtgAtMdltz-eztO0Y1-x45sLN_VA7e4g,12365 +PIL/McIdasImagePlugin.py,sha256=ZYk1D96P7gO5A9FgFSbCrB1ewgVpx7rP46rbWAMvRa0,1755 +PIL/MicImagePlugin.py,sha256=CYiHPyfGsp46k1i3jvWKmGL9Z1PHSvORvKvjdVWGVao,2365 +PIL/MpegImagePlugin.py,sha256=34yMKXGAg9tL_FxrGTu0g1gMI6ZX-p75rvUkugx9rPw,1880 +PIL/MpoImagePlugin.py,sha256=fPCgu5BXjjgef6yvJKN1md-HNy-90kOSQ8OzjgfkGu0,3022 +PIL/MspImagePlugin.py,sha256=lJRcxXQx7M-Hl3KTuA-SUuTh6COYz34c529CjTq-D7E,2263 +PIL/OleFileIO-README.md,sha256=7BhnoybuHsGmbaGvQt4-juROtNpIzQZ9UvrXoEsJv0w,17799 +PIL/OleFileIO.py,sha256=gjkWRTrgE9VN7BKeLI2dVAbK7nQTQys-pu1TPSbBAqM,102741 +PIL/PaletteFile.py,sha256=WyvvsHKi7afbjiOoFmx4yF8uusrMTz5_bXF86-68SWI,1113 +PIL/PalmImagePlugin.py,sha256=31oLjnm66aIwNQHVNeeuMKvC4KsVh-zgsuRdDmWeMrk,9267 +PIL/PcdImagePlugin.py,sha256=GIG6PN2Rd96xHk1eqt0wUFfha_HW2XI0oHI32GpHLPw,1303 +PIL/PcfFontFile.py,sha256=FE67pIhqGQ2PYOQ_4ImeOcJNcGr2Onxi9Eb_BYiqvr0,6194 +PIL/PcxImagePlugin.py,sha256=6W_CaLtY9BMai4FKs6HWHphAFDTh_IhjhYaAB7J_yAE,5335 +PIL/PdfImagePlugin.py,sha256=GX-U5Y9ExhuX1_xMMbivEmzBDJCbTaMcEfMKolRclH4,6752 +PIL/PixarImagePlugin.py,sha256=RdOw24KeRI2P6xqeQ2aO8dN855IkIvXzzfYGWetDw7E,1594 +PIL/PngImagePlugin.py,sha256=ePAEtZCI8j8odI8IMeX9PiYXUJ224C9FM_iZ3Wgp2m4,23409 +PIL/PpmImagePlugin.py,sha256=lRip9Z7vQ3qfq7WKLII3g2uxP7w_3HRhoiVUue3nz_Q,4769 +PIL/PsdImagePlugin.py,sha256=YRqZC0Krrhoclu9XWU9IPwxjL3Wc_9tBYBXc9J7L4N8,7601 +PIL/PSDraw.py,sha256=kf_m9DtTQhea7VmZXwnI_WdeGInsRG_c1biN18ManyE,6855 +PIL/PyAccess.py,sha256=iBohka8NLGDg1zw4x5jzRIoIXO1F50aRAoBdU9gxBlE,8718 +PIL/SgiImagePlugin.py,sha256=adiXiaA7EOHQ3w6oO0ySjQvBZHLqz1jefZ7Liji-N5Y,2181 +PIL/SpiderImagePlugin.py,sha256=F52u1q-koMReVYNXcox6NItLnKL0am9WLQB3z29I7Fo,9414 +PIL/SunImagePlugin.py,sha256=CSgKe1FOxEDg19fed2UoVnfsgcoJ8Sa3DYEJQSi9P2E,1972 +PIL/TarIO.py,sha256=5TS3r1DEi4pCHCTW2Xqcwh1ff0oTgcm2bQRdZZx1Ftc,1223 +PIL/TgaImagePlugin.py,sha256=N_JH5yf8Ka-II2PD3DK7oIDlW-jE7b6e2fKGGagC05w,4991 +PIL/TiffImagePlugin.py,sha256=IKncShdnhcPYCqh7G1JVVomcxtJFBfEdHOZU4zCBh_Y,48845 +PIL/TiffTags.py,sha256=AIUZ0oZkkOlMBpb2uuER7qv_aRYNGvEdih2yc6GNjGg,9273 +PIL/WalImageFile.py,sha256=Ty1vo2r6Nzxytin_FKAjuPQg1_veSpr8MNJB1aa8vfo,5506 +PIL/WebPImagePlugin.py,sha256=JSTb5X05ns6OnZarFRUYTHWm16BiEnTY4IRWGUBeevo,2017 +PIL/WmfImagePlugin.py,sha256=iikUDdzOPZWWpm5I2mPnEe9Ao1GwbREvq5KHjA_8-40,4147 +PIL/XbmImagePlugin.py,sha256=TkLzvPcmUrmubEnk4EYHxgB02aGNzu555AveFcJMKME,2505 +PIL/XpmImagePlugin.py,sha256=E7sxq_QI7b4fm2rLrnkIiDsvH5LmyJn_-6QWlRgzGI8,3130 +PIL/XVThumbImagePlugin.py,sha256=VivoMNY2Ge2uTdHMxCpt7-U57vbxXiGjpIPyVNjomIw,1845 +PIL/.dylibs/libfreetype.6.dylib,sha256=avH5Ra5v5B6RY8hpRuGnrAOPHtr-ujIAes6IYL59uuw,1761184 +PIL/.dylibs/libjpeg.9.dylib,sha256=ffmG9TezWoHQSopriiS-lYdL4YH8VMvtCrRXhstxf0A,717656 +PIL/.dylibs/liblcms2.2.dylib,sha256=jrfgiMwNr5FoQF_sHkjCNHiflfS5pIWqpSKBOTdDmBc,945468 +PIL/.dylibs/libpng16.16.dylib,sha256=eOSQX0T_66EDbO1h8JYUWqYPoUdmci-Unt60konBUDs,589148 +PIL/.dylibs/libtiff.5.dylib,sha256=m8z24UpM6JBqAMfhfmIRim41a8h64wtJ1YwloIkZTSc,1242044 +PIL/.dylibs/libwebp.5.dylib,sha256=r-BX4r1fPgwJ5G66LCn1hnhxbUI9Nmc59ug3l-ncob4,1091172 +PIL/.dylibs/libwebpdemux.1.dylib,sha256=3Gqx0t1ppSV-WBStaCup1mwyQ7_1VDfmFYP8imu1ePQ,51584 +PIL/.dylibs/libwebpmux.1.dylib,sha256=MjxAjSuxLyIRvp2RqpI8rHcC2IrVc1mynkPvtZ_umIk,91420 +PIL/.dylibs/libz.1.2.8.dylib,sha256=Ych-WZ0EiEjk31uWeIruBirBhTusc0Naj7ryhwTWHJ0,249092 +../../../bin/createfontdatachunk.py,sha256=Fq7Z96COeAGZe6ku17ktW4uzX_lEgs_cmFRWrOMz4xo,538 +../../../bin/enhancer.py,sha256=duWmYjo2__GpiYQTjHeeZR5f0i6fK7udIkEq9L-1ej0,1497 +../../../bin/explode.py,sha256=VwCANn7yubleSj0HXgF1CiuryiVkE3sqFgwg6nymLcs,2428 +../../../bin/gifmaker.py,sha256=XM_OCPWYc3TqnB-GwF38wMClk_q_aQPaaTxZFlLNoBE,631 +../../../bin/painter.py,sha256=Tjy0c7DP2c_UhIWA9mEOt3JjQpBxKY-25r1amIjgrvo,2024 +../../../bin/pilconvert.py,sha256=4MdeEuxPZ1BX5QxdL1it8k05fKB0jWp2xBYsMO-2o_U,2348 +../../../bin/pildriver.py,sha256=m3Et64ONgcvGhcfTlEuFcxcMvjBk8RbGCfMtu0eTBaI,15493 +../../../bin/pilfile.py,sha256=v8sZYbiPl7qladMcQEkvvaQRRG-hznWHT_LPe5S89o4,2656 +../../../bin/pilfont.py,sha256=g56o2yfPoXaOChQwzESzXO93UydKMk3Ug-pGqpbmZvo,1017 +../../../bin/pilprint.py,sha256=HWBSm4Kl_ej1obgajKkOBFEAP92bj-G2BQWRmPZ8ajo,2384 +../../../bin/player.py,sha256=5ZmYQXo_LTPhvwXXd0WALfWHYlFCwFMX8OxgwgpHccU,2163 +../../../bin/thresholder.py,sha256=1wOG_xiVpBQJy4sqihQuAquhxFk-N6B6ICdZ6N1mv2U,1784 +../../../bin/viewer.py,sha256=R9xolyX6ofsdrETvZXiLY0E357Sv1CwZH3cS9wvSL1M,957 +Pillow-3.0.0.dist-info/DESCRIPTION.rst,sha256=LelDlqUIT_eh0ZAPDQ6KRsGtRu1CTr9atEg3gVcUbcY,2131 +Pillow-3.0.0.dist-info/METADATA,sha256=O1QbSwJKEabZJIIq2N1QImewX2kM814nsUMd92xh2z0,3306 +Pillow-3.0.0.dist-info/metadata.json,sha256=7v8T2qvwu5x62KsB88twk24Ciy4qtbGfLRU7FXtPQM0,1241 +Pillow-3.0.0.dist-info/RECORD,, +Pillow-3.0.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +Pillow-3.0.0.dist-info/WHEEL,sha256=PDKbaZw4S6tFBK4Wu3E7RG8JTElWZ5rVAcyYIGagy50,109 +Pillow-3.0.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +PIL/__pycache__/EpsImagePlugin.cpython-35.pyc,, +PIL/__pycache__/TiffTags.cpython-35.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-35.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-35.pyc,, +../../../bin/__pycache__/player.cpython-35.pyc,, +../../../bin/__pycache__/enhancer.cpython-35.pyc,, +../../../bin/__pycache__/pilconvert.cpython-35.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-35.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-35.pyc,, +PIL/__pycache__/TarIO.cpython-35.pyc,, +PIL/__pycache__/BdfFontFile.cpython-35.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageEnhance.cpython-35.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-35.pyc,, +../../../bin/__pycache__/viewer.cpython-35.pyc,, +PIL/__pycache__/ImageWin.cpython-35.pyc,, +PIL/__pycache__/ImageTk.cpython-35.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-35.pyc,, +../../../bin/__pycache__/explode.cpython-35.pyc,, +PIL/__pycache__/PSDraw.cpython-35.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ExifTags.cpython-35.pyc,, +PIL/__pycache__/ImagePalette.cpython-35.pyc,, +PIL/__pycache__/PyAccess.cpython-35.pyc,, +PIL/__pycache__/_util.cpython-35.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageFont.cpython-35.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-35.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-35.pyc,, +PIL/__pycache__/JpegPresets.cpython-35.pyc,, +PIL/__pycache__/FontFile.cpython-35.pyc,, +PIL/__pycache__/ImageCms.cpython-35.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageShow.cpython-35.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/Image.cpython-35.pyc,, +../../../bin/__pycache__/gifmaker.cpython-35.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-35.pyc,, +PIL/__pycache__/WalImageFile.cpython-35.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-35.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageChops.cpython-35.pyc,, +PIL/__pycache__/FitsStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-35.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageSequence.cpython-35.pyc,, +PIL/__pycache__/ImageGrab.cpython-35.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageStat.cpython-35.pyc,, +../../../bin/__pycache__/pilprint.cpython-35.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-35.pyc,, +../../../bin/__pycache__/painter.cpython-35.pyc,, +PIL/__pycache__/ImageMode.cpython-35.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageDraw2.cpython-35.pyc,, +../../../bin/__pycache__/pildriver.cpython-35.pyc,, +../../../bin/__pycache__/createfontdatachunk.cpython-35.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-35.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageOps.cpython-35.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImagePath.cpython-35.pyc,, +PIL/__pycache__/OleFileIO.cpython-35.pyc,, +PIL/__pycache__/ImageColor.cpython-35.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-35.pyc,, +../../../bin/__pycache__/pilfont.cpython-35.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PcfFontFile.cpython-35.pyc,, +PIL/__pycache__/PaletteFile.cpython-35.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageMorph.cpython-35.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageFilter.cpython-35.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ContainerIO.cpython-35.pyc,, +PIL/__pycache__/ImageTransform.cpython-35.pyc,, +../../../bin/__pycache__/pilfile.cpython-35.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageFile.cpython-35.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageMath.cpython-35.pyc,, +../../../bin/__pycache__/thresholder.cpython-35.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GdImageFile.cpython-35.pyc,, +PIL/__pycache__/__init__.cpython-35.pyc,, +PIL/__pycache__/features.cpython-35.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-35.pyc,, +PIL/__pycache__/_binary.cpython-35.pyc,, +PIL/__pycache__/ImageQt.cpython-35.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageDraw.cpython-35.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/WHEEL new file mode 100644 index 0000000..d2f35a2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_6_intel + diff --git a/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/metadata.json new file mode 100644 index 0000000..8a432d2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Python Imaging Library (Fork)", "classifiers": ["Development Status :: 6 - Mature", "Topic :: Multimedia :: Graphics", "Topic :: Multimedia :: Graphics :: Capture :: Digital Camera", "Topic :: Multimedia :: Graphics :: Capture :: Scanners", "Topic :: Multimedia :: Graphics :: Capture :: Screen Capture", "Topic :: Multimedia :: Graphics :: Graphics Conversion", "Topic :: Multimedia :: Graphics :: Viewers", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"project_urls": {"Home": "http://python-pillow.github.io/"}, "contacts": [{"email": "aclark@aclark.net", "name": "Alex Clark (Fork Author)", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "keywords": ["Imaging"], "license": "Standard PIL License", "metadata_version": "2.0", "name": "Pillow", "version": "3.0.0"} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/top_level.txt similarity index 100% rename from Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/top_level.txt rename to Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/zip-safe b/Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/zip-safe similarity index 100% rename from Darwin/lib/python3.4/site-packages/Pillow-2.5.3-py3.4.egg-info/zip-safe rename to Darwin/lib/python3.5/site-packages/Pillow-3.0.0.dist-info/zip-safe diff --git a/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..479eaf5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/DESCRIPTION.rst @@ -0,0 +1,137 @@ +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/METADATA b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/METADATA new file mode 100644 index 0000000..a947267 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/METADATA @@ -0,0 +1,157 @@ +Metadata-Version: 2.0 +Name: SQLAlchemy +Version: 0.9.7 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent + +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/RECORD new file mode 100644 index 0000000..3bba30a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/RECORD @@ -0,0 +1,367 @@ +SQLAlchemy-0.9.7.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013 +SQLAlchemy-0.9.7.dist-info/METADATA,sha256=BJMEdxvRA6_2F3TeCnFjCS87MJ-9mQDfNhMasw7zlxw,5785 +SQLAlchemy-0.9.7.dist-info/RECORD,, +SQLAlchemy-0.9.7.dist-info/WHEEL,sha256=Er7DBTU_C2g_rTGCxcwhCKegQSKoYLj1ncusWiwlKwM,111 +SQLAlchemy-0.9.7.dist-info/metadata.json,sha256=L8ZvHjkvIuuc2wYjqxQXcMfbjzJukQrYyJWZgDtacI8,948 +SQLAlchemy-0.9.7.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=1e-MTh9yzDNEKrfMPLTPbxzQxq2--QE4H-tc03eT5uE,2072 +sqlalchemy/cprocessors.cpython-35m-darwin.so,sha256=FEiXsJ5pdsq_PuqNiR7jrnTn99nThaja3fdifVbizV4,17012 +sqlalchemy/cresultproxy.cpython-35m-darwin.so,sha256=s_Y7BzjDh2KutKXsl9PyCZMdpMs1nmy1WlKCnAipmbI,18844 +sqlalchemy/cutils.cpython-35m-darwin.so,sha256=bGZ5-TxVNVjfkNxMTkbugVNeRQHI3mKXadQ2TXXvFu0,9756 +sqlalchemy/events.py,sha256=n0Z8zkW0Fdwdv82HLssEKhNNkCJP1nj3jZrztZinkYc,39724 +sqlalchemy/exc.py,sha256=aIZHSzr2SgBoyfNcjm4cWc5e81B953kSq2BskM6fBQY,11392 +sqlalchemy/inspection.py,sha256=zCZPzSx4EwImFRXU8vySI8xglZP_Nx4UEyKtjGMynhs,3093 +sqlalchemy/interfaces.py,sha256=SzmUZ1pL-7b4vEH361UFoCDW6GdM64UFXn5m-VIwgIA,10967 +sqlalchemy/log.py,sha256=4EG734XnC0sZJ-jFZJKJ1ONCJq1rAZCyu4SyAT8q3yQ,6712 +sqlalchemy/pool.py,sha256=LOxfK-5Mpuz6RO462sGSFCXieXauqcoMdMZt28XO0to,43830 +sqlalchemy/processors.py,sha256=h-deajMZbjXpfiOlOVNfBAxuIQf3fq9FemazYk1BGho,5220 +sqlalchemy/schema.py,sha256=Kr_6g5anin76KPkgWA_uagf-EzPLTWvsbFuKorJNHf4,1106 +sqlalchemy/types.py,sha256=E-EC4GrZeg_aEgGdXoXIw7iSgLtYJMNccYoDEXtF81s,1635 +sqlalchemy/connectors/__init__.py,sha256=-3OdiI200TYZzctdcC8z5tgV__t9sdhukPWJBHjlszA,278 +sqlalchemy/connectors/mxodbc.py,sha256=JZj-z_sY-BYiAM-T8PK9m-WN3vhfObmzAP6eb8Abpag,5348 +sqlalchemy/connectors/mysqldb.py,sha256=ZSi4E_f5Or7bqsI6Zv6cjLGEMAN0bedv7sHnCkrwwbM,4748 +sqlalchemy/connectors/pyodbc.py,sha256=vAvQwk3wDt1wRkiBwuwe0hC-7Fla4ekqBJLO8WPS_xs,5890 +sqlalchemy/connectors/zxJDBC.py,sha256=cVhbJ3PqmVD0KJ7m6FRtR9c5KhHIG-hhm-NX-4rgd5E,1868 +sqlalchemy/databases/__init__.py,sha256=CKXfBXKaWADu571n8lJR8cndndTHDUAAyK-a_0JLGjg,879 +sqlalchemy/dialects/__init__.py,sha256=XtI5s53JyccSQo2GIGNa89bXn8mtePccSukfR_-qipc,1027 +sqlalchemy/dialects/postgres.py,sha256=_FjxoU0BVULlj6PZBMB-2-c4WM6zviIvzxCgtXhxhsY,614 +sqlalchemy/dialects/drizzle/__init__.py,sha256=AOyB8JGeTbwWfpM5wVLGhbyzX2MRKPFIAoU00IMtrzw,573 +sqlalchemy/dialects/drizzle/base.py,sha256=dLQxRslE6oyugIeSCdlrVP4DYuCgWiH0kgbpuD59-KM,14995 +sqlalchemy/dialects/drizzle/mysqldb.py,sha256=myT7EXJg9ToVBrbUkkfmGgCb5Xu2PAr3xnFeA-pvS3s,1270 +sqlalchemy/dialects/firebird/__init__.py,sha256=bmFjix7gx64rr2luqs9O2mYm-NnpcgguH951XuW6eyc,664 +sqlalchemy/dialects/firebird/base.py,sha256=nnUrSBfI_chqZmA32KOeMzx8cgpEE5Tr9RNkoZA_Qpk,28061 +sqlalchemy/dialects/firebird/fdb.py,sha256=mr4KaJgHzpFk6W7g8qBPDLy6WAQkdnR5gMipWmM_6EE,4325 +sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=WaIPAEIqQJ3QRpYk44IwwC-3t4X4jxv_LWK9CI0xBf4,6299 +sqlalchemy/dialects/mssql/__init__.py,sha256=dzp85H5bMoja-EsD08ctKaope5e-bjr9r6QoxL9TJXo,1081 +sqlalchemy/dialects/mssql/adodbapi.py,sha256=j1K_qpA_v8Uc6zX1PGSGsqnHECUQIx_mnxMr7h9pd3A,2493 +sqlalchemy/dialects/mssql/base.py,sha256=TY7YAXMg_ZKGtaWKrSOV1VTVpZ2oGh68ga_MbQYZvX4,58663 +sqlalchemy/dialects/mssql/information_schema.py,sha256=-E4WAgB0yYoncty676FvmZL9DEqXCEj0bGwvFc2aZD4,6418 +sqlalchemy/dialects/mssql/mxodbc.py,sha256=pIXO0sxf_5z2R68jIeILnC1aH5s8HyOadIzr7H13dxw,3856 +sqlalchemy/dialects/mssql/pymssql.py,sha256=DXoU2r3dcuxMh_QLB713iPAANfcsRqcMhhmBFKg_k9o,2978 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=gHTjyRc9VOEJDZqs1daBmCPTDZGPQzhoOet2kxy7r2Q,9437 +sqlalchemy/dialects/mssql/zxjdbc.py,sha256=x33xz8OTS9ffrtRV9lWxbReLS4qW3pq2lp1aljDHGA8,2144 +sqlalchemy/dialects/mysql/__init__.py,sha256=Ii4p3TOckR9l50WvkstGq1piE_eoySn5ZXPw8R7D_rI,1171 +sqlalchemy/dialects/mysql/base.py,sha256=Oqh2Z2mcFDVlUL0DwPy3Q7bSzJQiC1JxCdMrAMwMyEo,109960 +sqlalchemy/dialects/mysql/cymysql.py,sha256=MWtGXcS4f5JpP1zBLzejWz3y7w5-saWlwQfH7_i1aao,2349 +sqlalchemy/dialects/mysql/gaerdbms.py,sha256=oe9BfWjFJglITk6s_wQQYe5vs5h9zYcMUuUx2FmE7J8,2724 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=FX7HEsbiqE98IbpI3paFzfP2geLYsTadXox_lYmL9eE,3911 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=Gf9QLAY2o4Eu9IeagLw4e-v5uQGHb7sJgP_COnwn8MM,3259 +sqlalchemy/dialects/mysql/oursql.py,sha256=DGwyO-b7etGb76XdDrcQ-hwtVu_yIzU-8IF_iYw5LqU,8756 +sqlalchemy/dialects/mysql/pymysql.py,sha256=TLsJeMHMZGvOTlbO2JhQqnjcALKmAgTedQJCBCnME0Q,1232 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=agCzgILoQdSVOKup3OUMvt9RkDAtwyg8yQgIibo14XE,2640 +sqlalchemy/dialects/mysql/zxjdbc.py,sha256=MyCM6TGRxzjrhvsow6MhwLScB4qQTZn1DmyhhhPGcNg,3803 +sqlalchemy/dialects/oracle/__init__.py,sha256=hO304rf8aiIq9--QQSmuvi8MBZBcZhNzjI48cs1JTZo,797 +sqlalchemy/dialects/oracle/base.py,sha256=cG2Ov7EJ8GKg18mXLCiKLC0WO0ssdONykAPydaTCpJQ,49391 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=rneYv6pQOXVM2ztFBCO6bBcVAfR7vdtV8voEbvBYiIY,37737 +sqlalchemy/dialects/oracle/zxjdbc.py,sha256=c_nHf8X1GM0OkqXAKgQiTEved3ZDDzDop6dG_SpE55w,8034 +sqlalchemy/dialects/postgresql/__init__.py,sha256=JdIQ3kAuikIwYNEc4W39-BKnOepAb3jpdN7RXtwru9E,1251 +sqlalchemy/dialects/postgresql/base.py,sha256=EV642aX-WlOLDNirz33f50GXOGasLG8TBw8qaggo0GI,88366 +sqlalchemy/dialects/postgresql/constraints.py,sha256=OAIZmNYW3PEuWVagjj9p-CpCcdh6tM_PTObwjqD_vVs,2543 +sqlalchemy/dialects/postgresql/hstore.py,sha256=_HhwrAGGEk8KlKQZrqzJ_PSLHlH9Cd4wemN0ascv-Uk,11402 +sqlalchemy/dialects/postgresql/json.py,sha256=ezCEhBZKnobL2epNbT3re9AWQxvtYHpsQxK7fmB3XcI,11066 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=Uiu6RhLLn42UpCu27Z957dhet59zZh4z3jmbjTfgLJg,5428 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=cjIOs6k-EKVGhbsec4sx8MNq7Nk-c1z4hnW5ZJYOn4U,20761 +sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=ZxaL0d8xA0yhlhOdK09lLtJdWH90vdq57EITBrrdRms,2173 +sqlalchemy/dialects/postgresql/ranges.py,sha256=q3pc7jeUOc83lkgE3WVN8PlqKzeYJjuTHuXeRvY-s2s,4814 +sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=c9_JUHsjiaTbmxqoe3v2YS0oIgkk5xOL0e6ZSaUM_EI,1397 +sqlalchemy/dialects/sqlite/__init__.py,sha256=evr3TsIXnZIKD7QY-CHC-MVvkt28SyV0sCJrIjpnQJM,723 +sqlalchemy/dialects/sqlite/base.py,sha256=jQ5kDBsYuP4yoENO6lBJ792YDY9cHJRfPXq4etec0mI,39398 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=dHrZk8Ut8sgNpVJ2-Byx_xJoxn55zLS_whmGSjABNCk,13249 +sqlalchemy/dialects/sybase/__init__.py,sha256=4G1LG5YqVaE2QDIJANqomedZXRQrVIwMW3y2lKWurVU,894 +sqlalchemy/dialects/sybase/base.py,sha256=ArOCZBItXEupylulxBzjr9Z81st06ZgGfqtfLJEurWE,28629 +sqlalchemy/dialects/sybase/mxodbc.py,sha256=1Qmk1XbcjhJwu0TH6U1QjHRhnncclR9jqdMCWsasbYM,901 +sqlalchemy/dialects/sybase/pyodbc.py,sha256=FsrKZP9k9UBMp_sTBIhe50QWwJmcpDw6FAzXmEhaq2s,2102 +sqlalchemy/dialects/sybase/pysybase.py,sha256=c9LCZ0IM4wW5fwMvvpgaflDlZJKcan4Pq7QwFE1LAUw,3208 +sqlalchemy/engine/__init__.py,sha256=o2daUscphvcFRkjOPMl0BJLGO6PXev_L8eDcx-7Zafg,15923 +sqlalchemy/engine/base.py,sha256=3_F3iPisYSVePe8G2vIsZqqEz29qfSvmsijwdgsyNYI,70311 +sqlalchemy/engine/default.py,sha256=W2TuN72wafDfLQU-ud74Fpl6pL8TjD5YdftrPrclyAY,34371 +sqlalchemy/engine/interfaces.py,sha256=pzFtwvtRIFze67WKMFKzVivZbSsXGQeEngoY-2DnL8g,30327 +sqlalchemy/engine/reflection.py,sha256=GocL2XvTxrmSdqn4ybWiRTlfsfiLSZcUISSJ_M1Bur8,21545 +sqlalchemy/engine/result.py,sha256=Is8N8TcQISjwBPwRxdsiOgyKqIDV8cJ15wBcwQWnsEw,34979 +sqlalchemy/engine/strategies.py,sha256=oVroyOyomN2q_OJfpHMhBa_0q-n_vWzI2H1cZIZb5G4,8715 +sqlalchemy/engine/threadlocal.py,sha256=UBtauPUQjOPHuPIcBIxpiKUmOm_jpcUrkftbJ32Gt4E,4103 +sqlalchemy/engine/url.py,sha256=9kL6hfESlqwlBcc5E_2a8YsuGF1EseMBm_DQyUAJ4XA,7521 +sqlalchemy/engine/util.py,sha256=wh2y0Uwt9O1ulnT6YhxAShcDXZTtigQZbiBnMxpvHeo,2338 +sqlalchemy/event/__init__.py,sha256=sk4pgB4dEPftPZMtgFctlqsPnWYjtcvqcpTVSg6mQ9M,419 +sqlalchemy/event/api.py,sha256=zLbcAKsKsYX5I3zmiwNIewwto1puGSWCm49crnAbHbk,3854 +sqlalchemy/event/attr.py,sha256=-ENxlontP0HnKFE9nyHMC4jS5pvMlXmkiK2ehAcdzLU,12566 +sqlalchemy/event/base.py,sha256=DU3EYBWflaGrwpEz_jocGQTsR3_nlKKWMFVThh4D6f4,7248 +sqlalchemy/event/legacy.py,sha256=vA9km6n_ZN1YSI5C-A9Jw4ptKfwZueTuvJbmtVYY1os,5818 +sqlalchemy/event/registry.py,sha256=bzPbXp2NTcYKQC7wsYUk-5rNMJMz5OwPGGsnzyI2ylQ,7470 +sqlalchemy/ext/__init__.py,sha256=wSCbYQ2KptpL8sNFiCbEzTjI2doWmcEAiRAqx58qLcY,235 +sqlalchemy/ext/associationproxy.py,sha256=XDr4UarpHG7ulrhiEsD8PYnYp4MUEUhpvjPfPdbayGw,32975 +sqlalchemy/ext/automap.py,sha256=r7F2VM0T--wecKH6uNFIWsYTElwwXCW9cOzF0llKz10,39713 +sqlalchemy/ext/compiler.py,sha256=m12MOPF6YbhY2OWJccWVRxfe2cigX0VcKzAhSr5FTI0,15770 +sqlalchemy/ext/horizontal_shard.py,sha256=T31IsHSpyJC27YLHsSxQ7R4uGIn4guICsVNNDWE994k,4814 +sqlalchemy/ext/hybrid.py,sha256=ZnPkE4ORZwA1md6rBYeyK-ySAcVsPZdLzRLXS3ZxmYo,27985 +sqlalchemy/ext/instrumentation.py,sha256=5MjuuikGz1x_itSm22PZMWr-los8v-5PK6HE5TKCSSM,14646 +sqlalchemy/ext/mutable.py,sha256=GCsgkaFyypUa1c3x7q9oNF2uoGM8d3LTv3DgDB9x5J8,23069 +sqlalchemy/ext/orderinglist.py,sha256=5vRTTK4Pdm2_IrbRIxWV8qHgDxktPVljrUBqwyLk-TE,13695 +sqlalchemy/ext/serializer.py,sha256=JiHdBiStZDlEvh5yzsd7lJAkmwfHJbsgDtcPwCRFOi0,5586 +sqlalchemy/ext/declarative/__init__.py,sha256=kGdbexw3SfbgS8Uq7og9iTLDXV-Hk8CJnXn_4nulql0,47618 +sqlalchemy/ext/declarative/api.py,sha256=idvoFBxhqzQvhTEPY764L1RpdrrSCJS3yU_J9xTbqJY,17780 +sqlalchemy/ext/declarative/base.py,sha256=oXplGZM3G81DPKcaI_PsNVGjmpj9g7eqmDOXod3DzvU,20036 +sqlalchemy/ext/declarative/clsregistry.py,sha256=niIyzC-WeITMJeFQqkdZdeLxuKni-vHrs7-LJ3WZi_g,10314 +sqlalchemy/orm/__init__.py,sha256=-RsTaAlLe9a2GvHw74fN5jyB2brV-i0XXViPzPGp2gc,7976 +sqlalchemy/orm/attributes.py,sha256=aoHu08zOuMziAlwgVyO2cr_86m26PFnTx6YnZ0QgcGQ,55522 +sqlalchemy/orm/base.py,sha256=lijsuavy2C4UJd7RrKKx8U6IdxsXwG8xV_XdqQ6B31c,13181 +sqlalchemy/orm/collections.py,sha256=RmOIb6b-XrRD8sSJ-9qMDM3aRyps1aOZcbQlWt4Ojss,52951 +sqlalchemy/orm/dependency.py,sha256=zEruE4dj9m7qHC9nS0xIAVx4L0WB58c6eZHPsZF25QM,46072 +sqlalchemy/orm/deprecated_interfaces.py,sha256=CXg9nh2XUyrRYDVS20XVc3G3niHx84J8ko7PBXOXcAI,21941 +sqlalchemy/orm/descriptor_props.py,sha256=b1GyOu45jjvxgBMwhVXBSyBxhYM7IVlOGG5F_qOl5co,24455 +sqlalchemy/orm/dynamic.py,sha256=XQTf7ozzdkloQMUPPkwM02eE_sr0sYt-uQgpkdFt2SU,13338 +sqlalchemy/orm/evaluator.py,sha256=ZrExCzWmvVZS2ovM8t09cPzfKOqbF7Zm3b88nPviPQM,5032 +sqlalchemy/orm/events.py,sha256=6AMKAa-V_72GsGj3dP158FdiqXbF3JM9IeBpAQFPsjo,70274 +sqlalchemy/orm/exc.py,sha256=Otfiun4oExJpUp8Tjk2JcSt9w3BkL1JRi95wXo-tv60,5439 +sqlalchemy/orm/identity.py,sha256=e_xaDoNI06hLaiDNomYuvuAUs-TAh901dwqeQhZs320,7091 +sqlalchemy/orm/instrumentation.py,sha256=rl72nSRqgk4PXlY3NsZJ_jtkrdszi_AwlBFRvXzRaug,16787 +sqlalchemy/orm/interfaces.py,sha256=vNj2Jl_U_S2rlITAlqg90mz9RTdRmtZvNJHCUnekWig,18983 +sqlalchemy/orm/loading.py,sha256=mvvth2KOU2CTNNnQuhAtzQRjuFoEf63jTjQ3Seop56M,21257 +sqlalchemy/orm/mapper.py,sha256=SZ1V59o6e10sUyAT9XytHjCvzyGALMF99QajZcWAAd8,108109 +sqlalchemy/orm/path_registry.py,sha256=VzaWNV7iA8Z5XkXcHLP379H6gQm1qSfunZ4dRaxyLDY,7672 +sqlalchemy/orm/persistence.py,sha256=Dz3prpO_nHfPO67dDwO4-6buSOziqXQktXfcyIGseYI,40862 +sqlalchemy/orm/properties.py,sha256=AB5fBY8EEchTU7QYgWQMn93i7KJoFdKI15O9ofETo8k,9557 +sqlalchemy/orm/query.py,sha256=GCMI29Hj3XXYFMo7cyGwpThv2rRqNuFa8T4lpt4bDcg,129823 +sqlalchemy/orm/relationships.py,sha256=IjCW_ng5YXoUODGj7EbkS3q9r1YnE4Y788O34ZeFeBI,111050 +sqlalchemy/orm/scoping.py,sha256=OoRgem4nICXPZAeK4-Sbe2kDzWDtyBHd6fZtK3DZT4c,6101 +sqlalchemy/orm/session.py,sha256=DHchZuztFAGH256oCA9dDKJCznyvvN9EeoFEXEpKW9E,95874 +sqlalchemy/orm/state.py,sha256=_hEslw-lhYnJTcSMmVfmsAzerR1yxwwtLDvCDrw3PK0,21014 +sqlalchemy/orm/strategies.py,sha256=0RsDUGe0i1Di_eFXkWAW0ZcV19V-m0K9YxmT8lM9Q-k,54293 +sqlalchemy/orm/strategy_options.py,sha256=q1_-h6Vh-59mRgM9dGNApxjF18TfrnwXMRk7wpHvTrE,32170 +sqlalchemy/orm/sync.py,sha256=NDLMpllJjk_zF7rgCSy4WlaeEfP1TVSj-UeVU_QZbVs,4736 +sqlalchemy/orm/unitofwork.py,sha256=F70Dfg7kBtcqLUp2qBYlcQZIDx3zI4bpNsIJFRxcf90,23234 +sqlalchemy/orm/util.py,sha256=-wzG6p6NGztBvxHiosvTvShwSQpZdd3KKT2mw26l86o,35695 +sqlalchemy/sql/__init__.py,sha256=158RHlIfn8A-OyihHBGd1jNcd4Ed2-laRkgeGJIvs4w,1721 +sqlalchemy/sql/annotation.py,sha256=JXguy7w1i3jPtr7AMiJCZ5B-TaECZquA8GmM2laEcC4,6111 +sqlalchemy/sql/base.py,sha256=z_dXcqIZsqKaCsSgl7dz5t97Ndb3v0l8gBe53jYRwTE,21416 +sqlalchemy/sql/compiler.py,sha256=7_GOyLILz5GBozTCXmQAnOResx0IivYH4DCf7ZV9acs,109419 +sqlalchemy/sql/ddl.py,sha256=WjCxmUAHmlbFXe5zofpj_lYU6-TEqvCJR9cxThHTIlc,28693 +sqlalchemy/sql/default_comparator.py,sha256=4DYyP32ubGMPei66c-IMAMsNKE-xoXpptyZDEju5jL4,13132 +sqlalchemy/sql/dml.py,sha256=ruyX-MuW_FcOW7DyjqGlyXwQUQlpAHtx73LHxfVXwqI,29526 +sqlalchemy/sql/elements.py,sha256=tyrdO9Bzm6em1sNEnwJO04LJCz-b1AmomRTp9OuerKQ,121164 +sqlalchemy/sql/expression.py,sha256=ERaOilpJJEWVXXCanhEY5x3Jeu5Q3pLuDSIQZCzq0bU,5668 +sqlalchemy/sql/functions.py,sha256=zI_Q6gqketUqehiNnDB6ezgDQC01iT7Up2jhPhWMTOg,16567 +sqlalchemy/sql/naming.py,sha256=FIdNBDvZwf1e-mW-Opjd_aKyeo986nWmYW0Dr9MOgCc,4588 +sqlalchemy/sql/operators.py,sha256=wsWdHm4sN6b6jfB8CTaFgBqww2jC-YjgpJJnmqzicVc,22510 +sqlalchemy/sql/schema.py,sha256=ts0hj8oYU9bx-amhfYpaDHdx4GFNwj1_avwHwm3eY5Q,132789 +sqlalchemy/sql/selectable.py,sha256=-CaVRHBIbgWpKe7kI5BSWY0x8AHTwUSJtq5d3XCQLwQ,109544 +sqlalchemy/sql/sqltypes.py,sha256=Zb7AMQlkMaVKTWQgR2o1JS4hwDjr6gjWl_POBaVGZ_0,54635 +sqlalchemy/sql/type_api.py,sha256=WsB-QZiEz1ez4g9OeXBKBqSnyluawX0ttG5stLmKJCI,37692 +sqlalchemy/sql/util.py,sha256=k2cBNkvVBl-B3AF5nD16Hq0NyWOE78iBbIf0b6PHA9U,19501 +sqlalchemy/sql/visitors.py,sha256=cohfnIfn4fD6O-qLhzhrwqrMaGhLlrRKvW0nTa72dHk,9943 +sqlalchemy/testing/__init__.py,sha256=RzQCY3RZ88UFBUhCGxnA82w1BsJ8M-0L-76-ex-Wt5o,1035 +sqlalchemy/testing/assertions.py,sha256=MoK89J6upTMKcPU23gTMIDb9Wk8qPvOkJd88y6lWNt0,15666 +sqlalchemy/testing/assertsql.py,sha256=fgA3QTe2vNQZzlGoBXmAhc0RA2JZKsbxyRAmr0FczZ8,11248 +sqlalchemy/testing/config.py,sha256=l34Qkqpz2Yu6BZxWm2zy7e5AqLyx_0adHLLpTq-bGew,2136 +sqlalchemy/testing/distutils_run.py,sha256=fzij-nmjhKKdS9j9THw8i4zWkC3syEzN5V4L9gx45YA,230 +sqlalchemy/testing/engines.py,sha256=uf4lllczl1qqmaOUrPNrW57cYgLLpJK8k3k8oduCTcg,13030 +sqlalchemy/testing/entities.py,sha256=1JpVCXMLwzSPpzetTqERD_3Zk3tiQBIDpBO9OVoVa9k,2992 +sqlalchemy/testing/exclusions.py,sha256=gjgLNk2PRBtsYbi_CsWGf6MxRLB8y4b9QkFbAWRfPuA,10053 +sqlalchemy/testing/fixtures.py,sha256=ys7TZ1uP9wF_OQXgTUNcXiUTOQZhQYgzc9fzjD420mQ,10587 +sqlalchemy/testing/mock.py,sha256=F0ticsEqquR82laaAqhJaTNDk-wJBjY8UqQmrFrvTLM,620 +sqlalchemy/testing/pickleable.py,sha256=9I1ADF_Tw-E6UgTHOuKGZP7_ZM72XhmEXNp_1qmh2l4,2641 +sqlalchemy/testing/profiling.py,sha256=FeAWcKQrVh_-ow06yUIF3P5gQU8YCeIkXKsV142U5eU,10280 +sqlalchemy/testing/requirements.py,sha256=xTZPvrj3wr-4xDsatmX1mp7mPMxgS_4XH5WLaZhi8Yg,17718 +sqlalchemy/testing/runner.py,sha256=q2HZNYXYgcJytV8IHw4btb7sD6muov4WtJzJFF6zuFc,1625 +sqlalchemy/testing/schema.py,sha256=nn4K5mjQbRQuvnuux43h9feYrXjZvZKpKEJLJioljGM,3433 +sqlalchemy/testing/util.py,sha256=nDHZOwsKgX1-ecRgZOzXzMrKp11HXfMCd5LsQB94ES4,5304 +sqlalchemy/testing/warnings.py,sha256=VskMM5G9imDqSfOQvWsriJ21b_CrfcgD5VOCWJtmwps,1682 +sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/testing/plugin/noseplugin.py,sha256=Ql26MG8e5ZWzCDisrvYR3thsA0wS__iEHFq-0yGSgsg,2771 +sqlalchemy/testing/plugin/plugin_base.py,sha256=VMClps-DhTrrmx4rRCX5eXktKwZ5R-z-RlcRSmnt0lo,15384 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=B2Wsh3ANSi3mw9zzuheRJ_HSpXfzilnCgl0WEh1nDmE,4368 +sqlalchemy/testing/suite/__init__.py,sha256=lbOCv0BGIpGJmk9L4I4zUL_W6_n-LGXJRTPvlvM4kDQ,419 +sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838 +sqlalchemy/testing/suite/test_insert.py,sha256=QQVLHnw58kUZWwGCVH7E1LL3I3R2b9mxl7MWhTro0hA,6746 +sqlalchemy/testing/suite/test_reflection.py,sha256=5rjLsnHZvQx0GQ9s6rkQaI_JcBVdQl-KFw8KolgXTk0,19895 +sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685 +sqlalchemy/testing/suite/test_select.py,sha256=qmSQE2EaVSf1Zwi_4kiBWstLZD2NvC3l8NbCfcnAhr8,2506 +sqlalchemy/testing/suite/test_sequence.py,sha256=i7tWJnVqfZDTopHs8i4NEDZnhsxjDoOQW0khixKIAnU,3806 +sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088 +sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582 +sqlalchemy/util/__init__.py,sha256=goz0YsuGbgmDfmXJ5bmx9H0JjKDAKflB2glTMAfbtK0,2350 +sqlalchemy/util/_collections.py,sha256=G3xVqiiFI-stTqFwJ93epgHws5_el6W-0KsS0JXxeH4,26052 +sqlalchemy/util/compat.py,sha256=HhYut_7bky-P3acKHpXs5CsX9Bri8FDLRgfAnG9bEyg,5926 +sqlalchemy/util/deprecations.py,sha256=CYQ712rSop1CXF-0Kr0eAo-bEa4g8YJvHoOiBxbfIhw,4403 +sqlalchemy/util/langhelpers.py,sha256=Yc9l67mVY-O8NZLJqI4ZOvbsC9eibVDm_V8Rhjhzbj4,37539 +sqlalchemy/util/queue.py,sha256=XAJ2hKAwepp3mCw5M1-Ksn1t7XS-mHoGhIhwzusklWw,6548 +sqlalchemy/util/topological.py,sha256=wmuAjgNqxrGWFrI3KHzUAD8ppaD6gRsxLtoG1D3nKDI,2594 +sqlalchemy/sql/__pycache__/functions.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-35.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-35.pyc,, +sqlalchemy/__pycache__/log.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-35.pyc,, +sqlalchemy/dialects/drizzle/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-35.pyc,, +sqlalchemy/__pycache__/pool.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-35.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-35.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-35.pyc,, +sqlalchemy/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/exc.cpython-35.pyc,, +sqlalchemy/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-35.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/types.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-35.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-35.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-35.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-35.pyc,, +sqlalchemy/event/__pycache__/api.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-35.pyc,, +sqlalchemy/event/__pycache__/base.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/inspection.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-35.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-35.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-35.pyc,, +sqlalchemy/dialects/__pycache__/postgres.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-35.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/threadlocal.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-35.pyc,, +sqlalchemy/dialects/drizzle/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-35.pyc,, +sqlalchemy/dialects/drizzle/__pycache__/mysqldb.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-35.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-35.pyc,, +sqlalchemy/__pycache__/events.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/mysqldb.cpython-35.pyc,, +sqlalchemy/__pycache__/processors.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/WHEEL new file mode 100644 index 0000000..b37ab74 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_11_x86_64 + diff --git a/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/metadata.json new file mode 100644 index 0000000..f0f1347 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Database Abstraction Library", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"project_urls": {"Home": "http://www.sqlalchemy.org"}, "contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "version": "0.9.7", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)"]}]} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/top_level.txt similarity index 100% rename from Darwin/lib/python3.4/site-packages/SQLAlchemy-0.9.7-py3.4.egg-info/top_level.txt rename to Darwin/lib/python3.5/site-packages/SQLAlchemy-0.9.7.dist-info/top_level.txt diff --git a/Darwin/lib/python3.5/site-packages/_cffi_backend.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/_cffi_backend.cpython-35m-darwin.so new file mode 100755 index 0000000..b98e448 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/_cffi_backend.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/site-packages/_markerlib/__init__.py b/Darwin/lib/python3.5/site-packages/_markerlib/__init__.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/_markerlib/__init__.py rename to Darwin/lib/python3.5/site-packages/_markerlib/__init__.py diff --git a/Darwin/lib/python3.4/site-packages/_markerlib/markers.py b/Darwin/lib/python3.5/site-packages/_markerlib/markers.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/_markerlib/markers.py rename to Darwin/lib/python3.5/site-packages/_markerlib/markers.py diff --git a/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ef4aa7b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,13 @@ + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/METADATA similarity index 54% rename from Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/PKG-INFO rename to Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/METADATA index e19ce47..fe866a3 100644 --- a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/PKG-INFO +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/METADATA @@ -1,23 +1,11 @@ -Metadata-Version: 1.1 +Metadata-Version: 2.0 Name: cffi -Version: 0.8.6 +Version: 1.3.1 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski Author-email: python-cffi@googlegroups.com License: MIT -Description: - CFFI - ==== - - Foreign Function Interface for Python calling C code. - Please see the `Documentation `_. - - Contact - ------- - - `Mailing list `_ - Platform: UNKNOWN Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 @@ -26,3 +14,21 @@ Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.2 Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/RECORD new file mode 100644 index 0000000..dca70d1 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/RECORD @@ -0,0 +1,40 @@ +_cffi_backend.cpython-35m-darwin.so,sha256=mkTxtrtzemGhwkxVnUY6GppmJVvTPIXsaJkuuAPthBw,160144 +cffi/__init__.py,sha256=r5B-3Lnefe0bXvHXABDSii5221yluCrDvozJL8fUkVs,483 +cffi/_cffi_include.h,sha256=CbPNyXjyin_j46zDjkeL9hE8tkCskP2z2f1a-JnnHCY,9590 +cffi/api.py,sha256=ESNSlvaO6V6FiMl8qoyxKzRtIuKrBV4DMi6DSmFwBGU,30759 +cffi/backend_ctypes.py,sha256=L5uklowUxOBVFvv9tzYHch1oH62oYmJBe5DB8pbTTyY,40130 +cffi/cffi_opcode.py,sha256=YOXp8F5L07cEuZNHYwkiMtiORgxuFwaFTtCNnZEjvKU,5453 +cffi/commontypes.py,sha256=iOD1UdO_teZ8b1YLdTl-2K261gzJF9DGZQ9KS2q7cHo,2531 +cffi/cparser.py,sha256=oCRaWOmdUADquvrRfWY-mWJ9d_PfLJqhz7oUFev0C2k,33818 +cffi/ffiplatform.py,sha256=vajBaS3dzcd9JQzhAO7O9dmYXWotYA9Fgvb6BaxV4vc,3399 +cffi/gc_weakref.py,sha256=ijoJUjCI0I4BTfyhxE9RxvAFnbbKWMVc4U0os99mf4M,642 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=dRhR8Y_seBprLciRQKgUNNvAndNLu3ohBq7NWJ-KWxk,21110 +cffi/parse_c_type.h,sha256=DJyh7BrsGHslXPf9H7fAhoWEAoP7LK-GnWCKs3KQEgk,5630 +cffi/recompiler.py,sha256=wnQU64LpGmc4UxCOx26MBZcqut4-IjVD7w4FPX7G4RM,52062 +cffi/setuptools_ext.py,sha256=P5AQdnfwTdbHwvMQaIWIYKOW8zvzUZeLnSOxu7r-ufk,6158 +cffi/vengine_cpy.py,sha256=6uz_cMSdBUmkRAyoug5K7g_i0QuiWgACyLb1NUx4QO4,41320 +cffi/vengine_gen.py,sha256=Jy40OZrSkX2xyJulXkKWutXjOzD6Nvwd5p8g_r-aVHk,26583 +cffi/verifier.py,sha256=2KvPQhU6hQKaoqdBZ-LCWyNlJF7ez9RAlfX91iUgpR8,11468 +cffi-1.3.1.dist-info/DESCRIPTION.rst,sha256=9ijQLbcqTWNF-iV0RznFiBeBCNrjArA0P-eutKUPw98,220 +cffi-1.3.1.dist-info/METADATA,sha256=f0tWhW9wi7Awt7EZPNFy6Ma-NLE-Ju4TcMQ0U65p8yw,1039 +cffi-1.3.1.dist-info/RECORD,, +cffi-1.3.1.dist-info/WHEEL,sha256=6ZpjqpfPOHT7Ptn6fVpRf_4YnS_lFiLYHyrxGcJykKU,111 +cffi-1.3.1.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.3.1.dist-info/metadata.json,sha256=x_8aZiRzzZT4fXLkMJipP6jODIz-McoKGPk7X4NNaps,1070 +cffi-1.3.1.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi/__pycache__/setuptools_ext.cpython-35.pyc,, +cffi/__pycache__/gc_weakref.cpython-35.pyc,, +cffi/__pycache__/verifier.cpython-35.pyc,, +cffi/__pycache__/recompiler.cpython-35.pyc,, +cffi/__pycache__/ffiplatform.cpython-35.pyc,, +cffi/__pycache__/api.cpython-35.pyc,, +cffi/__pycache__/model.cpython-35.pyc,, +cffi/__pycache__/__init__.cpython-35.pyc,, +cffi/__pycache__/cffi_opcode.cpython-35.pyc,, +cffi/__pycache__/vengine_cpy.cpython-35.pyc,, +cffi/__pycache__/cparser.cpython-35.pyc,, +cffi/__pycache__/lock.cpython-35.pyc,, +cffi/__pycache__/commontypes.cpython-35.pyc,, +cffi/__pycache__/vengine_gen.cpython-35.pyc,, +cffi/__pycache__/backend_ctypes.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/WHEEL new file mode 100644 index 0000000..62cd579 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_10_x86_64 + diff --git a/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/entry_points.txt b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/metadata.json new file mode 100644 index 0000000..7dca4d0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Foreign Function Interface for Python calling C code.", "classifiers": ["Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"project_urls": {"Home": "http://cffi.readthedocs.org"}, "contacts": [{"email": "python-cffi@googlegroups.com", "name": "Armin Rigo, Maciej Fijalkowski", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}, "python.exports": {"distutils.setup_keywords": {"cffi_modules": "cffi.setuptools_ext:cffi_modules"}}}, "license": "MIT", "metadata_version": "2.0", "name": "cffi", "run_requires": [{"requires": ["pycparser"]}], "extras": [], "version": "1.3.1"} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/top_level.txt similarity index 100% rename from Darwin/lib/python3.4/site-packages/cffi-0.8.6-py3.4.egg-info/top_level.txt rename to Darwin/lib/python3.5/site-packages/cffi-1.3.1.dist-info/top_level.txt diff --git a/Darwin/lib/python3.5/site-packages/cffi/__init__.py b/Darwin/lib/python3.5/site-packages/cffi/__init__.py new file mode 100644 index 0000000..a7c0678 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/__init__.py @@ -0,0 +1,13 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI, CDefError, FFIError +from .ffiplatform import VerificationError, VerificationMissing + +__version__ = "1.3.1" +__version_info__ = (1, 3, 1) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/Darwin/lib/python3.5/site-packages/cffi/_cffi_include.h b/Darwin/lib/python3.5/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..9b32293 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/_cffi_include.h @@ -0,0 +1,229 @@ +#define _CFFI_ +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (CTypeDescrObject *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + +_CFFI_UNUSED_FN +static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected, + const char *fnname) +{ + if (PyTuple_GET_SIZE(args_tuple) != expected) { + PyErr_Format(PyExc_TypeError, + "%.150s() takes exactly %zd arguments (%zd given)", + fnname, expected, PyTuple_GET_SIZE(args_tuple)); + return NULL; + } + return &PyTuple_GET_ITEM(args_tuple, 0); /* pointer to the first item, + the others follow */ +} + +#endif +/********** end CPython-specific section **********/ + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef __cplusplus +} +#endif diff --git a/Darwin/lib/python3.4/site-packages/cffi/api.py b/Darwin/lib/python3.5/site-packages/cffi/api.py similarity index 63% rename from Darwin/lib/python3.4/site-packages/cffi/api.py rename to Darwin/lib/python3.5/site-packages/cffi/api.py index aed9715..0a98e05 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/api.py +++ b/Darwin/lib/python3.5/site-packages/cffi/api.py @@ -55,7 +55,8 @@ class FFI(object): # _cffi_backend.so compiled. import _cffi_backend as backend from . import __version__ - assert backend.__version__ == __version__ + assert backend.__version__ == __version__, \ + "version mismatch, %s != %s" % (backend.__version__, __version__) # (If you insist you can also try to pass the option # 'backend=backend_ctypes.CTypesBackend()', but don't # rely on it! It's probably not going to work well.) @@ -69,6 +70,8 @@ class FFI(object): self._function_caches = [] self._libraries = [] self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None if hasattr(backend, 'set_ffi'): backend.set_ffi(self) for name in backend.__dict__: @@ -77,6 +80,7 @@ class FFI(object): # with self._lock: self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) if isinstance(backend, types.ModuleType): # _cffi_backend: attach these constants to the class if not hasattr(FFI, 'NULL'): @@ -105,6 +109,11 @@ class FFI(object): if override: for cache in self._function_caches: cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) def dlopen(self, name, flags=0): """Load and return a dynamic library identified by 'name'. @@ -189,13 +198,16 @@ class FFI(object): cdecl = self._typeof(cdecl) return self._backend.alignof(cdecl) - def offsetof(self, cdecl, fieldname): + def offsetof(self, cdecl, *fields_or_indexes): """Return the offset of the named field inside the given - structure, which must be given as a C type name. + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. """ if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl) - return self._backend.typeoffsetof(cdecl, fieldname)[1] + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] def new(self, cdecl, init=None): """Allocate an instance according to the specified C type and @@ -224,6 +236,30 @@ class FFI(object): cdecl = self._typeof(cdecl) return self._backend.newp(cdecl, init) + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + def cast(self, cdecl, source): """Similar to a C cast: returns an instance of the named C type initialized with the given 'source'. The source is @@ -264,7 +300,33 @@ class FFI(object): """ return self._backend.buffer(cdata, size) - def callback(self, cdecl, python_callable=None, error=None): + def from_buffer(self, python_buffer): + """Return a that points to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types str, + unicode, or bytearray (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + """ + return self._backend.from_buffer(self.BCharA, python_buffer) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): """Return a callback object or a decorator making such a callback object. 'cdecl' must name a C function pointer type. The callback invokes the specified 'python_callable' (which may @@ -276,7 +338,8 @@ class FFI(object): if not callable(python_callable): raise TypeError("the 'python_callable' argument " "is not callable") - return self._backend.callback(cdecl, python_callable, error) + return self._backend.callback(cdecl, python_callable, + error, onerror) if isinstance(cdecl, basestring): cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) if python_callable is None: @@ -305,6 +368,13 @@ class FFI(object): data. Later, when this new cdata object is garbage-collected, 'destructor(old_cdata_object)' will be called. """ + try: + gcp = self._backend.gcp + except AttributeError: + pass + else: + return gcp(cdata, destructor) + # with self._lock: try: gc_weakrefs = self.gc_weakrefs @@ -335,9 +405,23 @@ class FFI(object): which requires binary compatibility in the signatures. """ from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. self.verifier = Verifier(self, source, tmpdir, **kwargs) lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). self._libraries.append(lib) return lib @@ -356,15 +440,29 @@ class FFI(object): with self._lock: return model.pointer_cache(self, ctype) - def addressof(self, cdata, field=None): + def addressof(self, cdata, *fields_or_indexes): """Return the address of a . - If 'field' is specified, return the address of this field. + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. """ ctype = self._backend.typeof(cdata) - ctype, offset = self._backend.typeoffsetof(ctype, field) + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 ctypeptr = self._pointer_to(ctype) return self._backend.rawaddressof(ctypeptr, cdata, offset) + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + def include(self, ffi_to_include): """Includes the typedefs, structs, unions and enums defined in another FFI instance. Usage is similar to a #include in C, @@ -374,12 +472,19 @@ class FFI(object): variables, which must anyway be accessed directly from the lib object returned by the original FFI instance. """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") with ffi_to_include._lock: with self._lock: self._parser.include(ffi_to_include._parser) self._cdefsources.append('[') self._cdefsources.extend(ffi_to_include._cdefsources) self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) def new_handle(self, x): return self._backend.newp_handle(self.BVoidP, x) @@ -387,6 +492,112 @@ class FFI(object): def from_handle(self, x): return self._backend.from_handle(x) + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def set_source(self, module_name, source, source_extension='.c', **kwds): + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.'): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + source_extension=source_extension, **kwds) + def _load_backend_lib(backend, name, flags): if name is None: @@ -414,7 +625,7 @@ def _make_ffi_library(ffi, libname, flags): def make_accessor_locked(name): key = 'function ' + name if key in ffi._parser._declarations: - tp = ffi._parser._declarations[key] + tp, _ = ffi._parser._declarations[key] BType = ffi._get_cached_btype(tp) try: value = backendlib.load_function(BType, name) @@ -425,7 +636,7 @@ def _make_ffi_library(ffi, libname, flags): # key = 'variable ' + name if key in ffi._parser._declarations: - tp = ffi._parser._declarations[key] + tp, _ = ffi._parser._declarations[key] BType = ffi._get_cached_btype(tp) read_variable = backendlib.read_variable write_variable = backendlib.write_variable @@ -436,12 +647,23 @@ def _make_ffi_library(ffi, libname, flags): # if not copied_enums: from . import model - for key, tp in ffi._parser._declarations.items(): + error = None + for key, (tp, _) in ffi._parser._declarations.items(): if not isinstance(tp, model.EnumType): continue + try: + tp.check_not_partial() + except Exception as e: + error = e + continue for enumname, enumval in zip(tp.enumerators, tp.enumvalues): if enumname not in library.__dict__: library.__dict__[enumname] = enumval + if error is not None: + if name in library.__dict__: + return # ignore error, about a different enum + raise error + for key, val in ffi._parser._int_constants.items(): if key not in library.__dict__: library.__dict__[key] = val @@ -450,6 +672,11 @@ def _make_ffi_library(ffi, libname, flags): if name in library.__dict__: return # + key = 'constant ' + name + if key in ffi._parser._declarations: + raise NotImplementedError("fetching a non-integer constant " + "after dlopen()") + # raise AttributeError(name) # def make_accessor(name): diff --git a/Darwin/lib/python3.4/site-packages/cffi/backend_ctypes.py b/Darwin/lib/python3.5/site-packages/cffi/backend_ctypes.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/cffi/backend_ctypes.py rename to Darwin/lib/python3.5/site-packages/cffi/backend_ctypes.py index 2b2b481..b061cda 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/backend_ctypes.py +++ b/Darwin/lib/python3.5/site-packages/cffi/backend_ctypes.py @@ -2,11 +2,10 @@ import ctypes, ctypes.util, operator, sys from . import model if sys.version_info < (3,): - integer_types = (int, long) bytechr = chr else: unicode = str - integer_types = int + long = int xrange = range bytechr = lambda num: bytes([num]) @@ -169,6 +168,7 @@ class CTypesGenericArray(CTypesData): class CTypesGenericPtr(CTypesData): __slots__ = ['_address', '_as_ctype_ptr'] _automatic_casts = False + kind = "pointer" @classmethod def _newp(cls, init): @@ -180,7 +180,7 @@ class CTypesGenericPtr(CTypesData): address = 0 elif isinstance(source, CTypesData): address = source._cast_to_integer() - elif isinstance(source, integer_types): + elif isinstance(source, (int, long)): address = source else: raise TypeError("bad type for cast to %r: %r" % @@ -357,7 +357,7 @@ class CTypesBackend(object): is_signed = (ctype(-1).value == -1) # def _cast_source_to_int(source): - if isinstance(source, (integer_types, float)): + if isinstance(source, (int, long, float)): source = int(source) elif isinstance(source, CTypesData): source = source._cast_to_integer() @@ -370,10 +370,12 @@ class CTypesBackend(object): (CTypesPrimitive, type(source).__name__)) return source # + kind1 = kind class CTypesPrimitive(CTypesGenericPrimitive): __slots__ = ['_value'] _ctype = ctype _reftypename = '%s &' % name + kind = kind1 def __init__(self, value): self._value = value @@ -396,7 +398,7 @@ class CTypesBackend(object): if kind == 'bool': @classmethod def _cast_from(cls, source): - if not isinstance(source, (integer_types, float)): + if not isinstance(source, (int, long, float)): source = _cast_source_to_int(source) return cls(bool(source)) def __int__(self): @@ -435,7 +437,7 @@ class CTypesBackend(object): if kind == 'int' or kind == 'byte' or kind == 'bool': @staticmethod def _to_ctypes(x): - if not isinstance(x, integer_types): + if not isinstance(x, (int, long)): if isinstance(x, CTypesData): x = int(x) else: @@ -462,7 +464,7 @@ class CTypesBackend(object): if kind == 'float': @staticmethod def _to_ctypes(x): - if not isinstance(x, (integer_types, float, CTypesData)): + if not isinstance(x, (int, long, float, CTypesData)): raise TypeError("float expected, got %s" % type(x).__name__) return ctype(x).value @@ -526,14 +528,14 @@ class CTypesBackend(object): self._own = True def __add__(self, other): - if isinstance(other, integer_types): + if isinstance(other, (int, long)): return self._new_pointer_at(self._address + other * self._bitem_size) else: return NotImplemented def __sub__(self, other): - if isinstance(other, integer_types): + if isinstance(other, (int, long)): return self._new_pointer_at(self._address - other * self._bitem_size) elif type(self) is type(other): @@ -608,7 +610,7 @@ class CTypesBackend(object): def __init__(self, init): if length is None: - if isinstance(init, integer_types): + if isinstance(init, (int, long)): len1 = init init = None elif kind == 'char' and isinstance(init, bytes): @@ -683,7 +685,7 @@ class CTypesBackend(object): return CTypesPtr._arg_to_ctypes(value) def __add__(self, other): - if isinstance(other, integer_types): + if isinstance(other, (int, long)): return CTypesPtr._new_pointer_at( ctypes.addressof(self._blob) + other * ctypes.sizeof(BItem._ctype)) @@ -703,12 +705,13 @@ class CTypesBackend(object): class struct_or_union(base_ctypes_class): pass struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind # class CTypesStructOrUnion(CTypesBaseStructOrUnion): __slots__ = ['_blob'] _ctype = struct_or_union _reftypename = '%s &' % (name,) - _kind = kind + _kind = kind = kind1 # CTypesStructOrUnion._fix_class() return CTypesStructOrUnion @@ -986,7 +989,8 @@ class CTypesBackend(object): def cast(self, BType, source): return BType._cast_from(source) - def callback(self, BType, source, error): + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented return BType(source, error) typeof = type @@ -994,27 +998,42 @@ class CTypesBackend(object): def getcname(self, BType, replace_with): return BType._get_c_name(replace_with) - def typeoffsetof(self, BType, fieldname): - if fieldname is not None and issubclass(BType, CTypesGenericPtr): - BType = BType._BItem - if not issubclass(BType, CTypesBaseStructOrUnion): - raise TypeError("expected a struct or union ctype") - if fieldname is None: - return (BType, 0) - else: + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") BField = BType._bfield_types[fieldname] if BField is Ellipsis: raise TypeError("not supported for bitfields") return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) - def rawaddressof(self, BTypePtr, cdata, offset): + def rawaddressof(self, BTypePtr, cdata, offset=None): if isinstance(cdata, CTypesBaseStructOrUnion): ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): ptr = type(cdata)._to_ctypes(cdata) else: raise TypeError("expected a ") - if offset != 0: + if offset: ptr = ctypes.cast( ctypes.c_void_p( ctypes.cast(ptr, ctypes.c_void_p).value + offset), diff --git a/Darwin/lib/python3.5/site-packages/cffi/cffi_opcode.py b/Darwin/lib/python3.5/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..0da15a5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,178 @@ + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + from .ffiplatform import VerificationError + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 + +_NUM_PRIM = 48 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/Darwin/lib/python3.5/site-packages/cffi/commontypes.py b/Darwin/lib/python3.5/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..3d11aae --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/commontypes.py @@ -0,0 +1,76 @@ +import sys +from . import api, model + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise api.FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise api.FFIError("Unsupported type: %r. Please file a bug " + "if you think it should be." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/Darwin/lib/python3.4/site-packages/cffi/cparser.py b/Darwin/lib/python3.5/site-packages/cffi/cparser.py similarity index 61% rename from Darwin/lib/python3.4/site-packages/cffi/cparser.py rename to Darwin/lib/python3.5/site-packages/cffi/cparser.py index a53d4c3..b3b07d7 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/cparser.py +++ b/Darwin/lib/python3.5/site-packages/cffi/cparser.py @@ -1,4 +1,3 @@ - from . import api, model from .commontypes import COMMON_TYPES, resolve_common_type try: @@ -16,15 +15,22 @@ try: except ImportError: lock = None -_r_comment = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE) -_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)\s+(.*?)$", - re.MULTILINE) +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) _r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") _r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") _r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") _r_words = re.compile(r"\w+|\S") _parser_cache = None -_r_int_literal = re.compile(r"^0?x?[0-9a-f]+u?l?$", re.IGNORECASE) +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") def _get_parser(): global _parser_cache @@ -32,6 +38,48 @@ def _get_parser(): _parser_cache = pycparser.CParser() return _parser_cache +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + def _preprocess(csource): # Remove comments. NOTE: this only work because the cdef() section # should not contain any string literal! @@ -40,8 +88,21 @@ def _preprocess(csource): macros = {} for match in _r_define.finditer(csource): macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() macros[macroname] = macrovalue csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) # Replace "[...]" with "[__dotdotdotarray__]" csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) # Replace "...}" with "__dotdotdotNUM__}". This construction should @@ -73,9 +134,13 @@ def _common_type_names(csource): # but should be fine for all the common types. look_for_words = set(COMMON_TYPES) look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') look_for_words.add('typedef') words_used = set() is_typedef = False + paren = 0 previous_word = '' for word in _r_words.findall(csource): if word in look_for_words: @@ -86,6 +151,15 @@ def _common_type_names(csource): is_typedef = False elif word == 'typedef': is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) else: # word in COMMON_TYPES words_used.add(word) previous_word = word @@ -96,11 +170,14 @@ class Parser(object): def __init__(self): self._declarations = {} + self._included_declarations = set() self._anonymous_counter = 0 self._structnode2type = weakref.WeakKeyDictionary() self._override = False self._packed = False self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None def _parse(self, csource): csource, macros = _preprocess(csource) @@ -187,9 +264,10 @@ class Parser(object): if not decl.name: raise api.CDefError("typedef does not declare any name", decl) + quals = 0 if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) - and decl.type.type.names == ['__dotdotdot__']): - realtype = model.unknown_type(decl.name) + and decl.type.type.names[-1] == '__dotdotdot__'): + realtype = self._get_unknown_type(decl) elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and isinstance(decl.type.type.type, @@ -197,8 +275,9 @@ class Parser(object): decl.type.type.type.names == ['__dotdotdot__']): realtype = model.unknown_ptr_type(decl.name) else: - realtype = self._get_type(decl.type, name=decl.name) - self._declare('typedef ' + decl.name, realtype) + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name) + self._declare('typedef ' + decl.name, realtype, quals=quals) else: raise api.CDefError("unrecognized construct", decl) except api.FFIError as e: @@ -209,95 +288,140 @@ class Parser(object): def _add_constants(self, key, val): if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations raise api.FFIError( "multiple declarations of constant: %s" % (key,)) self._int_constants[key] = val + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + def _process_macros(self, macros): for key, value in macros.items(): value = value.strip() - match = _r_int_literal.search(value) - if match is not None: - int_str = match.group(0).lower().rstrip("ul") - - # "010" is not valid oct in py3 - if (int_str.startswith("0") and - int_str != "0" and - not int_str.startswith("0x")): - int_str = "0o" + int_str[1:] - - pyvalue = int(int_str, 0) - self._add_constants(key, pyvalue) + if _r_int_literal.match(value): + self._add_integer_constant(key, value) elif value == '...': self._declare('macro ' + key, value) else: - raise api.CDefError('only supports the syntax "#define ' - '%s ..." (literally) or "#define ' - '%s 0x1FF" for now' % (key, key)) + raise api.CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) def _parse_decl(self, decl): node = decl.type if isinstance(node, pycparser.c_ast.FuncDecl): - tp = self._get_type(node, name=decl.name) + tp, quals = self._get_type_and_quals(node, name=decl.name) assert isinstance(tp, model.RawFunctionType) - tp = self._get_type_pointer(tp) + tp = self._get_type_pointer(tp, quals) self._declare('function ' + decl.name, tp) else: if isinstance(node, pycparser.c_ast.Struct): - # XXX do we need self._declare in any of those? - if node.decls is not None: - self._get_struct_union_enum_type('struct', node) + self._get_struct_union_enum_type('struct', node) elif isinstance(node, pycparser.c_ast.Union): - if node.decls is not None: - self._get_struct_union_enum_type('union', node) + self._get_struct_union_enum_type('union', node) elif isinstance(node, pycparser.c_ast.Enum): - if node.values is not None: - self._get_struct_union_enum_type('enum', node) + self._get_struct_union_enum_type('enum', node) elif not decl.name: raise api.CDefError("construct does not declare any variable", decl) # if decl.name: - tp = self._get_type(node, partial_length_ok=True) - if self._is_constant_globalvar(node): - self._declare('constant ' + decl.name, tp) + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + tp = self._get_type_pointer(tp, quals) + self._declare('function ' + decl.name, tp) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) else: - self._declare('variable ' + decl.name, tp) + self._declare('variable ' + decl.name, tp, quals=quals) def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] assert not macros exprnode = ast.ext[-1].type.args.params[0] if isinstance(exprnode, pycparser.c_ast.ID): raise api.CDefError("unknown identifier '%s'" % (exprnode.name,)) - return self._get_type(exprnode.type) + return self._get_type_and_quals(exprnode.type) - def _declare(self, name, obj): + def _declare(self, name, obj, included=False, quals=0): if name in self._declarations: - if self._declarations[name] is obj: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: return if not self._override: raise api.FFIError( "multiple declarations of %s (for interactive usage, " "try cdef(xx, override=True))" % (name,)) assert '__dotdotdot__' not in name.split() - self._declarations[name] = obj + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) - def _get_type_pointer(self, type, const=False): + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): if isinstance(type, model.RawFunctionType): return type.as_function_pointer() - if const: - return model.ConstPointerType(type) - return model.PointerType(type) + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) - def _get_type(self, typenode, name=None, partial_length_ok=False): + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): # first, dereference typedefs, if we have it already parsed, we're good if (isinstance(typenode, pycparser.c_ast.TypeDecl) and isinstance(typenode.type, pycparser.c_ast.IdentifierType) and len(typenode.type.names) == 1 and ('typedef ' + typenode.type.names[0]) in self._declarations): - type = self._declarations['typedef ' + typenode.type.names[0]] - return type + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals # if isinstance(typenode, pycparser.c_ast.ArrayDecl): # array type @@ -306,15 +430,19 @@ class Parser(object): else: length = self._parse_constant( typenode.dim, partial_length_ok=partial_length_ok) - return model.ArrayType(self._get_type(typenode.type), length) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals # if isinstance(typenode, pycparser.c_ast.PtrDecl): # pointer type - const = (isinstance(typenode.type, pycparser.c_ast.TypeDecl) - and 'const' in typenode.type.quals) - return self._get_type_pointer(self._get_type(typenode.type), const) + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals # if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) type = typenode.type if isinstance(type, pycparser.c_ast.IdentifierType): # assume a primitive type. get it from .names, but reduce @@ -342,41 +470,52 @@ class Parser(object): names = newnames + names ident = ' '.join(names) if ident == 'void': - return model.void_type + return model.void_type, quals if ident == '__dotdotdot__': raise api.FFIError(':%d: bad usage of "..."' % typenode.coord.line) - return resolve_common_type(ident) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) # if isinstance(type, pycparser.c_ast.Struct): # 'struct foobar' - return self._get_struct_union_enum_type('struct', type, name) + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals # if isinstance(type, pycparser.c_ast.Union): # 'union foobar' - return self._get_struct_union_enum_type('union', type, name) + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals # if isinstance(type, pycparser.c_ast.Enum): # 'enum foobar' - return self._get_struct_union_enum_type('enum', type, name) + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals # if isinstance(typenode, pycparser.c_ast.FuncDecl): # a function type - return self._parse_function_type(typenode, name) + return self._parse_function_type(typenode, name), 0 # # nested anonymous structs or unions end up here if isinstance(typenode, pycparser.c_ast.Struct): return self._get_struct_union_enum_type('struct', typenode, name, - nested=True) + nested=True), 0 if isinstance(typenode, pycparser.c_ast.Union): return self._get_struct_union_enum_type('union', typenode, name, - nested=True) + nested=True), 0 # raise api.FFIError(":%d: bad or unsupported type declaration" % typenode.coord.line) def _parse_function_type(self, typenode, funcname=None): params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise api.CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) ellipsis = ( len(params) > 0 and isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and @@ -389,31 +528,28 @@ class Parser(object): raise api.CDefError( "%s: a function with only '(...)' as argument" " is not correct C" % (funcname or 'in expression')) - elif (len(params) == 1 and - isinstance(params[0].type, pycparser.c_ast.TypeDecl) and - isinstance(params[0].type.type, pycparser.c_ast.IdentifierType) - and list(params[0].type.type.names) == ['void']): - del params[0] - args = [self._as_func_arg(self._get_type(argdeclnode.type)) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) for argdeclnode in params] - result = self._get_type(typenode.type) - return model.RawFunctionType(tuple(args), result, ellipsis) + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) - def _as_func_arg(self, type): + def _as_func_arg(self, type, quals): if isinstance(type, model.ArrayType): - return model.PointerType(type.item) + return model.PointerType(type.item, quals) elif isinstance(type, model.RawFunctionType): return type.as_function_pointer() else: return type - def _is_constant_globalvar(self, typenode): - if isinstance(typenode, pycparser.c_ast.PtrDecl): - return 'const' in typenode.quals - if isinstance(typenode, pycparser.c_ast.TypeDecl): - return 'const' in typenode.quals - return False - def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): # First, a level of caching on the exact 'type' node of the AST. # This is obscure, but needed because pycparser "unrolls" declarations @@ -452,7 +588,7 @@ class Parser(object): else: explicit_name = name key = '%s %s' % (kind, name) - tp = self._declarations.get(key, None) + tp, _ = self._declarations.get(key, (None, None)) # if tp is None: if kind == 'struct': @@ -460,6 +596,8 @@ class Parser(object): elif kind == 'union': tp = model.UnionType(explicit_name, None, None, None) elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") tp = self._build_enum_type(explicit_name, type.values) else: raise AssertionError("kind = %r" % (kind,)) @@ -492,6 +630,7 @@ class Parser(object): fldnames = [] fldtypes = [] fldbitsize = [] + fldquals = [] for decl in type.decls: if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and ''.join(decl.type.names) == '__dotdotdot__'): @@ -505,7 +644,8 @@ class Parser(object): else: bitsize = self._parse_constant(decl.bitsize) self._partial_length = False - type = self._get_type(decl.type, partial_length_ok=True) + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) if self._partial_length: self._make_partial(tp, nested) if isinstance(type, model.StructType) and type.partial: @@ -513,14 +653,19 @@ class Parser(object): fldnames.append(decl.name or '') fldtypes.append(type) fldbitsize.append(bitsize) + fldquals.append(fqual) tp.fldnames = tuple(fldnames) tp.fldtypes = tuple(fldtypes) tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) if fldbitsize != [-1] * len(fldbitsize): if isinstance(tp, model.StructType) and tp.partial: raise NotImplementedError("%s: using both bitfields and '...;'" % (tp,)) tp.packed = self._packed + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) return tp def _make_partial(self, tp, nested): @@ -532,9 +677,24 @@ class Parser(object): def _parse_constant(self, exprnode, partial_length_ok=False): # for now, limited to expressions that are an immediate number - # or negative number + # or positive/negative number if isinstance(exprnode, pycparser.c_ast.Constant): - return int(exprnode.value, 0) + s = exprnode.value + if s.startswith('0'): + if s.startswith('0x') or s.startswith('0X'): + return int(s, 16) + return int(s, 8) + elif '1' <= s[0] <= '9': + return int(s, 10) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise api.CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) # if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and exprnode.op == '-'): @@ -555,19 +715,21 @@ class Parser(object): def _build_enum_type(self, explicit_name, decls): if decls is not None: - enumerators1 = [enum.name for enum in decls.enumerators] - enumerators = [s for s in enumerators1 - if not _r_enum_dotdotdot.match(s)] - partial = len(enumerators) < len(enumerators1) - enumerators = tuple(enumerators) + partial = False + enumerators = [] enumvalues = [] nextenumvalue = 0 - for enum in decls.enumerators[:len(enumerators)]: + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue if enum.value is not None: nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) enumvalues.append(nextenumvalue) self._add_constants(enum.name, nextenumvalue) nextenumvalue += 1 + enumerators = tuple(enumerators) enumvalues = tuple(enumvalues) tp = model.EnumType(explicit_name, enumerators, enumvalues) tp.partial = partial @@ -576,9 +738,35 @@ class Parser(object): return tp def include(self, other): - for name, tp in other._declarations.items(): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include kind = name.split(' ', 1)[0] - if kind in ('typedef', 'struct', 'union', 'enum'): - self._declare(name, tp) + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) for k, v in other._int_constants.items(): self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + assert typenames[-1] == '__dotdotdot__' + if len(typenames) == 1: + return model.unknown_type(decl.name) + + if (typenames[:-1] == ['float'] or + typenames[:-1] == ['double']): + # not for 'long double' so far + result = model.UnknownFloatType(decl.name) + else: + for t in typenames[:-1]: + if t not in ['int', 'short', 'long', 'signed', + 'unsigned', 'char']: + raise api.FFIError(':%d: bad usage of "..."' % + decl.coord.line) + result = model.UnknownIntegerType(decl.name) + + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef %s... %s'" % ( + ' '.join(typenames[:-1]), decl.name) + + return result diff --git a/Darwin/lib/python3.4/site-packages/cffi/ffiplatform.py b/Darwin/lib/python3.5/site-packages/cffi/ffiplatform.py similarity index 96% rename from Darwin/lib/python3.4/site-packages/cffi/ffiplatform.py rename to Darwin/lib/python3.5/site-packages/cffi/ffiplatform.py index 4515d6c..62ca4f1 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/ffiplatform.py +++ b/Darwin/lib/python3.5/site-packages/cffi/ffiplatform.py @@ -1,4 +1,4 @@ -import os +import sys, os class VerificationError(Exception): @@ -11,6 +11,9 @@ class VerificationMissing(Exception): """ +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + def get_extension(srcfilename, modname, sources=(), **kwds): from distutils.core import Extension allsources = [srcfilename] diff --git a/Darwin/lib/python3.4/site-packages/cffi/gc_weakref.py b/Darwin/lib/python3.5/site-packages/cffi/gc_weakref.py similarity index 52% rename from Darwin/lib/python3.4/site-packages/cffi/gc_weakref.py rename to Darwin/lib/python3.5/site-packages/cffi/gc_weakref.py index a2c0967..d0ffb23 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/gc_weakref.py +++ b/Darwin/lib/python3.5/site-packages/cffi/gc_weakref.py @@ -2,18 +2,21 @@ from weakref import ref class GcWeakrefs(object): - # code copied and adapted from WeakKeyDictionary. - def __init__(self, ffi): self.ffi = ffi - self.data = data = {} - def remove(k): - destructor, cdata = data.pop(k) - destructor(cdata) - self.remove = remove + self.data = {} def build(self, cdata, destructor): # make a new cdata of the same type as the original one new_cdata = self.ffi.cast(self.ffi._backend.typeof(cdata), cdata) - self.data[ref(new_cdata, self.remove)] = destructor, cdata + # + def remove(key): + # careful, this function is not protected by any lock + old_key = self.data.pop(index) + assert old_key is key + destructor(cdata) + # + key = ref(new_cdata, remove) + index = object() + self.data[index] = key return new_cdata diff --git a/Darwin/lib/python3.4/site-packages/cffi/lock.py b/Darwin/lib/python3.5/site-packages/cffi/lock.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/cffi/lock.py rename to Darwin/lib/python3.5/site-packages/cffi/lock.py diff --git a/Darwin/lib/python3.4/site-packages/cffi/model.py b/Darwin/lib/python3.5/site-packages/cffi/model.py similarity index 76% rename from Darwin/lib/python3.4/site-packages/cffi/model.py rename to Darwin/lib/python3.5/site-packages/cffi/model.py index 371153f..5783034 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/model.py +++ b/Darwin/lib/python3.5/site-packages/cffi/model.py @@ -1,14 +1,32 @@ -import types +import types, sys import weakref from .lock import allocate_lock +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + class BaseTypeByIdentity(object): is_array_type = False is_raw_function = False - def get_c_name(self, replace_with='', context='a C file'): + def get_c_name(self, replace_with='', context='a C file', quals=0): result = self.c_name_with_marker assert result.count('&') == 1 # some logic duplication with ffi.getctype()... :-( @@ -18,6 +36,7 @@ class BaseTypeByIdentity(object): replace_with = '(%s)' % replace_with elif not replace_with[0] in '[(': replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) result = result.replace('&', replace_with) if '$' in result: from .ffiplatform import VerificationError @@ -32,6 +51,9 @@ class BaseTypeByIdentity(object): def has_c_name(self): return '$' not in self._get_c_name() + def is_integer_type(self): + return False + def get_cached_btype(self, ffi, finishlist, can_delay=False): try: BType = ffi._cached_btypes[self] @@ -73,7 +95,11 @@ class VoidType(BaseType): void_type = VoidType() -class PrimitiveType(BaseType): +class BasePrimitiveType(BaseType): + pass + + +class PrimitiveType(BasePrimitiveType): _attrs_ = ('name',) ALL_PRIMITIVE_TYPES = { @@ -102,8 +128,26 @@ class PrimitiveType(BaseType): 'uint32_t': 'i', 'int64_t': 'i', 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', 'intptr_t': 'i', 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', 'ptrdiff_t': 'i', 'size_t': 'i', 'ssize_t': 'i', @@ -125,19 +169,48 @@ class PrimitiveType(BaseType): return global_cache(self, ffi, 'new_primitive_type', self.name) -class BaseFunctionType(BaseType): - _attrs_ = ('args', 'result', 'ellipsis') +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) - def __init__(self, args, result, ellipsis): + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): self.args = args self.result = result self.ellipsis = ellipsis + self.abi = abi # reprargs = [arg._get_c_name() for arg in self.args] if self.ellipsis: reprargs.append('...') reprargs = reprargs or ['void'] replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] self.c_name_with_marker = ( self.result.c_name_with_marker.replace('&', replace_with)) @@ -155,7 +228,7 @@ class RawFunctionType(BaseFunctionType): "type, not a pointer-to-function type" % (self,)) def as_function_pointer(self): - return FunctionPtrType(self.args, self.result, self.ellipsis) + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) class FunctionPtrType(BaseFunctionType): @@ -166,21 +239,29 @@ class FunctionPtrType(BaseFunctionType): args = [] for tp in self.args: args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass return global_cache(self, ffi, 'new_function_type', - tuple(args), result, self.ellipsis) + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) class PointerType(BaseType): - _attrs_ = ('totype',) - _base_pattern = " *&" - _base_pattern_array = "(*&)" + _attrs_ = ('totype', 'quals') - def __init__(self, totype): + def __init__(self, totype, quals=0): self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") if totype.is_array_type: - extra = self._base_pattern_array - else: - extra = self._base_pattern + extra = "(%s)" % (extra.lstrip(),) self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) def build_backend_type(self, ffi, finishlist): @@ -189,10 +270,8 @@ class PointerType(BaseType): voidp_type = PointerType(void_type) - -class ConstPointerType(PointerType): - _base_pattern = " const *&" - _base_pattern_array = "(const *&)" +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) const_voidp_type = ConstPointerType(void_type) @@ -200,8 +279,8 @@ const_voidp_type = ConstPointerType(void_type) class NamedPointerType(PointerType): _attrs_ = ('totype', 'name') - def __init__(self, totype, name): - PointerType.__init__(self, totype) + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) self.name = name self.c_name_with_marker = name + '&' @@ -219,7 +298,7 @@ class ArrayType(BaseType): elif length == '...': brackets = '&[/*...*/]' else: - brackets = '&[%d]' % length + brackets = '&[%s]' % length self.c_name_with_marker = ( self.item.c_name_with_marker.replace('&', brackets)) @@ -235,6 +314,8 @@ class ArrayType(BaseType): BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) +char_array_type = ArrayType(PrimitiveType('char'), None) + class StructOrUnionOrEnum(BaseTypeByIdentity): _attrs_ = ('name',) @@ -255,26 +336,38 @@ class StructOrUnionOrEnum(BaseTypeByIdentity): class StructOrUnion(StructOrUnionOrEnum): fixedlayout = None - completed = False + completed = 0 partial = False packed = False - def __init__(self, name, fldnames, fldtypes, fldbitsize): + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): self.name = name self.fldnames = fldnames self.fldtypes = fldtypes self.fldbitsize = fldbitsize + self.fldquals = fldquals self.build_c_name_with_marker() + def has_anonymous_struct_fields(self): + if self.fldtypes is None: + return False + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + return True + return False + def enumfields(self): - for name, type, bitsize in zip(self.fldnames, self.fldtypes, - self.fldbitsize): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): if name == '' and isinstance(type, StructOrUnion): # nested anonymous struct/union for result in type.enumfields(): yield result else: - yield (name, type, bitsize) + yield (name, type, bitsize, quals) def force_flatten(self): # force the struct or union to have a declaration that lists @@ -283,13 +376,16 @@ class StructOrUnion(StructOrUnionOrEnum): names = [] types = [] bitsizes = [] - for name, type, bitsize in self.enumfields(): + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): names.append(name) types.append(type) bitsizes.append(bitsize) + fldquals.append(quals) self.fldnames = tuple(names) self.fldtypes = tuple(types) self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) def get_cached_btype(self, ffi, finishlist, can_delay=False): BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, @@ -305,12 +401,13 @@ class StructOrUnion(StructOrUnionOrEnum): "for '%s'" % (self.name,)) return BType = ffi._cached_btypes[self] - if self.fldtypes is None: - return # not completing it: it's an opaque struct # self.completed = 1 # - if self.fixedlayout is None: + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: fldtypes = [tp.get_cached_btype(ffi, finishlist) for tp in self.fldtypes] lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) @@ -417,12 +514,17 @@ class EnumType(StructOrUnionOrEnum): if self.baseinttype is not None: return self.baseinttype.get_cached_btype(ffi, finishlist) # + from . import api if self.enumvalues: smallest_value = min(self.enumvalues) largest_value = max(self.enumvalues) else: - smallest_value = 0 - largest_value = 0 + import warnings + warnings.warn("%r has no values explicitly defined; next version " + "will refuse to guess which integer type it is " + "meant to be (unsigned/signed, int/long)" + % self._get_c_name()) + smallest_value = largest_value = 0 if smallest_value < 0: # needs a signed type sign = 1 candidate1 = PrimitiveType("int") @@ -449,11 +551,12 @@ def unknown_type(name, structname=None): structname = '$%s' % name tp = StructType(structname, None, None, None) tp.force_the_name(name) + tp.origin = "unknown_type" return tp def unknown_ptr_type(name, structname=None): if structname is None: - structname = '*$%s' % name + structname = '$$%s' % name tp = StructType(structname, None, None, None) return NamedPointerType(tp, name) @@ -478,7 +581,7 @@ def global_cache(srctype, ffi, funcname, *args, **kwds): try: res = getattr(ffi._backend, funcname)(*args) except NotImplementedError as e: - raise NotImplementedError("%r: %s" % (srctype, e)) + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) # note that setdefault() on WeakValueDictionary is not atomic # and contains a rare bug (http://bugs.python.org/issue19542); # we have to use a lock and do it ourselves diff --git a/Darwin/lib/python3.5/site-packages/cffi/parse_c_type.h b/Darwin/lib/python3.5/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..693ea9b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/parse_c_type.h @@ -0,0 +1,169 @@ + +/* See doc/misc/parse_c_type.rst in the source of CFFI for more information */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 + +#define _CFFI__NUM_PRIM 48 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/Darwin/lib/python3.5/site-packages/cffi/recompiler.py b/Darwin/lib/python3.5/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..56b9efb --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/recompiler.py @@ -0,0 +1,1290 @@ +import os, sys, io +from . import ffiplatform, model +from .cffi_opcode import * + +VERSION = "0x2601" + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + prnt(' 0, /* flags */') + prnt('};') + prnt() + # + # the init function + base_module_name = self.module_name.split('.')[-1] + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + prnt(' p[0] = (const void *)%s;' % VERSION) + prnt(' p[1] = &_cffi_type_context;') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, VERSION)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, VERSION)) + prnt('}') + prnt('#endif') + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = %s," % (VERSION,)) + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = (%s)alloca((size_t)datasize);' % ( + tovar, tp.get_c_name(''))) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt(' PyObject **aa;') + prnt() + prnt(' aa = _cffi_unpack_args(args, %d, "%s");' % (len(rng), name)) + prnt(' if (aa == NULL)') + prnt(' return NULL;') + for i in rng: + prnt(' arg%d = aa[%d];' % (i, i)) + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if isinstance(tp_result, model.StructOrUnion): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(" (void)((p->%s) << 1); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or tp.has_anonymous_struct_fields(): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(tp.enumfields()) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise ffiplatform.VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) << 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise ffiplatform.VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) << 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise ffiplatform.VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file): + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file) + +def make_py_source(ffi, module_name, target_py_file): + return _make_c_or_py_source(ffi, module_name, None, target_py_file) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file) + if call_c_compiler: + cwd = os.getcwd() + try: + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext) + finally: + os.chdir(cwd) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file) + if call_c_compiler: + return c_file + else: + return None, updated + +def _verify(ffi, module_name, preamble, *args, **kwds): + # FOR TESTS ONLY + from testing.udir import udir + import imp + assert module_name not in sys.modules, "module name conflict: %r" % ( + module_name,) + kwds.setdefault('tmpdir', str(udir)) + outputfilename = recompile(ffi, module_name, preamble, *args, **kwds) + module = imp.load_dynamic(module_name, outputfilename) + # + # hack hack hack: copy all *bound methods* from module.ffi back to the + # ffi instance. Then calls like ffi.new() will invoke module.ffi.new(). + for name in dir(module.ffi): + if not name.startswith('_'): + attr = getattr(module.ffi, name) + if attr is not getattr(ffi, name, object()): + setattr(ffi, name, attr) + def typeof_disabled(*args, **kwds): + raise NotImplementedError + ffi._typeof = typeof_disabled + return module.lib diff --git a/Darwin/lib/python3.5/site-packages/cffi/setuptools_ext.py b/Darwin/lib/python3.5/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..9c6436d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,161 @@ +import os + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + from distutils.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + dist.cmdclass['build_py'] = build_py_make_mod + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/Darwin/lib/python3.4/site-packages/cffi/vengine_cpy.py b/Darwin/lib/python3.5/site-packages/cffi/vengine_cpy.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/cffi/vengine_cpy.py rename to Darwin/lib/python3.5/site-packages/cffi/vengine_cpy.py index 31793f0..9f82988 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/vengine_cpy.py +++ b/Darwin/lib/python3.5/site-packages/cffi/vengine_cpy.py @@ -65,7 +65,7 @@ class VCPythonEngine(object): # The following two 'chained_list_constants' items contains # the head of these two chained lists, as a string that gives the # call to do, if any. - self._chained_list_constants = ['0', '0'] + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] # prnt = self._prnt # first paste some standard set of lines that are mostly '#define' @@ -138,15 +138,26 @@ class VCPythonEngine(object): prnt() prnt('#endif') - def load_library(self): + def load_library(self, flags=None): # XXX review all usages of 'self' here! # import it as a new extension module + imp.acquire_lock() try: - module = imp.load_dynamic(self.verifier.get_module_name(), - self.verifier.modulefilename) - except ImportError as e: - error = "importing %r: %s" % (self.verifier.modulefilename, e) - raise ffiplatform.VerificationError(error) + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise ffiplatform.VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() # # call loading_cpy_struct() to get the struct layout inferred by # the C compiler @@ -186,7 +197,10 @@ class VCPythonEngine(object): return library def _get_declarations(self): - return sorted(self.ffi._parser._declarations.items()) + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst def _generate(self, step_name): for name, tp in self._get_declarations(): @@ -228,7 +242,8 @@ class VCPythonEngine(object): converter = '_cffi_to_c_int' extraarg = ', %s' % tp.name else: - converter = '_cffi_to_c_%s' % (tp.name.replace(' ', '_'),) + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) errvalue = '-1' # elif isinstance(tp, model.PointerType): @@ -267,8 +282,8 @@ class VCPythonEngine(object): self._prnt(' if (datasize != 0) {') self._prnt(' if (datasize < 0)') self._prnt(' %s;' % errcode) - self._prnt(' %s = alloca(datasize);' % (tovar,)) - self._prnt(' memset((void *)%s, 0, datasize);' % (tovar,)) + self._prnt(' %s = alloca((size_t)datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) self._prnt(' if (_cffi_convert_array_from_object(' '(char *)%s, _cffi_type(%d), %s) < 0)' % ( tovar, self._gettypenum(tp), fromvar)) @@ -336,7 +351,7 @@ class VCPythonEngine(object): prnt = self._prnt numargs = len(tp.args) if numargs == 0: - argname = 'no_arg' + argname = 'noarg' elif numargs == 1: argname = 'arg0' else: @@ -386,6 +401,9 @@ class VCPythonEngine(object): prnt(' Py_END_ALLOW_THREADS') prnt() # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') if result_code: prnt(' return %s;' % self._convert_expr_from_c(tp.result, 'result', 'result type')) @@ -452,7 +470,8 @@ class VCPythonEngine(object): prnt('static void %s(%s *p)' % (checkfuncname, cname)) prnt('{') prnt(' /* only to generate compile-time warnings or errors */') - for fname, ftype, fbitsize in tp.enumfields(): + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): if (isinstance(ftype, model.PrimitiveType) and ftype.is_integer_type()) or fbitsize >= 0: # accept all integers, but complain on float or double @@ -461,7 +480,8 @@ class VCPythonEngine(object): # only accept exactly the type declared. try: prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname), fname)) + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) except ffiplatform.VerificationError as e: prnt(' /* %s */' % str(e)) # cannot verify it, ignore prnt('}') @@ -472,7 +492,7 @@ class VCPythonEngine(object): prnt(' static Py_ssize_t nums[] = {') prnt(' sizeof(%s),' % cname) prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize in tp.enumfields(): + for fname, ftype, fbitsize, fqual in tp.enumfields(): if fbitsize >= 0: continue # xxx ignore fbitsize for now prnt(' offsetof(%s, %s),' % (cname, fname)) @@ -482,6 +502,8 @@ class VCPythonEngine(object): prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) prnt(' -1') prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') prnt(' return _cffi_get_struct_layout(nums);') prnt(' /* the next line is not executed, but compiled */') prnt(' %s(0);' % (checkfuncname,)) @@ -534,7 +556,7 @@ class VCPythonEngine(object): check(layout[0], ffi.sizeof(BStruct), "wrong total size") check(layout[1], ffi.alignof(BStruct), "wrong total alignment") i = 2 - for fname, ftype, fbitsize in tp.enumfields(): + for fname, ftype, fbitsize, fqual in tp.enumfields(): if fbitsize >= 0: continue # xxx ignore fbitsize for now check(layout[i], ffi.offsetof(BStruct, fname), @@ -578,7 +600,8 @@ class VCPythonEngine(object): # constants, likely declared with '#define' def _generate_cpy_const(self, is_int, name, tp=None, category='const', - vartp=None, delayed=True, size_too=False): + vartp=None, delayed=True, size_too=False, + check_value=None): prnt = self._prnt funcname = '_cffi_%s_%s' % (category, name) prnt('static int %s(PyObject *lib)' % funcname) @@ -590,6 +613,9 @@ class VCPythonEngine(object): else: assert category == 'const' # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # if not is_int: if category == 'var': realexpr = '&' + name @@ -637,6 +663,27 @@ class VCPythonEngine(object): # ---------- # enums + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + def _enum_funcname(self, prefix, name): # "$enum_$1" => "___D_enum____D_1" name = name.replace('$', '___D_') @@ -653,25 +700,8 @@ class VCPythonEngine(object): prnt('static int %s(PyObject *lib)' % funcname) prnt('{') for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - if enumvalue < 0: - prnt(' if ((%s) >= 0 || (long)(%s) != %dL) {' % ( - enumerator, enumerator, enumvalue)) - else: - prnt(' if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % ( - enumerator, enumerator, enumvalue)) - prnt(' char buf[64];') - prnt(' if ((%s) < 0)' % enumerator) - prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator) - prnt(' else') - prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % - enumerator) - prnt(' PyErr_Format(_cffi_VerificationError,') - prnt(' "enum %s: %s has the real value %s, ' - 'not %s",') - prnt(' "%s", "%s", buf, "%d");' % ( - name, enumerator, enumvalue)) - prnt(' return -1;') - prnt(' }') + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) prnt(' return %s;' % self._chained_list_constants[True]) self._chained_list_constants[True] = funcname + '(lib)' prnt('}') @@ -695,8 +725,11 @@ class VCPythonEngine(object): # macros: for now only for integers def _generate_cpy_macro_decl(self, tp, name): - assert tp == '...' - self._generate_cpy_const(True, name) + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) _generate_cpy_macro_collecttype = _generate_nothing _generate_cpy_macro_method = _generate_nothing @@ -783,6 +816,24 @@ cffimod_header = r''' typedef unsigned __int16 uint16_t; typedef unsigned __int32 uint32_t; typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; # else # include # endif @@ -828,15 +879,19 @@ cffimod_header = r''' PyLong_FromLongLong((long long)(x))) #define _cffi_from_c_int(x, type) \ - (((type)-1) > 0 ? /* unsigned */ \ - (sizeof(type) < sizeof(long) ? PyInt_FromLong(x) : \ - sizeof(type) == sizeof(long) ? PyLong_FromUnsignedLong(x) : \ - PyLong_FromUnsignedLongLong(x)) \ - : (sizeof(type) <= sizeof(long) ? PyInt_FromLong(x) : \ - PyLong_FromLongLong(x))) + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) #define _cffi_to_c_int(o, type) \ - (sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ : (type)_cffi_to_c_i8(o)) : \ sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ : (type)_cffi_to_c_i16(o)) : \ @@ -844,7 +899,7 @@ cffimod_header = r''' : (type)_cffi_to_c_i32(o)) : \ sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ : (type)_cffi_to_c_i64(o)) : \ - (Py_FatalError("unsupported size for type " #type), 0)) + (Py_FatalError("unsupported size for type " #type), (type)0))) #define _cffi_to_c_i8 \ ((int(*)(PyObject *))_cffi_exports[1]) @@ -907,6 +962,7 @@ static PyObject *_cffi_setup(PyObject *self, PyObject *args) { PyObject *library; int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, &library)) return NULL; diff --git a/Darwin/lib/python3.4/site-packages/cffi/vengine_gen.py b/Darwin/lib/python3.5/site-packages/cffi/vengine_gen.py similarity index 77% rename from Darwin/lib/python3.4/site-packages/cffi/vengine_gen.py rename to Darwin/lib/python3.5/site-packages/cffi/vengine_gen.py index 133ec7f..9cc3853 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/vengine_gen.py +++ b/Darwin/lib/python3.5/site-packages/cffi/vengine_gen.py @@ -58,12 +58,12 @@ class VGenericEngine(object): modname = self.verifier.get_module_name() prnt("void %s%s(void) { }\n" % (prefix, modname)) - def load_library(self): + def load_library(self, flags=0): # import it with the CFFI backend backend = self.ffi._backend # needs to make a path that contains '/', on Posix filename = os.path.join(os.curdir, self.verifier.modulefilename) - module = backend.load_library(filename) + module = backend.load_library(filename, flags) # # call loading_gen_struct() to get the struct layout inferred by # the C compiler @@ -87,7 +87,10 @@ class VGenericEngine(object): return library def _get_declarations(self): - return sorted(self.ffi._parser._declarations.items()) + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst def _generate(self, step_name): for name, tp in self._get_declarations(): @@ -149,15 +152,25 @@ class VGenericEngine(object): context = 'argument of %s' % name arglist = [type.get_c_name(' %s' % arg, context) for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type arglist = ', '.join(arglist) or 'void' wrappername = '_cffi_f_%s' % name self.export_symbols.append(wrappername) - funcdecl = ' %s(%s)' % (wrappername, arglist) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) context = 'result of %s' % name - prnt(tp.result.get_c_name(funcdecl, context)) + prnt(tpresult.get_c_name(funcdecl, context)) prnt('{') # - if not isinstance(tp.result, model.VoidType): + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): result_code = 'return ' else: result_code = '' @@ -174,15 +187,26 @@ class VGenericEngine(object): else: indirections = [] base_tp = tp - if any(isinstance(typ, model.StructOrUnion) for typ in tp.args): + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): indirect_args = [] for i, typ in enumerate(tp.args): if isinstance(typ, model.StructOrUnion): typ = model.PointerType(typ) indirections.append((i, typ)) indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type tp = model.FunctionPtrType(tuple(indirect_args), - tp.result, tp.ellipsis) + indirect_result, tp.ellipsis) BFunc = self.ffi._get_cached_btype(tp) wrappername = '_cffi_f_%s' % name newfunction = module.load_function(BFunc, wrappername) @@ -195,9 +219,16 @@ class VGenericEngine(object): def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): backend = self.ffi._backend BType = self.ffi._get_cached_btype(tp) - def newfunc(*args): - args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] - return oldfunc(*args) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) newfunc._cffi_base_type = base_tp return newfunc @@ -235,7 +266,8 @@ class VGenericEngine(object): prnt('static void %s(%s *p)' % (checkfuncname, cname)) prnt('{') prnt(' /* only to generate compile-time warnings or errors */') - for fname, ftype, fbitsize in tp.enumfields(): + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): if (isinstance(ftype, model.PrimitiveType) and ftype.is_integer_type()) or fbitsize >= 0: # accept all integers, but complain on float or double @@ -244,7 +276,8 @@ class VGenericEngine(object): # only accept exactly the type declared. try: prnt(' { %s = &p->%s; (void)tmp; }' % ( - ftype.get_c_name('*tmp', 'field %r'%fname), fname)) + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) except ffiplatform.VerificationError as e: prnt(' /* %s */' % str(e)) # cannot verify it, ignore prnt('}') @@ -255,7 +288,7 @@ class VGenericEngine(object): prnt(' static intptr_t nums[] = {') prnt(' sizeof(%s),' % cname) prnt(' offsetof(struct _cffi_aligncheck, y),') - for fname, ftype, fbitsize in tp.enumfields(): + for fname, ftype, fbitsize, fqual in tp.enumfields(): if fbitsize >= 0: continue # xxx ignore fbitsize for now prnt(' offsetof(%s, %s),' % (cname, fname)) @@ -317,7 +350,7 @@ class VGenericEngine(object): check(layout[0], ffi.sizeof(BStruct), "wrong total size") check(layout[1], ffi.alignof(BStruct), "wrong total alignment") i = 2 - for fname, ftype, fbitsize in tp.enumfields(): + for fname, ftype, fbitsize, fqual in tp.enumfields(): if fbitsize >= 0: continue # xxx ignore fbitsize for now check(layout[i], ffi.offsetof(BStruct, fname), @@ -354,11 +387,20 @@ class VGenericEngine(object): # ---------- # constants, likely declared with '#define' - def _generate_gen_const(self, is_int, name, tp=None, category='const'): + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): prnt = self._prnt funcname = '_cffi_%s_%s' % (category, name) self.export_symbols.append(funcname) - if is_int: + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: assert category == 'const' prnt('int %s(long long *out_value)' % funcname) prnt('{') @@ -367,12 +409,17 @@ class VGenericEngine(object): prnt('}') else: assert tp is not None - prnt(tp.get_c_name(' %s(void)' % funcname, name),) - prnt('{') + assert check_value is None if category == 'var': ampersand = '&' else: ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') prnt(' return (%s%s);' % (ampersand, name)) prnt('}') prnt() @@ -383,9 +430,13 @@ class VGenericEngine(object): _loading_gen_constant = _loaded_noop - def _load_constant(self, is_int, tp, name, module): + def _load_constant(self, is_int, tp, name, module, check_value=None): funcname = '_cffi_const_%s' % name - if is_int: + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: BType = self.ffi._typeof_locked("long long*")[0] BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] function = module.load_function(BFunc, funcname) @@ -396,9 +447,15 @@ class VGenericEngine(object): BLongLong = self.ffi._typeof_locked("long long")[0] value += (1 << (8*self.ffi.sizeof(BLongLong))) else: - BFunc = self.ffi._typeof_locked(tp.get_c_name('(*)(void)', name))[0] + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] function = module.load_function(BFunc, funcname) value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] return value def _loaded_gen_constant(self, tp, name, module, library): @@ -410,6 +467,36 @@ class VGenericEngine(object): # ---------- # enums + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise ffiplatform.VerificationError(error) + def _enum_funcname(self, prefix, name): # "$enum_$1" => "___D_enum____D_1" name = name.replace('$', '___D_') @@ -427,24 +514,7 @@ class VGenericEngine(object): prnt('int %s(char *out_error)' % funcname) prnt('{') for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): - if enumvalue < 0: - prnt(' if ((%s) >= 0 || (long)(%s) != %dL) {' % ( - enumerator, enumerator, enumvalue)) - else: - prnt(' if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % ( - enumerator, enumerator, enumvalue)) - prnt(' char buf[64];') - prnt(' if ((%s) < 0)' % enumerator) - prnt(' sprintf(buf, "%%ld", (long)(%s));' % enumerator) - prnt(' else') - prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % - enumerator) - prnt(' sprintf(out_error,' - ' "%s has the real value %s, not %s",') - prnt(' "%s", buf, "%d");' % ( - enumerator[:100], enumvalue)) - prnt(' return -1;') - prnt(' }') + self._check_int_constant_value(enumerator, enumvalue) prnt(' return 0;') prnt('}') prnt() @@ -456,16 +526,8 @@ class VGenericEngine(object): tp.enumvalues = tuple(enumvalues) tp.partial_resolved = True else: - BType = self.ffi._typeof_locked("char[]")[0] - BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] funcname = self._enum_funcname(prefix, name) - function = module.load_function(BFunc, funcname) - p = self.ffi.new(BType, 256) - if function(p) < 0: - error = self.ffi.string(p) - if sys.version_info >= (3,): - error = str(error, 'utf-8') - raise ffiplatform.VerificationError(error) + self._load_known_int_constant(module, funcname) def _loaded_gen_enum(self, tp, name, module, library): for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): @@ -476,13 +538,21 @@ class VGenericEngine(object): # macros: for now only for integers def _generate_gen_macro_decl(self, tp, name): - assert tp == '...' - self._generate_gen_const(True, name) + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) _loading_gen_macro = _loaded_noop def _loaded_gen_macro(self, tp, name, module, library): - value = self._load_constant(True, tp, name, module) + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) setattr(library, name, value) type(library)._cffi_dir.append(name) @@ -565,6 +635,24 @@ cffimod_header = r''' typedef unsigned __int16 uint16_t; typedef unsigned __int32 uint32_t; typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; # else # include # endif diff --git a/Darwin/lib/python3.4/site-packages/cffi/verifier.py b/Darwin/lib/python3.5/site-packages/cffi/verifier.py similarity index 70% rename from Darwin/lib/python3.4/site-packages/cffi/verifier.py rename to Darwin/lib/python3.5/site-packages/cffi/verifier.py index 9603a7e..01728ae 100644 --- a/Darwin/lib/python3.4/site-packages/cffi/verifier.py +++ b/Darwin/lib/python3.5/site-packages/cffi/verifier.py @@ -1,12 +1,47 @@ -import sys, os, binascii, imp, shutil -from . import __version__ +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ from . import ffiplatform +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass + class Verifier(object): def __init__(self, ffi, preamble, tmpdir=None, modulename=None, - ext_package=None, tag='', force_generic_engine=False, **kwds): + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise ffiplatform.VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) self.ffi = ffi self.preamble = preamble if not modulename: @@ -14,14 +49,15 @@ class Verifier(object): vengine_class = _locate_engine_class(ffi, force_generic_engine) self._vengine = vengine_class(self) self._vengine.patch_extension_kwds(kwds) - self.kwds = kwds + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) # if modulename: if tag: raise TypeError("can't specify both 'modulename' and 'tag'") else: - key = '\x00'.join([sys.version[:3], __version__, preamble, - flattened_kwds] + + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + ffi._cdefsources) if sys.version_info >= (3,): key = key.encode('utf-8') @@ -33,7 +69,7 @@ class Verifier(object): k1, k2) suffix = _get_so_suffixes()[0] self.tmpdir = tmpdir or _caller_dir_pycache() - self.sourcefilename = os.path.join(self.tmpdir, modulename + '.c') + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) self.ext_package = ext_package self._has_source = False @@ -86,6 +122,7 @@ class Verifier(object): return basename def get_extension(self): + _hack_at_distutils() # backward compatibility hack if not self._has_source: with self.ffi._lock: if not self._has_source: @@ -97,6 +134,20 @@ class Verifier(object): def generates_python_module(self): return self._vengine._gen_python_module + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + # ---------- def _locate_module(self): @@ -118,19 +169,36 @@ class Verifier(object): self._vengine.collect_types() self._has_module = True - def _write_source(self, file=None): - must_close = (file is None) - if must_close: - _ensure_dir(self.sourcefilename) - file = open(self.sourcefilename, 'w') + def _write_source_to(self, file): self._vengine._f = file try: self._vengine.write_source_to_f() finally: del self._vengine._f - if must_close: - file.close() - if must_close: + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag self._has_source = True def _compile_module(self): @@ -148,7 +216,10 @@ class Verifier(object): def _load_library(self): assert self._has_module - return self._vengine.load_library() + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() # ____________________________________________________________ @@ -181,6 +252,9 @@ _TMPDIR = None def _caller_dir_pycache(): if _TMPDIR: return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result filename = sys._getframe(2).f_code.co_filename return os.path.abspath(os.path.join(os.path.dirname(filename), '__pycache__')) @@ -222,11 +296,7 @@ def cleanup_tmpdir(tmpdir=None, keep_so=False): pass def _get_so_suffixes(): - suffixes = [] - for suffix, mode, type in imp.get_suffixes(): - if type == imp.C_EXTENSION: - suffixes.append(suffix) - + suffixes = _extension_suffixes() if not suffixes: # bah, no C_EXTENSION available. Occurs on pypy without cpyext if sys.platform == 'win32': diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..951d7db --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,57 @@ +Cryptography +============ + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/cryptography?branch=master + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+. + +``cryptography`` includes both high level recipes, and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get +involved. + + +.. _`documentation`: https://cryptography.io/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + + diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/METADATA b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/METADATA new file mode 100644 index 0000000..ebad722 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.0 +Name: cryptography +Version: 1.1.1 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The cryptography developers +Author-email: cryptography-dev@python.org +License: BSD or Apache License, Version 2.0 +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi (>=1.1.0) +Requires-Dist: idna (>=2.0) +Requires-Dist: pyasn1 (>=0.1.8) +Requires-Dist: setuptools +Requires-Dist: six (>=1.4.1) + +Cryptography +============ + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/cryptography?branch=master + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+. + +``cryptography`` includes both high level recipes, and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get +involved. + + +.. _`documentation`: https://cryptography.io/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + + diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/RECORD new file mode 100644 index 0000000..94dc40b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/RECORD @@ -0,0 +1,142 @@ +cryptography/__about__.py,sha256=0VyI8rx0r85wL6444eyvqF0rH0CLz4wbkdplVRhde98,817 +cryptography/__init__.py,sha256=Tkv1Zn9sQRZGTj8HGlgZy-PjBytscZdRSaMg-ahKjxY,752 +cryptography/exceptions.py,sha256=EEEhVmlWPJO0RNC9xAO--IzJDrFR_4u1bXPBIs0jEBM,1513 +cryptography/fernet.py,sha256=S1wID45we0yrimIoMOMYu89sI32rhoCgCnygzG0peSU,4295 +cryptography/utils.py,sha256=FjGlVCuTVnDNUqurk-tMHXLgCbeLE1jjpes4vfrowZ8,3455 +cryptography/hazmat/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/backends/__init__.py,sha256=oQFGi9nQ6d7IiWDMaoleb2taGLgqQEvXXkgOxSTW5bQ,1325 +cryptography/hazmat/backends/interfaces.py,sha256=zLVR3CcFbG_lV5X4plBDY8Mn_b0yiNT8o3uwNznXgEk,9325 +cryptography/hazmat/backends/multibackend.py,sha256=DVEhJecIJCJjZ1QfZ-r8bC9r9DRnD1_pJJ2sq77NIc4,14946 +cryptography/hazmat/backends/commoncrypto/__init__.py,sha256=aihmGquS6-l9Bcx6W8-nPO7BbpBlFg86IjPMIOKtJDk,341 +cryptography/hazmat/backends/commoncrypto/backend.py,sha256=GjHLdBKBCIg0hoSEuf_PFAaJfcxUHr6ShHh1LJG7NIg,8577 +cryptography/hazmat/backends/commoncrypto/ciphers.py,sha256=E1T9KtsnqeUVk4dl64ESwL9WiACdDUEqTDbc2JgtBaw,7946 +cryptography/hazmat/backends/commoncrypto/hashes.py,sha256=AJl9-ALt2HV3F7GfkMVc7z2Rj_E4msk9juscmSjx5bE,2040 +cryptography/hazmat/backends/commoncrypto/hmac.py,sha256=THOz8zjgFb-fbzOjQvKy7fr0Ri9Qw8fzr2hAe3N9iPo,2188 +cryptography/hazmat/backends/openssl/__init__.py,sha256=k4DMe228_hTuB2kY3Lwk62JdI3EmCd7VkV01zJm57ps,336 +cryptography/hazmat/backends/openssl/backend.py,sha256=to0tMJxCG6LL4EjyvB4CD756BPZbX4eELG1ozBRa0RE,79554 +cryptography/hazmat/backends/openssl/ciphers.py,sha256=67Paxuvbw8U_AcHvOm17RLIN9iinQgxtT89icnNcUHY,8759 +cryptography/hazmat/backends/openssl/cmac.py,sha256=SeZ7v_2sm4_GJmB0S39DMcCYr16X7bwQIBHAYG6Qkms,2797 +cryptography/hazmat/backends/openssl/dsa.py,sha256=-90bcweQjZmaGonnvz0_EN_I7d6wC5wvMyA_GmiVg1k,8353 +cryptography/hazmat/backends/openssl/ec.py,sha256=erW-j19CR4MAwue4PG_IfJScIi2tC6S9ojn3lWgNObU,10116 +cryptography/hazmat/backends/openssl/hashes.py,sha256=6KnIaZAAXz8vbMy1-1oZ_mptSaLyNSA1TFGiXmT7AkQ,2594 +cryptography/hazmat/backends/openssl/hmac.py,sha256=ZXLmKVRRObmlbvmLxruv3etarkLRCTkYh_70nKJlp9w,3067 +cryptography/hazmat/backends/openssl/rsa.py,sha256=dQPI4Q4VgYDzHHW34CbVjzKRni7JQhqn4ZxrdtVWRlQ,23375 +cryptography/hazmat/backends/openssl/utils.py,sha256=q2XWHoedrfB6bUqxV4Omr_ebp_R7VvSHJCw5sQN3Iag,737 +cryptography/hazmat/backends/openssl/x509.py,sha256=C0k2uUUdC8t4BdFJuFyGq4FQBEWbCZ2__kAqU_vor9Y,34381 +cryptography/hazmat/bindings/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/bindings/_constant_time.cpython-35m-darwin.so,sha256=NUmp8xkrKwTHJFDXiEWCqs_5YpTpSqGiy2FmfnG2pok,22776 +cryptography/hazmat/bindings/_openssl.cpython-35m-darwin.so,sha256=sfXxCyDQMm9fXTGCRYHjIEmMC5CFpCdmmkfSJkDgY6w,3332512 +cryptography/hazmat/bindings/_padding.cpython-35m-darwin.so,sha256=skm5f4GZGmlrwPlCOQ8B-3wzPNocpNWFQq4n3wC1604,22728 +cryptography/hazmat/bindings/commoncrypto/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/bindings/commoncrypto/binding.py,sha256=cO7kSFGxbBSnszoA1EXQSlxC0vA0EcrfphNdUK61DJ8,410 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=YCWstm3ccC3X149zV-9In2kujZYUIZyRe1AgkIfcPUg,12334 +cryptography/hazmat/bindings/openssl/binding.py,sha256=cUkhv5fYBxSYO0f3Ii0WRTRFbzqjdcC6ALT8s-S1Zss,5922 +cryptography/hazmat/primitives/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/primitives/cmac.py,sha256=qGZcqMN57eGkqYx7Ypd23WOBkMkrwCb5M17QbqrvZW0,2235 +cryptography/hazmat/primitives/constant_time.py,sha256=rsSJc99kyQ2VwNVP9w-0mr80sZnppgWcm9LfQitftF0,798 +cryptography/hazmat/primitives/hashes.py,sha256=Lw9UpEzn5flkQJUK4_psFo7vxsTBBVqRtb5dvZ2F1Ro,3954 +cryptography/hazmat/primitives/hmac.py,sha256=pp6gwio7HR6QWZNhnPEyuG9Zj3-gKoR6UM7fhphSvRo,2353 +cryptography/hazmat/primitives/keywrap.py,sha256=gJp1qggkxvQXlrl262aW4sfpBWWhLQYhNWNYGy0M3_A,3050 +cryptography/hazmat/primitives/padding.py,sha256=ORXdpuspku0CcYr0WrOnKtM6WwxhzLl7XyUG5_mFcMU,3635 +cryptography/hazmat/primitives/serialization.py,sha256=dWaO3RlNcmygbknNIN_VbaSNHZXw6qhvfiTrMqdVdCY,5152 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=WhUn3tGxoLAxGAsZHElJ2aOILXSh55AZi04MBudYmQA,1020 +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=iEk2dnAdro7SqH9VU9v39rSWQoyJpf2nCAR9J3WmZ60,4183 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=_2KFCkIuYRz2Ni1TPcvLxXOJH-D8rXhIJwcAG4R4FoU,6292 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=sChzpKwmvpmhuYHzgdWEBqoK4bM-wohMz7oceM1CjM4,8632 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=OR6Nm2KTdVJ-NNdRkBVhIPGj9cJJdF2LEkHfAFLA_EY,1803 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=QpJ0q_Fg97cvXMVb5fFcQ9s9c-3U9QIz0GehVO8oOGo,9908 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=rMjag4WEA_DVwNg2hYMUqy49jlUeCpvype0mP-P5eLk,2029 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=Kqf2BvvQ--KiFwfZ7oCRSfwrdi5wnERm7ctO7cEQ2Fo,574 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=Jo_kxQQev5a1DBOvTD7GcbTGcd_6YfYgVRdFZdiWHF8,3419 +cryptography/hazmat/primitives/ciphers/base.py,sha256=bcuWG0v2ETvtJeEs1dfk-_Fm_FQKvAPMBlURilhNJRE,6169 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=TiOYjod8RwYATKkyZNFlA6H_o2n1aHMMKHSL8UxKmFM,4682 +cryptography/hazmat/primitives/interfaces/__init__.py,sha256=dgLoN6PdA_QqowwB_RzSeEqJjIPnAqTUTxio-8CFP-8,884 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=nod5HjPswjZr8wFp6Tsu6en9blHYF3khgXI5R0zIcnM,771 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=WTooWTAkHUCtaMAUouAluNbg71qXxPj_AHLyuRnPDR8,4109 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=sf1YjTp8wVqvTEiga4Vee9km_ToHc3oVDLS7luATCyI,3675 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=YiFNDI4CbGRH0OQXDMXG8DfYap32vtjvA-Zo-n_oAE4,2185 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=yOK2qAlWTHABVIvwT55jl_B6UR5ELyLFlcVtH16HxGc,2363 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=BWrm3DKDoAa281E7U_nzz8v44OmAiXmlIycFcsehwfE,288 +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=EwMzI0B8GUa3FpGKLryDve4_raIiDIQAsWCyxeUcht8,2516 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=3x9sPZ70VbSWSPfembr57imJkoq6-JgPiKjvmuKjyaA,1532 +cryptography/hazmat/primitives/twofactor/utils.py,sha256=71gX1bJeP9TQa-HbSPzeUUJwVY78ALYQNvuusknUO4U,954 +cryptography/x509/__init__.py,sha256=0pW6bAecLJ8-OU5ZZ12slzKOXTjkR3lW4nxC0kkK-yU,6335 +cryptography/x509/base.py,sha256=A4S55pAnk6JJ9is0vDLnNhp8h-A0-ET8dVuru1ch544,13734 +cryptography/x509/extensions.py,sha256=2adK1yCxe9fFaHapgfIaDuihIGhiNBVm5Ils0mdtUBI,29152 +cryptography/x509/general_name.py,sha256=eX7S0xNRfMexpn9TMbzNH-T8p5lZxVneJTE0qATMw44,7405 +cryptography/x509/name.py,sha256=I-dCbj2Fv3pAKzszivYfFOdIu0CvrUqmUKOtXxvR-7c,2116 +cryptography/x509/oid.py,sha256=r9tqsnpPENJE-b60iz3UoM2lB0eiuWt2T_YRBZLcD1s,9192 +cryptography-1.1.1.dist-info/DESCRIPTION.rst,sha256=pVyhl71rmRmivUoJQGAS4Jy93myVUgINRBhxIFzqI6w,1940 +cryptography-1.1.1.dist-info/METADATA,sha256=Y6-2h35477K4fTFiLlus0wqRIF1MEEExiuze7r2VvpI,3445 +cryptography-1.1.1.dist-info/RECORD,, +cryptography-1.1.1.dist-info/WHEEL,sha256=PDKbaZw4S6tFBK4Wu3E7RG8JTElWZ5rVAcyYIGagy50,109 +cryptography-1.1.1.dist-info/entry_points.txt,sha256=aG4jSsppK5sGk3sJ88DGQsXxbseci9MN5ZgbY9VKvyk,80 +cryptography-1.1.1.dist-info/metadata.json,sha256=duNBhdEZeqXn5tHraZdqmeUp5EtSAq_iFjNmiq74XEI,1755 +cryptography-1.1.1.dist-info/top_level.txt,sha256=QCkYQE4HJBpqIr-aBqbOZ70NlfbejKnDE6ODbNgUwwg,46 +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-35.pyc,, +cryptography/__pycache__/__about__.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/bindings/commoncrypto/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-35.pyc,, +cryptography/x509/__pycache__/extensions.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-35.pyc,, +cryptography/__pycache__/exceptions.cpython-35.pyc,, +cryptography/hazmat/bindings/__pycache__/__init__.cpython-35.pyc,, +cryptography/__pycache__/fernet.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-35.pyc,, +cryptography/hazmat/bindings/commoncrypto/__pycache__/binding.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-35.pyc,, +cryptography/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-35.pyc,, +cryptography/hazmat/primitives/interfaces/__pycache__/__init__.cpython-35.pyc,, +cryptography/x509/__pycache__/general_name.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-35.pyc,, +cryptography/x509/__pycache__/base.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/multibackend.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/interfaces.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-35.pyc,, +cryptography/x509/__pycache__/oid.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/serialization.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/ciphers.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/backend.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-35.pyc,, +cryptography/x509/__pycache__/name.cpython-35.pyc,, +cryptography/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-35.pyc,, +cryptography/x509/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/WHEEL new file mode 100644 index 0000000..d2f35a2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_6_intel + diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/entry_points.txt b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/entry_points.txt new file mode 100644 index 0000000..bfde650 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[cryptography.backends] +openssl = cryptography.hazmat.backends.openssl:backend + diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/metadata.json new file mode 100644 index 0000000..3db2800 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "cryptography is a package which provides cryptographic recipes and primitives to Python developers.", "classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: BSD", "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography"], "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/pyca/cryptography"}, "contacts": [{"email": "cryptography-dev@python.org", "name": "The cryptography developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}, "python.exports": {"cryptography.backends": {"openssl": "cryptography.hazmat.backends.openssl:backend"}}}, "license": "BSD or Apache License, Version 2.0", "metadata_version": "2.0", "name": "cryptography", "run_requires": [{"requires": ["cffi (>=1.1.0)", "idna (>=2.0)", "pyasn1 (>=0.1.8)", "setuptools", "six (>=1.4.1)"]}], "extras": [], "version": "1.1.1", "test_requires": [{"requires": ["cryptography-vectors (==1.1.1)", "hypothesis", "iso8601", "pretend", "pyasn1-modules", "pytest"]}]} \ No newline at end of file diff --git a/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/top_level.txt b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..e32461e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography-1.1.1.dist-info/top_level.txt @@ -0,0 +1,4 @@ +_constant_time +_openssl +_padding +cryptography diff --git a/Darwin/lib/python3.5/site-packages/cryptography/__about__.py b/Darwin/lib/python3.5/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..02e50ac --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/__about__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "cryptography" +__summary__ = ("cryptography is a package which provides cryptographic recipes" + " and primitives to Python developers.") +__uri__ = "https://github.com/pyca/cryptography" + +__version__ = "1.1.1" + +__author__ = "The cryptography developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2015 {0}".format(__author__) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..985ebd6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import sys +import warnings + +from cryptography.__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +if sys.version_info[:2] == (2, 6): + warnings.warn( + "Python 2.6 is no longer supported by the Python core team, please " + "upgrade your Python.", + DeprecationWarning + ) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/exceptions.py b/Darwin/lib/python3.5/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..3bf8a75 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/exceptions.py @@ -0,0 +1,70 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + +from cryptography import utils +from cryptography.hazmat.primitives import twofactor + + +class _Reasons(Enum): + BACKEND_MISSING_INTERFACE = 0 + UNSUPPORTED_HASH = 1 + UNSUPPORTED_CIPHER = 2 + UNSUPPORTED_PADDING = 3 + UNSUPPORTED_MGF = 4 + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5 + UNSUPPORTED_ELLIPTIC_CURVE = 6 + UNSUPPORTED_SERIALIZATION = 7 + UNSUPPORTED_X509 = 8 + UNSUPPORTED_EXCHANGE_ALGORITHM = 9 + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message, reason=None): + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__(self, msg, err_code): + super(InternalError, self).__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass + + +InvalidToken = utils.deprecated( + twofactor.InvalidToken, + __name__, + ( + "The InvalidToken exception has moved to the " + "cryptography.hazmat.primitives.twofactor module" + ), + utils.DeprecatedIn09 +) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/fernet.py b/Darwin/lib/python3.5/site-packages/cryptography/fernet.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/cryptography/fernet.py rename to Darwin/lib/python3.5/site-packages/cryptography/fernet.py index cdb9bdc..6fbe9f2 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/fernet.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/fernet.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -90,7 +81,7 @@ class Fernet(object): except (TypeError, binascii.Error): raise InvalidToken - if six.indexbytes(data, 0) != 0x80: + if not data or six.indexbytes(data, 0) != 0x80: raise InvalidToken try: @@ -127,3 +118,24 @@ class Fernet(object): except ValueError: raise InvalidToken return unpadded + + +class MultiFernet(object): + def __init__(self, fernets): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg): + return self._fernets[0].encrypt(msg) + + def decrypt(self, msg, ttl=None): + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..256fee3 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,42 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import pkg_resources + +from cryptography.hazmat.backends.multibackend import MultiBackend + + +_available_backends_list = None + + +def _available_backends(): + global _available_backends_list + + if _available_backends_list is None: + _available_backends_list = [ + # setuptools 11.3 deprecated support for the require parameter to + # load(), and introduced the new resolve() method instead. + # This can be removed if/when we can assume setuptools>=11.3. At + # some point we may wish to add a warning, to push people along, + # but at present this would result in too many warnings. + ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False) + for ep in pkg_resources.iter_entry_points( + "cryptography.backends" + ) + ] + + return _available_backends_list + +_default_backend = None + + +def default_backend(): + global _default_backend + + if _default_backend is None: + _default_backend = MultiBackend(_available_backends()) + + return _default_backend diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py new file mode 100644 index 0000000..1d52a25 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.commoncrypto.backend import backend + + +__all__ = ["backend"] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py similarity index 93% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py index 7bab979..315d67d 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -236,7 +227,8 @@ class Backend(object): else: raise InternalError( "The backend returned an unknown error, consider filing a bug." - " Code: {0}.".format(response) + " Code: {0}.".format(response), + response ) def _release_cipher_ctx(self, ctx): diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py similarity index 81% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py index 525500c..1ce8aec 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -17,13 +8,14 @@ from cryptography import utils from cryptography.exceptions import ( InvalidTag, UnsupportedAlgorithm, _Reasons ) -from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives import ciphers, constant_time +from cryptography.hazmat.primitives.ciphers import modes from cryptography.hazmat.primitives.ciphers.modes import ( CFB, CFB8, CTR, OFB ) -@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(ciphers.CipherContext) class _CipherContext(object): def __init__(self, backend, cipher, mode, operation): self._backend = backend @@ -40,7 +32,7 @@ class _CipherContext(object): # treat RC4 and other stream cipher block sizes). # This bug has been filed as rdar://15589470 self._bytes_processed = 0 - if (isinstance(cipher, interfaces.BlockCipherAlgorithm) and not + if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not isinstance(mode, (OFB, CFB, CFB8, CTR))): self._byte_block_size = cipher.block_size // 8 else: @@ -60,9 +52,9 @@ class _CipherContext(object): ctx = self._backend._ffi.new("CCCryptorRef *") ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) - if isinstance(mode, interfaces.ModeWithInitializationVector): + if isinstance(mode, modes.ModeWithInitializationVector): iv_nonce = mode.initialization_vector - elif isinstance(mode, interfaces.ModeWithNonce): + elif isinstance(mode, modes.ModeWithNonce): iv_nonce = mode.nonce else: iv_nonce = self._backend._ffi.NULL @@ -110,8 +102,8 @@ class _CipherContext(object): return self._backend._ffi.buffer(buf)[:outlen[0]] -@utils.register_interface(interfaces.AEADCipherContext) -@utils.register_interface(interfaces.AEADEncryptionContext) +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) class _GCMCipherContext(object): def __init__(self, backend, cipher, mode, operation): self._backend = backend @@ -151,6 +143,12 @@ class _GCMCipherContext(object): len(mode.initialization_vector) ) self._backend._check_cipher_response(res) + # CommonCrypto has a bug where calling update without at least one + # call to authenticate_additional_data will result in null byte output + # for ciphertext. The following empty byte string call prevents the + # issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314544 + self.authenticate_additional_data(b"") def update(self, data): buf = self._backend._ffi.new("unsigned char[]", len(data)) @@ -164,6 +162,12 @@ class _GCMCipherContext(object): return self._backend._ffi.buffer(buf)[:] def finalize(self): + # CommonCrypto has a yet another bug where you must make at least one + # call to update. If you pass just AAD and call finalize without a call + # to update you'll get null bytes for tag. The following update call + # prevents this issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314580 + self.update(b"") tag_size = self._cipher.block_size // 8 tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) tag_len = self._backend._ffi.new("size_t *", tag_size) @@ -186,6 +190,4 @@ class _GCMCipherContext(object): ) self._backend._check_cipher_response(res) - @property - def tag(self): - return self._tag + tag = utils.read_only_property("_tag") diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py similarity index 70% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py index ebad720..a54e983 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py @@ -1,27 +1,18 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives import hashes -@utils.register_interface(interfaces.HashContext) +@utils.register_interface(hashes.HashContext) class _HashContext(object): def __init__(self, backend, algorithm, ctx=None): - self.algorithm = algorithm + self._algorithm = algorithm self._backend = backend if ctx is None: @@ -39,6 +30,8 @@ class _HashContext(object): self._ctx = ctx + algorithm = utils.read_only_property("_algorithm") + def copy(self): methods = self._backend._hash_mapping[self.algorithm.name] new_ctx = self._backend._ffi.new(methods.ctx) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py similarity index 65% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py index ec3a878..ae623d8 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py @@ -1,27 +1,21 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces -@utils.register_interface(interfaces.HashContext) +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) class _HMACContext(object): def __init__(self, backend, key, algorithm, ctx=None): - self.algorithm = algorithm + self._algorithm = algorithm self._backend = backend if ctx is None: ctx = self._backend._ffi.new("CCHmacContext *") @@ -39,6 +33,8 @@ class _HMACContext(object): self._ctx = ctx self._key = key + algorithm = utils.read_only_property("_algorithm") + def copy(self): copied_ctx = self._backend._ffi.new("CCHmacContext *") # CommonCrypto has no APIs for copying HMACs, so we have to copy the @@ -56,3 +52,8 @@ class _HMACContext(object): self.algorithm.digest_size) self._backend._lib.CCHmacFinal(self._ctx, buf) return self._backend._ffi.buffer(buf)[:] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/interfaces.py similarity index 62% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/interfaces.py index 5ed4996..92d9653 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/interfaces.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/interfaces.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -66,7 +57,22 @@ class HMACBackend(object): @abc.abstractmethod def create_hmac_ctx(self, key, algorithm): """ - Create a HashContext for calculating a message authentication code. + Create a MACContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACBackend(object): + @abc.abstractmethod + def cmac_algorithm_supported(self, algorithm): + """ + Returns True if the block cipher is supported for CMAC by this backend + """ + + @abc.abstractmethod + def create_cmac_ctx(self, algorithm): + """ + Create a MACContext for calculating a message authentication code. """ @@ -96,39 +102,6 @@ class RSABackend(object): of key_size bits. """ - @abc.abstractmethod - def create_rsa_signature_ctx(self, private_key, padding, algorithm): - """ - Returns an object conforming to the AsymmetricSignatureContext - interface. - """ - - @abc.abstractmethod - def create_rsa_verification_ctx(self, public_key, signature, padding, - algorithm): - """ - Returns an object conforming to the AsymmetricVerificationContext - interface. - """ - - @abc.abstractmethod - def mgf1_hash_supported(self, algorithm): - """ - Return True if the hash algorithm is supported for MGF1 in PSS. - """ - - @abc.abstractmethod - def decrypt_rsa(self, private_key, ciphertext, padding): - """ - Returns decrypted bytes. - """ - - @abc.abstractmethod - def encrypt_rsa(self, public_key, plaintext, padding): - """ - Returns encrypted bytes. - """ - @abc.abstractmethod def rsa_padding_supported(self, padding): """ @@ -176,20 +149,6 @@ class DSABackend(object): Generate a DSAPrivateKey instance using key size only. """ - @abc.abstractmethod - def create_dsa_signature_ctx(self, private_key, algorithm): - """ - Returns an object conforming to the AsymmetricSignatureContext - interface. - """ - - @abc.abstractmethod - def create_dsa_verification_ctx(self, public_key, signature, algorithm): - """ - Returns an object conforming to the AsymmetricVerificationContext - interface. - """ - @abc.abstractmethod def dsa_hash_supported(self, algorithm): """ @@ -221,41 +180,6 @@ class DSABackend(object): """ -@six.add_metaclass(abc.ABCMeta) -class TraditionalOpenSSLSerializationBackend(object): - @abc.abstractmethod - def load_traditional_openssl_pem_private_key(self, data, password): - """ - Load a private key from PEM encoded data, using password if the data - is encrypted. - """ - - -@six.add_metaclass(abc.ABCMeta) -class PKCS8SerializationBackend(object): - @abc.abstractmethod - def load_pkcs8_pem_private_key(self, data, password): - """ - Load a private key from PEM encoded data, using password if the data - is encrypted. - """ - - -@six.add_metaclass(abc.ABCMeta) -class CMACBackend(object): - @abc.abstractmethod - def cmac_algorithm_supported(self, algorithm): - """ - Returns True if the block cipher is supported for CMAC by this backend - """ - - @abc.abstractmethod - def create_cmac_ctx(self, algorithm): - """ - Create a CMACContext for calculating a message authentication code. - """ - - @six.add_metaclass(abc.ABCMeta) class EllipticCurveBackend(object): @abc.abstractmethod @@ -280,13 +204,142 @@ class EllipticCurveBackend(object): """ @abc.abstractmethod - def elliptic_curve_public_key_from_numbers(self, numbers): + def load_elliptic_curve_public_numbers(self, numbers): """ Return an EllipticCurvePublicKey provider using the given numbers. """ @abc.abstractmethod - def elliptic_curve_private_key_from_numbers(self, numbers): + def load_elliptic_curve_private_numbers(self, numbers): """ - Return an EllipticCurvePublicKey provider using the given numbers. + Return an EllipticCurvePrivateKey provider using the given numbers. + """ + + @abc.abstractmethod + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PEMSerializationBackend(object): + @abc.abstractmethod + def load_pem_private_key(self, data, password): + """ + Loads a private key from PEM encoded data, using the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_pem_public_key(self, data): + """ + Loads a public key from PEM encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DERSerializationBackend(object): + @abc.abstractmethod + def load_der_private_key(self, data, password): + """ + Loads a private key from DER encoded data. Uses the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_der_public_key(self, data): + """ + Loads a public key from DER encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X509Backend(object): + @abc.abstractmethod + def load_pem_x509_certificate(self, data): + """ + Load an X.509 certificate from PEM encoded data. + """ + + @abc.abstractmethod + def load_der_x509_certificate(self, data): + """ + Load an X.509 certificate from DER encoded data. + """ + + @abc.abstractmethod + def load_der_x509_csr(self, data): + """ + Load an X.509 CSR from DER encoded data. + """ + + @abc.abstractmethod + def load_pem_x509_csr(self, data): + """ + Load an X.509 CSR from PEM encoded data. + """ + + @abc.abstractmethod + def create_x509_csr(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CSR from a CSR builder object. + """ + + @abc.abstractmethod + def create_x509_certificate(self, builder, private_key, algorithm): + """ + Create and sign an X.509 certificate from a CertificateBuilder object. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHBackend(object): + @abc.abstractmethod + def generate_dh_parameters(self, key_size): + """ + Generate a DHParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dh_private_key(self, parameters): + """ + Generate a DHPrivateKey instance with parameters as a DHParameters + object. + """ + + @abc.abstractmethod + def generate_dh_private_key_and_parameters(self, key_size): + """ + Generate a DHPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def load_dh_private_numbers(self, numbers): + """ + Returns a DHPrivateKey provider. + """ + + @abc.abstractmethod + def load_dh_public_numbers(self, numbers): + """ + Returns a DHPublicKey provider. + """ + + @abc.abstractmethod + def load_dh_parameter_numbers(self, numbers): + """ + Returns a DHParameters provider. + """ + + @abc.abstractmethod + def dh_exchange_algorithm_supported(self, exchange_algorithm): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + @abc.abstractmethod + def dh_parameters_supported(self, p, g): + """ + Returns whether the backend supports DH with these parameter values. """ diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/multibackend.py similarity index 64% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/multibackend.py index 35e2a09..bbaaf42 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/multibackend.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/multibackend.py @@ -1,37 +1,29 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm, _Reasons from cryptography.hazmat.backends.interfaces import ( - CMACBackend, CipherBackend, DSABackend, EllipticCurveBackend, HMACBackend, - HashBackend, PBKDF2HMACBackend, PKCS8SerializationBackend, - RSABackend, TraditionalOpenSSLSerializationBackend + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend ) @utils.register_interface(CMACBackend) @utils.register_interface(CipherBackend) +@utils.register_interface(DERSerializationBackend) @utils.register_interface(HashBackend) @utils.register_interface(HMACBackend) @utils.register_interface(PBKDF2HMACBackend) -@utils.register_interface(PKCS8SerializationBackend) @utils.register_interface(RSABackend) -@utils.register_interface(TraditionalOpenSSLSerializationBackend) @utils.register_interface(DSABackend) @utils.register_interface(EllipticCurveBackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) class MultiBackend(object): name = "multibackend" @@ -43,33 +35,33 @@ class MultiBackend(object): if isinstance(b, interface): yield b - def cipher_supported(self, algorithm, mode): + def cipher_supported(self, cipher, mode): return any( - b.cipher_supported(algorithm, mode) + b.cipher_supported(cipher, mode) for b in self._filtered_backends(CipherBackend) ) - def create_symmetric_encryption_ctx(self, algorithm, mode): + def create_symmetric_encryption_ctx(self, cipher, mode): for b in self._filtered_backends(CipherBackend): try: - return b.create_symmetric_encryption_ctx(algorithm, mode) + return b.create_symmetric_encryption_ctx(cipher, mode) except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( "cipher {0} in {1} mode is not supported by this backend.".format( - algorithm.name, mode.name if mode else mode), + cipher.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) - def create_symmetric_decryption_ctx(self, algorithm, mode): + def create_symmetric_decryption_ctx(self, cipher, mode): for b in self._filtered_backends(CipherBackend): try: - return b.create_symmetric_decryption_ctx(algorithm, mode) + return b.create_symmetric_decryption_ctx(cipher, mode) except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( "cipher {0} in {1} mode is not supported by this backend.".format( - algorithm.name, mode.name if mode else mode), + cipher.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) @@ -144,38 +136,6 @@ class MultiBackend(object): raise UnsupportedAlgorithm("RSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - def create_rsa_signature_ctx(self, private_key, padding, algorithm): - for b in self._filtered_backends(RSABackend): - return b.create_rsa_signature_ctx(private_key, padding, algorithm) - raise UnsupportedAlgorithm("RSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def create_rsa_verification_ctx(self, public_key, signature, padding, - algorithm): - for b in self._filtered_backends(RSABackend): - return b.create_rsa_verification_ctx(public_key, signature, - padding, algorithm) - raise UnsupportedAlgorithm("RSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def mgf1_hash_supported(self, algorithm): - for b in self._filtered_backends(RSABackend): - return b.mgf1_hash_supported(algorithm) - raise UnsupportedAlgorithm("RSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def decrypt_rsa(self, private_key, ciphertext, padding): - for b in self._filtered_backends(RSABackend): - return b.decrypt_rsa(private_key, ciphertext, padding) - raise UnsupportedAlgorithm("RSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def encrypt_rsa(self, public_key, plaintext, padding): - for b in self._filtered_backends(RSABackend): - return b.encrypt_rsa(public_key, plaintext, padding) - raise UnsupportedAlgorithm("RSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - def rsa_padding_supported(self, padding): for b in self._filtered_backends(RSABackend): return b.rsa_padding_supported(padding) @@ -214,19 +174,6 @@ class MultiBackend(object): raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - def create_dsa_verification_ctx(self, public_key, signature, algorithm): - for b in self._filtered_backends(DSABackend): - return b.create_dsa_verification_ctx(public_key, signature, - algorithm) - raise UnsupportedAlgorithm("DSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - - def create_dsa_signature_ctx(self, private_key, algorithm): - for b in self._filtered_backends(DSABackend): - return b.create_dsa_signature_ctx(private_key, algorithm) - raise UnsupportedAlgorithm("DSA is not supported by the backend.", - _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) - def dsa_hash_supported(self, algorithm): for b in self._filtered_backends(DSABackend): return b.dsa_hash_supported(algorithm) @@ -239,6 +186,24 @@ class MultiBackend(object): raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + def load_dsa_public_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_public_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_private_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_private_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_parameter_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_parameter_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + def cmac_algorithm_supported(self, algorithm): return any( b.cmac_algorithm_supported(algorithm) @@ -282,10 +247,10 @@ class MultiBackend(object): _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) - def elliptic_curve_private_key_from_numbers(self, numbers): + def load_elliptic_curve_private_numbers(self, numbers): for b in self._filtered_backends(EllipticCurveBackend): try: - return b.elliptic_curve_private_key_from_numbers(numbers) + return b.load_elliptic_curve_private_numbers(numbers) except UnsupportedAlgorithm: continue @@ -294,10 +259,10 @@ class MultiBackend(object): _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) - def elliptic_curve_public_key_from_numbers(self, numbers): + def load_elliptic_curve_public_numbers(self, numbers): for b in self._filtered_backends(EllipticCurveBackend): try: - return b.elliptic_curve_public_key_from_numbers(numbers) + return b.load_elliptic_curve_public_numbers(numbers) except UnsupportedAlgorithm: continue @@ -306,22 +271,116 @@ class MultiBackend(object): _Reasons.UNSUPPORTED_ELLIPTIC_CURVE ) - def load_pkcs8_pem_private_key(self, data, password): - for b in self._filtered_backends(PKCS8SerializationBackend): - return b.load_pkcs8_pem_private_key(data, password) + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return any( + b.elliptic_curve_exchange_algorithm_supported(algorithm, curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def load_pem_private_key(self, data, password): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_private_key(data, password) raise UnsupportedAlgorithm( "This backend does not support this key serialization.", _Reasons.UNSUPPORTED_SERIALIZATION ) - def load_traditional_openssl_pem_private_key(self, data, password): - for b in self._filtered_backends( - TraditionalOpenSSLSerializationBackend - ): - return b.load_traditional_openssl_pem_private_key(data, password) + def load_pem_public_key(self, data): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_public_key(data) raise UnsupportedAlgorithm( "This backend does not support this key serialization.", _Reasons.UNSUPPORTED_SERIALIZATION ) + + def load_der_private_key(self, data, password): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_der_public_key(self, data): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_public_key(data) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_pem_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_csr(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_csr(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_certificate(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_certificate(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..8eadeb6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..8e302a9 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,2102 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import calendar +import collections +import datetime +import itertools +from contextlib import contextmanager + +import idna + +import six + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend +) +from cryptography.hazmat.backends.openssl.ciphers import ( + _AESCTRCipherContext, _CipherContext +) +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, _DSAPrivateKey, _DSAPublicKey +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, _EllipticCurvePublicKey +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, _RSAPublicKey +) +from cryptography.hazmat.backends.openssl.x509 import ( + _Certificate, _CertificateRevocationList, _CertificateSigningRequest, + _DISTPOINT_TYPE_FULLNAME, _DISTPOINT_TYPE_RELATIVENAME +) +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB +) +from cryptography.x509.oid import ExtensionOID, NameOID + + +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) + + +def _encode_asn1_int(backend, x): + """ + Converts a python integer to a ASN1_INTEGER. The returned ASN1_INTEGER will + not be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will be + discarded after use. + + """ + # Convert Python integer to OpenSSL "bignum" in case value exceeds + # machine's native integer limits (note: `int_to_bn` doesn't automatically + # GC). + i = backend._int_to_bn(x) + i = backend._ffi.gc(i, backend._lib.BN_free) + + # Wrap in a ASN.1 integer. Don't GC -- as documented. + i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL) + backend.openssl_assert(i != backend._ffi.NULL) + return i + + +def _encode_asn1_int_gc(backend, x): + i = _encode_asn1_int(backend, x) + i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free) + return i + + +def _encode_asn1_str(backend, data, length): + """ + Create an ASN1_OCTET_STRING from a Python byte string. + """ + s = backend._lib.ASN1_OCTET_STRING_new() + res = backend._lib.ASN1_OCTET_STRING_set(s, data, length) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_utf8_str(backend, string): + """ + Create an ASN1_UTF8STRING from a Python unicode string. + This object will be a ASN1_STRING with UTF8 type in OpenSSL and + can be decoded with ASN1_STRING_to_UTF8. + """ + s = backend._lib.ASN1_UTF8STRING_new() + res = backend._lib.ASN1_STRING_set( + s, string.encode("utf8"), len(string.encode("utf8")) + ) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_str_gc(backend, data, length): + s = _encode_asn1_str(backend, data, length) + s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free) + return s + + +def _encode_inhibit_any_policy(backend, inhibit_any_policy): + asn1int = _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs) + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_ASN1_INTEGER(asn1int, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_name(backend, attributes): + """ + The X509_NAME created will not be gc'd. Use _encode_name_gc if needed. + """ + subject = backend._lib.X509_NAME_new() + for attribute in attributes: + value = attribute.value.encode('utf8') + obj = _txt2obj_gc(backend, attribute.oid.dotted_string) + if attribute.oid == NameOID.COUNTRY_NAME: + # Per RFC5280 Appendix A.1 countryName should be encoded as + # PrintableString, not UTF8String + type = backend._lib.MBSTRING_ASC + else: + type = backend._lib.MBSTRING_UTF8 + res = backend._lib.X509_NAME_add_entry_by_OBJ( + subject, obj, type, value, -1, -1, 0, + ) + backend.openssl_assert(res == 1) + return subject + + +def _encode_name_gc(backend, attributes): + subject = _encode_name(backend, attributes) + subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free) + return subject + + +def _encode_certificate_policies(backend, certificate_policies): + cp = backend._lib.sk_POLICYINFO_new_null() + backend.openssl_assert(cp != backend._ffi.NULL) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + for policy_info in certificate_policies: + pi = backend._lib.POLICYINFO_new() + backend.openssl_assert(pi != backend._ffi.NULL) + res = backend._lib.sk_POLICYINFO_push(cp, pi) + backend.openssl_assert(res >= 1) + oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string) + pi.policyid = oid + if policy_info.policy_qualifiers: + pqis = backend._lib.sk_POLICYQUALINFO_new_null() + backend.openssl_assert(pqis != backend._ffi.NULL) + for qualifier in policy_info.policy_qualifiers: + pqi = backend._lib.POLICYQUALINFO_new() + backend.openssl_assert(pqi != backend._ffi.NULL) + res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi) + backend.openssl_assert(res >= 1) + if isinstance(qualifier, six.text_type): + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_QUALIFIER.dotted_string + ) + pqi.d.cpsuri = _encode_asn1_str( + backend, + qualifier.encode("ascii"), + len(qualifier.encode("ascii")) + ) + else: + assert isinstance(qualifier, x509.UserNotice) + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_USER_NOTICE.dotted_string + ) + un = backend._lib.USERNOTICE_new() + backend.openssl_assert(un != backend._ffi.NULL) + pqi.d.usernotice = un + if qualifier.explicit_text: + un.exptext = _encode_asn1_utf8_str( + backend, qualifier.explicit_text + ) + + un.noticeref = _encode_notice_reference( + backend, qualifier.notice_reference + ) + + pi.qualifiers = pqis + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_CERTIFICATEPOLICIES(cp, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_notice_reference(backend, notice): + if notice is None: + return backend._ffi.NULL + else: + nr = backend._lib.NOTICEREF_new() + backend.openssl_assert(nr != backend._ffi.NULL) + # organization is a required field + nr.organization = _encode_asn1_utf8_str(backend, notice.organization) + + notice_stack = backend._lib.sk_ASN1_INTEGER_new_null() + nr.noticenos = notice_stack + for number in notice.notice_numbers: + num = _encode_asn1_int(backend, number) + res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num) + backend.openssl_assert(res >= 1) + + return nr + + +def _txt2obj(backend, name): + """ + Converts a Python string with an ASN.1 object ID in dotted form to a + ASN1_OBJECT. + """ + name = name.encode('ascii') + obj = backend._lib.OBJ_txt2obj(name, 1) + backend.openssl_assert(obj != backend._ffi.NULL) + return obj + + +def _txt2obj_gc(backend, name): + obj = _txt2obj(backend, name) + obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free) + return obj + + +def _encode_ocsp_nocheck(backend, ext): + """ + The OCSP No Check extension is defined as a null ASN.1 value. Rather than + calling OpenSSL we can return a Python bytestring value in a list. + """ + return [b"\x05\x00"], 2 + + +def _encode_key_usage(backend, key_usage): + set_bit = backend._lib.ASN1_BIT_STRING_set_bit + ku = backend._lib.ASN1_BIT_STRING_new() + ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free) + res = set_bit(ku, 0, key_usage.digital_signature) + backend.openssl_assert(res == 1) + res = set_bit(ku, 1, key_usage.content_commitment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 2, key_usage.key_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 3, key_usage.data_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 4, key_usage.key_agreement) + backend.openssl_assert(res == 1) + res = set_bit(ku, 5, key_usage.key_cert_sign) + backend.openssl_assert(res == 1) + res = set_bit(ku, 6, key_usage.crl_sign) + backend.openssl_assert(res == 1) + if key_usage.key_agreement: + res = set_bit(ku, 7, key_usage.encipher_only) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, key_usage.decipher_only) + backend.openssl_assert(res == 1) + else: + res = set_bit(ku, 7, 0) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, 0) + backend.openssl_assert(res == 1) + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_ASN1_BIT_STRING(ku, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_authority_key_identifier(backend, authority_keyid): + akid = backend._lib.AUTHORITY_KEYID_new() + backend.openssl_assert(akid != backend._ffi.NULL) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + if authority_keyid.key_identifier is not None: + akid.keyid = _encode_asn1_str( + backend, + authority_keyid.key_identifier, + len(authority_keyid.key_identifier) + ) + + if authority_keyid.authority_cert_issuer is not None: + akid.issuer = _encode_general_names( + backend, authority_keyid.authority_cert_issuer + ) + + if authority_keyid.authority_cert_serial_number is not None: + akid.serial = _encode_asn1_int( + backend, authority_keyid.authority_cert_serial_number + ) + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_AUTHORITY_KEYID(akid, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_basic_constraints(backend, basic_constraints): + constraints = backend._lib.BASIC_CONSTRAINTS_new() + constraints = backend._ffi.gc( + constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + constraints.ca = 255 if basic_constraints.ca else 0 + if basic_constraints.ca and basic_constraints.path_length is not None: + constraints.pathlen = _encode_asn1_int( + backend, basic_constraints.path_length + ) + + # Fetch the encoded payload. + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_BASIC_CONSTRAINTS(constraints, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_authority_information_access(backend, authority_info_access): + aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null() + backend.openssl_assert(aia != backend._ffi.NULL) + aia = backend._ffi.gc( + aia, backend._lib.sk_ACCESS_DESCRIPTION_free + ) + for access_description in authority_info_access: + ad = backend._lib.ACCESS_DESCRIPTION_new() + method = _txt2obj( + backend, access_description.access_method.dotted_string + ) + gn = _encode_general_name(backend, access_description.access_location) + ad.method = method + ad.location = gn + res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad) + backend.openssl_assert(res >= 1) + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_AUTHORITY_INFO_ACCESS(aia, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_general_names(backend, names): + general_names = backend._lib.GENERAL_NAMES_new() + backend.openssl_assert(general_names != backend._ffi.NULL) + for name in names: + gn = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_NAME_push(general_names, gn) + backend.openssl_assert(res != 0) + + return general_names + + +def _encode_alt_name(backend, san): + general_names = _encode_general_names(backend, san) + general_names = backend._ffi.gc( + general_names, backend._lib.GENERAL_NAMES_free + ) + pp = backend._ffi.new("unsigned char **") + r = backend._lib.i2d_GENERAL_NAMES(general_names, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_subject_key_identifier(backend, ski): + asn1_str = _encode_asn1_str_gc(backend, ski.digest, len(ski.digest)) + pp = backend._ffi.new("unsigned char **") + r = backend._lib.i2d_ASN1_OCTET_STRING(asn1_str, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_general_name(backend, name): + if isinstance(name, x509.DNSName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_DNS + + ia5 = backend._lib.ASN1_IA5STRING_new() + backend.openssl_assert(ia5 != backend._ffi.NULL) + + if name.value.startswith(u"*."): + value = b"*." + idna.encode(name.value[2:]) + else: + value = idna.encode(name.value) + + res = backend._lib.ASN1_STRING_set(ia5, value, len(value)) + backend.openssl_assert(res == 1) + gn.d.dNSName = ia5 + elif isinstance(name, x509.RegisteredID): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_RID + obj = backend._lib.OBJ_txt2obj( + name.value.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(obj != backend._ffi.NULL) + gn.d.registeredID = obj + elif isinstance(name, x509.DirectoryName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + dir_name = _encode_name(backend, name.value) + gn.type = backend._lib.GEN_DIRNAME + gn.d.directoryName = dir_name + elif isinstance(name, x509.IPAddress): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + ipaddr = _encode_asn1_str( + backend, name.value.packed, len(name.value.packed) + ) + gn.type = backend._lib.GEN_IPADD + gn.d.iPAddress = ipaddr + elif isinstance(name, x509.OtherName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + other_name = backend._lib.OTHERNAME_new() + backend.openssl_assert(other_name != backend._ffi.NULL) + + type_id = backend._lib.OBJ_txt2obj( + name.type_id.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(type_id != backend._ffi.NULL) + data = backend._ffi.new("unsigned char[]", name.value) + data_ptr_ptr = backend._ffi.new("unsigned char **") + data_ptr_ptr[0] = data + value = backend._lib.d2i_ASN1_TYPE( + backend._ffi.NULL, data_ptr_ptr, len(name.value) + ) + if value == backend._ffi.NULL: + backend._consume_errors() + raise ValueError("Invalid ASN.1 data") + other_name.type_id = type_id + other_name.value = value + gn.type = backend._lib.GEN_OTHERNAME + gn.d.otherName = other_name + elif isinstance(name, x509.RFC822Name): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_EMAIL + gn.d.rfc822Name = asn1_str + elif isinstance(name, x509.UniformResourceIdentifier): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_URI + gn.d.uniformResourceIdentifier = asn1_str + else: + raise ValueError( + "{0} is an unknown GeneralName type".format(name) + ) + + return gn + + +def _encode_extended_key_usage(backend, extended_key_usage): + eku = backend._lib.sk_ASN1_OBJECT_new_null() + eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free) + for oid in extended_key_usage: + obj = _txt2obj(backend, oid.dotted_string) + res = backend._lib.sk_ASN1_OBJECT_push(eku, obj) + backend.openssl_assert(res >= 1) + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_EXTENDED_KEY_USAGE( + backend._ffi.cast("EXTENDED_KEY_USAGE *", eku), pp + ) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +_CRLREASONFLAGS = { + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.privilege_withdrawn: 7, + x509.ReasonFlags.aa_compromise: 8, +} + + +def _encode_crl_distribution_points(backend, crl_distribution_points): + cdp = backend._lib.sk_DIST_POINT_new_null() + cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free) + for point in crl_distribution_points: + dp = backend._lib.DIST_POINT_new() + backend.openssl_assert(dp != backend._ffi.NULL) + + if point.reasons: + bitmask = backend._lib.ASN1_BIT_STRING_new() + backend.openssl_assert(bitmask != backend._ffi.NULL) + dp.reasons = bitmask + for reason in point.reasons: + res = backend._lib.ASN1_BIT_STRING_set_bit( + bitmask, _CRLREASONFLAGS[reason], 1 + ) + backend.openssl_assert(res == 1) + + if point.full_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_FULLNAME + dpn.name.fullname = _encode_general_names(backend, point.full_name) + dp.distpoint = dpn + + if point.relative_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_RELATIVENAME + name = _encode_name_gc(backend, point.relative_name) + relativename = backend._lib.sk_X509_NAME_ENTRY_dup(name.entries) + backend.openssl_assert(relativename != backend._ffi.NULL) + dpn.name.relativename = relativename + dp.distpoint = dpn + + if point.crl_issuer: + dp.CRLissuer = _encode_general_names(backend, point.crl_issuer) + + res = backend._lib.sk_DIST_POINT_push(cdp, dp) + backend.openssl_assert(res >= 1) + + pp = backend._ffi.new('unsigned char **') + r = backend._lib.i2d_CRL_DIST_POINTS(cdp, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _encode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_crl_distribution_points, + ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy, + ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck, +} + + +@utils.register_interface(CipherBackend) +@utils.register_interface(CMACBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +class Backend(object): + """ + OpenSSL API binding interfaces. + """ + name = "openssl" + + def __init__(self): + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + # Set the default string mask for encoding ASN1 strings to UTF8. This + # is the default for newer OpenSSLs for several years and is + # recommended in RFC 2459. + res = self._lib.ASN1_STRING_set_default_mask_asc(b"utf8only") + self.openssl_assert(res == 1) + + self._cipher_registry = {} + self._register_default_ciphers() + self.activate_osrandom_engine() + + def openssl_assert(self, ok): + return binding._openssl_assert(self._lib, ok) + + def activate_builtin_random(self): + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + def activate_osrandom_engine(self): + # Unregister and free the current engine. + self.activate_builtin_random() + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id) + self.openssl_assert(e != self._ffi.NULL) + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + self.openssl_assert(res == 1) + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + self.openssl_assert(res == 1) + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + + def openssl_version_text(self): + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 1.0.1e 11 Feb 2013 + """ + return self._ffi.string( + self._lib.SSLeay_version(self._lib.SSLEAY_VERSION) + ).decode("ascii") + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def hash_supported(self, algorithm): + digest = self._lib.EVP_get_digestbyname(algorithm.name.encode("ascii")) + return digest != self._ffi.NULL + + def hmac_supported(self, algorithm): + return self.hash_supported(algorithm) + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher, mode): + if self._evp_cipher_supported(cipher, mode): + return True + elif isinstance(mode, CTR) and isinstance(cipher, AES): + return True + else: + return False + + def _evp_cipher_supported(self, cipher, mode): + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self): + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8]: + self.register_cipher_adapter( + AES, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: + self.register_cipher_adapter( + TripleDES, + mode_cls, + GetCipherByName("des-ede3-{mode.name}") + ) + self.register_cipher_adapter( + TripleDES, + ECB, + GetCipherByName("des-ede3") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + Blowfish, + mode_cls, + GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + SEED, + mode_cls, + GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [CAST5, IDEA], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}") + ) + self.register_cipher_adapter( + ARC4, + type(None), + GetCipherByName("rc4") + ) + self.register_cipher_adapter( + AES, + GCM, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm): + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + return self.hmac_supported(algorithm) + else: + # OpenSSL < 1.0.0 has an explicit PBKDF2-HMAC-SHA1 function, + # so if the PBKDF2_HMAC function is missing we only support + # SHA1 via PBKDF2_HMAC_SHA1. + return isinstance(algorithm, hashes.SHA1) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + buf = self._ffi.new("char[]", length) + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + self.openssl_assert(evp_md != self._ffi.NULL) + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf + ) + self.openssl_assert(res == 1) + else: + if not isinstance(algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This version of OpenSSL only supports PBKDF2HMAC with " + "SHA1.", + _Reasons.UNSUPPORTED_HASH + ) + res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( + key_material, + len(key_material), + salt, + len(salt), + iterations, + length, + buf + ) + self.openssl_assert(res == 1) + + return self._ffi.buffer(buf)[:] + + def _consume_errors(self): + return binding._consume_errors(self._lib) + + def _bn_to_int(self, bn): + assert bn != self._ffi.NULL + if six.PY3: + # Python 3 has constant time from_bytes, so use that. + + bn_num_bytes = (self._lib.BN_num_bits(bn) + 7) // 8 + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + # A zero length means the BN has value 0 + self.openssl_assert(bin_len >= 0) + return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + + else: + # Under Python 2 the best we can do is hex() + + hex_cdata = self._lib.BN_bn2hex(bn) + self.openssl_assert(hex_cdata != self._ffi.NULL) + hex_str = self._ffi.string(hex_cdata) + self._lib.OPENSSL_free(hex_cdata) + return int(hex_str, 16) + + def _int_to_bn(self, num, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + assert bn is None or bn != self._ffi.NULL + + if bn is None: + bn = self._ffi.NULL + + if six.PY3: + # Python 3 has constant time to_bytes, so use that. + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + self.openssl_assert(bn_ptr != self._ffi.NULL) + return bn_ptr + + else: + # Under Python 2 the best we can do is hex() + + hex_num = hex(num).rstrip("L").lstrip("0x").encode("ascii") or b"0" + bn_ptr = self._ffi.new("BIGNUM **") + bn_ptr[0] = bn + res = self._lib.BN_hex2bn(bn_ptr, hex_num) + self.openssl_assert(res != 0) + self.openssl_assert(bn_ptr[0] != self._ffi.NULL) + return bn_ptr[0] + + def generate_rsa_private_key(self, public_exponent, key_size): + rsa._verify_rsa_parameters(public_exponent, key_size) + + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + rsa_cdata, key_size, bn, self._ffi.NULL + ) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + return (public_exponent >= 3 and public_exponent & 1 != 0 and + key_size >= 512) + + def load_rsa_private_numbers(self, numbers): + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n + ) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.p = self._int_to_bn(numbers.p) + rsa_cdata.q = self._int_to_bn(numbers.q) + rsa_cdata.d = self._int_to_bn(numbers.d) + rsa_cdata.dmp1 = self._int_to_bn(numbers.dmp1) + rsa_cdata.dmq1 = self._int_to_bn(numbers.dmq1) + rsa_cdata.iqmp = self._int_to_bn(numbers.iqmp) + rsa_cdata.e = self._int_to_bn(numbers.public_numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def load_rsa_public_numbers(self, numbers): + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.e = self._int_to_bn(numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + + def _rsa_cdata_to_evp_pkey(self, rsa_cdata): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _bytes_to_bio(self, data): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_char_p = self._ffi.new("char[]", data) + bio = self._lib.BIO_new_mem_buf( + data_char_p, len(data) + ) + self.openssl_assert(bio != self._ffi.NULL) + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) + + def _create_mem_bio(self): + """ + Creates an empty memory BIO. + """ + bio_method = self._lib.BIO_s_mem() + self.openssl_assert(bio_method != self._ffi.NULL) + bio = self._lib.BIO_new(bio_method) + self.openssl_assert(bio != self._ffi.NULL) + bio = self._ffi.gc(bio, self._lib.BIO_free) + return bio + + def _read_mem_bio(self, bio): + """ + Reads a memory BIO. This only works on memory BIOs. + """ + buf = self._ffi.new("char **") + buf_len = self._lib.BIO_get_mem_data(bio, buf) + self.openssl_assert(buf_len > 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + bio_data = self._ffi.buffer(buf[0], buf_len)[:] + return bio_data + + def _evp_pkey_to_private_key(self, evp_pkey): + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + key_type = evp_pkey.type + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _evp_pkey_to_public_key(self, evp_pkey): + """ + Return the appropriate type of PublicKey given an evp_pkey cdata + pointer. + """ + + key_type = evp_pkey.type + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _pem_password_cb(self, password): + """ + Generate a pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + Returns a tuple of (cdata function pointer, callback function). + """ + + def pem_password_cb(buf, size, writing, userdata): + pem_password_cb.called += 1 + + if not password: + pem_password_cb.exception = TypeError( + "Password was not given but private key is encrypted." + ) + return 0 + elif len(password) < size: + pw_buf = self._ffi.buffer(buf, size) + pw_buf[:len(password)] = password + return len(password) + else: + pem_password_cb.exception = ValueError( + "Passwords longer than {0} bytes are not supported " + "by this backend.".format(size - 1) + ) + return 0 + + pem_password_cb.called = 0 + pem_password_cb.exception = None + + return ( + self._ffi.callback("int (char *, int, int, void *)", + pem_password_cb), + pem_password_cb + ) + + def _mgf1_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_MGF1_MD: + return self.hash_supported(algorithm) + else: + return isinstance(algorithm, hashes.SHA1) + + def rsa_padding_supported(self, padding): + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + return self._mgf1_hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return isinstance(padding._mgf._algorithm, hashes.SHA1) + else: + return False + + def generate_dsa_parameters(self, key_size): + if key_size not in (1024, 2048, 3072): + raise ValueError("Key size must be 1024 or 2048 or 3072 bits.") + + if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and + key_size > 1024): + raise ValueError( + "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " + "support larger key sizes.") + + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, key_size, self._ffi.NULL, 0, + self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + + self.openssl_assert(res == 1) + + return _DSAParameters(self, ctx) + + def generate_dsa_private_key(self, parameters): + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + ctx.p = self._lib.BN_dup(parameters._dsa_cdata.p) + ctx.q = self._lib.BN_dup(parameters._dsa_cdata.q) + ctx.g = self._lib.BN_dup(parameters._dsa_cdata.g) + + self._lib.DSA_generate_key(ctx) + evp_pkey = self._dsa_cdata_to_evp_pkey(ctx) + + return _DSAPrivateKey(self, ctx, evp_pkey) + + def generate_dsa_private_key_and_parameters(self, key_size): + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) + + def load_dsa_private_numbers(self, numbers): + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.public_numbers.y) + dsa_cdata.priv_key = self._int_to_bn(numbers.x) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + + def load_dsa_public_numbers(self, numbers): + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.y) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + + def load_dsa_parameter_numbers(self, numbers): + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.g) + + return _DSAParameters(self, dsa_cdata) + + def _dsa_cdata_to_evp_pkey(self, dsa_cdata): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def dsa_hash_supported(self, algorithm): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return isinstance(algorithm, hashes.SHA1) + else: + return self.hash_supported(algorithm) + + def dsa_parameters_supported(self, p, q, g): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return utils.bit_length(p) <= 1024 and utils.bit_length(q) <= 160 + else: + return True + + def cmac_algorithm_supported(self, algorithm): + return ( + self._lib.Cryptography_HAS_CMAC == 1 and + self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + ) + + def create_cmac_ctx(self, algorithm): + return _CMACContext(self, algorithm) + + def create_x509_csr(self, builder, private_key, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty request. + x509_req = self._lib.X509_REQ_new() + self.openssl_assert(x509_req != self._ffi.NULL) + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + + # Set x509 version. + res = self._lib.X509_REQ_set_version(x509_req, x509.Version.v1.value) + self.openssl_assert(res == 1) + + # Set subject name. + res = self._lib.X509_REQ_set_subject_name( + x509_req, _encode_name_gc(self, builder._subject_name) + ) + self.openssl_assert(res == 1) + + # Set subject public key. + public_key = private_key.public_key() + res = self._lib.X509_REQ_set_pubkey( + x509_req, public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Add extensions. + extensions = self._lib.sk_X509_EXTENSION_new_null() + self.openssl_assert(extensions != self._ffi.NULL) + extensions = self._ffi.gc( + extensions, + self._lib.sk_X509_EXTENSION_free, + ) + for extension in builder._extensions: + try: + encode = _EXTENSION_ENCODE_HANDLERS[extension.oid] + except KeyError: + raise NotImplementedError('Extension not yet supported.') + + pp, r = encode(self, extension.value) + obj = _txt2obj_gc(self, extension.oid.dotted_string) + extension = self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, + obj, + 1 if extension.critical else 0, + _encode_asn1_str_gc(self, pp[0], r), + ) + self.openssl_assert(extension != self._ffi.NULL) + res = self._lib.sk_X509_EXTENSION_push(extensions, extension) + self.openssl_assert(res >= 1) + res = self._lib.X509_REQ_add_extensions(x509_req, extensions) + self.openssl_assert(res == 1) + + # Sign the request using the requester's private key. + res = self._lib.X509_REQ_sign( + x509_req, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateSigningRequest(self, x509_req) + + def create_x509_certificate(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateBuilder): + raise TypeError('Builder type mismatch.') + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty certificate. + x509_cert = self._lib.X509_new() + x509_cert = self._ffi.gc(x509_cert, backend._lib.X509_free) + + # Set the x509 version. + res = self._lib.X509_set_version(x509_cert, builder._version.value) + self.openssl_assert(res == 1) + + # Set the subject's name. + res = self._lib.X509_set_subject_name( + x509_cert, _encode_name(self, list(builder._subject_name)) + ) + self.openssl_assert(res == 1) + + # Set the subject's public key. + res = self._lib.X509_set_pubkey( + x509_cert, builder._public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Set the certificate serial number. + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_set_serialNumber(x509_cert, serial_number) + self.openssl_assert(res == 1) + + # Set the "not before" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notBefore(x509_cert), + calendar.timegm(builder._not_valid_before.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Set the "not after" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notAfter(x509_cert), + calendar.timegm(builder._not_valid_after.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Add extensions. + for i, extension in enumerate(builder._extensions): + try: + encode = _EXTENSION_ENCODE_HANDLERS[extension.oid] + except KeyError: + raise NotImplementedError('Extension not yet supported.') + + pp, r = encode(self, extension.value) + obj = _txt2obj_gc(self, extension.oid.dotted_string) + extension = self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, + obj, + 1 if extension.critical else 0, + _encode_asn1_str_gc(self, pp[0], r) + ) + self.openssl_assert(extension != self._ffi.NULL) + extension = self._ffi.gc(extension, self._lib.X509_EXTENSION_free) + res = self._lib.X509_add_ext(x509_cert, extension, i) + self.openssl_assert(res == 1) + + # Set the issuer name. + res = self._lib.X509_set_issuer_name( + x509_cert, _encode_name(self, list(builder._issuer_name)) + ) + self.openssl_assert(res == 1) + + # Sign the certificate with the issuer's private key. + res = self._lib.X509_sign( + x509_cert, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _Certificate(self, x509_cert) + + def load_pem_private_key(self, data, password): + return self._load_key( + self._lib.PEM_read_bio_PrivateKey, + self._evp_pkey_to_private_key, + data, + password, + ) + + def load_pem_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.PEM_read_bio_PUBKEY( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_der_private_key(self, data, password): + # OpenSSL has a function called d2i_AutoPrivateKey that can simplify + # this. Unfortunately it doesn't properly support PKCS8 on OpenSSL + # 0.9.8 so we can't use it. Instead we sequentially try to load it 3 + # different ways. First we'll try to load it as a traditional key + bio_data = self._bytes_to_bio(data) + key = self._evp_pkey_from_der_traditional_key(bio_data, password) + if not key: + # Okay so it's not a traditional key. Let's try + # PKCS8 unencrypted. OpenSSL 0.9.8 can't load unencrypted + # PKCS8 keys using d2i_PKCS8PrivateKey_bio so we do this instead. + # Reset the memory BIO so we can read the data again. + res = self._lib.BIO_reset(bio_data.bio) + self.openssl_assert(res == 1) + key = self._evp_pkey_from_der_unencrypted_pkcs8(bio_data, password) + + if key: + return self._evp_pkey_to_private_key(key) + else: + # Finally we try to load it with the method that handles encrypted + # PKCS8 properly. + return self._load_key( + self._lib.d2i_PKCS8PrivateKey_bio, + self._evp_pkey_to_private_key, + data, + password, + ) + + def _evp_pkey_from_der_traditional_key(self, bio_data, password): + key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL) + if key != self._ffi.NULL: + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + return key + else: + self._consume_errors() + return None + + def _evp_pkey_from_der_unencrypted_pkcs8(self, bio_data, password): + info = self._lib.d2i_PKCS8_PRIV_KEY_INFO_bio( + bio_data.bio, self._ffi.NULL + ) + info = self._ffi.gc(info, self._lib.PKCS8_PRIV_KEY_INFO_free) + if info != self._ffi.NULL: + key = self._lib.EVP_PKCS82PKEY(info) + self.openssl_assert(key != self._ffi.NULL) + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + return key + else: + self._consume_errors() + return None + + def load_der_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.d2i_RSAPublicKey_bio( + mem_bio.bio, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_pem_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.PEM_read_bio_X509( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_der_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_pem_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.PEM_read_bio_X509_CRL( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_der_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_pem_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.PEM_read_bio_X509_REQ( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def load_der_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def _load_key(self, openssl_read_func, convert_func, data, password): + mem_bio = self._bytes_to_bio(data) + + password_callback, password_func = self._pem_password_cb(password) + + evp_pkey = openssl_read_func( + mem_bio.bio, + self._ffi.NULL, + password_callback, + self._ffi.NULL + ) + + if evp_pkey == self._ffi.NULL: + if password_func.exception is not None: + errors = self._consume_errors() + self.openssl_assert(errors) + raise password_func.exception + else: + self._handle_key_loading_error() + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and password_func.called == 0: + raise TypeError( + "Password was given but private key is not encrypted.") + + assert ( + (password is not None and password_func.called == 1) or + password is None + ) + + return convert_func(evp_pkey) + + def _handle_key_loading_error(self): + errors = self._consume_errors() + + if not errors: + raise ValueError("Could not unserialize key data.") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._lib.EVP_R_BAD_DECRYPT + ), + ( + self._lib.ERR_LIB_PKCS12, + self._lib.PKCS12_F_PKCS12_PBE_CRYPT, + self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR, + ) + ): + raise ValueError("Bad decrypt. Incorrect password?") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_GET_EVP_CIPHER_INFO, + self._lib.PEM_R_UNSUPPORTED_ENCRYPTION + ), + + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PBE_CIPHERINIT, + self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM + ) + ): + raise UnsupportedAlgorithm( + "PEM data is encrypted with an unsupported cipher", + _Reasons.UNSUPPORTED_CIPHER + ) + + elif any( + error[1:] == ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PKCS82PKEY, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM + ) + for error in errors + ): + raise UnsupportedAlgorithm( + "Unsupported public key algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + else: + assert errors[0][1] in ( + self._lib.ERR_LIB_EVP, + self._lib.ERR_LIB_PEM, + self._lib.ERR_LIB_ASN1, + ) + raise ValueError("Could not unserialize key data.") + + def elliptic_curve_supported(self, curve): + if self._lib.Cryptography_HAS_EC != 1: + return False + + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if ctx == self._ffi.NULL: + errors = self._consume_errors() + self.openssl_assert( + curve_nid == self._lib.NID_undef or + errors[0][1:] == ( + self._lib.ERR_LIB_EC, + self._lib.EC_F_EC_GROUP_NEW_BY_CURVE_NAME, + self._lib.EC_R_UNKNOWN_GROUP + ) + ) + return False + else: + self.openssl_assert(curve_nid != self._lib.NID_undef) + self._lib.EC_GROUP_free(ctx) + return True + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + if self._lib.Cryptography_HAS_EC != 1: + return False + + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + # Before 0.9.8m OpenSSL can't cope with digests longer than the curve. + if ( + self._lib.OPENSSL_VERSION_NUMBER < 0x009080df and + curve.key_size < signature_algorithm.algorithm.digest_size * 8 + ): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key(self, curve): + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + curve_nid = self._elliptic_curve_to_nid(curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + res = self._lib.EC_KEY_generate_key(ec_cdata) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ec_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {0}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + public = numbers.public_numbers + + curve_nid = self._elliptic_curve_to_nid(public.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, public.x, public.y) + + res = self._lib.EC_KEY_set_private_key( + ec_cdata, self._int_to_bn(numbers.private_value)) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_numbers(self, numbers): + curve_nid = self._elliptic_curve_to_nid(numbers.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, numbers.x, numbers.y) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return ( + self.elliptic_curve_supported(curve) and + self._lib.Cryptography_HAS_ECDH == 1 and + isinstance(algorithm, ec.ECDH) + ) + + def _ec_cdata_to_evp_pkey(self, ec_cdata): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _elliptic_curve_to_nid(self, curve): + """ + Get the NID for a curve name. + """ + + curve_aliases = { + "secp192r1": "prime192v1", + "secp256r1": "prime256v1" + } + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + return curve_nid + + @contextmanager + def _tmp_bn_ctx(self): + bn_ctx = self._lib.BN_CTX_new() + self.openssl_assert(bn_ctx != self._ffi.NULL) + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + self._lib.BN_CTX_start(bn_ctx) + try: + yield bn_ctx + finally: + self._lib.BN_CTX_end(bn_ctx) + + def _ec_key_determine_group_get_set_funcs(self, ctx): + """ + Given an EC_KEY determine the group and what methods are required to + get/set point coordinates. + """ + self.openssl_assert(ctx != self._ffi.NULL) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + self.openssl_assert(nid_two_field != self._lib.NID_undef) + + group = self._lib.EC_KEY_get0_group(ctx) + self.openssl_assert(group != self._ffi.NULL) + + method = self._lib.EC_GROUP_method_of(group) + self.openssl_assert(method != self._ffi.NULL) + + nid = self._lib.EC_METHOD_get_field_type(method) + self.openssl_assert(nid != self._lib.NID_undef) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + set_func = self._lib.EC_POINT_set_affine_coordinates_GF2m + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + set_func = self._lib.EC_POINT_set_affine_coordinates_GFp + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert set_func and get_func + + return set_func, get_func, group + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): + """ + This is a port of EC_KEY_set_public_key_affine_coordinates that was + added in 1.0.1. + + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + if x < 0 or y < 0: + raise ValueError( + "Invalid EC key. Both x and y must be non-negative." + ) + + set_func, get_func, group = ( + self._ec_key_determine_group_get_set_funcs(ctx) + ) + + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + bn_x = self._int_to_bn(x) + bn_y = self._int_to_bn(y) + + with self._tmp_bn_ctx() as bn_ctx: + check_x = self._lib.BN_CTX_get(bn_ctx) + check_y = self._lib.BN_CTX_get(bn_ctx) + + res = set_func(group, point, bn_x, bn_y, bn_ctx) + self.openssl_assert(res == 1) + + res = get_func(group, point, check_x, check_y, bn_ctx) + self.openssl_assert(res == 1) + + res = self._lib.BN_cmp(bn_x, check_x) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key X point.") + res = self._lib.BN_cmp(bn_y, check_y) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key Y point.") + + res = self._lib.EC_KEY_set_public_key(ctx, point) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ctx) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + return ctx + + def _private_key_bytes(self, encoding, format, encryption_algorithm, + evp_pkey, cdata): + if not isinstance(format, serialization.PrivateFormat): + raise TypeError( + "format must be an item from the PrivateFormat enum" + ) + + if not isinstance(encryption_algorithm, + serialization.KeySerializationEncryption): + raise TypeError( + "Encryption algorithm must be a KeySerializationEncryption " + "instance" + ) + + if isinstance(encryption_algorithm, serialization.NoEncryption): + password = b"" + passlen = 0 + evp_cipher = self._ffi.NULL + elif isinstance(encryption_algorithm, + serialization.BestAvailableEncryption): + # This is a curated value that we will update over time. + evp_cipher = self._lib.EVP_get_cipherbyname( + b"aes-256-cbc" + ) + password = encryption_algorithm.password + passlen = len(password) + if passlen > 1023: + raise ValueError( + "Passwords longer than 1023 bytes are not supported by " + "this backend" + ) + else: + raise ValueError("Unsupported encryption type") + + if encoding is serialization.Encoding.PEM: + if format is serialization.PrivateFormat.PKCS8: + write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey + key = evp_pkey + else: + assert format is serialization.PrivateFormat.TraditionalOpenSSL + if evp_pkey.type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.PEM_write_bio_RSAPrivateKey + elif evp_pkey.type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.PEM_write_bio_DSAPrivateKey + else: + assert self._lib.Cryptography_HAS_EC == 1 + assert evp_pkey.type == self._lib.EVP_PKEY_EC + write_bio = self._lib.PEM_write_bio_ECPrivateKey + + key = cdata + elif encoding is serialization.Encoding.DER: + if format is serialization.PrivateFormat.TraditionalOpenSSL: + if not isinstance( + encryption_algorithm, serialization.NoEncryption + ): + raise ValueError( + "Encryption is not supported for DER encoded " + "traditional OpenSSL keys" + ) + + return self._private_key_bytes_traditional_der( + evp_pkey.type, cdata + ) + else: + assert format is serialization.PrivateFormat.PKCS8 + write_bio = self._lib.i2d_PKCS8PrivateKey_bio + key = evp_pkey + else: + raise TypeError("encoding must be an item from the Encoding enum") + + bio = self._create_mem_bio() + res = write_bio( + bio, + key, + evp_cipher, + password, + passlen, + self._ffi.NULL, + self._ffi.NULL + ) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _private_key_bytes_traditional_der(self, key_type, cdata): + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.i2d_RSAPrivateKey_bio + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + write_bio = self._lib.i2d_ECPrivateKey_bio + else: + self.openssl_assert(key_type == self._lib.EVP_PKEY_DSA) + write_bio = self._lib.i2d_DSAPrivateKey_bio + + bio = self._create_mem_bio() + res = write_bio(bio, cdata) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _public_key_bytes(self, encoding, format, evp_pkey, cdata): + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + + if format is serialization.PublicFormat.SubjectPublicKeyInfo: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PUBKEY + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_PUBKEY_bio + + key = evp_pkey + elif format is serialization.PublicFormat.PKCS1: + # Only RSA is supported here. + assert evp_pkey.type == self._lib.EVP_PKEY_RSA + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_RSAPublicKey + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_RSAPublicKey_bio + + key = cdata + else: + raise TypeError( + "format must be an item from the PublicFormat enum" + ) + + bio = self._create_mem_bio() + res = write_bio(bio, key) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _asn1_integer_to_int(self, asn1_int): + bn = self._lib.ASN1_INTEGER_to_BN(asn1_int, self._ffi.NULL) + self.openssl_assert(bn != self._ffi.NULL) + bn = self._ffi.gc(bn, self._lib.BN_free) + return self._bn_to_int(bn) + + def _asn1_string_to_bytes(self, asn1_string): + return self._ffi.buffer(asn1_string.data, asn1_string.length)[:] + + def _asn1_string_to_ascii(self, asn1_string): + return self._asn1_string_to_bytes(asn1_string).decode("ascii") + + def _asn1_string_to_utf8(self, asn1_string): + buf = self._ffi.new("unsigned char **") + res = self._lib.ASN1_STRING_to_UTF8(buf, asn1_string) + self.openssl_assert(res >= 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + buf = self._ffi.gc( + buf, lambda buffer: self._lib.OPENSSL_free(buffer[0]) + ) + return self._ffi.buffer(buf[0], res)[:].decode('utf8') + + def _asn1_to_der(self, asn1_type): + buf = self._ffi.new("unsigned char **") + res = self._lib.i2d_ASN1_TYPE(asn1_type, buf) + self.openssl_assert(res >= 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + buf = self._ffi.gc( + buf, lambda buffer: self._lib.OPENSSL_free(buffer[0]) + ) + return self._ffi.buffer(buf[0], res)[:] + + def _parse_asn1_time(self, asn1_time): + self.openssl_assert(asn1_time != self._ffi.NULL) + generalized_time = self._lib.ASN1_TIME_to_generalizedtime( + asn1_time, self._ffi.NULL + ) + self.openssl_assert(generalized_time != self._ffi.NULL) + generalized_time = self._ffi.gc( + generalized_time, self._lib.ASN1_GENERALIZEDTIME_free + ) + time = self._asn1_string_to_ascii( + self._ffi.cast("ASN1_STRING *", generalized_time) + ) + return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ") + + +class GetCipherByName(object): + def __init__(self, fmt): + self._fmt = fmt + + def __call__(self, backend, cipher, mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +backend = Backend() diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ciphers.py similarity index 70% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ciphers.py index c3a5499..a80708a 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/ciphers.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -1,27 +1,18 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces -from cryptography.hazmat.primitives.ciphers.modes import GCM +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes -@utils.register_interface(interfaces.CipherContext) -@utils.register_interface(interfaces.AEADCipherContext) -@utils.register_interface(interfaces.AEADEncryptionContext) +@utils.register_interface(ciphers.CipherContext) +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) class _CipherContext(object): _ENCRYPT = 1 _DECRYPT = 0 @@ -33,7 +24,7 @@ class _CipherContext(object): self._operation = operation self._tag = None - if isinstance(self._cipher, interfaces.BlockCipherAlgorithm): + if isinstance(self._cipher, ciphers.BlockCipherAlgorithm): self._block_size = self._cipher.block_size else: self._block_size = 1 @@ -63,9 +54,9 @@ class _CipherContext(object): _Reasons.UNSUPPORTED_CIPHER ) - if isinstance(mode, interfaces.ModeWithInitializationVector): + if isinstance(mode, modes.ModeWithInitializationVector): iv_nonce = mode.initialization_vector - elif isinstance(mode, interfaces.ModeWithNonce): + elif isinstance(mode, modes.ModeWithNonce): iv_nonce = mode.nonce else: iv_nonce = self._backend._ffi.NULL @@ -75,24 +66,24 @@ class _CipherContext(object): self._backend._ffi.NULL, self._backend._ffi.NULL, operation) - assert res != 0 + self._backend.openssl_assert(res != 0) # set the key length to handle variable key ciphers res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( ctx, len(cipher.key) ) - assert res != 0 - if isinstance(mode, GCM): + self._backend.openssl_assert(res != 0) + if isinstance(mode, modes.GCM): res = self._backend._lib.EVP_CIPHER_CTX_ctrl( ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, len(iv_nonce), self._backend._ffi.NULL ) - assert res != 0 + self._backend.openssl_assert(res != 0) if operation == self._DECRYPT: res = self._backend._lib.EVP_CIPHER_CTX_ctrl( ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, len(mode.tag), mode.tag ) - assert res != 0 + self._backend.openssl_assert(res != 0) # pass key/iv res = self._backend._lib.EVP_CipherInit_ex( @@ -103,7 +94,7 @@ class _CipherContext(object): iv_nonce, operation ) - assert res != 0 + self._backend.openssl_assert(res != 0) # We purposely disable padding here as it's handled higher up in the # API. self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) @@ -116,7 +107,7 @@ class _CipherContext(object): # should be taken only when length is zero and mode is not GCM because # AES GCM can return improper tag values if you don't call update # with empty plaintext when authenticating AAD for ...reasons. - if len(data) == 0 and not isinstance(self._mode, GCM): + if len(data) == 0 and not isinstance(self._mode, modes.GCM): return b"" buf = self._backend._ffi.new("unsigned char[]", @@ -124,38 +115,44 @@ class _CipherContext(object): outlen = self._backend._ffi.new("int *") res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, len(data)) - assert res != 0 + self._backend.openssl_assert(res != 0) return self._backend._ffi.buffer(buf)[:outlen[0]] def finalize(self): + # OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions) + # appears to have a bug where you must make at least one call to update + # even if you are only using authenticate_additional_data or the + # GCM tag will be wrong. An (empty) call to update resolves this + # and is harmless for all other versions of OpenSSL. + if isinstance(self._mode, modes.GCM): + self.update(b"") + buf = self._backend._ffi.new("unsigned char[]", self._block_size) outlen = self._backend._ffi.new("int *") res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) if res == 0: errors = self._backend._consume_errors() - if not errors and isinstance(self._mode, GCM): + if not errors and isinstance(self._mode, modes.GCM): raise InvalidTag - assert errors - - if errors[0][1:] == ( - self._backend._lib.ERR_LIB_EVP, - self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, - self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH - ) or errors[0][1:] == ( - self._backend._lib.ERR_LIB_EVP, - self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, - self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH - ): - raise ValueError( - "The length of the provided data is not a multiple of " - "the block length." + self._backend.openssl_assert( + errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) or errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH ) - else: - raise self._backend._unknown_error(errors[0]) + ) + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) - if (isinstance(self._mode, GCM) and + if (isinstance(self._mode, modes.GCM) and self._operation == self._ENCRYPT): block_byte_size = self._block_size // 8 tag_buf = self._backend._ffi.new( @@ -165,11 +162,11 @@ class _CipherContext(object): self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, block_byte_size, tag_buf ) - assert res != 0 + self._backend.openssl_assert(res != 0) self._tag = self._backend._ffi.buffer(tag_buf)[:] res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) - assert res == 1 + self._backend.openssl_assert(res == 1) return self._backend._ffi.buffer(buf)[:outlen[0]] def authenticate_additional_data(self, data): @@ -177,14 +174,12 @@ class _CipherContext(object): res = self._backend._lib.EVP_CipherUpdate( self._ctx, self._backend._ffi.NULL, outlen, data, len(data) ) - assert res != 0 + self._backend.openssl_assert(res != 0) - @property - def tag(self): - return self._tag + tag = utils.read_only_property("_tag") -@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(ciphers.CipherContext) class _AESCTRCipherContext(object): """ This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can @@ -194,11 +189,10 @@ class _AESCTRCipherContext(object): self._backend = backend self._key = self._backend._ffi.new("AES_KEY *") - assert self._key != self._backend._ffi.NULL res = self._backend._lib.AES_set_encrypt_key( cipher.key, len(cipher.key) * 8, self._key ) - assert res == 0 + self._backend.openssl_assert(res == 0) self._ecount = self._backend._ffi.new("char[]", 16) self._nonce = self._backend._ffi.new("char[16]", mode.nonce) self._num = self._backend._ffi.new("unsigned int *", 0) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/cmac.py similarity index 69% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/cmac.py index 7acf439..eaefc27 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/cmac.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -1,26 +1,19 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, interfaces from cryptography.hazmat.primitives.ciphers.modes import CBC -@utils.register_interface(interfaces.CMACContext) +@utils.register_interface(interfaces.MACContext) class _CMACContext(object): def __init__(self, backend, algorithm, ctx=None): if not backend.cmac_algorithm_supported(algorithm): @@ -40,7 +33,7 @@ class _CMACContext(object): ctx = self._backend._lib.CMAC_CTX_new() - assert ctx != self._backend._ffi.NULL + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) self._backend._lib.CMAC_Init( @@ -50,9 +43,11 @@ class _CMACContext(object): self._ctx = ctx + algorithm = utils.read_only_property("_algorithm") + def update(self, data): res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) - assert res == 1 + self._backend.openssl_assert(res == 1) def finalize(self): buf = self._backend._ffi.new("unsigned char[]", self._output_length) @@ -60,7 +55,7 @@ class _CMACContext(object): res = self._backend._lib.CMAC_Final( self._ctx, buf, length ) - assert res == 1 + self._backend.openssl_assert(res == 1) self._ctx = None @@ -74,7 +69,12 @@ class _CMACContext(object): res = self._backend._lib.CMAC_CTX_copy( copied_ctx, self._ctx ) - assert res == 1 + self._backend.openssl_assert(res == 1) return _CMACContext( self._backend, self._algorithm, ctx=copied_ctx ) + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/dsa.py similarity index 60% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/dsa.py index 5e7a26f..9b4c1af 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/dsa.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -1,28 +1,32 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import InvalidSignature -from cryptography.hazmat.primitives import hashes, interfaces -from cryptography.hazmat.primitives.asymmetric import dsa -from cryptography.hazmat.primitives.interfaces import ( - DSAParametersWithNumbers, DSAPrivateKeyWithNumbers, DSAPublicKeyWithNumbers +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, dsa ) -@utils.register_interface(interfaces.AsymmetricVerificationContext) +def _truncate_digest_for_dsa(dsa_cdata, digest, backend): + """ + This function truncates digests that are longer than a given DS + key's length so they can be signed. OpenSSL does this for us in + 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three + releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This + truncation is not required in 0.9.8 because DSA is limited to SHA-1. + """ + + order_bits = backend._lib.BN_num_bits(dsa_cdata.q) + return _truncate_digest(digest, order_bits) + + +@utils.register_interface(AsymmetricVerificationContext) class _DSAVerificationContext(object): def __init__(self, backend, public_key, signature, algorithm): self._backend = backend @@ -36,11 +40,12 @@ class _DSAVerificationContext(object): self._hash_ctx.update(data) def verify(self): - self._dsa_cdata = self._backend._ffi.gc(self._public_key._dsa_cdata, - self._backend._lib.DSA_free) - data_to_verify = self._hash_ctx.finalize() + data_to_verify = _truncate_digest_for_dsa( + self._public_key._dsa_cdata, data_to_verify, self._backend + ) + # The first parameter passed to DSA_verify is unused by OpenSSL but # must be an integer. res = self._backend._lib.DSA_verify( @@ -48,15 +53,11 @@ class _DSAVerificationContext(object): len(self._signature), self._public_key._dsa_cdata) if res != 1: - errors = self._backend._consume_errors() - assert errors - if res == -1: - assert errors[0].lib == self._backend._lib.ERR_LIB_ASN1 - + self._backend._consume_errors() raise InvalidSignature -@utils.register_interface(interfaces.AsymmetricSignatureContext) +@utils.register_interface(AsymmetricSignatureContext) class _DSASignatureContext(object): def __init__(self, backend, private_key, algorithm): self._backend = backend @@ -69,6 +70,9 @@ class _DSASignatureContext(object): def finalize(self): data_to_sign = self._hash_ctx.finalize() + data_to_sign = _truncate_digest_for_dsa( + self._private_key._dsa_cdata, data_to_sign, self._backend + ) sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata) sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) buflen = self._backend._ffi.new("unsigned int *") @@ -78,13 +82,13 @@ class _DSASignatureContext(object): res = self._backend._lib.DSA_sign( 0, data_to_sign, len(data_to_sign), sig_buf, buflen, self._private_key._dsa_cdata) - assert res == 1 - assert buflen[0] + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0]) return self._backend._ffi.buffer(sig_buf)[:buflen[0]] -@utils.register_interface(DSAParametersWithNumbers) +@utils.register_interface(dsa.DSAParametersWithNumbers) class _DSAParameters(object): def __init__(self, backend, dsa_cdata): self._backend = backend @@ -101,19 +105,18 @@ class _DSAParameters(object): return self._backend.generate_dsa_private_key(self) -@utils.register_interface(DSAPrivateKeyWithNumbers) +@utils.register_interface(dsa.DSAPrivateKeyWithSerialization) class _DSAPrivateKey(object): - def __init__(self, backend, dsa_cdata): + def __init__(self, backend, dsa_cdata, evp_pkey): self._backend = backend self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) - @property - def key_size(self): - return self._key_size + key_size = utils.read_only_property("_key_size") - def signer(self, algorithm): - return _DSASignatureContext(self._backend, self, algorithm) + def signer(self, signature_algorithm): + return _DSASignatureContext(self._backend, self, signature_algorithm) def private_numbers(self): return dsa.DSAPrivateNumbers( @@ -130,7 +133,7 @@ class _DSAPrivateKey(object): def public_key(self): dsa_cdata = self._backend._lib.DSA_new() - assert dsa_cdata != self._backend._ffi.NULL + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) dsa_cdata = self._backend._ffi.gc( dsa_cdata, self._backend._lib.DSA_free ) @@ -138,11 +141,12 @@ class _DSAPrivateKey(object): dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key) - return _DSAPublicKey(self._backend, dsa_cdata) + evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata) + return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey) def parameters(self): dsa_cdata = self._backend._lib.DSA_new() - assert dsa_cdata != self._backend._ffi.NULL + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) dsa_cdata = self._backend._ffi.gc( dsa_cdata, self._backend._lib.DSA_free ) @@ -151,21 +155,32 @@ class _DSAPrivateKey(object): dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) return _DSAParameters(self._backend, dsa_cdata) + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._dsa_cdata + ) -@utils.register_interface(DSAPublicKeyWithNumbers) + +@utils.register_interface(dsa.DSAPublicKeyWithSerialization) class _DSAPublicKey(object): - def __init__(self, backend, dsa_cdata): + def __init__(self, backend, dsa_cdata, evp_pkey): self._backend = backend self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) - @property - def key_size(self): - return self._key_size + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") - def verifier(self, signature, algorithm): return _DSAVerificationContext( - self._backend, self, signature, algorithm + self._backend, self, signature, signature_algorithm ) def public_numbers(self): @@ -180,7 +195,7 @@ class _DSAPublicKey(object): def parameters(self): dsa_cdata = self._backend._lib.DSA_new() - assert dsa_cdata != self._backend._ffi.NULL + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) dsa_cdata = self._backend._ffi.gc( dsa_cdata, self._backend._lib.DSA_free ) @@ -188,3 +203,16 @@ class _DSAPublicKey(object): dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) return _DSAParameters(self._backend, dsa_cdata) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "DSA public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + None + ) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..cfd559a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,299 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, ec +) + + +def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend): + """ + This function truncates digests that are longer than a given elliptic + curve key's length so they can be signed. Since elliptic curve keys are + much shorter than RSA keys many digests (e.g. SHA-512) may require + truncation. + """ + + _lib = backend._lib + _ffi = backend._ffi + + group = _lib.EC_KEY_get0_group(ec_key_cdata) + + with backend._tmp_bn_ctx() as bn_ctx: + order = _lib.BN_CTX_get(bn_ctx) + backend.openssl_assert(order != _ffi.NULL) + + res = _lib.EC_GROUP_get_order(group, order, bn_ctx) + backend.openssl_assert(res == 1) + + order_bits = _lib.BN_num_bits(order) + + return _truncate_digest(digest, order_bits) + + +def _ec_key_curve_sn(backend, ec_key): + group = backend._lib.EC_KEY_get0_group(ec_key) + backend.openssl_assert(group != backend._ffi.NULL) + + nid = backend._lib.EC_GROUP_get_curve_name(group) + # The following check is to find EC keys with unnamed curves and raise + # an error for now. + if nid == backend._lib.NID_undef: + raise NotImplementedError( + "ECDSA certificates with unnamed curves are unsupported " + "at this time" + ) + + curve_name = backend._lib.OBJ_nid2sn(nid) + backend.openssl_assert(curve_name != backend._ffi.NULL) + + sn = backend._ffi.string(curve_name).decode('ascii') + return sn + + +def _mark_asn1_named_ec_curve(backend, ec_cdata): + """ + Set the named curve flag on the EC_KEY. This causes OpenSSL to + serialize EC keys along with their curve OID which makes + deserialization easier. + """ + + backend._lib.EC_KEY_set_asn1_flag( + ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE + ) + + +def _sn_to_elliptic_curve(backend, sn): + try: + return ec._CURVE_TYPES[sn]() + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(sn), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + +@utils.register_interface(AsymmetricSignatureContext) +class _ECDSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def finalize(self): + ec_key = self._private_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + max_size = self._backend._lib.ECDSA_size(ec_key) + self._backend.openssl_assert(max_size > 0) + + sigbuf = self._backend._ffi.new("char[]", max_size) + siglen_ptr = self._backend._ffi.new("unsigned int[]", 1) + res = self._backend._lib.ECDSA_sign( + 0, + digest, + len(digest), + sigbuf, + siglen_ptr, + ec_key + ) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] + + +@utils.register_interface(AsymmetricVerificationContext) +class _ECDSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def verify(self): + ec_key = self._public_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + res = self._backend._lib.ECDSA_verify( + 0, + digest, + len(digest), + self._signature, + len(self._signature), + ec_key + ) + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + return True + + +@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization) +class _EllipticCurvePrivateKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def signer(self, signature_algorithm): + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSASignatureContext( + self._backend, self, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def exchange(self, algorithm, peer_public_key): + if not ( + self._backend.elliptic_curve_exchange_algorithm_supported( + algorithm, self.curve + ) + ): + raise UnsupportedAlgorithm( + "This backend does not support the ECDH algorithm.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8 + self._backend.openssl_assert(z_len > 0) + z_buf = self._backend._ffi.new("uint8_t[]", z_len) + peer_key = self._backend._lib.EC_KEY_get0_public_key( + peer_public_key._ec_key + ) + + r = self._backend._lib.ECDH_compute_key( + z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(r > 0) + return self._backend._ffi.buffer(z_buf)[:z_len] + + def public_key(self): + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + + public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) + self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL) + public_ec_key = self._backend._ffi.gc( + public_ec_key, self._backend._lib.EC_KEY_free + ) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + self._backend.openssl_assert(res == 1) + + evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key) + + return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey) + + def private_numbers(self): + bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key) + private_value = self._backend._bn_to_int(bn) + return ec.EllipticCurvePrivateNumbers( + private_value=private_value, + public_numbers=self.public_key().public_numbers() + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._ec_key + ) + + +@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization) +class _EllipticCurvePublicKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSAVerificationContext( + self._backend, self, signature, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def public_numbers(self): + set_func, get_func, group = ( + self._backend._ec_key_determine_group_get_set_funcs(self._ec_key) + ) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + with self._backend._tmp_bn_ctx() as bn_ctx: + bn_x = self._backend._lib.BN_CTX_get(bn_ctx) + bn_y = self._backend._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self._backend.openssl_assert(res == 1) + + x = self._backend._bn_to_int(bn_x) + y = self._backend._bn_to_int(bn_y) + + return ec.EllipticCurvePublicNumbers( + x=x, + y=y, + curve=self._curve + ) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "EC public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + None + ) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hashes.py similarity index 68% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hashes.py index da91eef..02ce5f0 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hashes.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -1,28 +1,19 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives import hashes -@utils.register_interface(interfaces.HashContext) +@utils.register_interface(hashes.HashContext) class _HashContext(object): def __init__(self, backend, algorithm, ctx=None): - self.algorithm = algorithm + self._algorithm = algorithm self._backend = backend @@ -40,30 +31,32 @@ class _HashContext(object): ) res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, self._backend._ffi.NULL) - assert res != 0 + self._backend.openssl_assert(res != 0) self._ctx = ctx + algorithm = utils.read_only_property("_algorithm") + def copy(self): copied_ctx = self._backend._lib.EVP_MD_CTX_create() copied_ctx = self._backend._ffi.gc( copied_ctx, self._backend._lib.EVP_MD_CTX_destroy ) res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) - assert res != 0 + self._backend.openssl_assert(res != 0) return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) def update(self, data): res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) - assert res != 0 + self._backend.openssl_assert(res != 0) def finalize(self): buf = self._backend._ffi.new("unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE) outlen = self._backend._ffi.new("unsigned int *") res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) - assert res != 0 - assert outlen[0] == self.algorithm.digest_size + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx) - assert res == 1 + self._backend.openssl_assert(res == 1) return self._backend._ffi.buffer(buf)[:outlen[0]] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hmac.py similarity index 67% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hmac.py index 3f1576f..dcf2fba 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/hmac.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -1,28 +1,22 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils -from cryptography.exceptions import UnsupportedAlgorithm, _Reasons -from cryptography.hazmat.primitives import interfaces +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces -@utils.register_interface(interfaces.HashContext) +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) class _HMACContext(object): def __init__(self, backend, key, algorithm, ctx=None): - self.algorithm = algorithm + self._algorithm = algorithm self._backend = backend if ctx is None: @@ -42,11 +36,13 @@ class _HMACContext(object): res = self._backend._lib.Cryptography_HMAC_Init_ex( ctx, key, len(key), evp_md, self._backend._ffi.NULL ) - assert res != 0 + self._backend.openssl_assert(res != 0) self._ctx = ctx self._key = key + algorithm = utils.read_only_property("_algorithm") + def copy(self): copied_ctx = self._backend._ffi.new("HMAC_CTX *") self._backend._lib.HMAC_CTX_init(copied_ctx) @@ -56,7 +52,7 @@ class _HMACContext(object): res = self._backend._lib.Cryptography_HMAC_CTX_copy( copied_ctx, self._ctx ) - assert res != 0 + self._backend.openssl_assert(res != 0) return _HMACContext( self._backend, self._key, self.algorithm, ctx=copied_ctx ) @@ -65,7 +61,7 @@ class _HMACContext(object): res = self._backend._lib.Cryptography_HMAC_Update( self._ctx, data, len(data) ) - assert res != 0 + self._backend.openssl_assert(res != 0) def finalize(self): buf = self._backend._ffi.new("unsigned char[]", @@ -74,7 +70,12 @@ class _HMACContext(object): res = self._backend._lib.Cryptography_HMAC_Final( self._ctx, buf, outlen ) - assert res != 0 - assert outlen[0] == self.algorithm.digest_size + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) self._backend._lib.HMAC_CTX_cleanup(self._ctx) return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py similarity index 82% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py index 6f28c54..664f6d3 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/backends/openssl/rsa.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -19,21 +10,20 @@ from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons ) -from cryptography.hazmat.primitives import hashes, interfaces -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.asymmetric.padding import ( - MGF1, OAEP, PKCS1v15, PSS +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, rsa ) -from cryptography.hazmat.primitives.interfaces import ( - RSAPrivateKeyWithNumbers, RSAPublicKeyWithNumbers +from cryptography.hazmat.primitives.asymmetric.padding import ( + AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization ) def _get_rsa_pss_salt_length(pss, key_size, digest_size): - if pss._mgf._salt_length is not None: - salt = pss._mgf._salt_length - else: - salt = pss._salt_length + salt = pss._salt_length if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: # bit length - 1 per RFC 3447 @@ -46,6 +36,9 @@ def _get_rsa_pss_salt_length(pss, key_size, digest_size): def _enc_dec_rsa(backend, key, data, padding): + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Padding must be an instance of AsymmetricPadding.") + if isinstance(padding, PKCS1v15): padding_enum = backend._lib.RSA_PKCS1_PADDING elif isinstance(padding, OAEP): @@ -96,15 +89,15 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum): pkey_ctx = backend._lib.EVP_PKEY_CTX_new( key._evp_pkey, backend._ffi.NULL ) - assert pkey_ctx != backend._ffi.NULL + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) res = init(pkey_ctx) - assert res == 1 + backend.openssl_assert(res == 1) res = backend._lib.EVP_PKEY_CTX_set_rsa_padding( pkey_ctx, padding_enum) - assert res > 0 + backend.openssl_assert(res > 0) buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) - assert buf_size > 0 + backend.openssl_assert(buf_size > 0) outlen = backend._ffi.new("size_t *", buf_size) buf = backend._ffi.new("char[]", buf_size) res = crypt(pkey_ctx, buf, outlen, data, len(data)) @@ -121,7 +114,7 @@ def _enc_dec_rsa_098(backend, key, data, padding_enum): crypt = backend._lib.RSA_private_decrypt key_size = backend._lib.RSA_size(key._rsa_cdata) - assert key_size > 0 + backend.openssl_assert(key_size > 0) buf = backend._ffi.new("unsigned char[]", key_size) res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum) if res < 0: @@ -142,26 +135,30 @@ def _handle_rsa_enc_dec_error(backend, key): "larger key size." ) else: - assert ( - errors[0].reason == backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01 or - errors[0].reason == backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02 - ) + decoding_errors = [ + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01, + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02, + ] + if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR: + decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR) + + assert errors[0].reason in decoding_errors raise ValueError("Decryption failed.") -@utils.register_interface(interfaces.AsymmetricSignatureContext) +@utils.register_interface(AsymmetricSignatureContext) class _RSASignatureContext(object): def __init__(self, backend, private_key, padding, algorithm): self._backend = backend self._private_key = private_key - if not isinstance(padding, interfaces.AsymmetricPadding): - raise TypeError( - "Expected provider of interfaces.AsymmetricPadding.") + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") self._pkey_size = self._backend._lib.EVP_PKEY_size( self._private_key._evp_pkey ) + self._backend.openssl_assert(self._pkey_size > 0) if isinstance(padding, PKCS1v15): if self._backend._lib.Cryptography_HAS_PKEY_CTX: @@ -178,7 +175,6 @@ class _RSASignatureContext(object): # Size of key in bytes - 2 is the maximum # PSS signature length (salt length is checked later) - assert self._pkey_size > 0 if self._pkey_size - algorithm.digest_size - 2 < 0: raise ValueError("Digest too large for key size. Use a larger " "key.") @@ -211,7 +207,7 @@ class _RSASignatureContext(object): def finalize(self): evp_md = self._backend._lib.EVP_get_digestbyname( self._algorithm.name.encode("ascii")) - assert evp_md != self._backend._ffi.NULL + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) return self._finalize_method(evp_md) @@ -219,18 +215,18 @@ class _RSASignatureContext(object): pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( self._private_key._evp_pkey, self._backend._ffi.NULL ) - assert pkey_ctx != self._backend._ffi.NULL + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) pkey_ctx = self._backend._ffi.gc(pkey_ctx, self._backend._lib.EVP_PKEY_CTX_free) res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) - assert res == 1 + self._backend.openssl_assert(res == 1) res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( pkey_ctx, evp_md) - assert res > 0 + self._backend.openssl_assert(res > 0) res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( pkey_ctx, self._padding_enum) - assert res > 0 + self._backend.openssl_assert(res > 0) if isinstance(self._padding, PSS): res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( pkey_ctx, @@ -240,17 +236,19 @@ class _RSASignatureContext(object): self._hash_ctx.algorithm.digest_size ) ) - assert res > 0 + self._backend.openssl_assert(res > 0) if self._backend._lib.Cryptography_HAS_MGF1_MD: # MGF1 MD is configurable in OpenSSL 1.0.1+ mgf1_md = self._backend._lib.EVP_get_digestbyname( self._padding._mgf._algorithm.name.encode("ascii")) - assert mgf1_md != self._backend._ffi.NULL + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( pkey_ctx, mgf1_md ) - assert res > 0 + self._backend.openssl_assert(res > 0) data_to_sign = self._hash_ctx.finalize() buflen = self._backend._ffi.new("size_t *") res = self._backend._lib.EVP_PKEY_sign( @@ -260,7 +258,7 @@ class _RSASignatureContext(object): data_to_sign, len(data_to_sign) ) - assert res == 1 + self._backend.openssl_assert(res == 1) buf = self._backend._ffi.new("unsigned char[]", buflen[0]) res = self._backend._lib.EVP_PKEY_sign( pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) @@ -272,8 +270,9 @@ class _RSASignatureContext(object): self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): reason = ("Salt length too long for key size. Try using " "MAX_LENGTH instead.") - elif (errors[0].reason == - self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY): + else: + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) reason = "Digest too large for key size. Use a larger key." assert reason is not None raise ValueError(reason) @@ -333,24 +332,24 @@ class _RSASignatureContext(object): self._private_key._rsa_cdata, self._backend._lib.RSA_NO_PADDING ) - assert sig_len != -1 + self._backend.openssl_assert(sig_len != -1) return self._backend._ffi.buffer(sig_buf)[:sig_len] -@utils.register_interface(interfaces.AsymmetricVerificationContext) +@utils.register_interface(AsymmetricVerificationContext) class _RSAVerificationContext(object): def __init__(self, backend, public_key, signature, padding, algorithm): self._backend = backend self._public_key = public_key self._signature = signature - if not isinstance(padding, interfaces.AsymmetricPadding): - raise TypeError( - "Expected provider of interfaces.AsymmetricPadding.") + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") self._pkey_size = self._backend._lib.EVP_PKEY_size( self._public_key._evp_pkey ) + self._backend.openssl_assert(self._pkey_size > 0) if isinstance(padding, PKCS1v15): if self._backend._lib.Cryptography_HAS_PKEY_CTX: @@ -367,7 +366,6 @@ class _RSAVerificationContext(object): # Size of key in bytes - 2 is the maximum # PSS signature length (salt length is checked later) - assert self._pkey_size > 0 if self._pkey_size - algorithm.digest_size - 2 < 0: raise ValueError( "Digest too large for key size. Check that you have the " @@ -402,7 +400,7 @@ class _RSAVerificationContext(object): def verify(self): evp_md = self._backend._lib.EVP_get_digestbyname( self._algorithm.name.encode("ascii")) - assert evp_md != self._backend._ffi.NULL + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) self._verify_method(evp_md) @@ -410,18 +408,18 @@ class _RSAVerificationContext(object): pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( self._public_key._evp_pkey, self._backend._ffi.NULL ) - assert pkey_ctx != self._backend._ffi.NULL + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) pkey_ctx = self._backend._ffi.gc(pkey_ctx, self._backend._lib.EVP_PKEY_CTX_free) res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) - assert res == 1 + self._backend.openssl_assert(res == 1) res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( pkey_ctx, evp_md) - assert res > 0 + self._backend.openssl_assert(res > 0) res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( pkey_ctx, self._padding_enum) - assert res > 0 + self._backend.openssl_assert(res > 0) if isinstance(self._padding, PSS): res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( pkey_ctx, @@ -431,16 +429,18 @@ class _RSAVerificationContext(object): self._hash_ctx.algorithm.digest_size ) ) - assert res > 0 + self._backend.openssl_assert(res > 0) if self._backend._lib.Cryptography_HAS_MGF1_MD: # MGF1 MD is configurable in OpenSSL 1.0.1+ mgf1_md = self._backend._lib.EVP_get_digestbyname( self._padding._mgf._algorithm.name.encode("ascii")) - assert mgf1_md != self._backend._ffi.NULL + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( pkey_ctx, mgf1_md ) - assert res > 0 + self._backend.openssl_assert(res > 0) data_to_verify = self._hash_ctx.finalize() res = self._backend._lib.EVP_PKEY_verify( @@ -453,7 +453,7 @@ class _RSAVerificationContext(object): # The previous call can return negative numbers in the event of an # error. This is not a signature failure but we need to fail if it # occurs. - assert res >= 0 + self._backend.openssl_assert(res >= 0) if res == 0: errors = self._backend._consume_errors() assert errors @@ -473,7 +473,7 @@ class _RSAVerificationContext(object): # The previous call can return negative numbers in the event of an # error. This is not a signature failure but we need to fail if it # occurs. - assert res >= 0 + self._backend.openssl_assert(res >= 0) if res == 0: errors = self._backend._consume_errors() assert errors @@ -511,26 +511,16 @@ class _RSAVerificationContext(object): raise InvalidSignature -@utils.register_interface(RSAPrivateKeyWithNumbers) +@utils.register_interface(RSAPrivateKeyWithSerialization) class _RSAPrivateKey(object): - def __init__(self, backend, rsa_cdata): + def __init__(self, backend, rsa_cdata, evp_pkey): self._backend = backend self._rsa_cdata = rsa_cdata - - evp_pkey = self._backend._lib.EVP_PKEY_new() - assert evp_pkey != self._backend._ffi.NULL - evp_pkey = self._backend._ffi.gc( - evp_pkey, self._backend._lib.EVP_PKEY_free - ) - res = self._backend._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) - assert res == 1 self._evp_pkey = evp_pkey self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) - @property - def key_size(self): - return self._key_size + key_size = utils.read_only_property("_key_size") def signer(self, padding, algorithm): return _RSASignatureContext(self._backend, self, padding, algorithm) @@ -544,13 +534,14 @@ class _RSAPrivateKey(object): def public_key(self): ctx = self._backend._lib.RSA_new() - assert ctx != self._backend._ffi.NULL + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e) ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n) res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL) - assert res == 1 - return _RSAPublicKey(self._backend, ctx) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx) + return _RSAPublicKey(self._backend, ctx, evp_pkey) def private_numbers(self): return rsa.RSAPrivateNumbers( @@ -566,29 +557,31 @@ class _RSAPrivateKey(object): ) ) + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._rsa_cdata + ) -@utils.register_interface(RSAPublicKeyWithNumbers) + +@utils.register_interface(RSAPublicKeyWithSerialization) class _RSAPublicKey(object): - def __init__(self, backend, rsa_cdata): + def __init__(self, backend, rsa_cdata, evp_pkey): self._backend = backend self._rsa_cdata = rsa_cdata - - evp_pkey = self._backend._lib.EVP_PKEY_new() - assert evp_pkey != self._backend._ffi.NULL - evp_pkey = self._backend._ffi.gc( - evp_pkey, self._backend._lib.EVP_PKEY_free - ) - res = self._backend._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) - assert res == 1 self._evp_pkey = evp_pkey self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) - @property - def key_size(self): - return self._key_size + key_size = utils.read_only_property("_key_size") def verifier(self, signature, padding, algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + return _RSAVerificationContext( self._backend, self, signature, padding, algorithm ) @@ -601,3 +594,11 @@ class _RSAPublicKey(object): e=self._backend._bn_to_int(self._rsa_cdata.e), n=self._backend._bn_to_int(self._rsa_cdata.n), ) + + def public_bytes(self, encoding, format): + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + self._rsa_cdata + ) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/utils.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/utils.py new file mode 100644 index 0000000..001121f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + + +def _truncate_digest(digest, order_bits): + digest_len = len(digest) + + if 8 * digest_len > order_bits: + digest_len = (order_bits + 7) // 8 + digest = digest[:digest_len] + + if 8 * digest_len > order_bits: + rshift = 8 - (order_bits & 0x7) + assert 0 < rshift < 8 + + mask = 0xFF >> rshift << rshift + + # Set the bottom rshift bits to 0 + digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask) + + return digest diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/x509.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/x509.py new file mode 100644 index 0000000..1ba59b6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1,940 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import datetime +import ipaddress + +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.x509.oid import ( + CRLExtensionOID, CertificatePoliciesOID, ExtensionOID +) + + +def _obj2txt(backend, obj): + # Set to 80 on the recommendation of + # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values + buf_len = 80 + buf = backend._ffi.new("char[]", buf_len) + res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1) + backend.openssl_assert(res > 0) + return backend._ffi.buffer(buf, res)[:].decode() + + +def _decode_x509_name_entry(backend, x509_name_entry): + obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry) + backend.openssl_assert(obj != backend._ffi.NULL) + data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry) + backend.openssl_assert(data != backend._ffi.NULL) + value = backend._asn1_string_to_utf8(data) + oid = _obj2txt(backend, obj) + + return x509.NameAttribute(x509.ObjectIdentifier(oid), value) + + +def _decode_x509_name(backend, x509_name): + count = backend._lib.X509_NAME_entry_count(x509_name) + attributes = [] + for x in range(count): + entry = backend._lib.X509_NAME_get_entry(x509_name, x) + attributes.append(_decode_x509_name_entry(backend, entry)) + + return x509.Name(attributes) + + +def _decode_general_names(backend, gns): + num = backend._lib.sk_GENERAL_NAME_num(gns) + names = [] + for i in range(num): + gn = backend._lib.sk_GENERAL_NAME_value(gns, i) + backend.openssl_assert(gn != backend._ffi.NULL) + names.append(_decode_general_name(backend, gn)) + + return names + + +def _decode_general_name(backend, gn): + if gn.type == backend._lib.GEN_DNS: + data = backend._asn1_string_to_bytes(gn.d.dNSName) + if not data: + decoded = u"" + elif data.startswith(b"*."): + # This is a wildcard name. We need to remove the leading wildcard, + # IDNA decode, then re-add the wildcard. Wildcard characters should + # always be left-most (RFC 2595 section 2.4). + decoded = u"*." + idna.decode(data[2:]) + else: + # Not a wildcard, decode away. If the string has a * in it anywhere + # invalid this will raise an InvalidCodePoint + decoded = idna.decode(data) + if data.startswith(b"."): + # idna strips leading periods. Name constraints can have that + # so we need to re-add it. Sigh. + decoded = u"." + decoded + + return x509.DNSName(decoded) + elif gn.type == backend._lib.GEN_URI: + data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier) + parsed = urllib_parse.urlparse(data) + if parsed.hostname: + hostname = idna.decode(parsed.hostname) + else: + hostname = "" + if parsed.port: + netloc = hostname + u":" + six.text_type(parsed.port) + else: + netloc = hostname + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )) + return x509.UniformResourceIdentifier(uri) + elif gn.type == backend._lib.GEN_RID: + oid = _obj2txt(backend, gn.d.registeredID) + return x509.RegisteredID(x509.ObjectIdentifier(oid)) + elif gn.type == backend._lib.GEN_IPADD: + data = backend._asn1_string_to_bytes(gn.d.iPAddress) + data_len = len(data) + if data_len == 8 or data_len == 32: + # This is an IPv4 or IPv6 Network and not a single IP. This + # type of data appears in Name Constraints. Unfortunately, + # ipaddress doesn't support packed bytes + netmask. Additionally, + # IPv6Network can only handle CIDR rather than the full 16 byte + # netmask. To handle this we convert the netmask to integer, then + # find the first 0 bit, which will be the prefix. If another 1 + # bit is present after that the netmask is invalid. + base = ipaddress.ip_address(data[:data_len // 2]) + netmask = ipaddress.ip_address(data[data_len // 2:]) + bits = bin(int(netmask))[2:] + prefix = bits.find('0') + # If no 0 bits are found it is a /32 or /128 + if prefix == -1: + prefix = len(bits) + + if "1" in bits[prefix:]: + raise ValueError("Invalid netmask") + + ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix)) + else: + ip = ipaddress.ip_address(data) + + return x509.IPAddress(ip) + elif gn.type == backend._lib.GEN_DIRNAME: + return x509.DirectoryName( + _decode_x509_name(backend, gn.d.directoryName) + ) + elif gn.type == backend._lib.GEN_EMAIL: + data = backend._asn1_string_to_ascii(gn.d.rfc822Name) + name, address = parseaddr(data) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. No + # IDNA decoding can be done since there is no domain component. + return x509.RFC822Name(address) + else: + # A normal email of the form user@domain.com. Let's attempt to + # decode the domain component and return the entire address. + return x509.RFC822Name( + parts[0] + u"@" + idna.decode(parts[1]) + ) + elif gn.type == backend._lib.GEN_OTHERNAME: + type_id = _obj2txt(backend, gn.d.otherName.type_id) + value = backend._asn1_to_der(gn.d.otherName.value) + return x509.OtherName(x509.ObjectIdentifier(type_id), value) + else: + # x400Address or ediPartyName + raise x509.UnsupportedGeneralNameType( + "{0} is not a supported type".format( + x509._GENERAL_NAMES.get(gn.type, gn.type) + ), + gn.type + ) + + +def _decode_ocsp_no_check(backend, ext): + return x509.OCSPNoCheck() + + +class _X509ExtensionParser(object): + def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None): + self.ext_count = ext_count + self.get_ext = get_ext + self.handlers = handlers + self.unsupported_exts = unsupported_exts + + def parse(self, backend, x509_obj): + extensions = [] + seen_oids = set() + for i in range(self.ext_count(backend, x509_obj)): + ext = self.get_ext(backend, x509_obj, i) + backend.openssl_assert(ext != backend._ffi.NULL) + crit = backend._lib.X509_EXTENSION_get_critical(ext) + critical = crit == 1 + oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object)) + if oid in seen_oids: + raise x509.DuplicateExtension( + "Duplicate {0} extension found".format(oid), oid + ) + try: + handler = self.handlers[oid] + except KeyError: + if critical: + raise x509.UnsupportedExtension( + "Critical extension {0} is not currently supported" + .format(oid), oid + ) + else: + # For extensions which are not supported by OpenSSL we pass the + # extension object directly to the parsing routine so it can + # be decoded manually. + if self.unsupported_exts and oid in self.unsupported_exts: + ext_data = ext + else: + ext_data = backend._lib.X509V3_EXT_d2i(ext) + if ext_data == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is invalid and can't be " + "parsed".format(oid) + ) + + value = handler(backend, ext_data) + extensions.append(x509.Extension(oid, critical, value)) + + seen_oids.add(oid) + + return x509.Extensions(extensions) + + +@utils.register_interface(x509.Certificate) +class _Certificate(object): + def __init__(self, backend, x509): + self._backend = backend + self._x509 = x509 + + def __repr__(self): + return "".format(self.subject) + + def __eq__(self, other): + if not isinstance(other, x509.Certificate): + return NotImplemented + + res = self._backend._lib.X509_cmp(self._x509, other._x509) + return res == 0 + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + h.update(self.public_bytes(serialization.Encoding.DER)) + return h.finalize() + + @property + def version(self): + version = self._backend._lib.X509_get_version(self._x509) + if version == 0: + return x509.Version.v1 + elif version == 2: + return x509.Version.v3 + else: + raise x509.InvalidVersion( + "{0} is not a valid X509 version".format(version), version + ) + + @property + def serial(self): + asn1_int = self._backend._lib.X509_get_serialNumber(self._x509) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return self._backend._asn1_integer_to_int(asn1_int) + + def public_key(self): + pkey = self._backend._lib.X509_get_pubkey(self._x509) + if pkey == self._backend._ffi.NULL: + # Remove errors from the stack. + self._backend._consume_errors() + raise ValueError("Certificate public key is of an unknown type") + + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def not_valid_before(self): + asn1_time = self._backend._lib.X509_get_notBefore(self._x509) + return self._backend._parse_asn1_time(asn1_time) + + @property + def not_valid_after(self): + asn1_time = self._backend._lib.X509_get_notAfter(self._x509) + return self._backend._parse_asn1_time(asn1_time) + + @property + def issuer(self): + issuer = self._backend._lib.X509_get_issuer_name(self._x509) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def subject(self): + subject = self._backend._lib.X509_get_subject_name(self._x509) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509) + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509(bio, self._x509) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_bio(bio, self._x509) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +def _decode_certificate_policies(backend, cp): + cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + num = backend._lib.sk_POLICYINFO_num(cp) + certificate_policies = [] + for i in range(num): + qualifiers = None + pi = backend._lib.sk_POLICYINFO_value(cp, i) + oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid)) + if pi.qualifiers != backend._ffi.NULL: + qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers) + qualifiers = [] + for j in range(qnum): + pqi = backend._lib.sk_POLICYQUALINFO_value( + pi.qualifiers, j + ) + pqualid = x509.ObjectIdentifier( + _obj2txt(backend, pqi.pqualid) + ) + if pqualid == CertificatePoliciesOID.CPS_QUALIFIER: + cpsuri = backend._ffi.buffer( + pqi.d.cpsuri.data, pqi.d.cpsuri.length + )[:].decode('ascii') + qualifiers.append(cpsuri) + else: + assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE + user_notice = _decode_user_notice( + backend, pqi.d.usernotice + ) + qualifiers.append(user_notice) + + certificate_policies.append( + x509.PolicyInformation(oid, qualifiers) + ) + + return x509.CertificatePolicies(certificate_policies) + + +def _decode_user_notice(backend, un): + explicit_text = None + notice_reference = None + + if un.exptext != backend._ffi.NULL: + explicit_text = backend._asn1_string_to_utf8(un.exptext) + + if un.noticeref != backend._ffi.NULL: + organization = backend._asn1_string_to_utf8(un.noticeref.organization) + + num = backend._lib.sk_ASN1_INTEGER_num( + un.noticeref.noticenos + ) + notice_numbers = [] + for i in range(num): + asn1_int = backend._lib.sk_ASN1_INTEGER_value( + un.noticeref.noticenos, i + ) + notice_num = backend._asn1_integer_to_int(asn1_int) + notice_numbers.append(notice_num) + + notice_reference = x509.NoticeReference( + organization, notice_numbers + ) + + return x509.UserNotice(notice_reference, explicit_text) + + +def _decode_basic_constraints(backend, bc_st): + basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st) + basic_constraints = backend._ffi.gc( + basic_constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + # The byte representation of an ASN.1 boolean true is \xff. OpenSSL + # chooses to just map this to its ordinal value, so true is 255 and + # false is 0. + ca = basic_constraints.ca == 255 + if basic_constraints.pathlen == backend._ffi.NULL: + path_length = None + else: + path_length = backend._asn1_integer_to_int(basic_constraints.pathlen) + + return x509.BasicConstraints(ca, path_length) + + +def _decode_subject_key_identifier(backend, asn1_string): + asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string) + asn1_string = backend._ffi.gc( + asn1_string, backend._lib.ASN1_OCTET_STRING_free + ) + return x509.SubjectKeyIdentifier( + backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + ) + + +def _decode_authority_key_identifier(backend, akid): + akid = backend._ffi.cast("AUTHORITY_KEYID *", akid) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + key_identifier = None + authority_cert_issuer = None + authority_cert_serial_number = None + + if akid.keyid != backend._ffi.NULL: + key_identifier = backend._ffi.buffer( + akid.keyid.data, akid.keyid.length + )[:] + + if akid.issuer != backend._ffi.NULL: + authority_cert_issuer = _decode_general_names( + backend, akid.issuer + ) + + if akid.serial != backend._ffi.NULL: + authority_cert_serial_number = backend._asn1_integer_to_int( + akid.serial + ) + + return x509.AuthorityKeyIdentifier( + key_identifier, authority_cert_issuer, authority_cert_serial_number + ) + + +def _decode_authority_information_access(backend, aia): + aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia) + aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free) + num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia) + access_descriptions = [] + for i in range(num): + ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i) + backend.openssl_assert(ad.method != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method)) + backend.openssl_assert(ad.location != backend._ffi.NULL) + gn = _decode_general_name(backend, ad.location) + access_descriptions.append(x509.AccessDescription(oid, gn)) + + return x509.AuthorityInformationAccess(access_descriptions) + + +def _decode_key_usage(backend, bit_string): + bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string) + bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free) + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + digital_signature = get_bit(bit_string, 0) == 1 + content_commitment = get_bit(bit_string, 1) == 1 + key_encipherment = get_bit(bit_string, 2) == 1 + data_encipherment = get_bit(bit_string, 3) == 1 + key_agreement = get_bit(bit_string, 4) == 1 + key_cert_sign = get_bit(bit_string, 5) == 1 + crl_sign = get_bit(bit_string, 6) == 1 + encipher_only = get_bit(bit_string, 7) == 1 + decipher_only = get_bit(bit_string, 8) == 1 + return x509.KeyUsage( + digital_signature, + content_commitment, + key_encipherment, + data_encipherment, + key_agreement, + key_cert_sign, + crl_sign, + encipher_only, + decipher_only + ) + + +def _decode_general_names_extension(backend, gns): + gns = backend._ffi.cast("GENERAL_NAMES *", gns) + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + general_names = _decode_general_names(backend, gns) + return general_names + + +def _decode_subject_alt_name(backend, ext): + return x509.SubjectAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_issuer_alt_name(backend, ext): + return x509.IssuerAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_name_constraints(backend, nc): + nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _decode_general_subtrees(backend, nc.permittedSubtrees) + excluded = _decode_general_subtrees(backend, nc.excludedSubtrees) + return x509.NameConstraints( + permitted_subtrees=permitted, excluded_subtrees=excluded + ) + + +def _decode_general_subtrees(backend, stack_subtrees): + if stack_subtrees == backend._ffi.NULL: + return None + + num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees) + subtrees = [] + + for i in range(num): + obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i) + backend.openssl_assert(obj != backend._ffi.NULL) + name = _decode_general_name(backend, obj.base) + subtrees.append(name) + + return subtrees + + +def _decode_extended_key_usage(backend, sk): + sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk) + sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free) + num = backend._lib.sk_ASN1_OBJECT_num(sk) + ekus = [] + + for i in range(num): + obj = backend._lib.sk_ASN1_OBJECT_value(sk, i) + backend.openssl_assert(obj != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, obj)) + ekus.append(oid) + + return x509.ExtendedKeyUsage(ekus) + + +_DISTPOINT_TYPE_FULLNAME = 0 +_DISTPOINT_TYPE_RELATIVENAME = 1 + + +def _decode_crl_distribution_points(backend, cdps): + cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps) + cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free) + num = backend._lib.sk_DIST_POINT_num(cdps) + + dist_points = [] + for i in range(num): + full_name = None + relative_name = None + crl_issuer = None + reasons = None + cdp = backend._lib.sk_DIST_POINT_value(cdps, i) + if cdp.reasons != backend._ffi.NULL: + # We will check each bit from RFC 5280 + # ReasonFlags ::= BIT STRING { + # unused (0), + # keyCompromise (1), + # cACompromise (2), + # affiliationChanged (3), + # superseded (4), + # cessationOfOperation (5), + # certificateHold (6), + # privilegeWithdrawn (7), + # aACompromise (8) } + reasons = [] + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + if get_bit(cdp.reasons, 1): + reasons.append(x509.ReasonFlags.key_compromise) + + if get_bit(cdp.reasons, 2): + reasons.append(x509.ReasonFlags.ca_compromise) + + if get_bit(cdp.reasons, 3): + reasons.append(x509.ReasonFlags.affiliation_changed) + + if get_bit(cdp.reasons, 4): + reasons.append(x509.ReasonFlags.superseded) + + if get_bit(cdp.reasons, 5): + reasons.append(x509.ReasonFlags.cessation_of_operation) + + if get_bit(cdp.reasons, 6): + reasons.append(x509.ReasonFlags.certificate_hold) + + if get_bit(cdp.reasons, 7): + reasons.append(x509.ReasonFlags.privilege_withdrawn) + + if get_bit(cdp.reasons, 8): + reasons.append(x509.ReasonFlags.aa_compromise) + + reasons = frozenset(reasons) + + if cdp.CRLissuer != backend._ffi.NULL: + crl_issuer = _decode_general_names(backend, cdp.CRLissuer) + + # Certificates may have a crl_issuer/reasons and no distribution + # point so make sure it's not null. + if cdp.distpoint != backend._ffi.NULL: + # Type 0 is fullName, there is no #define for it in the code. + if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME: + full_name = _decode_general_names( + backend, cdp.distpoint.name.fullname + ) + # OpenSSL code doesn't test for a specific type for + # relativename, everything that isn't fullname is considered + # relativename. + else: + rns = cdp.distpoint.name.relativename + rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns) + attributes = [] + for i in range(rnum): + rn = backend._lib.sk_X509_NAME_ENTRY_value( + rns, i + ) + backend.openssl_assert(rn != backend._ffi.NULL) + attributes.append( + _decode_x509_name_entry(backend, rn) + ) + + relative_name = x509.Name(attributes) + + dist_points.append( + x509.DistributionPoint( + full_name, relative_name, reasons, crl_issuer + ) + ) + + return x509.CRLDistributionPoints(dist_points) + + +def _decode_inhibit_any_policy(backend, asn1_int): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + skip_certs = backend._asn1_integer_to_int(asn1_int) + return x509.InhibitAnyPolicy(skip_certs) + + +_CRL_REASON_CODE_TO_ENUM = { + 0: x509.ReasonFlags.unspecified, + 1: x509.ReasonFlags.key_compromise, + 2: x509.ReasonFlags.ca_compromise, + 3: x509.ReasonFlags.affiliation_changed, + 4: x509.ReasonFlags.superseded, + 5: x509.ReasonFlags.cessation_of_operation, + 6: x509.ReasonFlags.certificate_hold, + 8: x509.ReasonFlags.remove_from_crl, + 9: x509.ReasonFlags.privilege_withdrawn, + 10: x509.ReasonFlags.aa_compromise, +} + + +def _decode_crl_reason(backend, enum): + enum = backend._ffi.cast("ASN1_ENUMERATED *", enum) + enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free) + code = backend._lib.ASN1_ENUMERATED_get(enum) + + try: + return _CRL_REASON_CODE_TO_ENUM[code] + except KeyError: + raise ValueError("Unsupported reason code: {0}".format(code)) + + +def _decode_invalidity_date(backend, inv_date): + generalized_time = backend._ffi.cast( + "ASN1_GENERALIZEDTIME *", inv_date + ) + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + time = backend._ffi.string( + backend._lib.ASN1_STRING_data( + backend._ffi.cast("ASN1_STRING *", generalized_time) + ) + ).decode("ascii") + return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ") + + +def _decode_cert_issuer(backend, ext): + """ + This handler decodes the CertificateIssuer entry extension directly + from the X509_EXTENSION object. This is necessary because this entry + extension is not directly supported by OpenSSL 0.9.8. + """ + + data_ptr_ptr = backend._ffi.new("const unsigned char **") + data_ptr_ptr[0] = ext.value.data + gns = backend._lib.d2i_GENERAL_NAMES( + backend._ffi.NULL, data_ptr_ptr, ext.value.length + ) + + # Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered + # in _X509ExtensionParser but since we are responsible for decoding this + # entry extension ourselves, we have to this here. + if gns == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is corrupted and can't be parsed".format( + CRLExtensionOID.CERTIFICATE_ISSUER)) + + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + return x509.GeneralNames(_decode_general_names(backend, gns)) + + +@utils.register_interface(x509.RevokedCertificate) +class _RevokedCertificate(object): + def __init__(self, backend, x509_revoked): + self._backend = backend + self._x509_revoked = x509_revoked + + @property + def serial_number(self): + asn1_int = self._x509_revoked.serialNumber + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return self._backend._asn1_integer_to_int(asn1_int) + + @property + def revocation_date(self): + return self._backend._parse_asn1_time( + self._x509_revoked.revocationDate) + + @property + def extensions(self): + return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse( + self._backend, self._x509_revoked + ) + + +@utils.register_interface(x509.CertificateRevocationList) +class _CertificateRevocationList(object): + def __init__(self, backend, x509_crl): + self._backend = backend + self._x509_crl = x509_crl + + def __eq__(self, other): + if not isinstance(other, x509.CertificateRevocationList): + return NotImplemented + + res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl) + return res == 0 + + def __ne__(self, other): + return not self == other + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + bio = self._backend._create_mem_bio() + res = self._backend._lib.i2d_X509_CRL_bio( + bio, self._x509_crl + ) + self._backend.openssl_assert(res == 1) + der = self._backend._read_mem_bio(bio) + h.update(der) + return h.finalize() + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def issuer(self): + issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def next_update(self): + nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl) + self._backend.openssl_assert(nu != self._backend._ffi.NULL) + return self._backend._parse_asn1_time(nu) + + @property + def last_update(self): + lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl) + self._backend.openssl_assert(lu != self._backend._ffi.NULL) + return self._backend._parse_asn1_time(lu) + + def _revoked_certificates(self): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + self._backend.openssl_assert(revoked != self._backend._ffi.NULL) + + num = self._backend._lib.sk_X509_REVOKED_num(revoked) + revoked_list = [] + for i in range(num): + r = self._backend._lib.sk_X509_REVOKED_value(revoked, i) + self._backend.openssl_assert(r != self._backend._ffi.NULL) + revoked_list.append(_RevokedCertificate(self._backend, r)) + + return revoked_list + + def __iter__(self): + return iter(self._revoked_certificates()) + + def __getitem__(self, idx): + return self._revoked_certificates()[idx] + + def __len__(self): + return len(self._revoked_certificates()) + + @property + def extensions(self): + raise NotImplementedError() + + +@utils.register_interface(x509.CertificateSigningRequest) +class _CertificateSigningRequest(object): + def __init__(self, backend, x509_req): + self._backend = backend + self._x509_req = x509_req + + def __eq__(self, other): + if not isinstance(other, _CertificateSigningRequest): + return NotImplemented + + self_bytes = self.public_bytes(serialization.Encoding.DER) + other_bytes = other.public_bytes(serialization.Encoding.DER) + return self_bytes == other_bytes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def public_key(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def subject(self): + subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req) + return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts) + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_REQ( + bio, self._x509_req + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +_EXTENSION_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _decode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), + ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies, + ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points, + ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check, + ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints, +} + +_REVOKED_EXTENSION_HANDLERS = { + CRLExtensionOID.CRL_REASON: _decode_crl_reason, + CRLExtensionOID.INVALIDITY_DATE: _decode_invalidity_date, + CRLExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer, +} + +_REVOKED_UNSUPPORTED_EXTENSIONS = set([ + CRLExtensionOID.CERTIFICATE_ISSUER, +]) + +_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), + handlers=_EXTENSION_HANDLERS +) + +_CSR_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x), + get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i), + handlers=_EXTENSION_HANDLERS +) + +_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i), + handlers=_REVOKED_EXTENSION_HANDLERS, + unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS +) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-35m-darwin.so new file mode 100755 index 0000000..f52fd03 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_constant_time.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_openssl.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_openssl.cpython-35m-darwin.so new file mode 100755 index 0000000..7b619f8 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_openssl.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_padding.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_padding.cpython-35m-darwin.so new file mode 100755 index 0000000..710eed2 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/_padding.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py new file mode 100644 index 0000000..dfe046b --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.bindings._commoncrypto import ffi, lib + + +class Binding(object): + """ + CommonCrypto API wrapper. + """ + lib = lib + ffi = ffi diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..f0ad1d5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,423 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so +# we can loop over them and delete them at runtime. It will be removed when +# cffi supports #if in cdef + +CONDITIONAL_NAMES = { + "Cryptography_HAS_AES_WRAP": [ + "AES_wrap_key", + "AES_unwrap_key", + ], + "Cryptography_HAS_CMAC": [ + "CMAC_CTX_new", + "CMAC_Init", + "CMAC_Update", + "CMAC_Final", + "CMAC_CTX_copy", + "CMAC_CTX_free", + ], + "Cryptography_HAS_CMS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + "CMS_final", + "CMS_sign", + "CMS_verify", + "CMS_encrypt", + "CMS_decrypt", + "CMS_add1_signer", + "CMS_TEXT", + "CMS_NOCERTS", + "CMS_NO_CONTENT_VERIFY", + "CMS_NO_ATTR_VERIFY", + "CMS_NOSIGS", + "CMS_NOINTERN", + "CMS_NO_SIGNER_CERT_VERIFY", + "CMS_NOVERIFY", + "CMS_DETACHED", + "CMS_BINARY", + "CMS_NOATTR", + "CMS_NOSMIMECAP", + "CMS_NOOLDMIMETYPE", + "CMS_CRLFEOL", + "CMS_STREAM", + "CMS_NOCRL", + "CMS_PARTIAL", + "CMS_REUSE_DIGEST", + "CMS_USE_KEYID", + "CMS_DEBUG_DECRYPT", + ], + "Cryptography_HAS_CMS_BIO_FUNCTIONS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + ], + "Cryptography_HAS_EC": [ + "OPENSSL_EC_NAMED_CURVE", + "EC_GROUP_new", + "EC_GROUP_free", + "EC_GROUP_clear_free", + "EC_GROUP_new_curve_GFp", + "EC_GROUP_new_by_curve_name", + "EC_GROUP_set_curve_GFp", + "EC_GROUP_get_curve_GFp", + "EC_GROUP_method_of", + "EC_GROUP_get0_generator", + "EC_GROUP_get_curve_name", + "EC_GROUP_get_degree", + "EC_GROUP_set_asn1_flag", + "EC_GROUP_set_point_conversion_form", + "EC_KEY_new", + "EC_KEY_free", + "EC_get_builtin_curves", + "EC_KEY_new_by_curve_name", + "EC_KEY_copy", + "EC_KEY_dup", + "EC_KEY_up_ref", + "EC_KEY_set_group", + "EC_KEY_get0_private_key", + "EC_KEY_set_private_key", + "EC_KEY_set_public_key", + "EC_KEY_get_enc_flags", + "EC_KEY_set_enc_flags", + "EC_KEY_set_conv_form", + "EC_KEY_get_key_method_data", + "EC_KEY_insert_key_method_data", + "EC_KEY_set_asn1_flag", + "EC_KEY_precompute_mult", + "EC_KEY_generate_key", + "EC_KEY_check_key", + "EC_POINT_new", + "EC_POINT_free", + "EC_POINT_clear_free", + "EC_POINT_copy", + "EC_POINT_dup", + "EC_POINT_method_of", + "EC_POINT_set_to_infinity", + "EC_POINT_set_Jprojective_coordinates_GFp", + "EC_POINT_get_Jprojective_coordinates_GFp", + "EC_POINT_set_affine_coordinates_GFp", + "EC_POINT_get_affine_coordinates_GFp", + "EC_POINT_set_compressed_coordinates_GFp", + "EC_POINT_point2oct", + "EC_POINT_oct2point", + "EC_POINT_point2bn", + "EC_POINT_bn2point", + "EC_POINT_point2hex", + "EC_POINT_hex2point", + "EC_POINT_add", + "EC_POINT_dbl", + "EC_POINT_invert", + "EC_POINT_is_at_infinity", + "EC_POINT_is_on_curve", + "EC_POINT_cmp", + "EC_POINT_make_affine", + "EC_POINTs_make_affine", + "EC_POINTs_mul", + "EC_POINT_mul", + "EC_GROUP_precompute_mult", + "EC_GROUP_have_precompute_mult", + "EC_GFp_simple_method", + "EC_GFp_mont_method", + "EC_GFp_nist_method", + "EC_METHOD_get_field_type", + "EVP_PKEY_assign_EC_KEY", + "EVP_PKEY_get1_EC_KEY", + "EVP_PKEY_set1_EC_KEY", + "PEM_write_bio_ECPrivateKey", + "i2d_EC_PUBKEY", + "d2i_EC_PUBKEY", + "d2i_EC_PUBKEY_bio", + "i2d_EC_PUBKEY_bio", + "d2i_ECPrivateKey", + "d2i_ECPrivateKey_bio", + "i2d_ECPrivateKey", + "i2d_ECPrivateKey_bio", + "i2o_ECPublicKey", + "o2i_ECPublicKey", + "SSL_CTX_set_tmp_ecdh", + ], + + "Cryptography_HAS_EC_1_0_1": [ + "EC_KEY_get_flags", + "EC_KEY_set_flags", + "EC_KEY_clear_flags", + "EC_KEY_set_public_key_affine_coordinates", + ], + + "Cryptography_HAS_EC_NISTP_64_GCC_128": [ + "EC_GFp_nistp224_method", + "EC_GFp_nistp256_method", + "EC_GFp_nistp521_method", + ], + + "Cryptography_HAS_EC2M": [ + "EC_GF2m_simple_method", + "EC_POINT_set_affine_coordinates_GF2m", + "EC_POINT_get_affine_coordinates_GF2m", + "EC_POINT_set_compressed_coordinates_GF2m", + "EC_GROUP_set_curve_GF2m", + "EC_GROUP_get_curve_GF2m", + "EC_GROUP_new_curve_GF2m", + ], + + "Cryptography_HAS_EC_1_0_2": [ + "EC_curve_nid2nist", + ], + "Cryptography_HAS_ECDH": [ + "ECDH_compute_key", + "ECDH_get_ex_new_index", + "ECDH_set_ex_data", + "ECDH_get_ex_data", + ], + "Cryptography_HAS_ECDSA": [ + "ECDSA_SIG_new", + "ECDSA_SIG_free", + "i2d_ECDSA_SIG", + "d2i_ECDSA_SIG", + "ECDSA_do_sign", + "ECDSA_do_sign_ex", + "ECDSA_do_verify", + "ECDSA_sign_setup", + "ECDSA_sign", + "ECDSA_sign_ex", + "ECDSA_verify", + "ECDSA_size", + "ECDSA_OpenSSL", + "ECDSA_set_default_method", + "ECDSA_get_default_method", + "ECDSA_set_method", + "ECDSA_get_ex_new_index", + "ECDSA_set_ex_data", + "ECDSA_get_ex_data", + ], + "Cryptography_HAS_ENGINE_CRYPTODEV": [ + "ENGINE_load_cryptodev" + ], + "Cryptography_HAS_REMOVE_THREAD_STATE": [ + "ERR_remove_thread_state" + ], + "Cryptography_HAS_098H_ERROR_CODES": [ + "ASN1_F_B64_READ_ASN1", + "ASN1_F_B64_WRITE_ASN1", + "ASN1_F_SMIME_READ_ASN1", + "ASN1_F_SMIME_TEXT", + "ASN1_R_NO_CONTENT_TYPE", + "ASN1_R_NO_MULTIPART_BODY_FAILURE", + "ASN1_R_NO_MULTIPART_BOUNDARY", + ], + "Cryptography_HAS_098C_CAMELLIA_CODES": [ + "EVP_F_CAMELLIA_INIT_KEY", + "EVP_R_CAMELLIA_KEY_SETUP_FAILED" + ], + "Cryptography_HAS_EC_CODES": [ + "EC_R_UNKNOWN_GROUP", + "EC_F_EC_GROUP_NEW_BY_CURVE_NAME" + ], + "Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [ + "RSA_R_PKCS_DECODING_ERROR" + ], + "Cryptography_HAS_GCM": [ + "EVP_CTRL_GCM_GET_TAG", + "EVP_CTRL_GCM_SET_TAG", + "EVP_CTRL_GCM_SET_IVLEN", + ], + "Cryptography_HAS_PBKDF2_HMAC": [ + "PKCS5_PBKDF2_HMAC" + ], + "Cryptography_HAS_PKEY_CTX": [ + "EVP_PKEY_CTX_new", + "EVP_PKEY_CTX_new_id", + "EVP_PKEY_CTX_dup", + "EVP_PKEY_CTX_free", + "EVP_PKEY_sign", + "EVP_PKEY_sign_init", + "EVP_PKEY_verify", + "EVP_PKEY_verify_init", + "Cryptography_EVP_PKEY_encrypt", + "EVP_PKEY_encrypt_init", + "Cryptography_EVP_PKEY_decrypt", + "EVP_PKEY_decrypt_init", + "EVP_PKEY_CTX_set_signature_md", + "EVP_PKEY_id", + "EVP_PKEY_CTX_set_rsa_padding", + "EVP_PKEY_CTX_set_rsa_pss_saltlen", + ], + "Cryptography_HAS_ECDSA_SHA2_NIDS": [ + "NID_ecdsa_with_SHA224", + "NID_ecdsa_with_SHA256", + "NID_ecdsa_with_SHA384", + "NID_ecdsa_with_SHA512", + ], + "Cryptography_HAS_EGD": [ + "RAND_egd", + "RAND_egd_bytes", + "RAND_query_egd_bytes", + ], + "Cryptography_HAS_PSS_PADDING": [ + "RSA_PKCS1_PSS_PADDING", + ], + "Cryptography_HAS_MGF1_MD": [ + "EVP_PKEY_CTX_set_rsa_mgf1_md", + ], + "Cryptography_HAS_TLSv1_1": [ + "SSL_OP_NO_TLSv1_1", + "TLSv1_1_method", + "TLSv1_1_server_method", + "TLSv1_1_client_method", + ], + + "Cryptography_HAS_TLSv1_2": [ + "SSL_OP_NO_TLSv1_2", + "TLSv1_2_method", + "TLSv1_2_server_method", + "TLSv1_2_client_method", + ], + + "Cryptography_HAS_SSL2": [ + "SSLv2_method", + "SSLv2_client_method", + "SSLv2_server_method", + ], + + "Cryptography_HAS_SSL3_METHOD": [ + "SSLv3_method", + "SSLv3_client_method", + "SSLv3_server_method", + ], + + "Cryptography_HAS_TLSEXT_HOSTNAME": [ + "SSL_set_tlsext_host_name", + "SSL_get_servername", + "SSL_CTX_set_tlsext_servername_callback", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [ + "SSL_CTX_set_tlsext_status_cb", + "SSL_CTX_set_tlsext_status_arg" + ], + + "Cryptography_HAS_STATUS_REQ_OCSP_RESP": [ + "SSL_set_tlsext_status_ocsp_resp", + "SSL_get_tlsext_status_ocsp_resp", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [ + "SSL_set_tlsext_status_type", + ], + + "Cryptography_HAS_RELEASE_BUFFERS": [ + "SSL_MODE_RELEASE_BUFFERS", + ], + + "Cryptography_HAS_OP_NO_COMPRESSION": [ + "SSL_OP_NO_COMPRESSION", + ], + + "Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [ + "SSL_OP_MSIE_SSLV2_RSA_PADDING", + ], + + "Cryptography_HAS_SSL_OP_NO_TICKET": [ + "SSL_OP_NO_TICKET", + ], + + "Cryptography_HAS_SSL_SET_SSL_CTX": [ + "SSL_set_SSL_CTX", + "TLSEXT_NAMETYPE_host_name", + ], + + "Cryptography_HAS_NETBSD_D1_METH": [ + "DTLSv1_method", + ], + + "Cryptography_HAS_NEXTPROTONEG": [ + "SSL_CTX_set_next_protos_advertised_cb", + "SSL_CTX_set_next_proto_select_cb", + "SSL_select_next_proto", + "SSL_get0_next_proto_negotiated", + ], + + "Cryptography_HAS_SECURE_RENEGOTIATION": [ + "SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION", + "SSL_OP_LEGACY_SERVER_CONNECT", + "SSL_get_secure_renegotiation_support", + ], + + "Cryptography_HAS_ALPN": [ + "SSL_CTX_set_alpn_protos", + "SSL_set_alpn_protos", + "SSL_CTX_set_alpn_select_cb", + "SSL_get0_alpn_selected", + ], + + "Cryptography_HAS_COMPRESSION": [ + "SSL_get_current_compression", + "SSL_get_current_expansion", + "SSL_COMP_get_name", + ], + + "Cryptography_HAS_GET_SERVER_TMP_KEY": [ + "SSL_get_server_tmp_key", + ], + + "Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [ + "SSL_CTX_set_client_cert_engine", + ], + "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_SUITE_B_INVALID_VERSION', + 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', + 'X509_V_ERR_SUITE_B_INVALID_CURVE', + 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', + 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', + 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', + 'X509_V_ERR_HOSTNAME_MISMATCH', + 'X509_V_ERR_EMAIL_MISMATCH', + 'X509_V_ERR_IP_ADDRESS_MISMATCH' + ], + "Cryptography_HAS_102_VERIFICATION_PARAMS": [ + "X509_V_FLAG_SUITEB_128_LOS_ONLY", + "X509_V_FLAG_SUITEB_192_LOS", + "X509_V_FLAG_SUITEB_128_LOS", + "X509_VERIFY_PARAM_set1_host", + "X509_VERIFY_PARAM_set1_email", + "X509_VERIFY_PARAM_set1_ip", + "X509_VERIFY_PARAM_set1_ip_asc", + "X509_VERIFY_PARAM_set_hostflags", + ], + "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [ + "X509_V_FLAG_TRUSTED_FIRST", + ], + "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [ + "X509_V_FLAG_PARTIAL_CHAIN", + ], + "Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_DIFFERENT_CRL_SCOPE', + 'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE', + 'X509_V_ERR_UNNESTED_RESOURCE', + 'X509_V_ERR_PERMITTED_VIOLATION', + 'X509_V_ERR_EXCLUDED_VIOLATION', + 'X509_V_ERR_SUBTREE_MINMAX', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX', + 'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX', + 'X509_V_ERR_CRL_PATH_VALIDATION_ERROR', + ], + "Cryptography_HAS_100_VERIFICATION_PARAMS": [ + "Cryptography_HAS_100_VERIFICATION_PARAMS", + "X509_V_FLAG_EXTENDED_CRL_SUPPORT", + "X509_V_FLAG_USE_DELTAS", + ], + "Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [ + "X509_V_FLAG_CHECK_SS_SIGNATURE", + ], + "Cryptography_HAS_SET_CERT_CB": [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ], +} diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..07b6b9a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,182 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import collections +import os +import threading +import types + +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._openssl import ffi, lib +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + + +_OpenSSLError = collections.namedtuple("_OpenSSLError", + ["code", "lib", "func", "reason"]) + + +def _consume_errors(lib): + errors = [] + while True: + code = lib.ERR_get_error() + if code == 0: + break + + err_lib = lib.ERR_GET_LIB(code) + err_func = lib.ERR_GET_FUNC(code) + err_reason = lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, err_lib, err_func, err_reason)) + return errors + + +def _openssl_assert(lib, ok): + if not ok: + errors = _consume_errors(lib) + raise InternalError( + "Unknown OpenSSL error. Please file an issue at https://github.com" + "/pyca/cryptography/issues with information on how to reproduce " + "this. ({0!r})".format(errors), + errors + ) + + +@ffi.callback("int (*)(unsigned char *, int)", error=-1) +def _osrandom_rand_bytes(buf, size): + signed = ffi.cast("char *", buf) + result = os.urandom(size) + signed[0:size] = result + return 1 + + +@ffi.callback("int (*)(void)") +def _osrandom_rand_status(): + return 1 + + +def build_conditional_library(lib, conditional_names): + conditional_lib = types.ModuleType("lib") + excluded_names = set() + for condition, names in conditional_names.items(): + if not getattr(lib, condition): + excluded_names |= set(names) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding(object): + """ + OpenSSL API wrapper. + """ + lib = None + ffi = ffi + _lib_loaded = False + _locks = None + _lock_cb_handle = None + _init_lock = threading.Lock() + _lock_init_lock = threading.Lock() + + _osrandom_engine_id = ffi.new("const char[]", b"osrandom") + _osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine") + _osrandom_method = ffi.new( + "RAND_METHOD *", + dict(bytes=_osrandom_rand_bytes, pseudorand=_osrandom_rand_bytes, + status=_osrandom_rand_status) + ) + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _register_osrandom_engine(cls): + _openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0) + + engine = cls.lib.ENGINE_new() + _openssl_assert(cls.lib, engine != cls.ffi.NULL) + try: + result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_add(engine) + if result != 1: + errors = _consume_errors(cls.lib) + _openssl_assert( + cls.lib, + errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID + ) + + finally: + result = cls.lib.ENGINE_free(engine) + _openssl_assert(cls.lib, result == 1) + + @classmethod + def _ensure_ffi_initialized(cls): + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES) + cls._lib_loaded = True + # initialize the SSL library + cls.lib.SSL_library_init() + # adds all ciphers/digests for EVP + cls.lib.OpenSSL_add_all_algorithms() + # loads error strings for libcrypto and libssl functions + cls.lib.SSL_load_error_strings() + cls._register_osrandom_engine() + + @classmethod + def init_static_locks(cls): + with cls._lock_init_lock: + cls._ensure_ffi_initialized() + + if not cls._lock_cb_handle: + cls._lock_cb_handle = cls.ffi.callback( + "void(int, int, const char *, int)", + cls._lock_cb + ) + + # Use Python's implementation if available, importing _ssl triggers + # the setup for this. + __import__("_ssl") + + if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL: + return + + # If nothing else has setup a locking callback already, we set up + # our own + num_locks = cls.lib.CRYPTO_num_locks() + cls._locks = [threading.Lock() for n in range(num_locks)] + + cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle) + + @classmethod + def _lock_cb(cls, mode, n, file, line): + lock = cls._locks[n] + + if mode & cls.lib.CRYPTO_LOCK: + lock.acquire() + elif mode & cls.lib.CRYPTO_UNLOCK: + lock.release() + else: + raise RuntimeError( + "Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format( + mode, n, file, line + ) + ) + + +# OpenSSL is not thread safe until the locks are initialized. We call this +# method in module scope so that it executes with the import lock. On +# Pythons < 3.4 this import lock is a global lock, which can prevent a race +# condition registering the OpenSSL locks. On Python 3.4+ the import lock +# is per module so this approach will not work. +Binding.init_static_locks() diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..494a7a1 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricSignatureContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the signature as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricVerificationContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def verify(self): + """ + Raises an exception if the bytes provided to update do not match the + signature or the signature does not match the public key. + """ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..12d53ee --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,166 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +class DHPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("x must be an integer.") + + if not isinstance(public_numbers, DHPublicNumbers): + raise TypeError("public_numbers must be an instance of " + "DHPublicNumbers.") + + self._x = x + self._public_numbers = public_numbers + + def __eq__(self, other): + if not isinstance(other, DHPrivateNumbers): + return NotImplemented + + return ( + self._x == other._x and + self._public_numbers == other._public_numbers + ) + + def __ne__(self, other): + return not self == other + + public_numbers = utils.read_only_property("_public_numbers") + x = utils.read_only_property("_x") + + +class DHPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("y must be an integer.") + + if not isinstance(parameter_numbers, DHParameterNumbers): + raise TypeError( + "parameters must be an instance of DHParameterNumbers.") + + self._y = y + self._parameter_numbers = parameter_numbers + + def __eq__(self, other): + if not isinstance(other, DHPublicNumbers): + return NotImplemented + + return ( + self._y == other._y and + self._parameter_numbers == other._parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + +class DHParameterNumbers(object): + def __init__(self, p, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError("p and g must be integers") + + self._p = p + self._g = g + + def __eq__(self, other): + if not isinstance(other, DHParameterNumbers): + return NotImplemented + + return ( + self._p == other._p and + self._g == other._g + ) + + def __ne__(self, other): + return not self == other + + p = utils.read_only_property("_p") + g = utils.read_only_property("_g") + + +@six.add_metaclass(abc.ABCMeta) +class DHParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DHPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHParametersWithSerialization(DHParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DHParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKeyWithSerialization(DHPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DHPrivateNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKeyWithSerialization(DHPublicKey): + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DHPublicNumbers. + """ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..184177e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,229 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class DSAParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DSAPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAParametersWithNumbers(DSAParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DSAParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKeyWithSerialization(DSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey + + +def generate_parameters(key_size, backend): + return backend.generate_dsa_parameters(key_size) + + +def generate_private_key(key_size, backend): + return backend.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters): + if utils.bit_length(parameters.p) not in [1024, 2048, 3072]: + raise ValueError("p must be exactly 1024, 2048, or 3072 bits long") + if utils.bit_length(parameters.q) not in [160, 256]: + raise ValueError("q must be exactly 160 or 256 bits long") + + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers): + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") + + +class DSAParameterNumbers(object): + def __init__(self, p, q, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + g = utils.read_only_property("_g") + + def parameters(self, backend): + return backend.load_dsa_parameter_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAParameterNumbers): + return NotImplemented + + return self.p == other.p and self.q == other.q and self.g == other.g + + def __ne__(self, other): + return not self == other + + +class DSAPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + def public_key(self, backend): + return backend.load_dsa_public_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPublicNumbers): + return NotImplemented + + return ( + self.y == other.y and + self.parameter_numbers == other.parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + +class DSAPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + x = utils.read_only_property("_x") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_dsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPrivateNumbers): + return NotImplemented + + return ( + self.x == other.x and self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..eda7df0 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,346 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurve(object): + @abc.abstractproperty + def name(self): + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the base point of the curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveSignatureAlgorithm(object): + @abc.abstractproperty + def algorithm(self): + """ + The digest algorithm used with this signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKey(object): + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def exchange(self, algorithm, peer_public_key): + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self): + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePublicKey(object): + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey + + +@utils.register_interface(EllipticCurve) +class SECT571R1(object): + name = "sect571r1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409R1(object): + name = "sect409r1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283R1(object): + name = "sect283r1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233R1(object): + name = "sect233r1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163R2(object): + name = "sect163r2" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECT571K1(object): + name = "sect571k1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409K1(object): + name = "sect409k1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283K1(object): + name = "sect283k1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233K1(object): + name = "sect233k1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163K1(object): + name = "sect163k1" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECP521R1(object): + name = "secp521r1" + key_size = 521 + + +@utils.register_interface(EllipticCurve) +class SECP384R1(object): + name = "secp384r1" + key_size = 384 + + +@utils.register_interface(EllipticCurve) +class SECP256R1(object): + name = "secp256r1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP256K1(object): + name = "secp256k1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP224R1(object): + name = "secp224r1" + key_size = 224 + + +@utils.register_interface(EllipticCurve) +class SECP192R1(object): + name = "secp192r1" + key_size = 192 + + +_CURVE_TYPES = { + "prime192v1": SECP192R1, + "prime256v1": SECP256R1, + + "secp192r1": SECP192R1, + "secp224r1": SECP224R1, + "secp256r1": SECP256R1, + "secp384r1": SECP384R1, + "secp521r1": SECP521R1, + "secp256k1": SECP256K1, + + "sect163k1": SECT163K1, + "sect233k1": SECT233K1, + "sect283k1": SECT283K1, + "sect409k1": SECT409K1, + "sect571k1": SECT571K1, + + "sect163r2": SECT163R2, + "sect233r1": SECT233R1, + "sect283r1": SECT283R1, + "sect409r1": SECT409R1, + "sect571r1": SECT571R1, +} + + +@utils.register_interface(EllipticCurveSignatureAlgorithm) +class ECDSA(object): + def __init__(self, algorithm): + self._algorithm = algorithm + + algorithm = utils.read_only_property("_algorithm") + + +def generate_private_key(curve, backend): + return backend.generate_elliptic_curve_private_key(curve) + + +class EllipticCurvePublicNumbers(object): + def __init__(self, x, y, curve): + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend): + return backend.load_elliptic_curve_public_numbers(self) + + def encode_point(self): + # key_size is in bits. Convert to bytes and round up + byte_length = (self.curve.key_size + 7) // 8 + return ( + b'\x04' + utils.int_to_bytes(self.x, byte_length) + + utils.int_to_bytes(self.y, byte_length) + ) + + @classmethod + def from_encoded_point(cls, curve, data): + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if data.startswith(b'\x04'): + # key_size is in bits. Convert to bytes and round up + byte_length = (curve.key_size + 7) // 8 + if len(data) == 2 * byte_length + 1: + x = utils.int_from_bytes(data[1:byte_length + 1], 'big') + y = utils.int_from_bytes(data[byte_length + 1:], 'big') + return cls(x, y, curve) + else: + raise ValueError('Invalid elliptic curve point data length') + else: + raise ValueError('Unsupported elliptic curve point type') + + curve = utils.read_only_property("_curve") + x = utils.read_only_property("_x") + y = utils.read_only_property("_y") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePublicNumbers): + return NotImplemented + + return ( + self.x == other.x and + self.y == other.y and + self.curve.name == other.curve.name and + self.curve.key_size == other.curve.key_size + ) + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format(self) + ) + + +class EllipticCurvePrivateNumbers(object): + def __init__(self, private_value, public_numbers): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key(self, backend): + return backend.load_elliptic_curve_private_numbers(self) + + private_value = utils.read_only_property("_private_value") + public_numbers = utils.read_only_property("_public_numbers") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePrivateNumbers): + return NotImplemented + + return ( + self.private_value == other.private_value and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + +class ECDH(object): + pass diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..c796d8e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricPadding(object): + @abc.abstractproperty + def name(self): + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ + + +@utils.register_interface(AsymmetricPadding) +class PKCS1v15(object): + name = "EMSA-PKCS1-v1_5" + + +@utils.register_interface(AsymmetricPadding) +class PSS(object): + MAX_LENGTH = object() + name = "EMSA-PSS" + + def __init__(self, mgf, salt_length): + self._mgf = mgf + + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer.") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + +@utils.register_interface(AsymmetricPadding) +class OAEP(object): + name = "EME-OAEP" + + def __init__(self, mgf, algorithm, label): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF1(object): + MAX_LENGTH = object() + + def __init__(self, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..41b0089 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,352 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from fractions import gcd + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import RSABackend + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKey(object): + @abc.abstractmethod + def signer(self, padding, algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def decrypt(self, ciphertext, padding): + """ + Decrypts the provided ciphertext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The RSAPublicKey associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKeyWithSerialization(RSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKey(object): + @abc.abstractmethod + def verifier(self, signature, padding, algorithm): + """ + Returns an AsymmetricVerificationContext used for verifying signatures. + """ + + @abc.abstractmethod + def encrypt(self, plaintext, padding): + """ + Encrypts the given plaintext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey + + +def generate_private_key(public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + _verify_rsa_parameters(public_exponent, key_size) + return backend.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e, n): + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + +def _modinv(e, m): + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, y1, x2, y2 = 1, 0, 0, 1 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn, yn = x1 - q * x2, y1 - q * y2 + a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + return x1 % m + + +def rsa_crt_iqmp(p, q): + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent, p): + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent, q): + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent and q. + """ + return private_exponent % (q - 1) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + + return (p, q) + + +class RSAPrivateNumbers(object): + def __init__(self, p, q, d, dmp1, dmq1, iqmp, + public_numbers): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(d, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + d = utils.read_only_property("_d") + dmp1 = utils.read_only_property("_dmp1") + dmq1 = utils.read_only_property("_dmq1") + iqmp = utils.read_only_property("_iqmp") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_rsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, RSAPrivateNumbers): + return NotImplemented + + return ( + self.p == other.p and + self.q == other.q and + self.d == other.d and + self.dmp1 == other.dmp1 and + self.dmq1 == other.dmq1 and + self.iqmp == other.iqmp and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.p, + self.q, + self.d, + self.dmp1, + self.dmq1, + self.iqmp, + self.public_numbers, + )) + + +class RSAPublicNumbers(object): + def __init__(self, e, n): + if ( + not isinstance(e, six.integer_types) or + not isinstance(n, six.integer_types) + ): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + e = utils.read_only_property("_e") + n = utils.read_only_property("_n") + + def public_key(self, backend): + return backend.load_rsa_public_numbers(self) + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, RSAPublicNumbers): + return NotImplemented + + return self.e == other.e and self.n == other.n + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.e, self.n)) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..bad9ab7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,73 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import warnings + +from pyasn1.codec.der import decoder, encoder +from pyasn1.error import PyAsn1Error +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils + + +class _DSSSigValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('r', univ.Integer()), + namedtype.NamedType('s', univ.Integer()) + ) + + +def decode_rfc6979_signature(signature): + warnings.warn( + "decode_rfc6979_signature is deprecated and will " + "be removed in a future version, use decode_dss_signature instead " + "instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return decode_dss_signature(signature) + + +def decode_dss_signature(signature): + try: + data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue()) + except PyAsn1Error: + raise ValueError("Invalid signature data. Unable to decode ASN.1") + + if remaining: + raise ValueError( + "The signature contains bytes after the end of the ASN.1 sequence." + ) + + r = int(data.getComponentByName('r')) + s = int(data.getComponentByName('s')) + return (r, s) + + +def encode_rfc6979_signature(r, s): + warnings.warn( + "encode_rfc6979_signature is deprecated and will " + "be removed in a future version, use encode_dss_signature instead " + "instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return encode_dss_signature(r, s) + + +def encode_dss_signature(r, s): + if ( + not isinstance(r, six.integer_types) or + not isinstance(s, six.integer_types) + ): + raise ValueError("Both r and s must be integers") + + sig = _DSSSigValue() + sig.setComponentByName('r', r) + sig.setComponentByName('s', s) + return encoder.encode(sig) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..b5dd0ed --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,20 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher, + CipherAlgorithm, CipherContext +) + + +__all__ = [ + "Cipher", + "CipherAlgorithm", + "BlockCipherAlgorithm", + "CipherContext", + "AEADCipherContext", + "AEADEncryptionContext", +] diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py similarity index 64% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py index bd8437c..b71dddb 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -1,20 +1,13 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils -from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, CipherAlgorithm +) def _verify_key_size(algorithm, key): @@ -26,8 +19,8 @@ def _verify_key_size(algorithm, key): return key -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class AES(object): name = "AES" block_size = 128 @@ -41,8 +34,8 @@ class AES(object): return len(self.key) * 8 -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class Camellia(object): name = "camellia" block_size = 128 @@ -56,8 +49,8 @@ class Camellia(object): return len(self.key) * 8 -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class TripleDES(object): name = "3DES" block_size = 64 @@ -75,8 +68,8 @@ class TripleDES(object): return len(self.key) * 8 -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class Blowfish(object): name = "Blowfish" block_size = 64 @@ -90,8 +83,8 @@ class Blowfish(object): return len(self.key) * 8 -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class CAST5(object): name = "CAST5" block_size = 64 @@ -105,7 +98,7 @@ class CAST5(object): return len(self.key) * 8 -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class ARC4(object): name = "RC4" key_sizes = frozenset([40, 56, 64, 80, 128, 192, 256]) @@ -118,7 +111,7 @@ class ARC4(object): return len(self.key) * 8 -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class IDEA(object): name = "IDEA" block_size = 64 @@ -132,8 +125,8 @@ class IDEA(object): return len(self.key) * 8 -@utils.register_interface(interfaces.BlockCipherAlgorithm) -@utils.register_interface(interfaces.CipherAlgorithm) +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) class SEED(object): name = "SEED" block_size = 128 diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/base.py similarity index 53% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/base.py index e3fe5ad..dae9365 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/ciphers/base.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -1,25 +1,79 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function +import abc + +import six + from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import CipherBackend -from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives.ciphers import modes + + +@six.add_metaclass(abc.ABCMeta) +class CipherAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_size(self): + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +@six.add_metaclass(abc.ABCMeta) +class BlockCipherAlgorithm(object): + @abc.abstractproperty + def block_size(self): + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +@six.add_metaclass(abc.ABCMeta) +class CipherContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the results of processing the final block as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADCipherContext(object): + @abc.abstractmethod + def authenticate_additional_data(self, data): + """ + Authenticates the provided bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADEncryptionContext(object): + @abc.abstractproperty + def tag(self): + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ class Cipher(object): @@ -30,10 +84,8 @@ class Cipher(object): _Reasons.BACKEND_MISSING_INTERFACE ) - if not isinstance(algorithm, interfaces.CipherAlgorithm): - raise TypeError( - "Expected interface of interfaces.CipherAlgorithm." - ) + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") if mode is not None: mode.validate_for_algorithm(algorithm) @@ -43,7 +95,7 @@ class Cipher(object): self._backend = backend def encryptor(self): - if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): if self.mode.tag is not None: raise ValueError( "Authentication tag must be None when encrypting." @@ -54,7 +106,7 @@ class Cipher(object): return self._wrap_ctx(ctx, encrypt=True) def decryptor(self): - if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): if self.mode.tag is None: raise ValueError( "Authentication tag must be provided when decrypting." @@ -65,7 +117,7 @@ class Cipher(object): return self._wrap_ctx(ctx, encrypt=False) def _wrap_ctx(self, ctx, encrypt): - if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): if encrypt: return _AEADEncryptionContext(ctx) else: @@ -74,7 +126,7 @@ class Cipher(object): return _CipherContext(ctx) -@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(CipherContext) class _CipherContext(object): def __init__(self, ctx): self._ctx = ctx @@ -92,11 +144,13 @@ class _CipherContext(object): return data -@utils.register_interface(interfaces.AEADCipherContext) -@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(AEADCipherContext) +@utils.register_interface(CipherContext) class _AEADCipherContext(object): def __init__(self, ctx): self._ctx = ctx + self._bytes_processed = 0 + self._aad_bytes_processed = 0 self._tag = None self._updated = False @@ -104,6 +158,14 @@ class _AEADCipherContext(object): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") self._updated = True + self._bytes_processed += len(data) + if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: + raise ValueError( + "{0} has a maximum encrypted byte limit of {1}".format( + self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES + ) + ) + return self._ctx.update(data) def finalize(self): @@ -119,10 +181,19 @@ class _AEADCipherContext(object): raise AlreadyFinalized("Context was already finalized.") if self._updated: raise AlreadyUpdated("Update has been called on this context.") + + self._aad_bytes_processed += len(data) + if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: + raise ValueError( + "{0} has a maximum AAD byte limit of {0}".format( + self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES + ) + ) + self._ctx.authenticate_additional_data(data) -@utils.register_interface(interfaces.AEADEncryptionContext) +@utils.register_interface(AEADEncryptionContext) class _AEADEncryptionContext(_AEADCipherContext): @property def tag(self): diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..4284042 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,164 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class Mode(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm): + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithInitializationVector(object): + @abc.abstractproperty + def initialization_vector(self): + """ + The value of the initialization vector for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithNonce(object): + @abc.abstractproperty + def nonce(self): + """ + The value of the nonce for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithAuthenticationTag(object): + @abc.abstractproperty + def tag(self): + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_iv_length(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid IV size ({0}) for {1}.".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CBC(object): + name = "CBC" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +class ECB(object): + name = "ECB" + + def validate_for_algorithm(self, algorithm): + pass + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class OFB(object): + name = "OFB" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB(object): + name = "CFB" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB8(object): + name = "CFB8" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithNonce) +class CTR(object): + name = "CTR" + + def __init__(self, nonce): + self._nonce = nonce + + nonce = utils.read_only_property("_nonce") + + def validate_for_algorithm(self, algorithm): + if len(self.nonce) * 8 != algorithm.block_size: + raise ValueError("Invalid nonce size ({0}) for {1}.".format( + len(self.nonce), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +@utils.register_interface(ModeWithAuthenticationTag) +class GCM(object): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8 + _MAX_AAD_BYTES = (2 ** 64) // 8 + + def __init__(self, initialization_vector, tag=None, min_tag_length=16): + # len(initialization_vector) must in [1, 2 ** 64), but it's impossible + # to actually construct a bytes object that large, so we don't check + # for it + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if tag is not None and len(tag) < min_tag_length: + raise ValueError( + "Authentication tag must be {0} bytes or longer.".format( + min_tag_length) + ) + + self._initialization_vector = initialization_vector + self._tag = tag + + tag = utils.read_only_property("_tag") + initialization_vector = utils.read_only_property("_initialization_vector") + + def validate_for_algorithm(self, algorithm): + pass diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/cmac.py similarity index 60% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/cmac.py index fa463ae..c2038a3 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/cmac.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/cmac.py @@ -1,27 +1,18 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import ( - AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons + AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import CMACBackend -from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives import ciphers, interfaces -@utils.register_interface(interfaces.CMACContext) +@utils.register_interface(interfaces.MACContext) class CMAC(object): def __init__(self, algorithm, backend, ctx=None): if not isinstance(backend, CMACBackend): @@ -30,9 +21,9 @@ class CMAC(object): _Reasons.BACKEND_MISSING_INTERFACE ) - if not isinstance(algorithm, interfaces.BlockCipherAlgorithm): + if not isinstance(algorithm, ciphers.BlockCipherAlgorithm): raise TypeError( - "Expected instance of interfaces.BlockCipherAlgorithm." + "Expected instance of BlockCipherAlgorithm." ) self._algorithm = algorithm @@ -59,9 +50,11 @@ class CMAC(object): def verify(self, signature): if not isinstance(signature, bytes): raise TypeError("signature must be bytes.") - digest = self.finalize() - if not constant_time.bytes_eq(digest, signature): - raise InvalidSignature("Signature did not match digest.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) def copy(self): if self._ctx is None: diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/constant_time.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..5a682ca --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import hmac + +from cryptography.hazmat.bindings._constant_time import lib + + +if hasattr(hmac, "compare_digest"): + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) + +else: + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return lib.Cryptography_constant_time_bytes_eq( + a, len(a), b, len(b) + ) == 1 diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hashes.py similarity index 52% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hashes.py index 04f7620..6bc8500 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hashes.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hashes.py @@ -1,27 +1,69 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function +import abc + +import six + from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HashBackend -from cryptography.hazmat.primitives import interfaces -@utils.register_interface(interfaces.HashContext) +@six.add_metaclass(abc.ABCMeta) +class HashAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self): + """ + The size of the resulting digest in bytes. + """ + + @abc.abstractproperty + def block_size(self): + """ + The internal block size of the hash algorithm in bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashContext(object): + @abc.abstractproperty + def algorithm(self): + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a HashContext that is a copy of the current context. + """ + + +@utils.register_interface(HashContext) class Hash(object): def __init__(self, algorithm, backend, ctx=None): if not isinstance(backend, HashBackend): @@ -30,9 +72,9 @@ class Hash(object): _Reasons.BACKEND_MISSING_INTERFACE ) - if not isinstance(algorithm, interfaces.HashAlgorithm): - raise TypeError("Expected instance of interfaces.HashAlgorithm.") - self.algorithm = algorithm + if not isinstance(algorithm, HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm self._backend = backend @@ -41,6 +83,8 @@ class Hash(object): else: self._ctx = ctx + algorithm = utils.read_only_property("_algorithm") + def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") @@ -63,56 +107,56 @@ class Hash(object): return digest -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class SHA1(object): name = "sha1" digest_size = 20 block_size = 64 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class SHA224(object): name = "sha224" digest_size = 28 block_size = 64 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class SHA256(object): name = "sha256" digest_size = 32 block_size = 64 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class SHA384(object): name = "sha384" digest_size = 48 block_size = 128 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class SHA512(object): name = "sha512" digest_size = 64 block_size = 128 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class RIPEMD160(object): name = "ripemd160" digest_size = 20 block_size = 64 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class Whirlpool(object): name = "whirlpool" digest_size = 64 block_size = 64 -@utils.register_interface(interfaces.HashAlgorithm) +@utils.register_interface(HashAlgorithm) class MD5(object): name = "md5" digest_size = 16 diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hmac.py similarity index 53% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hmac.py index 026ad3b..15b9ee6 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/hmac.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/hmac.py @@ -1,27 +1,19 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import ( - AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons + AlreadyFinalized, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend -from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives import hashes, interfaces -@utils.register_interface(interfaces.HashContext) +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) class HMAC(object): def __init__(self, key, algorithm, backend, ctx=None): if not isinstance(backend, HMACBackend): @@ -30,9 +22,9 @@ class HMAC(object): _Reasons.BACKEND_MISSING_INTERFACE ) - if not isinstance(algorithm, interfaces.HashAlgorithm): - raise TypeError("Expected instance of interfaces.HashAlgorithm.") - self.algorithm = algorithm + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm self._backend = backend self._key = key @@ -41,12 +33,14 @@ class HMAC(object): else: self._ctx = ctx - def update(self, msg): + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): if self._ctx is None: raise AlreadyFinalized("Context was already finalized.") - if not isinstance(msg, bytes): - raise TypeError("msg must be bytes.") - self._ctx.update(msg) + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) def copy(self): if self._ctx is None: @@ -68,6 +62,8 @@ class HMAC(object): def verify(self, signature): if not isinstance(signature, bytes): raise TypeError("signature must be bytes.") - digest = self.finalize() - if not constant_time.bytes_eq(digest, signature): - raise InvalidSignature("Signature did not match digest.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py new file mode 100644 index 0000000..4c95190 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class MACContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the message authentication code as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a MACContext that is a copy of the current context. + """ + + @abc.abstractmethod + def verify(self, signature): + """ + Checks if the generated message authentication code matches the + signature. + """ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..2d0724e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class KeyDerivationFunction(object): + @abc.abstractmethod + def derive(self, key_material): + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material, expected_key): + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..c6399e4 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +def _common_args_checks(algorithm, length, otherinfo): + max_length = algorithm.digest_size * (2 ** 32 - 1) + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} bits.".format( + max_length + )) + if not (otherinfo is None or isinstance(otherinfo, bytes)): + raise TypeError("otherinfo must be bytes.") + + +def _concatkdf_derive(key_material, length, auxfn, otherinfo): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while (length > outlen): + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHash(object): + def __init__(self, algorithm, length, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hash(self): + return hashes.Hash(self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hash, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHMAC(object): + def __init__(self, algorithm, length, salt, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not (salt is None or isinstance(salt, bytes)): + raise TypeError("salt must be bytes.") + if salt is None: + salt = b"\x00" * algorithm.block_size + self._salt = salt + + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hmac(self): + return hmac.HMAC(self._salt, self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hmac, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py similarity index 78% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py index 04d02b2..f738bbd 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -20,10 +11,11 @@ from cryptography.exceptions import ( AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend -from cryptography.hazmat.primitives import constant_time, hmac, interfaces +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction -@utils.register_interface(interfaces.KeyDerivationFunction) +@utils.register_interface(KeyDerivationFunction) class HKDF(object): def __init__(self, algorithm, length, salt, info, backend): if not isinstance(backend, HMACBackend): @@ -34,7 +26,7 @@ class HKDF(object): self._algorithm = algorithm - if not isinstance(salt, bytes) and salt is not None: + if not (salt is None or isinstance(salt, bytes)): raise TypeError("salt must be bytes.") if salt is None: @@ -62,7 +54,7 @@ class HKDF(object): raise InvalidKey -@utils.register_interface(interfaces.KeyDerivationFunction) +@utils.register_interface(KeyDerivationFunction) class HKDFExpand(object): def __init__(self, algorithm, length, info, backend): if not isinstance(backend, HMACBackend): @@ -85,7 +77,7 @@ class HKDFExpand(object): self._length = length - if not isinstance(info, bytes) and info is not None: + if not (info is None or isinstance(info, bytes)): raise TypeError("info must be bytes.") if info is None: diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py similarity index 73% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py index 97b6408..f8ce7a3 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -18,10 +9,11 @@ from cryptography.exceptions import ( AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend -from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction -@utils.register_interface(interfaces.KeyDerivationFunction) +@utils.register_interface(KeyDerivationFunction) class PBKDF2HMAC(object): def __init__(self, algorithm, length, salt, iterations, backend): if not isinstance(backend, PBKDF2HMACBackend): diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..83789b3 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,70 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +@utils.register_interface(KeyDerivationFunction) +class X963KDF(object): + def __init__(self, algorithm, length, sharedinfo, backend): + + max_len = algorithm.digest_size * (2 ** 32 - 1) + if length > max_len: + raise ValueError( + "Can not derive keys larger than {0} bits.".format(max_len)) + if not (sharedinfo is None or isinstance(sharedinfo, bytes)): + raise TypeError("sharedinfo must be bytes.") + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm, self._backend) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:self._length] + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/keywrap.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..6e79ab6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,85 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def aes_key_wrap(wrapping_key, key_to_wrap, backend): + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor() + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)] + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + # pack/unpack are safe as these are always 64-bit chunks + a = struct.pack( + ">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1) + ) + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_unwrap(wrapping_key, wrapped_key, backend): + if len(wrapped_key) < 24: + raise ValueError("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise ValueError("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor() + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + + r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + # pack/unpack are safe as these are always 64-bit chunks + atr = struct.pack( + ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1) + ) + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/padding.py similarity index 54% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/padding.py index 74f1ef2..f6491eb 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/padding.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/padding.py @@ -1,79 +1,31 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function -import sys - -import cffi +import abc import six from cryptography import utils from cryptography.exceptions import AlreadyFinalized -from cryptography.hazmat.bindings.utils import _create_modulename -from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.bindings._padding import lib -TYPES = """ -uint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t); -""" +@six.add_metaclass(abc.ABCMeta) +class PaddingContext(object): + @abc.abstractmethod + def update(self, data): + """ + Pads the provided bytes and returns any available data as bytes. + """ -FUNCTIONS = """ -/* Returns the value of the input with the most-significant-bit copied to all - of the bits. */ -static uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) { - return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1; -} - -/* This returns 0xFF if a < b else 0x00, but does so in a constant time - fashion */ -static uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) { - a -= b; - return Cryptography_DUPLICATE_MSB_TO_ALL(a); -} - -uint8_t Cryptography_check_pkcs7_padding(const uint8_t *data, - uint8_t block_len) { - uint8_t i; - uint8_t pad_size = data[block_len - 1]; - uint8_t mismatch = 0; - for (i = 0; i < block_len; i++) { - unsigned int mask = Cryptography_constant_time_lt(i, pad_size); - uint8_t b = data[block_len - 1 - i]; - mismatch |= (mask & (pad_size ^ b)); - } - - /* Check to make sure the pad_size was within the valid range. */ - mismatch |= ~Cryptography_constant_time_lt(0, pad_size); - mismatch |= Cryptography_constant_time_lt(block_len, pad_size); - - /* Make sure any bits set are copied to the lowest bit */ - mismatch |= mismatch >> 4; - mismatch |= mismatch >> 2; - mismatch |= mismatch >> 1; - /* Now check the low bit to see if it's set */ - return (mismatch & 1) == 0; -} -""" - -_ffi = cffi.FFI() -_ffi.cdef(TYPES) -_lib = _ffi.verify( - source=FUNCTIONS, - modulename=_create_modulename([TYPES], FUNCTIONS, sys.version), - ext_package="cryptography", -) + @abc.abstractmethod + def finalize(self): + """ + Finalize the padding, returns bytes. + """ class PKCS7(object): @@ -93,7 +45,7 @@ class PKCS7(object): return _PKCS7UnpaddingContext(self.block_size) -@utils.register_interface(interfaces.PaddingContext) +@utils.register_interface(PaddingContext) class _PKCS7PaddingContext(object): def __init__(self, block_size): self.block_size = block_size @@ -126,7 +78,7 @@ class _PKCS7PaddingContext(object): return result -@utils.register_interface(interfaces.PaddingContext) +@utils.register_interface(PaddingContext) class _PKCS7UnpaddingContext(object): def __init__(self, block_size): self.block_size = block_size @@ -159,7 +111,7 @@ class _PKCS7UnpaddingContext(object): if len(self._buffer) != self.block_size // 8: raise ValueError("Invalid padding bytes.") - valid = _lib.Cryptography_check_pkcs7_padding( + valid = lib.Cryptography_check_pkcs7_padding( self._buffer, self.block_size // 8 ) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/serialization.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/serialization.py new file mode 100644 index 0000000..fc50456 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/serialization.py @@ -0,0 +1,188 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import base64 +import struct +from enum import Enum + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa + + +def load_pem_private_key(data, password, backend): + return backend.load_pem_private_key(data, password) + + +def load_pem_public_key(data, backend): + return backend.load_pem_public_key(data) + + +def load_der_private_key(data, password, backend): + return backend.load_der_private_key(data, password) + + +def load_der_public_key(data, backend): + return backend.load_der_public_key(data) + + +def load_ssh_public_key(data, backend): + key_parts = data.split(b' ', 2) + + if len(key_parts) < 2: + raise ValueError( + 'Key is not in the proper format or contains extra data.') + + key_type = key_parts[0] + + if key_type == b'ssh-rsa': + loader = _load_ssh_rsa_public_key + elif key_type == b'ssh-dss': + loader = _load_ssh_dss_public_key + elif key_type in [ + b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521', + ]: + loader = _load_ssh_ecdsa_public_key + else: + raise UnsupportedAlgorithm('Key type is not supported.') + + key_body = key_parts[1] + + try: + decoded_data = base64.b64decode(key_body) + except TypeError: + raise ValueError('Key is not in the proper format.') + + inner_key_type, rest = _read_next_string(decoded_data) + + if inner_key_type != key_type: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + return loader(key_type, rest, backend) + + +def _load_ssh_rsa_public_key(key_type, decoded_data, backend): + e, rest = _read_next_mpint(decoded_data) + n, rest = _read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return rsa.RSAPublicNumbers(e, n).public_key(backend) + + +def _load_ssh_dss_public_key(key_type, decoded_data, backend): + p, rest = _read_next_mpint(decoded_data) + q, rest = _read_next_mpint(rest) + g, rest = _read_next_mpint(rest) + y, rest = _read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + + return public_numbers.public_key(backend) + + +def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend): + curve_name, rest = _read_next_string(decoded_data) + data, rest = _read_next_string(rest) + + if expected_key_type != b"ecdsa-sha2-" + curve_name: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + if rest: + raise ValueError('Key body contains extra bytes.') + + curve = { + b"nistp256": ec.SECP256R1, + b"nistp384": ec.SECP384R1, + b"nistp521": ec.SECP521R1, + }[curve_name]() + + if six.indexbytes(data, 0) != 4: + raise NotImplementedError( + "Compressed elliptic curve points are not supported" + ) + + # key_size is in bits, and sometimes it's not evenly divisible by 8, so we + # add 7 to round up the number of bytes. + if len(data) != 1 + 2 * ((curve.key_size + 7) // 8): + raise ValueError("Malformed key bytes") + + x = utils.int_from_bytes( + data[1:1 + (curve.key_size + 7) // 8], byteorder='big' + ) + y = utils.int_from_bytes( + data[1 + (curve.key_size + 7) // 8:], byteorder='big' + ) + return ec.EllipticCurvePublicNumbers(x, y, curve).public_key(backend) + + +def _read_next_string(data): + """ + Retrieves the next RFC 4251 string value from the data. + + While the RFC calls these strings, in Python they are bytes objects. + """ + str_len, = struct.unpack('>I', data[:4]) + return data[4:4 + str_len], data[4 + str_len:] + + +def _read_next_mpint(data): + """ + Reads the next mpint from the data. + + Currently, all mpints are interpreted as unsigned. + """ + mpint_data, rest = _read_next_string(data) + + return ( + utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest + ) + + +class Encoding(Enum): + PEM = "PEM" + DER = "DER" + + +class PrivateFormat(Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + + +class PublicFormat(Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + + +@six.add_metaclass(abc.ABCMeta) +class KeySerializationEncryption(object): + pass + + +@utils.register_interface(KeySerializationEncryption) +class BestAvailableEncryption(object): + def __init__(self, password): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +@utils.register_interface(KeySerializationEncryption) +class NoEncryption(object): + pass diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..e71f9e6 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +class InvalidToken(Exception): + pass diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py similarity index 76% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py index d0b476a..12bc766 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -1,15 +1,6 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function @@ -18,11 +9,13 @@ import struct import six from cryptography.exceptions import ( - InvalidToken, UnsupportedAlgorithm, _Reasons + UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives import constant_time, hmac from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri class HOTP(object): @@ -67,3 +60,8 @@ class HOTP(object): offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 p = hmac_value[offset:offset + 4] return struct.unpack(">I", p)[0] & 0x7fffffff + + def get_provisioning_uri(self, account_name, counter, issuer): + return _generate_uri(self, "hotp", account_name, issuer, [ + ("counter", int(counter)), + ]) diff --git a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/totp.py similarity index 62% rename from Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py rename to Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/totp.py index 854c516..6070590 100644 --- a/Darwin/lib/python3.4/site-packages/cryptography/hazmat/primitives/twofactor/totp.py +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -1,24 +1,17 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. -# See the License for the specific language governing permissions and -# limitations under the License. +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. from __future__ import absolute_import, division, print_function from cryptography.exceptions import ( - InvalidToken, UnsupportedAlgorithm, _Reasons + UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import HMACBackend from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken from cryptography.hazmat.primitives.twofactor.hotp import HOTP +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri class TOTP(object): @@ -39,3 +32,8 @@ class TOTP(object): def verify(self, totp, time): if not constant_time.bytes_eq(self.generate(time), totp): raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri(self, account_name, issuer): + return _generate_uri(self._hotp, "totp", account_name, issuer, [ + ("period", int(self._time_step)), + ]) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/utils.py b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/utils.py new file mode 100644 index 0000000..0ed8c4c --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/hazmat/primitives/twofactor/utils.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 + +from six.moves.urllib.parse import quote, urlencode + + +def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters): + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + uriparts = { + "type": type_name, + "label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer + else quote(account_name)), + "parameters": urlencode(parameters), + } + return "otpauth://{type}/{label}?{parameters}".format(**uriparts) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/utils.py b/Darwin/lib/python3.5/site-packages/cryptography/utils.py new file mode 100644 index 0000000..dbd961f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/utils.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import binascii +import inspect +import struct +import sys +import warnings + + +DeprecatedIn09 = DeprecationWarning +DeprecatedIn10 = PendingDeprecationWarning + + +def read_only_property(name): + return property(lambda self: getattr(self, name)) + + +def register_interface(iface): + def register_decorator(klass): + verify_interface(iface, klass) + iface.register(klass) + return klass + return register_decorator + + +if hasattr(int, "from_bytes"): + int_from_bytes = int.from_bytes +else: + def int_from_bytes(data, byteorder, signed=False): + assert byteorder == 'big' + assert not signed + + if len(data) % 4 != 0: + data = (b'\x00' * (4 - (len(data) % 4))) + data + + result = 0 + + while len(data) > 0: + digit, = struct.unpack('>I', data[:4]) + result = (result << 32) + digit + data = data[4:] + + return result + + +def int_to_bytes(integer, length=None): + hex_string = '%x' % integer + if length is None: + n = len(hex_string) + else: + n = length * 2 + return binascii.unhexlify(hex_string.zfill(n + (n & 1))) + + +class InterfaceNotImplemented(Exception): + pass + + +if hasattr(inspect, "signature"): + signature = inspect.signature +else: + signature = inspect.getargspec + + +def verify_interface(iface, klass): + for method in iface.__abstractmethods__: + if not hasattr(klass, method): + raise InterfaceNotImplemented( + "{0} is missing a {1!r} method".format(klass, method) + ) + if isinstance(getattr(iface, method), abc.abstractproperty): + # Can't properly verify these yet. + continue + sig = signature(getattr(iface, method)) + actual = signature(getattr(klass, method)) + if sig != actual: + raise InterfaceNotImplemented( + "{0}.{1}'s signature differs from the expected. Expected: " + "{2!r}. Received: {3!r}".format( + klass, method, sig, actual + ) + ) + + +if sys.version_info >= (2, 7): + def bit_length(x): + return x.bit_length() +else: + def bit_length(x): + return len(bin(x)) - (2 + (x <= 0)) + + +class _DeprecatedValue(object): + def __init__(self, value, message, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(object): + def __init__(self, module): + self.__dict__["_module"] = module + + def __getattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr, value): + setattr(self._module, attr, value) + + def __dir__(self): + return ["_module"] + dir(self._module) + + +def deprecated(value, module_name, message, warning_class): + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + return _DeprecatedValue(value, message, warning_class) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/__init__.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..70e1d3d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,163 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.x509.base import ( + Certificate, CertificateBuilder, CertificateRevocationList, + CertificateSigningRequest, CertificateSigningRequestBuilder, + InvalidVersion, RevokedCertificate, + Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr, + load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr, +) +from cryptography.x509.extensions import ( + AccessDescription, AuthorityInformationAccess, + AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints, + CertificatePolicies, DistributionPoint, DuplicateExtension, + ExtendedKeyUsage, Extension, ExtensionNotFound, ExtensionType, Extensions, + GeneralNames, InhibitAnyPolicy, IssuerAlternativeName, KeyUsage, + NameConstraints, NoticeReference, OCSPNoCheck, PolicyInformation, + ReasonFlags, SubjectAlternativeName, SubjectKeyIdentifier, + UnsupportedExtension, UserNotice +) +from cryptography.x509.general_name import ( + DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name, + RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType, + _GENERAL_NAMES +) +from cryptography.x509.name import Name, NameAttribute +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, CRLExtensionOID, CertificatePoliciesOID, + ExtendedKeyUsageOID, ExtensionOID, NameOID, ObjectIdentifier, + SignatureAlgorithmOID, _SIG_OIDS_TO_HASH +) + + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + + +__all__ = [ + "load_pem_x509_certificate", + "load_der_x509_certificate", + "load_pem_x509_csr", + "load_der_x509_csr", + "load_pem_x509_crl", + "load_der_x509_crl", + "InvalidVersion", + "DuplicateExtension", + "UnsupportedExtension", + "ExtensionNotFound", + "UnsupportedGeneralNameType", + "NameAttribute", + "Name", + "ObjectIdentifier", + "ExtensionType", + "Extensions", + "Extension", + "ExtendedKeyUsage", + "OCSPNoCheck", + "BasicConstraints", + "KeyUsage", + "AuthorityInformationAccess", + "AccessDescription", + "CertificatePolicies", + "PolicyInformation", + "UserNotice", + "NoticeReference", + "SubjectKeyIdentifier", + "NameConstraints", + "CRLDistributionPoints", + "DistributionPoint", + "ReasonFlags", + "InhibitAnyPolicy", + "SubjectAlternativeName", + "IssuerAlternativeName", + "AuthorityKeyIdentifier", + "GeneralNames", + "GeneralName", + "RFC822Name", + "DNSName", + "UniformResourceIdentifier", + "RegisteredID", + "DirectoryName", + "IPAddress", + "OtherName", + "Certificate", + "CertificateRevocationList", + "CertificateSigningRequest", + "RevokedCertificate", + "CertificateSigningRequestBuilder", + "CertificateBuilder", + "Version", + "_SIG_OIDS_TO_HASH", + "OID_CA_ISSUERS", + "OID_OCSP", + "_GENERAL_NAMES", +] diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/base.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..01eadfc --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/base.py @@ -0,0 +1,467 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +from enum import Enum + +import six + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.x509.extensions import Extension, ExtensionType +from cryptography.x509.name import Name + + +_UNIX_EPOCH = datetime.datetime(1970, 1, 1) + + +class Version(Enum): + v1 = 0 + v3 = 2 + + +def load_pem_x509_certificate(data, backend): + return backend.load_pem_x509_certificate(data) + + +def load_der_x509_certificate(data, backend): + return backend.load_der_x509_certificate(data) + + +def load_pem_x509_csr(data, backend): + return backend.load_pem_x509_csr(data) + + +def load_der_x509_csr(data, backend): + return backend.load_der_x509_csr(data) + + +def load_pem_x509_crl(data, backend): + return backend.load_pem_x509_crl(data) + + +def load_der_x509_crl(data, backend): + return backend.load_der_x509_crl(data) + + +class InvalidVersion(Exception): + def __init__(self, msg, parsed_version): + super(InvalidVersion, self).__init__(msg) + self.parsed_version = parsed_version + + +@six.add_metaclass(abc.ABCMeta) +class Certificate(object): + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def serial(self): + """ + Returns certificate serial number + """ + + @abc.abstractproperty + def version(self): + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def not_valid_before(self): + """ + Not before time (represented as UTC datetime) + """ + + @abc.abstractproperty + def not_valid_after(self): + """ + Not after time (represented as UTC datetime) + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the issuer name object. + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the certificate to PEM or DER format. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateRevocationList(object): + + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the X509Name with the issuer of this CRL. + """ + + @abc.abstractproperty + def next_update(self): + """ + Returns the date of next update for this CRL. + """ + + @abc.abstractproperty + def last_update(self): + """ + Returns the date of last update for this CRL. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateSigningRequest(object): + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns the extensions in the signing request. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Encodes the request to PEM or DER format. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RevokedCertificate(object): + @abc.abstractproperty + def serial_number(self): + """ + Returns the serial number of the revoked certificate. + """ + + @abc.abstractproperty + def revocation_date(self): + """ + Returns the date of when this certificate was revoked. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +class CertificateSigningRequestBuilder(object): + def __init__(self, subject_name=None, extensions=[]): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + + def subject_name(self, name): + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateSigningRequestBuilder(name, self._extensions) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return CertificateSigningRequestBuilder( + self._subject_name, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + return backend.create_x509_csr(self, private_key, algorithm) + + +class CertificateBuilder(object): + def __init__(self, issuer_name=None, subject_name=None, + public_key=None, serial_number=None, not_valid_before=None, + not_valid_after=None, extensions=[]): + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name): + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateBuilder( + name, self._subject_name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def subject_name(self, name): + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateBuilder( + self._issuer_name, name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def public_key(self, key): + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey)): + raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' + ' or EllipticCurvePublicKey.') + if self._public_key is not None: + raise ValueError('The public key may only be set once.') + return CertificateBuilder( + self._issuer_name, self._subject_name, key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def serial_number(self, number): + """ + Sets the certificate serial number. + """ + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number < 0: + raise ValueError('The serial number should be non-negative.') + if utils.bit_length(number) > 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 160 ' + 'bits.') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def not_valid_before(self, time): + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_before is not None: + raise ValueError('The not valid before may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid before date must be after the unix' + ' epoch (1970 January 1).') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, time, + self._not_valid_after, self._extensions + ) + + def not_valid_after(self, time): + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_after is not None: + raise ValueError('The not valid after may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid after date must be after the unix' + ' epoch (1970 January 1).') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + time, self._extensions + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + return backend.create_x509_certificate(self, private_key, algorithm) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/extensions.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..46ba5a2 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,924 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import hashlib +import ipaddress +from enum import Enum + +from pyasn1.codec.der import decoder +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.x509.general_name import GeneralName, IPAddress, OtherName +from cryptography.x509.name import Name +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, ExtensionOID, ObjectIdentifier +) + + +class _SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.Sequence()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + + +def _key_identifier_from_public_key(public_key): + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo + ) + spki, remaining = decoder.decode( + serialized, asn1Spec=_SubjectPublicKeyInfo() + ) + assert not remaining + # the univ.BitString object is a tuple of bits. We need bytes and + # pyasn1 really doesn't want to give them to us. To get it we'll + # build an integer and convert that to bytes. + bits = 0 + for bit in spki.getComponentByName("subjectPublicKey"): + bits = bits << 1 | bit + + data = utils.int_to_bytes(bits) + return hashlib.sha1(data).digest() + + +class DuplicateExtension(Exception): + def __init__(self, msg, oid): + super(DuplicateExtension, self).__init__(msg) + self.oid = oid + + +class UnsupportedExtension(Exception): + def __init__(self, msg, oid): + super(UnsupportedExtension, self).__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg, oid): + super(ExtensionNotFound, self).__init__(msg) + self.oid = oid + + +@six.add_metaclass(abc.ABCMeta) +class ExtensionType(object): + @abc.abstractproperty + def oid(self): + """ + Returns the oid associated with the given extension type. + """ + + +class Extensions(object): + def __init__(self, extensions): + self._extensions = extensions + + def get_extension_for_oid(self, oid): + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound("No {0} extension was found".format(oid), oid) + + def get_extension_for_class(self, extclass): + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + "No {0} extension was found".format(extclass), extclass.oid + ) + + def __iter__(self): + return iter(self._extensions) + + def __len__(self): + return len(self._extensions) + + def __repr__(self): + return ( + "".format(self._extensions) + ) + + +@utils.register_interface(ExtensionType) +class AuthorityKeyIdentifier(object): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__(self, key_identifier, authority_cert_issuer, + authority_cert_serial_number): + if authority_cert_issuer or authority_cert_serial_number: + if not authority_cert_issuer or not authority_cert_serial_number: + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if not isinstance(authority_cert_serial_number, six.integer_types): + raise TypeError( + "authority_cert_serial_number must be an integer" + ) + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + @classmethod + def from_issuer_public_key(cls, public_key): + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier and + self.authority_cert_issuer == other.authority_cert_issuer and + self.authority_cert_serial_number == + other.authority_cert_serial_number + ) + + def __ne__(self, other): + return not self == other + + key_identifier = utils.read_only_property("_key_identifier") + authority_cert_issuer = utils.read_only_property("_authority_cert_issuer") + authority_cert_serial_number = utils.read_only_property( + "_authority_cert_serial_number" + ) + + +@utils.register_interface(ExtensionType) +class SubjectKeyIdentifier(object): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest): + self._digest = digest + + @classmethod + def from_public_key(cls, public_key): + return cls(_key_identifier_from_public_key(public_key)) + + digest = utils.read_only_property("_digest") + + def __repr__(self): + return "".format(self.digest) + + def __eq__(self, other): + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class AuthorityInformationAccess(object): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions): + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + def __iter__(self): + return iter(self._descriptions) + + def __len__(self): + return len(self._descriptions) + + def __repr__(self): + return "".format(self._descriptions) + + def __eq__(self, other): + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __ne__(self, other): + return not self == other + + +class AccessDescription(object): + def __init__(self, access_method, access_location): + if not (access_method == AuthorityInformationAccessOID.OCSP or + access_method == AuthorityInformationAccessOID.CA_ISSUERS): + raise ValueError( + "access_method must be OID_OCSP or OID_CA_ISSUERS" + ) + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method and + self.access_location == other.access_location + ) + + def __ne__(self, other): + return not self == other + + access_method = utils.read_only_property("_access_method") + access_location = utils.read_only_property("_access_location") + + +@utils.register_interface(ExtensionType) +class BasicConstraints(object): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca, path_length): + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if ( + path_length is not None and + (not isinstance(path_length, six.integer_types) or path_length < 0) + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + ca = utils.read_only_property("_ca") + path_length = utils.read_only_property("_path_length") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class CRLDistributionPoints(object): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__(self, distribution_points): + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + def __iter__(self): + return iter(self._distribution_points) + + def __len__(self): + return len(self._distribution_points) + + def __repr__(self): + return "".format(self._distribution_points) + + def __eq__(self, other): + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __ne__(self, other): + return not self == other + + +class DistributionPoint(object): + def __init__(self, full_name, relative_name, reasons, crl_issuer): + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + + if full_name and not all( + isinstance(x, GeneralName) for x in full_name + ): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name and not isinstance(relative_name, Name): + raise TypeError("relative_name must be a Name") + + if crl_issuer and not all( + isinstance(x, GeneralName) for x in crl_issuer + ): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and (not isinstance(reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in reasons + )): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons or + ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + if reasons and not crl_issuer and not (full_name or relative_name): + raise ValueError( + "You must supply crl_issuer, full_name, or relative_name when " + "reasons is not None" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.reasons == other.reasons and + self.crl_issuer == other.crl_issuer + ) + + def __ne__(self, other): + return not self == other + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + reasons = utils.read_only_property("_reasons") + crl_issuer = utils.read_only_property("_crl_issuer") + + +class ReasonFlags(Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +@utils.register_interface(ExtensionType) +class CertificatePolicies(object): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies): + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + def __iter__(self): + return iter(self._policies) + + def __len__(self): + return len(self._policies) + + def __repr__(self): + return "".format(self._policies) + + def __eq__(self, other): + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __ne__(self, other): + return not self == other + + +class PolicyInformation(object): + def __init__(self, policy_identifier, policy_qualifiers): + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + if policy_qualifiers and not all( + isinstance( + x, (six.text_type, UserNotice) + ) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or UserNotice" + " objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier and + self.policy_qualifiers == other.policy_qualifiers + ) + + def __ne__(self, other): + return not self == other + + policy_identifier = utils.read_only_property("_policy_identifier") + policy_qualifiers = utils.read_only_property("_policy_qualifiers") + + +class UserNotice(object): + def __init__(self, notice_reference, explicit_text): + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference and + self.explicit_text == other.explicit_text + ) + + def __ne__(self, other): + return not self == other + + notice_reference = utils.read_only_property("_notice_reference") + explicit_text = utils.read_only_property("_explicit_text") + + +class NoticeReference(object): + def __init__(self, organization, notice_numbers): + self._organization = organization + if not isinstance(notice_numbers, list) or not all( + isinstance(x, int) for x in notice_numbers + ): + raise TypeError( + "notice_numbers must be a list of integers" + ) + + self._notice_numbers = notice_numbers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization and + self.notice_numbers == other.notice_numbers + ) + + def __ne__(self, other): + return not self == other + + organization = utils.read_only_property("_organization") + notice_numbers = utils.read_only_property("_notice_numbers") + + +@utils.register_interface(ExtensionType) +class ExtendedKeyUsage(object): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages): + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + def __iter__(self): + return iter(self._usages) + + def __len__(self): + return len(self._usages) + + def __repr__(self): + return "".format(self._usages) + + def __eq__(self, other): + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class OCSPNoCheck(object): + oid = ExtensionOID.OCSP_NO_CHECK + + +@utils.register_interface(ExtensionType) +class InhibitAnyPolicy(object): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs): + if not isinstance(skip_certs, six.integer_types): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __ne__(self, other): + return not self == other + + skip_certs = utils.read_only_property("_skip_certs") + + +@utils.register_interface(ExtensionType) +class KeyUsage(object): + oid = ExtensionOID.KEY_USAGE + + def __init__(self, digital_signature, content_commitment, key_encipherment, + data_encipherment, key_agreement, key_cert_sign, crl_sign, + encipher_only, decipher_only): + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + digital_signature = utils.read_only_property("_digital_signature") + content_commitment = utils.read_only_property("_content_commitment") + key_encipherment = utils.read_only_property("_key_encipherment") + data_encipherment = utils.read_only_property("_data_encipherment") + key_agreement = utils.read_only_property("_key_agreement") + key_cert_sign = utils.read_only_property("_key_cert_sign") + crl_sign = utils.read_only_property("_crl_sign") + + @property + def encipher_only(self): + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self): + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self): + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + encipher_only = None + decipher_only = None + + return ("").format( + self, encipher_only, decipher_only) + + def __eq__(self, other): + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature and + self.content_commitment == other.content_commitment and + self.key_encipherment == other.key_encipherment and + self.data_encipherment == other.data_encipherment and + self.key_agreement == other.key_agreement and + self.key_cert_sign == other.key_cert_sign and + self.crl_sign == other.crl_sign and + self._encipher_only == other._encipher_only and + self._decipher_only == other._decipher_only + ) + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class NameConstraints(object): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__(self, permitted_subtrees, excluded_subtrees): + if permitted_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in permitted_subtrees + ): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(permitted_subtrees) + + if excluded_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in excluded_subtrees + ): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other): + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees and + self.permitted_subtrees == other.permitted_subtrees + ) + + def __ne__(self, other): + return not self == other + + def _validate_ip_name(self, tree): + if any(isinstance(name, IPAddress) and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) for name in tree): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def __repr__(self): + return ( + u"".format(self) + ) + + permitted_subtrees = utils.read_only_property("_permitted_subtrees") + excluded_subtrees = utils.read_only_property("_excluded_subtrees") + + +class Extension(object): + def __init__(self, oid, critical, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + oid = utils.read_only_property("_oid") + critical = utils.read_only_property("_critical") + value = utils.read_only_property("_value") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid and + self.critical == other.critical and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + +class GeneralNames(object): + def __init__(self, general_names): + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + objs = (i.value for i in objs) + return list(objs) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class SubjectAlternativeName(object): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class IssuerAlternativeName(object): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/general_name.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..f5bd30f --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,265 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import ipaddress +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import utils +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + + +_GENERAL_NAMES = { + 0: "otherName", + 1: "rfc822Name", + 2: "dNSName", + 3: "x400Address", + 4: "directoryName", + 5: "ediPartyName", + 6: "uniformResourceIdentifier", + 7: "iPAddress", + 8: "registeredID", +} + + +class UnsupportedGeneralNameType(Exception): + def __init__(self, msg, type): + super(UnsupportedGeneralNameType, self).__init__(msg) + self.type = type + + +@six.add_metaclass(abc.ABCMeta) +class GeneralName(object): + @abc.abstractproperty + def value(self): + """ + Return the value of the object + """ + + +@utils.register_interface(GeneralName) +class RFC822Name(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + name, address = parseaddr(value) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. + # No IDNA encoding needed since there is no domain component. + encoded = address.encode("ascii") + else: + # A normal email of the form user@domain.com. Let's attempt to + # encode the domain component and reconstruct the address. + encoded = parts[0].encode("ascii") + b"@" + idna.encode(parts[1]) + + self._value = value + self._encoded = encoded + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class DNSName(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class UniformResourceIdentifier(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + parsed = urllib_parse.urlparse(value) + if not parsed.hostname: + netloc = "" + elif parsed.port: + netloc = ( + idna.encode(parsed.hostname) + + ":{0}".format(parsed.port).encode("ascii") + ).decode("ascii") + else: + netloc = idna.encode(parsed.hostname).decode("ascii") + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )).encode("ascii") + + self._value = value + self._encoded = uri + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class DirectoryName(object): + def __init__(self, value): + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class RegisteredID(object): + def __init__(self, value): + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class IPAddress(object): + def __init__(self, value): + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network + ) + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class OtherName(object): + def __init__(self, type_id, value): + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + type_id = utils.read_only_property("_type_id") + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format( + self.type_id, self.value) + + def __eq__(self, other): + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __ne__(self, other): + return not self == other diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/name.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..9d93ece --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/name.py @@ -0,0 +1,78 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.x509.oid import ObjectIdentifier + + +class NameAttribute(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(value, six.text_type): + raise TypeError( + "value argument must be a text type." + ) + + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __eq__(self, other): + if not isinstance(other, NameAttribute): + return NotImplemented + + return ( + self.oid == other.oid and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) + + def __repr__(self): + return "".format(self) + + +class Name(object): + def __init__(self, attributes): + self._attributes = attributes + + def get_attributes_for_oid(self, oid): + return [i for i in self if i.oid == oid] + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self): + return iter(self._attributes) + + def __len__(self): + return len(self._attributes) + + def __repr__(self): + return "".format(self._attributes) diff --git a/Darwin/lib/python3.5/site-packages/cryptography/x509/oid.py b/Darwin/lib/python3.5/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..ead4016 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/cryptography/x509/oid.py @@ -0,0 +1,199 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +class ObjectIdentifier(object): + def __init__(self, dotted_string): + self._dotted_string = dotted_string + + def __eq__(self, other): + if not isinstance(other, ObjectIdentifier): + return NotImplemented + + return self.dotted_string == other.dotted_string + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "".format( + self.dotted_string, + self._name + ) + + def __hash__(self): + return hash(self.dotted_string) + + @property + def _name(self): + return _OID_NAMES.get(self, "Unknown OID") + + dotted_string = utils.read_only_property("_dotted_string") + + +class ExtensionOID(object): + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + + +class CRLExtensionOID(object): + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +class NameOID(object): + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + + +class SignatureAlgorithmOID(object): + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + +_SIG_OIDS_TO_HASH = { + SignatureAlgorithmOID.RSA_WITH_MD5.dotted_string: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.DSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256.dotted_string: hashes.SHA256() +} + + +class ExtendedKeyUsageOID(object): + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + + +class AuthorityInformationAccessOID(object): + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class CertificatePoliciesOID(object): + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + CRLExtensionOID.CRL_REASON: "cRLReason", + CRLExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", +} diff --git a/Darwin/lib/python3.4/site-packages/easy_install.py b/Darwin/lib/python3.5/site-packages/easy_install.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/easy_install.py rename to Darwin/lib/python3.5/site-packages/easy_install.py diff --git a/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..2d8b087 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,11 @@ +Python bindings to the Ed25519 public-key signature system. + +This offers a comfortable python interface to a C implementation of the +Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the +portable 'ref' code from the 'SUPERCOP' benchmarking suite. + +This system provides high (128-bit) security, short (32-byte) keys, short +(64-byte) signatures, and fast (2-6ms) operation. Please see the README for +more details. + + diff --git a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/PKG-INFO b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/METADATA similarity index 53% rename from Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/PKG-INFO rename to Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/METADATA index 5d6abf0..10628e3 100644 --- a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/PKG-INFO +++ b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/METADATA @@ -1,21 +1,11 @@ -Metadata-Version: 1.1 +Metadata-Version: 2.0 Name: ed25519 -Version: 1.3 +Version: 1.4 Summary: Ed25519 public-key signatures Home-page: https://github.com/warner/python-ed25519 Author: Brian Warner Author-email: warner-python-ed25519@lothar.com License: MIT -Description: Python bindings to the Ed25519 public-key signature system. - - This offers a comfortable python interface to a C implementation of the - Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the - portable 'ref' code from the 'SUPERCOP' benchmarking suite. - - This system provides high (128-bit) security, short (32-byte) keys, short - (64-byte) signatures, and fast (2-6ms) operation. Please see the README for - more details. - Platform: UNKNOWN Classifier: Development Status :: 5 - Production/Stable Classifier: Intended Audience :: Developers @@ -26,3 +16,15 @@ Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3.3 Classifier: Programming Language :: Python :: 3.4 Classifier: Topic :: Security :: Cryptography + +Python bindings to the Ed25519 public-key signature system. + +This offers a comfortable python interface to a C implementation of the +Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the +portable 'ref' code from the 'SUPERCOP' benchmarking suite. + +This system provides high (128-bit) security, short (32-byte) keys, short +(64-byte) signatures, and fast (2-6ms) operation. Please see the README for +more details. + + diff --git a/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/RECORD new file mode 100644 index 0000000..8cd10a7 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/RECORD @@ -0,0 +1,16 @@ +ed25519/__init__.py,sha256=0AicD1xQAforRdrUWwmmURJkZ3Gi1lqaifukwZNYJos,401 +ed25519/_ed25519.cpython-35m-darwin.so,sha256=BV_EG_nX6bp0ZCMagYbdJzSMdMKaOVPpQiLnDimlfis,172776 +ed25519/_version.py,sha256=yb119RosJrH_RO02_o3o12GWQvkxx3xD4X7UrJW9vTY,469 +ed25519/keys.py,sha256=AbMFsbxn0qbwmQ6HntpNURsOGq_y4puwFxs6U7Of2eo,7123 +ed25519/test_ed25519.py,sha256=IG8ot-yARHi6PoyJY6ixS1l2L23hE1lCXbSH-XQPCCM,12389 +../../../bin/edsig,sha256=SA1mUUWCjAAaSEe6MKSpVWg-2qXwuiuK3PodCAUwCN0,2853 +ed25519-1.4.dist-info/DESCRIPTION.rst,sha256=8UWGEqjPrB7zPyxLA5Ep6JL58ANbe0Wybqth188exdc,434 +ed25519-1.4.dist-info/METADATA,sha256=5SfqBgerND9vMg8dq81VUTwDclNFkXr30DkD9K95gZU,1114 +ed25519-1.4.dist-info/RECORD,, +ed25519-1.4.dist-info/WHEEL,sha256=Er7DBTU_C2g_rTGCxcwhCKegQSKoYLj1ncusWiwlKwM,111 +ed25519-1.4.dist-info/metadata.json,sha256=y0EzmBUVRGjYZw7dNoEHGy5gJoCC2FdO-5uJL7NXLfk,802 +ed25519-1.4.dist-info/top_level.txt,sha256=U3-N9ZJMBO9MUuZLwoiMbsWSkxsd0TfkNSuzO6O_gYY,8 +ed25519/__pycache__/_version.cpython-35.pyc,, +ed25519/__pycache__/__init__.cpython-35.pyc,, +ed25519/__pycache__/test_ed25519.cpython-35.pyc,, +ed25519/__pycache__/keys.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/WHEEL new file mode 100644 index 0000000..b37ab74 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_11_x86_64 + diff --git a/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/metadata.json new file mode 100644 index 0000000..5046511 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Ed25519 public-key signatures", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Security :: Cryptography"], "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/warner/python-ed25519"}, "contacts": [{"email": "warner-python-ed25519@lothar.com", "name": "Brian Warner", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT", "metadata_version": "2.0", "name": "ed25519", "version": "1.4"} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/top_level.txt similarity index 100% rename from Darwin/lib/python3.4/site-packages/ed25519-1.3-py3.4.egg-info/top_level.txt rename to Darwin/lib/python3.5/site-packages/ed25519-1.4.dist-info/top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/ed25519/__init__.py b/Darwin/lib/python3.5/site-packages/ed25519/__init__.py similarity index 88% rename from Darwin/lib/python3.4/site-packages/ed25519/__init__.py rename to Darwin/lib/python3.5/site-packages/ed25519/__init__.py index 565093c..7ad9cd8 100644 --- a/Darwin/lib/python3.4/site-packages/ed25519/__init__.py +++ b/Darwin/lib/python3.5/site-packages/ed25519/__init__.py @@ -7,5 +7,5 @@ from .keys import (BadSignatureError, BadPrefixError, remove_prefix, to_ascii, from_ascii) # hush pyflakes from ._version import get_versions -__version__ = get_versions()['version'] +__version__ = str(get_versions()['version']) del get_versions diff --git a/Darwin/lib/python3.5/site-packages/ed25519/_ed25519.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/ed25519/_ed25519.cpython-35m-darwin.so new file mode 100755 index 0000000..3f00552 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/ed25519/_ed25519.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.5/site-packages/ed25519/_version.py b/Darwin/lib/python3.5/site-packages/ed25519/_version.py new file mode 100644 index 0000000..7c19428 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/ed25519/_version.py @@ -0,0 +1,21 @@ + +# This file was generated by 'versioneer.py' (0.15) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json +import sys + +version_json = ''' +{ + "dirty": false, + "error": null, + "full-revisionid": "a8732e8b6ba4e04e83c7ef05f86c565a2b2fc278", + "version": "1.4" +} +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) diff --git a/Darwin/lib/python3.4/site-packages/ed25519/keys.py b/Darwin/lib/python3.5/site-packages/ed25519/keys.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/ed25519/keys.py rename to Darwin/lib/python3.5/site-packages/ed25519/keys.py index 8bd8e2d..2803174 100644 --- a/Darwin/lib/python3.4/site-packages/ed25519/keys.py +++ b/Darwin/lib/python3.5/site-packages/ed25519/keys.py @@ -60,13 +60,13 @@ def from_ascii(s_ascii, prefix="", encoding="base64"): prefix = prefix.decode('ascii') s_ascii = remove_prefix(s_ascii.strip(), prefix) if encoding == "base64": - s_ascii += "="*((4 - len(s_ascii)%4)%4) - s_bytes = base64.b64decode(s_ascii) + s_ascii += "=" * ((4 - len(s_ascii) % 4) % 4) + s_bytes = base64.b64decode(s_ascii.encode('ascii')) elif encoding == "base32": - s_ascii += "="*((8 - len(s_ascii)%8)%8) - s_bytes = base64.b32decode(s_ascii.upper()) + s_ascii += "=" * ((8 - len(s_ascii) % 8) % 8) + s_bytes = base64.b32decode(s_ascii.upper().encode('ascii')) elif encoding in ("base16", "hex"): - s_bytes = base64.b16decode(s_ascii.upper()) + s_bytes = base64.b16decode(s_ascii.upper().encode('ascii')) else: raise NotImplementedError return s_bytes diff --git a/Darwin/lib/python3.4/site-packages/ed25519/test_ed25519.py b/Darwin/lib/python3.5/site-packages/ed25519/test_ed25519.py similarity index 85% rename from Darwin/lib/python3.4/site-packages/ed25519/test_ed25519.py rename to Darwin/lib/python3.5/site-packages/ed25519/test_ed25519.py index c5830c9..8dea618 100644 --- a/Darwin/lib/python3.4/site-packages/ed25519/test_ed25519.py +++ b/Darwin/lib/python3.5/site-packages/ed25519/test_ed25519.py @@ -54,8 +54,8 @@ class Basic(unittest.TestCase): sk_s = b"\x00" * 32 # usually urandom(32) vk_s, skvk_s = raw.publickey(sk_s) self.failUnlessEqual(len(vk_s), 32) - exp_vks = unhexlify("3b6a27bcceb6a42d62a3a8d02a6f0d73" - "653215771de243a63ac048a18b59da29") + exp_vks = unhexlify(b"3b6a27bcceb6a42d62a3a8d02a6f0d73" + b"653215771de243a63ac048a18b59da29") self.failUnlessEqual(vk_s, exp_vks) self.failUnlessEqual(skvk_s[:32], sk_s) self.failUnlessEqual(skvk_s[32:], vk_s) @@ -63,10 +63,10 @@ class Basic(unittest.TestCase): msg_and_sig = raw.sign(msg, skvk_s) sig = msg_and_sig[:-len(msg)] self.failUnlessEqual(len(sig), 64) - exp_sig = unhexlify("b0b47780f096ae60bfff8d8e7b19c36b" - "321ae6e69cca972f2ff987ef30f20d29" - "774b53bae404485c4391ddf1b3f37aaa" - "8a9747f984eb0884e8aa533386e73305") + exp_sig = unhexlify(b"b0b47780f096ae60bfff8d8e7b19c36b" + b"321ae6e69cca972f2ff987ef30f20d29" + b"774b53bae404485c4391ddf1b3f37aaa" + b"8a9747f984eb0884e8aa533386e73305") self.failUnlessEqual(sig, exp_sig) ret = raw.open(sig+msg, vk_s) # don't raise exception self.failUnlessEqual(ret, msg) @@ -112,8 +112,8 @@ class Basic(unittest.TestCase): def test_publickey(self): - seed = unhexlify("4ba96b0b5303328c7405220598a587c4" - "acb06ed9a9601d149f85400195f1ec3d") + seed = unhexlify(b"4ba96b0b5303328c7405220598a587c4" + b"acb06ed9a9601d149f85400195f1ec3d") sk = ed25519.SigningKey(seed) self.failUnlessEqual(hexlify(sk.to_bytes()), (b"4ba96b0b5303328c7405220598a587c4" @@ -129,46 +129,46 @@ class Basic(unittest.TestCase): self.failUnlessEqual(sk, sk2) def test_OOP(self): - sk_s = unhexlify("4ba96b0b5303328c7405220598a587c4" - "acb06ed9a9601d149f85400195f1ec3d" - "a66d161e090652b054740748f059f92a" - "5b731f1c27b05571f6d942e4f8b7b264") + sk_s = unhexlify(b"4ba96b0b5303328c7405220598a587c4" + b"acb06ed9a9601d149f85400195f1ec3d" + b"a66d161e090652b054740748f059f92a" + b"5b731f1c27b05571f6d942e4f8b7b264") sk = ed25519.SigningKey(sk_s) self.failUnlessEqual(len(sk.to_bytes()), 64) self.failUnlessEqual(sk.to_bytes(), sk_s) - sk2_seed = unhexlify("4ba96b0b5303328c7405220598a587c4" - "acb06ed9a9601d149f85400195f1ec3d") + sk2_seed = unhexlify(b"4ba96b0b5303328c7405220598a587c4" + b"acb06ed9a9601d149f85400195f1ec3d") sk2 = ed25519.SigningKey(sk2_seed) self.failUnlessEqual(sk2.to_bytes(), sk.to_bytes()) vk = sk.get_verifying_key() self.failUnlessEqual(len(vk.to_bytes()), 32) - exp_vks = unhexlify("a66d161e090652b054740748f059f92a" - "5b731f1c27b05571f6d942e4f8b7b264") + exp_vks = unhexlify(b"a66d161e090652b054740748f059f92a" + b"5b731f1c27b05571f6d942e4f8b7b264") self.failUnlessEqual(vk.to_bytes(), exp_vks) self.failUnlessEqual(ed25519.VerifyingKey(vk.to_bytes()), vk) msg = b"hello world" sig = sk.sign(msg) self.failUnlessEqual(len(sig), 64) - exp_sig = unhexlify("6eaffe94f2972b35158b6aaa9b69c1da" - "97f0896aca29c41b1dd7b32e6c9e2ff6" - "76fc8d8b034709cdcc37d8aeb86bebfb" - "173ace3c319e211ea1d7e8d8884c1808") + exp_sig = unhexlify(b"6eaffe94f2972b35158b6aaa9b69c1da" + b"97f0896aca29c41b1dd7b32e6c9e2ff6" + b"76fc8d8b034709cdcc37d8aeb86bebfb" + b"173ace3c319e211ea1d7e8d8884c1808") self.failUnlessEqual(sig, exp_sig) self.failUnlessEqual(vk.verify(sig, msg), None) # also, don't throw self.failUnlessRaises(ed25519.BadSignatureError, vk.verify, sig, msg+b".. NOT!") def test_object_identity(self): - sk1_s = unhexlify("ef32972ae3f1252a5aa1395347ea008c" - "bd2fed0773a4ea45e2d2d06c8cf8fbd4" - "c024601a9c5b854fb100ff3116cf4f22" - "a311565f027391cb49d3bbe11c44399d") - sk2_s = unhexlify("3d550c158900b4c2922b6656d2f80572" - "89de4ee65043745179685ae7d29b944d" - "672b8a2cb23f9e75e1d46ce249cd9c04" - "68f816f1c734a102822b60e18b41eacd") + sk1_s = unhexlify(b"ef32972ae3f1252a5aa1395347ea008c" + b"bd2fed0773a4ea45e2d2d06c8cf8fbd4" + b"c024601a9c5b854fb100ff3116cf4f22" + b"a311565f027391cb49d3bbe11c44399d") + sk2_s = unhexlify(b"3d550c158900b4c2922b6656d2f80572" + b"89de4ee65043745179685ae7d29b944d" + b"672b8a2cb23f9e75e1d46ce249cd9c04" + b"68f816f1c734a102822b60e18b41eacd") sk1a = ed25519.SigningKey(sk1_s) sk1b = ed25519.SigningKey(sk1_s) vk1a = sk1a.get_verifying_key() diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..84c7681 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,144 @@ +Internationalized Domain Names in Applications (IDNA) +===================================================== + +A library to support the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This version of the protocol is often referred to as “IDNA2008†and can +produce different results from the earlier standard from 2003. + +The library is also intended to act as a suitable drop-in replacement for +the “encodings.idna†module that comes with the Python standard library +but currently only supports the older 2003 specification. + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.иÑпытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.иÑпытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in RFC 5895, the IDNA specification no longer including mappings +from different forms of input that a user may enter, to the form that is provided +to the IDNA functions. This functionality is now a local user-interface issue +distinct from the IDNA functionality. + +The Unicode Consortium has developed one such user-level mapping, known as +`Unicode IDNA Compatibility Processing `_. +It provides for both transitional mapping and non-transitional mapping described +in this document. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Note that implementors should use transitional processing with caution as the outputs +of the functions may differ from what is expected, as noted in the example. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/METADATA b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/METADATA new file mode 100644 index 0000000..0dbccca --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/METADATA @@ -0,0 +1,168 @@ +Metadata-Version: 2.0 +Name: idna +Version: 2.0 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +A library to support the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This version of the protocol is often referred to as “IDNA2008†and can +produce different results from the earlier standard from 2003. + +The library is also intended to act as a suitable drop-in replacement for +the “encodings.idna†module that comes with the Python standard library +but currently only supports the older 2003 specification. + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.иÑпытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.иÑпытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in RFC 5895, the IDNA specification no longer including mappings +from different forms of input that a user may enter, to the form that is provided +to the IDNA functions. This functionality is now a local user-interface issue +distinct from the IDNA functionality. + +The Unicode Consortium has developed one such user-level mapping, known as +`Unicode IDNA Compatibility Processing `_. +It provides for both transitional mapping and non-transitional mapping described +in this document. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Note that implementors should use transitional processing with caution as the outputs +of the functions may differ from what is expected, as noted in the example. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/RECORD new file mode 100644 index 0000000..0a4249d --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/RECORD @@ -0,0 +1,19 @@ +idna/__init__.py,sha256=K0kNy26Vm6A-1V5lST3ily6yVsNLUbiqk6AZDFm2nJI,20 +idna/codec.py,sha256=fHb4zE1NzJR6aZmcpcaRd3tq4e8mYaETS1UogAU2IfM,3303 +idna/compat.py,sha256=LXKc5WEVdXNWv5A0FpETd5T15qZyQTKGBeoHF-GbdkM,240 +idna/core.py,sha256=zL5Do1c7hChvSJI6tUWnRB6RZayruc7JUeFJn2M9OTc,11243 +idna/idnadata.py,sha256=UVUBp9zbkxFBtZgNzq7sdQ9kYMPJseVXCC02PsHYn-E,90754 +idna/uts46data.py,sha256=RAmrTTPJKgSNXPICpBZ0f6Bp8_9R2OHQysKDGPymYlw,181224 +idna-2.0.dist-info/DESCRIPTION.rst,sha256=kSltH00qpJdoxPQkrllcXZXbgbQJy8uTam97W_RVjyM,4774 +idna-2.0.dist-info/METADATA,sha256=zc-FgzaRTcjWPBQf5XDnbZfZ7SFrBOZKUL8J29e0imA,5712 +idna-2.0.dist-info/metadata.json,sha256=9LK-dlPf8HPLMPefULPipZdGCVmIkahoVFeM4UvAc9A,1015 +idna-2.0.dist-info/pbr.json,sha256=G17oY0YosBv1cEKLCgflO_-APtB0l9i46H4DKOvoLew,46 +idna-2.0.dist-info/RECORD,, +idna-2.0.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna-2.0.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +idna/__pycache__/idnadata.cpython-35.pyc,, +idna/__pycache__/compat.cpython-35.pyc,, +idna/__pycache__/core.cpython-35.pyc,, +idna/__pycache__/__init__.cpython-35.pyc,, +idna/__pycache__/codec.cpython-35.pyc,, +idna/__pycache__/uts46data.cpython-35.pyc,, diff --git a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/WHEEL similarity index 70% rename from Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL rename to Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/WHEEL index f19235c..9dff69d 100644 --- a/Darwin/lib/python3.4/site-packages/pip-1.5.6.dist-info/WHEEL +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.23.0) +Generator: bdist_wheel (0.24.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/metadata.json new file mode 100644 index 0000000..85f9fb5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "BSD-like", "name": "idna", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "Internationalized Domain Names in Applications (IDNA)", "version": "2.0", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/kjd/idna"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "kim@cynosure.com.au", "name": "Kim Davies"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Internet :: Name Service (DNS)", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Utilities"]} \ No newline at end of file diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/pbr.json b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/pbr.json new file mode 100644 index 0000000..086d6e8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": true, "git_version": "bb6e94c"} \ No newline at end of file diff --git a/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/top_level.txt b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna-2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/Darwin/lib/python3.5/site-packages/idna/__init__.py b/Darwin/lib/python3.5/site-packages/idna/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/Darwin/lib/python3.5/site-packages/idna/codec.py b/Darwin/lib/python3.5/site-packages/idna/codec.py new file mode 100644 index 0000000..cdd9675 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from idna.core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/Darwin/lib/python3.5/site-packages/idna/compat.py b/Darwin/lib/python3.5/site-packages/idna/compat.py new file mode 100644 index 0000000..ef9bcbd --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from idna.core import * +from idna.codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/Darwin/lib/python3.5/site-packages/idna/core.py b/Darwin/lib/python3.5/site-packages/idna/core.py new file mode 100644 index 0000000..35b2803 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/core.py @@ -0,0 +1,386 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + return unicodedata.combining(unichr(cp)) + +def _is_script(cp, script): + return ord(cp) in idnadata.scripts[script] + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + break + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['L', 'D']: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['R', 'D']: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if not _is_script(cp, 'Hiragana') and not _is_script(cp, 'Katakana') and not _is_script(cp, 'Han'): + return False + return True + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if cp_value in idnadata.codepoint_classes['PVALID']: + continue + elif cp_value in idnadata.codepoint_classes['CONTEXTJ']: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + elif cp_value in idnadata.codepoint_classes['CONTEXTO']: + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + result.append(alabel(label)) + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + result.append(ulabel(label)) + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/Darwin/lib/python3.5/site-packages/idna/idnadata.py b/Darwin/lib/python3.5/site-packages/idna/idnadata.py new file mode 100644 index 0000000..e2867e5 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/idnadata.py @@ -0,0 +1,3573 @@ +# This file is automatically generated by build-idnadata.py + +scripts = { + 'Arabic': frozenset( + list(range(0x600,0x605)) + + list(range(0x606,0x60c)) + + list(range(0x60d,0x61b)) + + [0x61e] + + list(range(0x620,0x640)) + + list(range(0x641,0x64b)) + + list(range(0x656,0x660)) + + list(range(0x66a,0x670)) + + list(range(0x671,0x6dd)) + + list(range(0x6de,0x700)) + + list(range(0x750,0x780)) + + list(range(0x8a0,0x8b3)) + + list(range(0x8e4,0x900)) + + list(range(0xfb50,0xfbc2)) + + list(range(0xfbd3,0xfd3e)) + + list(range(0xfd50,0xfd90)) + + list(range(0xfd92,0xfdc8)) + + list(range(0xfdf0,0xfdfe)) + + list(range(0xfe70,0xfe75)) + + list(range(0xfe76,0xfefd)) + + list(range(0x10e60,0x10e7f)) + + list(range(0x1ee00,0x1ee04)) + + list(range(0x1ee05,0x1ee20)) + + list(range(0x1ee21,0x1ee23)) + + [0x1ee24] + + [0x1ee27] + + list(range(0x1ee29,0x1ee33)) + + list(range(0x1ee34,0x1ee38)) + + [0x1ee39] + + [0x1ee3b] + + [0x1ee42] + + [0x1ee47] + + [0x1ee49] + + [0x1ee4b] + + list(range(0x1ee4d,0x1ee50)) + + list(range(0x1ee51,0x1ee53)) + + [0x1ee54] + + [0x1ee57] + + [0x1ee59] + + [0x1ee5b] + + [0x1ee5d] + + [0x1ee5f] + + list(range(0x1ee61,0x1ee63)) + + [0x1ee64] + + list(range(0x1ee67,0x1ee6b)) + + list(range(0x1ee6c,0x1ee73)) + + list(range(0x1ee74,0x1ee78)) + + list(range(0x1ee79,0x1ee7d)) + + [0x1ee7e] + + list(range(0x1ee80,0x1ee8a)) + + list(range(0x1ee8b,0x1ee9c)) + + list(range(0x1eea1,0x1eea4)) + + list(range(0x1eea5,0x1eeaa)) + + list(range(0x1eeab,0x1eebc)) + + list(range(0x1eef0,0x1eef2)) + ), + 'Armenian': frozenset( + list(range(0x531,0x557)) + + list(range(0x559,0x560)) + + list(range(0x561,0x588)) + + [0x58a] + + list(range(0x58d,0x590)) + + list(range(0xfb13,0xfb18)) + ), + 'Avestan': frozenset( + list(range(0x10b00,0x10b36)) + + list(range(0x10b39,0x10b40)) + ), + 'Balinese': frozenset( + list(range(0x1b00,0x1b4c)) + + list(range(0x1b50,0x1b7d)) + ), + 'Bamum': frozenset( + list(range(0xa6a0,0xa6f8)) + + list(range(0x16800,0x16a39)) + ), + 'Bassa_Vah': frozenset( + list(range(0x16ad0,0x16aee)) + + list(range(0x16af0,0x16af6)) + ), + 'Batak': frozenset( + list(range(0x1bc0,0x1bf4)) + + list(range(0x1bfc,0x1c00)) + ), + 'Bengali': frozenset( + list(range(0x980,0x984)) + + list(range(0x985,0x98d)) + + list(range(0x98f,0x991)) + + list(range(0x993,0x9a9)) + + list(range(0x9aa,0x9b1)) + + [0x9b2] + + list(range(0x9b6,0x9ba)) + + list(range(0x9bc,0x9c5)) + + list(range(0x9c7,0x9c9)) + + list(range(0x9cb,0x9cf)) + + [0x9d7] + + list(range(0x9dc,0x9de)) + + list(range(0x9df,0x9e4)) + + list(range(0x9e6,0x9fc)) + ), + 'Bopomofo': frozenset( + list(range(0x2ea,0x2ec)) + + list(range(0x3105,0x312e)) + + list(range(0x31a0,0x31bb)) + ), + 'Brahmi': frozenset( + list(range(0x11000,0x1104e)) + + list(range(0x11052,0x11070)) + + [0x1107f] + ), + 'Braille': frozenset( + list(range(0x2800,0x2900)) + ), + 'Buginese': frozenset( + list(range(0x1a00,0x1a1c)) + + list(range(0x1a1e,0x1a20)) + ), + 'Buhid': frozenset( + list(range(0x1740,0x1754)) + ), + 'Canadian_Aboriginal': frozenset( + list(range(0x1400,0x1680)) + + list(range(0x18b0,0x18f6)) + ), + 'Carian': frozenset( + list(range(0x102a0,0x102d1)) + ), + 'Caucasian_Albanian': frozenset( + list(range(0x10530,0x10564)) + + [0x1056f] + ), + 'Chakma': frozenset( + list(range(0x11100,0x11135)) + + list(range(0x11136,0x11144)) + ), + 'Cham': frozenset( + list(range(0xaa00,0xaa37)) + + list(range(0xaa40,0xaa4e)) + + list(range(0xaa50,0xaa5a)) + + list(range(0xaa5c,0xaa60)) + ), + 'Cherokee': frozenset( + list(range(0x13a0,0x13f5)) + ), + 'Common': frozenset( + list(range(0x0,0x41)) + + list(range(0x5b,0x61)) + + list(range(0x7b,0xaa)) + + list(range(0xab,0xba)) + + list(range(0xbb,0xc0)) + + [0xd7] + + [0xf7] + + list(range(0x2b9,0x2e0)) + + list(range(0x2e5,0x2ea)) + + list(range(0x2ec,0x300)) + + [0x374] + + [0x37e] + + [0x385] + + [0x387] + + [0x589] + + [0x605] + + [0x60c] + + list(range(0x61b,0x61d)) + + [0x61f] + + [0x640] + + list(range(0x660,0x66a)) + + [0x6dd] + + list(range(0x964,0x966)) + + [0xe3f] + + list(range(0xfd5,0xfd9)) + + [0x10fb] + + list(range(0x16eb,0x16ee)) + + list(range(0x1735,0x1737)) + + list(range(0x1802,0x1804)) + + [0x1805] + + [0x1cd3] + + [0x1ce1] + + list(range(0x1ce9,0x1ced)) + + list(range(0x1cee,0x1cf4)) + + list(range(0x1cf5,0x1cf7)) + + list(range(0x2000,0x200c)) + + list(range(0x200e,0x2065)) + + list(range(0x2066,0x2071)) + + list(range(0x2074,0x207f)) + + list(range(0x2080,0x208f)) + + list(range(0x20a0,0x20be)) + + list(range(0x2100,0x2126)) + + list(range(0x2127,0x212a)) + + list(range(0x212c,0x2132)) + + list(range(0x2133,0x214e)) + + list(range(0x214f,0x2160)) + + [0x2189] + + list(range(0x2190,0x23fb)) + + list(range(0x2400,0x2427)) + + list(range(0x2440,0x244b)) + + list(range(0x2460,0x2800)) + + list(range(0x2900,0x2b74)) + + list(range(0x2b76,0x2b96)) + + list(range(0x2b98,0x2bba)) + + list(range(0x2bbd,0x2bc9)) + + list(range(0x2bca,0x2bd2)) + + list(range(0x2e00,0x2e43)) + + list(range(0x2ff0,0x2ffc)) + + list(range(0x3000,0x3005)) + + [0x3006] + + list(range(0x3008,0x3021)) + + list(range(0x3030,0x3038)) + + list(range(0x303c,0x3040)) + + list(range(0x309b,0x309d)) + + [0x30a0] + + list(range(0x30fb,0x30fd)) + + list(range(0x3190,0x31a0)) + + list(range(0x31c0,0x31e4)) + + list(range(0x3220,0x3260)) + + list(range(0x327f,0x32d0)) + + list(range(0x3358,0x3400)) + + list(range(0x4dc0,0x4e00)) + + list(range(0xa700,0xa722)) + + list(range(0xa788,0xa78b)) + + list(range(0xa830,0xa83a)) + + [0xa92e] + + [0xa9cf] + + [0xab5b] + + list(range(0xfd3e,0xfd40)) + + list(range(0xfe10,0xfe1a)) + + list(range(0xfe30,0xfe53)) + + list(range(0xfe54,0xfe67)) + + list(range(0xfe68,0xfe6c)) + + [0xfeff] + + list(range(0xff01,0xff21)) + + list(range(0xff3b,0xff41)) + + list(range(0xff5b,0xff66)) + + [0xff70] + + list(range(0xff9e,0xffa0)) + + list(range(0xffe0,0xffe7)) + + list(range(0xffe8,0xffef)) + + list(range(0xfff9,0xfffe)) + + list(range(0x10100,0x10103)) + + list(range(0x10107,0x10134)) + + list(range(0x10137,0x10140)) + + list(range(0x10190,0x1019c)) + + list(range(0x101d0,0x101fd)) + + list(range(0x102e1,0x102fc)) + + list(range(0x1bca0,0x1bca4)) + + list(range(0x1d000,0x1d0f6)) + + list(range(0x1d100,0x1d127)) + + list(range(0x1d129,0x1d167)) + + list(range(0x1d16a,0x1d17b)) + + list(range(0x1d183,0x1d185)) + + list(range(0x1d18c,0x1d1aa)) + + list(range(0x1d1ae,0x1d1de)) + + list(range(0x1d300,0x1d357)) + + list(range(0x1d360,0x1d372)) + + list(range(0x1d400,0x1d455)) + + list(range(0x1d456,0x1d49d)) + + list(range(0x1d49e,0x1d4a0)) + + [0x1d4a2] + + list(range(0x1d4a5,0x1d4a7)) + + list(range(0x1d4a9,0x1d4ad)) + + list(range(0x1d4ae,0x1d4ba)) + + [0x1d4bb] + + list(range(0x1d4bd,0x1d4c4)) + + list(range(0x1d4c5,0x1d506)) + + list(range(0x1d507,0x1d50b)) + + list(range(0x1d50d,0x1d515)) + + list(range(0x1d516,0x1d51d)) + + list(range(0x1d51e,0x1d53a)) + + list(range(0x1d53b,0x1d53f)) + + list(range(0x1d540,0x1d545)) + + [0x1d546] + + list(range(0x1d54a,0x1d551)) + + list(range(0x1d552,0x1d6a6)) + + list(range(0x1d6a8,0x1d7cc)) + + list(range(0x1d7ce,0x1d800)) + + list(range(0x1f000,0x1f02c)) + + list(range(0x1f030,0x1f094)) + + list(range(0x1f0a0,0x1f0af)) + + list(range(0x1f0b1,0x1f0c0)) + + list(range(0x1f0c1,0x1f0d0)) + + list(range(0x1f0d1,0x1f0f6)) + + list(range(0x1f100,0x1f10d)) + + list(range(0x1f110,0x1f12f)) + + list(range(0x1f130,0x1f16c)) + + list(range(0x1f170,0x1f19b)) + + list(range(0x1f1e6,0x1f200)) + + list(range(0x1f201,0x1f203)) + + list(range(0x1f210,0x1f23b)) + + list(range(0x1f240,0x1f249)) + + list(range(0x1f250,0x1f252)) + + list(range(0x1f300,0x1f32d)) + + list(range(0x1f330,0x1f37e)) + + list(range(0x1f380,0x1f3cf)) + + list(range(0x1f3d4,0x1f3f8)) + + list(range(0x1f400,0x1f4ff)) + + list(range(0x1f500,0x1f54b)) + + list(range(0x1f550,0x1f57a)) + + list(range(0x1f57b,0x1f5a4)) + + list(range(0x1f5a5,0x1f643)) + + list(range(0x1f645,0x1f6d0)) + + list(range(0x1f6e0,0x1f6ed)) + + list(range(0x1f6f0,0x1f6f4)) + + list(range(0x1f700,0x1f774)) + + list(range(0x1f780,0x1f7d5)) + + list(range(0x1f800,0x1f80c)) + + list(range(0x1f810,0x1f848)) + + list(range(0x1f850,0x1f85a)) + + list(range(0x1f860,0x1f888)) + + list(range(0x1f890,0x1f8ae)) + + [0xe0001] + + list(range(0xe0020,0xe0080)) + ), + 'Coptic': frozenset( + list(range(0x3e2,0x3f0)) + + list(range(0x2c80,0x2cf4)) + + list(range(0x2cf9,0x2d00)) + ), + 'Cuneiform': frozenset( + list(range(0x12000,0x12399)) + + list(range(0x12400,0x1246f)) + + list(range(0x12470,0x12475)) + ), + 'Cypriot': frozenset( + list(range(0x10800,0x10806)) + + [0x10808] + + list(range(0x1080a,0x10836)) + + list(range(0x10837,0x10839)) + + [0x1083c] + + [0x1083f] + ), + 'Cyrillic': frozenset( + list(range(0x400,0x485)) + + list(range(0x487,0x530)) + + [0x1d2b] + + [0x1d78] + + list(range(0x2de0,0x2e00)) + + list(range(0xa640,0xa69e)) + + [0xa69f] + ), + 'Deseret': frozenset( + list(range(0x10400,0x10450)) + ), + 'Devanagari': frozenset( + list(range(0x900,0x951)) + + list(range(0x953,0x964)) + + list(range(0x966,0x980)) + + list(range(0xa8e0,0xa8fc)) + ), + 'Duployan': frozenset( + list(range(0x1bc00,0x1bc6b)) + + list(range(0x1bc70,0x1bc7d)) + + list(range(0x1bc80,0x1bc89)) + + list(range(0x1bc90,0x1bc9a)) + + list(range(0x1bc9c,0x1bca0)) + ), + 'Egyptian_Hieroglyphs': frozenset( + list(range(0x13000,0x1342f)) + ), + 'Elbasan': frozenset( + list(range(0x10500,0x10528)) + ), + 'Ethiopic': frozenset( + list(range(0x1200,0x1249)) + + list(range(0x124a,0x124e)) + + list(range(0x1250,0x1257)) + + [0x1258] + + list(range(0x125a,0x125e)) + + list(range(0x1260,0x1289)) + + list(range(0x128a,0x128e)) + + list(range(0x1290,0x12b1)) + + list(range(0x12b2,0x12b6)) + + list(range(0x12b8,0x12bf)) + + [0x12c0] + + list(range(0x12c2,0x12c6)) + + list(range(0x12c8,0x12d7)) + + list(range(0x12d8,0x1311)) + + list(range(0x1312,0x1316)) + + list(range(0x1318,0x135b)) + + list(range(0x135d,0x137d)) + + list(range(0x1380,0x139a)) + + list(range(0x2d80,0x2d97)) + + list(range(0x2da0,0x2da7)) + + list(range(0x2da8,0x2daf)) + + list(range(0x2db0,0x2db7)) + + list(range(0x2db8,0x2dbf)) + + list(range(0x2dc0,0x2dc7)) + + list(range(0x2dc8,0x2dcf)) + + list(range(0x2dd0,0x2dd7)) + + list(range(0x2dd8,0x2ddf)) + + list(range(0xab01,0xab07)) + + list(range(0xab09,0xab0f)) + + list(range(0xab11,0xab17)) + + list(range(0xab20,0xab27)) + + list(range(0xab28,0xab2f)) + ), + 'Georgian': frozenset( + list(range(0x10a0,0x10c6)) + + [0x10c7] + + [0x10cd] + + list(range(0x10d0,0x10fb)) + + list(range(0x10fc,0x1100)) + + list(range(0x2d00,0x2d26)) + + [0x2d27] + + [0x2d2d] + ), + 'Glagolitic': frozenset( + list(range(0x2c00,0x2c2f)) + + list(range(0x2c30,0x2c5f)) + ), + 'Gothic': frozenset( + list(range(0x10330,0x1034b)) + ), + 'Grantha': frozenset( + list(range(0x11301,0x11304)) + + list(range(0x11305,0x1130d)) + + list(range(0x1130f,0x11311)) + + list(range(0x11313,0x11329)) + + list(range(0x1132a,0x11331)) + + list(range(0x11332,0x11334)) + + list(range(0x11335,0x1133a)) + + list(range(0x1133c,0x11345)) + + list(range(0x11347,0x11349)) + + list(range(0x1134b,0x1134e)) + + [0x11357] + + list(range(0x1135d,0x11364)) + + list(range(0x11366,0x1136d)) + + list(range(0x11370,0x11375)) + ), + 'Greek': frozenset( + list(range(0x370,0x374)) + + list(range(0x375,0x378)) + + list(range(0x37a,0x37e)) + + [0x37f] + + [0x384] + + [0x386] + + list(range(0x388,0x38b)) + + [0x38c] + + list(range(0x38e,0x3a2)) + + list(range(0x3a3,0x3e2)) + + list(range(0x3f0,0x400)) + + list(range(0x1d26,0x1d2b)) + + list(range(0x1d5d,0x1d62)) + + list(range(0x1d66,0x1d6b)) + + [0x1dbf] + + list(range(0x1f00,0x1f16)) + + list(range(0x1f18,0x1f1e)) + + list(range(0x1f20,0x1f46)) + + list(range(0x1f48,0x1f4e)) + + list(range(0x1f50,0x1f58)) + + [0x1f59] + + [0x1f5b] + + [0x1f5d] + + list(range(0x1f5f,0x1f7e)) + + list(range(0x1f80,0x1fb5)) + + list(range(0x1fb6,0x1fc5)) + + list(range(0x1fc6,0x1fd4)) + + list(range(0x1fd6,0x1fdc)) + + list(range(0x1fdd,0x1ff0)) + + list(range(0x1ff2,0x1ff5)) + + list(range(0x1ff6,0x1fff)) + + [0x2126] + + [0xab65] + + list(range(0x10140,0x1018d)) + + [0x101a0] + + list(range(0x1d200,0x1d246)) + ), + 'Gujarati': frozenset( + list(range(0xa81,0xa84)) + + list(range(0xa85,0xa8e)) + + list(range(0xa8f,0xa92)) + + list(range(0xa93,0xaa9)) + + list(range(0xaaa,0xab1)) + + list(range(0xab2,0xab4)) + + list(range(0xab5,0xaba)) + + list(range(0xabc,0xac6)) + + list(range(0xac7,0xaca)) + + list(range(0xacb,0xace)) + + [0xad0] + + list(range(0xae0,0xae4)) + + list(range(0xae6,0xaf2)) + ), + 'Gurmukhi': frozenset( + list(range(0xa01,0xa04)) + + list(range(0xa05,0xa0b)) + + list(range(0xa0f,0xa11)) + + list(range(0xa13,0xa29)) + + list(range(0xa2a,0xa31)) + + list(range(0xa32,0xa34)) + + list(range(0xa35,0xa37)) + + list(range(0xa38,0xa3a)) + + [0xa3c] + + list(range(0xa3e,0xa43)) + + list(range(0xa47,0xa49)) + + list(range(0xa4b,0xa4e)) + + [0xa51] + + list(range(0xa59,0xa5d)) + + [0xa5e] + + list(range(0xa66,0xa76)) + ), + 'Han': frozenset( + list(range(0x2e80,0x2e9a)) + + list(range(0x2e9b,0x2ef4)) + + list(range(0x2f00,0x2fd6)) + + [0x3005] + + [0x3007] + + list(range(0x3021,0x302a)) + + list(range(0x3038,0x303c)) + + list(range(0x3400,0x4db6)) + + list(range(0x4e00,0x9fcd)) + + list(range(0xf900,0xfa6e)) + + list(range(0xfa70,0xfada)) + + list(range(0x20000,0x2a6d7)) + + list(range(0x2a700,0x2b735)) + + list(range(0x2b740,0x2b81e)) + + list(range(0x2f800,0x2fa1e)) + ), + 'Hangul': frozenset( + list(range(0x1100,0x1200)) + + list(range(0x302e,0x3030)) + + list(range(0x3131,0x318f)) + + list(range(0x3200,0x321f)) + + list(range(0x3260,0x327f)) + + list(range(0xa960,0xa97d)) + + list(range(0xac00,0xd7a4)) + + list(range(0xd7b0,0xd7c7)) + + list(range(0xd7cb,0xd7fc)) + + list(range(0xffa0,0xffbf)) + + list(range(0xffc2,0xffc8)) + + list(range(0xffca,0xffd0)) + + list(range(0xffd2,0xffd8)) + + list(range(0xffda,0xffdd)) + ), + 'Hanunoo': frozenset( + list(range(0x1720,0x1735)) + ), + 'Hebrew': frozenset( + list(range(0x591,0x5c8)) + + list(range(0x5d0,0x5eb)) + + list(range(0x5f0,0x5f5)) + + list(range(0xfb1d,0xfb37)) + + list(range(0xfb38,0xfb3d)) + + [0xfb3e] + + list(range(0xfb40,0xfb42)) + + list(range(0xfb43,0xfb45)) + + list(range(0xfb46,0xfb50)) + ), + 'Hiragana': frozenset( + list(range(0x3041,0x3097)) + + list(range(0x309d,0x30a0)) + + [0x1b001] + + [0x1f200] + ), + 'Imperial_Aramaic': frozenset( + list(range(0x10840,0x10856)) + + list(range(0x10857,0x10860)) + ), + 'Inherited': frozenset( + list(range(0x300,0x370)) + + list(range(0x485,0x487)) + + list(range(0x64b,0x656)) + + [0x670] + + list(range(0x951,0x953)) + + list(range(0x1ab0,0x1abf)) + + list(range(0x1cd0,0x1cd3)) + + list(range(0x1cd4,0x1ce1)) + + list(range(0x1ce2,0x1ce9)) + + [0x1ced] + + [0x1cf4] + + list(range(0x1cf8,0x1cfa)) + + list(range(0x1dc0,0x1df6)) + + list(range(0x1dfc,0x1e00)) + + list(range(0x200c,0x200e)) + + list(range(0x20d0,0x20f1)) + + list(range(0x302a,0x302e)) + + list(range(0x3099,0x309b)) + + list(range(0xfe00,0xfe10)) + + list(range(0xfe20,0xfe2e)) + + [0x101fd] + + [0x102e0] + + list(range(0x1d167,0x1d16a)) + + list(range(0x1d17b,0x1d183)) + + list(range(0x1d185,0x1d18c)) + + list(range(0x1d1aa,0x1d1ae)) + + list(range(0xe0100,0xe01f0)) + ), + 'Inscriptional_Pahlavi': frozenset( + list(range(0x10b60,0x10b73)) + + list(range(0x10b78,0x10b80)) + ), + 'Inscriptional_Parthian': frozenset( + list(range(0x10b40,0x10b56)) + + list(range(0x10b58,0x10b60)) + ), + 'Javanese': frozenset( + list(range(0xa980,0xa9ce)) + + list(range(0xa9d0,0xa9da)) + + list(range(0xa9de,0xa9e0)) + ), + 'Kaithi': frozenset( + list(range(0x11080,0x110c2)) + ), + 'Kannada': frozenset( + list(range(0xc81,0xc84)) + + list(range(0xc85,0xc8d)) + + list(range(0xc8e,0xc91)) + + list(range(0xc92,0xca9)) + + list(range(0xcaa,0xcb4)) + + list(range(0xcb5,0xcba)) + + list(range(0xcbc,0xcc5)) + + list(range(0xcc6,0xcc9)) + + list(range(0xcca,0xcce)) + + list(range(0xcd5,0xcd7)) + + [0xcde] + + list(range(0xce0,0xce4)) + + list(range(0xce6,0xcf0)) + + list(range(0xcf1,0xcf3)) + ), + 'Katakana': frozenset( + list(range(0x30a1,0x30fb)) + + list(range(0x30fd,0x3100)) + + list(range(0x31f0,0x3200)) + + list(range(0x32d0,0x32ff)) + + list(range(0x3300,0x3358)) + + list(range(0xff66,0xff70)) + + list(range(0xff71,0xff9e)) + + [0x1b000] + ), + 'Kayah_Li': frozenset( + list(range(0xa900,0xa92e)) + + [0xa92f] + ), + 'Kharoshthi': frozenset( + list(range(0x10a00,0x10a04)) + + list(range(0x10a05,0x10a07)) + + list(range(0x10a0c,0x10a14)) + + list(range(0x10a15,0x10a18)) + + list(range(0x10a19,0x10a34)) + + list(range(0x10a38,0x10a3b)) + + list(range(0x10a3f,0x10a48)) + + list(range(0x10a50,0x10a59)) + ), + 'Khmer': frozenset( + list(range(0x1780,0x17de)) + + list(range(0x17e0,0x17ea)) + + list(range(0x17f0,0x17fa)) + + list(range(0x19e0,0x1a00)) + ), + 'Khojki': frozenset( + list(range(0x11200,0x11212)) + + list(range(0x11213,0x1123e)) + ), + 'Khudawadi': frozenset( + list(range(0x112b0,0x112eb)) + + list(range(0x112f0,0x112fa)) + ), + 'Lao': frozenset( + list(range(0xe81,0xe83)) + + [0xe84] + + list(range(0xe87,0xe89)) + + [0xe8a] + + [0xe8d] + + list(range(0xe94,0xe98)) + + list(range(0xe99,0xea0)) + + list(range(0xea1,0xea4)) + + [0xea5] + + [0xea7] + + list(range(0xeaa,0xeac)) + + list(range(0xead,0xeba)) + + list(range(0xebb,0xebe)) + + list(range(0xec0,0xec5)) + + [0xec6] + + list(range(0xec8,0xece)) + + list(range(0xed0,0xeda)) + + list(range(0xedc,0xee0)) + ), + 'Latin': frozenset( + list(range(0x41,0x5b)) + + list(range(0x61,0x7b)) + + [0xaa] + + [0xba] + + list(range(0xc0,0xd7)) + + list(range(0xd8,0xf7)) + + list(range(0xf8,0x2b9)) + + list(range(0x2e0,0x2e5)) + + list(range(0x1d00,0x1d26)) + + list(range(0x1d2c,0x1d5d)) + + list(range(0x1d62,0x1d66)) + + list(range(0x1d6b,0x1d78)) + + list(range(0x1d79,0x1dbf)) + + list(range(0x1e00,0x1f00)) + + [0x2071] + + [0x207f] + + list(range(0x2090,0x209d)) + + list(range(0x212a,0x212c)) + + [0x2132] + + [0x214e] + + list(range(0x2160,0x2189)) + + list(range(0x2c60,0x2c80)) + + list(range(0xa722,0xa788)) + + list(range(0xa78b,0xa78f)) + + list(range(0xa790,0xa7ae)) + + list(range(0xa7b0,0xa7b2)) + + list(range(0xa7f7,0xa800)) + + list(range(0xab30,0xab5b)) + + list(range(0xab5c,0xab60)) + + [0xab64] + + list(range(0xfb00,0xfb07)) + + list(range(0xff21,0xff3b)) + + list(range(0xff41,0xff5b)) + ), + 'Lepcha': frozenset( + list(range(0x1c00,0x1c38)) + + list(range(0x1c3b,0x1c4a)) + + list(range(0x1c4d,0x1c50)) + ), + 'Limbu': frozenset( + list(range(0x1900,0x191f)) + + list(range(0x1920,0x192c)) + + list(range(0x1930,0x193c)) + + [0x1940] + + list(range(0x1944,0x1950)) + ), + 'Linear_A': frozenset( + list(range(0x10600,0x10737)) + + list(range(0x10740,0x10756)) + + list(range(0x10760,0x10768)) + ), + 'Linear_B': frozenset( + list(range(0x10000,0x1000c)) + + list(range(0x1000d,0x10027)) + + list(range(0x10028,0x1003b)) + + list(range(0x1003c,0x1003e)) + + list(range(0x1003f,0x1004e)) + + list(range(0x10050,0x1005e)) + + list(range(0x10080,0x100fb)) + ), + 'Lisu': frozenset( + list(range(0xa4d0,0xa500)) + ), + 'Lycian': frozenset( + list(range(0x10280,0x1029d)) + ), + 'Lydian': frozenset( + list(range(0x10920,0x1093a)) + + [0x1093f] + ), + 'Mahajani': frozenset( + list(range(0x11150,0x11177)) + ), + 'Malayalam': frozenset( + list(range(0xd01,0xd04)) + + list(range(0xd05,0xd0d)) + + list(range(0xd0e,0xd11)) + + list(range(0xd12,0xd3b)) + + list(range(0xd3d,0xd45)) + + list(range(0xd46,0xd49)) + + list(range(0xd4a,0xd4f)) + + [0xd57] + + list(range(0xd60,0xd64)) + + list(range(0xd66,0xd76)) + + list(range(0xd79,0xd80)) + ), + 'Mandaic': frozenset( + list(range(0x840,0x85c)) + + [0x85e] + ), + 'Manichaean': frozenset( + list(range(0x10ac0,0x10ae7)) + + list(range(0x10aeb,0x10af7)) + ), + 'Meetei_Mayek': frozenset( + list(range(0xaae0,0xaaf7)) + + list(range(0xabc0,0xabee)) + + list(range(0xabf0,0xabfa)) + ), + 'Mende_Kikakui': frozenset( + list(range(0x1e800,0x1e8c5)) + + list(range(0x1e8c7,0x1e8d7)) + ), + 'Meroitic_Cursive': frozenset( + list(range(0x109a0,0x109b8)) + + list(range(0x109be,0x109c0)) + ), + 'Meroitic_Hieroglyphs': frozenset( + list(range(0x10980,0x109a0)) + ), + 'Miao': frozenset( + list(range(0x16f00,0x16f45)) + + list(range(0x16f50,0x16f7f)) + + list(range(0x16f8f,0x16fa0)) + ), + 'Modi': frozenset( + list(range(0x11600,0x11645)) + + list(range(0x11650,0x1165a)) + ), + 'Mongolian': frozenset( + list(range(0x1800,0x1802)) + + [0x1804] + + list(range(0x1806,0x180f)) + + list(range(0x1810,0x181a)) + + list(range(0x1820,0x1878)) + + list(range(0x1880,0x18ab)) + ), + 'Mro': frozenset( + list(range(0x16a40,0x16a5f)) + + list(range(0x16a60,0x16a6a)) + + list(range(0x16a6e,0x16a70)) + ), + 'Myanmar': frozenset( + list(range(0x1000,0x10a0)) + + list(range(0xa9e0,0xa9ff)) + + list(range(0xaa60,0xaa80)) + ), + 'Nabataean': frozenset( + list(range(0x10880,0x1089f)) + + list(range(0x108a7,0x108b0)) + ), + 'New_Tai_Lue': frozenset( + list(range(0x1980,0x19ac)) + + list(range(0x19b0,0x19ca)) + + list(range(0x19d0,0x19db)) + + list(range(0x19de,0x19e0)) + ), + 'Nko': frozenset( + list(range(0x7c0,0x7fb)) + ), + 'Ogham': frozenset( + list(range(0x1680,0x169d)) + ), + 'Ol_Chiki': frozenset( + list(range(0x1c50,0x1c80)) + ), + 'Old_Italic': frozenset( + list(range(0x10300,0x10324)) + ), + 'Old_North_Arabian': frozenset( + list(range(0x10a80,0x10aa0)) + ), + 'Old_Permic': frozenset( + list(range(0x10350,0x1037b)) + ), + 'Old_Persian': frozenset( + list(range(0x103a0,0x103c4)) + + list(range(0x103c8,0x103d6)) + ), + 'Old_South_Arabian': frozenset( + list(range(0x10a60,0x10a80)) + ), + 'Old_Turkic': frozenset( + list(range(0x10c00,0x10c49)) + ), + 'Oriya': frozenset( + list(range(0xb01,0xb04)) + + list(range(0xb05,0xb0d)) + + list(range(0xb0f,0xb11)) + + list(range(0xb13,0xb29)) + + list(range(0xb2a,0xb31)) + + list(range(0xb32,0xb34)) + + list(range(0xb35,0xb3a)) + + list(range(0xb3c,0xb45)) + + list(range(0xb47,0xb49)) + + list(range(0xb4b,0xb4e)) + + list(range(0xb56,0xb58)) + + list(range(0xb5c,0xb5e)) + + list(range(0xb5f,0xb64)) + + list(range(0xb66,0xb78)) + ), + 'Osmanya': frozenset( + list(range(0x10480,0x1049e)) + + list(range(0x104a0,0x104aa)) + ), + 'Pahawh_Hmong': frozenset( + list(range(0x16b00,0x16b46)) + + list(range(0x16b50,0x16b5a)) + + list(range(0x16b5b,0x16b62)) + + list(range(0x16b63,0x16b78)) + + list(range(0x16b7d,0x16b90)) + ), + 'Palmyrene': frozenset( + list(range(0x10860,0x10880)) + ), + 'Pau_Cin_Hau': frozenset( + list(range(0x11ac0,0x11af9)) + ), + 'Phags_Pa': frozenset( + list(range(0xa840,0xa878)) + ), + 'Phoenician': frozenset( + list(range(0x10900,0x1091c)) + + [0x1091f] + ), + 'Psalter_Pahlavi': frozenset( + list(range(0x10b80,0x10b92)) + + list(range(0x10b99,0x10b9d)) + + list(range(0x10ba9,0x10bb0)) + ), + 'Rejang': frozenset( + list(range(0xa930,0xa954)) + + [0xa95f] + ), + 'Runic': frozenset( + list(range(0x16a0,0x16eb)) + + list(range(0x16ee,0x16f9)) + ), + 'Samaritan': frozenset( + list(range(0x800,0x82e)) + + list(range(0x830,0x83f)) + ), + 'Saurashtra': frozenset( + list(range(0xa880,0xa8c5)) + + list(range(0xa8ce,0xa8da)) + ), + 'Sharada': frozenset( + list(range(0x11180,0x111c9)) + + [0x111cd] + + list(range(0x111d0,0x111db)) + ), + 'Shavian': frozenset( + list(range(0x10450,0x10480)) + ), + 'Siddham': frozenset( + list(range(0x11580,0x115b6)) + + list(range(0x115b8,0x115ca)) + ), + 'Sinhala': frozenset( + list(range(0xd82,0xd84)) + + list(range(0xd85,0xd97)) + + list(range(0xd9a,0xdb2)) + + list(range(0xdb3,0xdbc)) + + [0xdbd] + + list(range(0xdc0,0xdc7)) + + [0xdca] + + list(range(0xdcf,0xdd5)) + + [0xdd6] + + list(range(0xdd8,0xde0)) + + list(range(0xde6,0xdf0)) + + list(range(0xdf2,0xdf5)) + + list(range(0x111e1,0x111f5)) + ), + 'Sora_Sompeng': frozenset( + list(range(0x110d0,0x110e9)) + + list(range(0x110f0,0x110fa)) + ), + 'Sundanese': frozenset( + list(range(0x1b80,0x1bc0)) + + list(range(0x1cc0,0x1cc8)) + ), + 'Syloti_Nagri': frozenset( + list(range(0xa800,0xa82c)) + ), + 'Syriac': frozenset( + list(range(0x700,0x70e)) + + list(range(0x70f,0x74b)) + + list(range(0x74d,0x750)) + ), + 'Tagalog': frozenset( + list(range(0x1700,0x170d)) + + list(range(0x170e,0x1715)) + ), + 'Tagbanwa': frozenset( + list(range(0x1760,0x176d)) + + list(range(0x176e,0x1771)) + + list(range(0x1772,0x1774)) + ), + 'Tai_Le': frozenset( + list(range(0x1950,0x196e)) + + list(range(0x1970,0x1975)) + ), + 'Tai_Tham': frozenset( + list(range(0x1a20,0x1a5f)) + + list(range(0x1a60,0x1a7d)) + + list(range(0x1a7f,0x1a8a)) + + list(range(0x1a90,0x1a9a)) + + list(range(0x1aa0,0x1aae)) + ), + 'Tai_Viet': frozenset( + list(range(0xaa80,0xaac3)) + + list(range(0xaadb,0xaae0)) + ), + 'Takri': frozenset( + list(range(0x11680,0x116b8)) + + list(range(0x116c0,0x116ca)) + ), + 'Tamil': frozenset( + list(range(0xb82,0xb84)) + + list(range(0xb85,0xb8b)) + + list(range(0xb8e,0xb91)) + + list(range(0xb92,0xb96)) + + list(range(0xb99,0xb9b)) + + [0xb9c] + + list(range(0xb9e,0xba0)) + + list(range(0xba3,0xba5)) + + list(range(0xba8,0xbab)) + + list(range(0xbae,0xbba)) + + list(range(0xbbe,0xbc3)) + + list(range(0xbc6,0xbc9)) + + list(range(0xbca,0xbce)) + + [0xbd0] + + [0xbd7] + + list(range(0xbe6,0xbfb)) + ), + 'Telugu': frozenset( + list(range(0xc00,0xc04)) + + list(range(0xc05,0xc0d)) + + list(range(0xc0e,0xc11)) + + list(range(0xc12,0xc29)) + + list(range(0xc2a,0xc3a)) + + list(range(0xc3d,0xc45)) + + list(range(0xc46,0xc49)) + + list(range(0xc4a,0xc4e)) + + list(range(0xc55,0xc57)) + + list(range(0xc58,0xc5a)) + + list(range(0xc60,0xc64)) + + list(range(0xc66,0xc70)) + + list(range(0xc78,0xc80)) + ), + 'Thaana': frozenset( + list(range(0x780,0x7b2)) + ), + 'Thai': frozenset( + list(range(0xe01,0xe3b)) + + list(range(0xe40,0xe5c)) + ), + 'Tibetan': frozenset( + list(range(0xf00,0xf48)) + + list(range(0xf49,0xf6d)) + + list(range(0xf71,0xf98)) + + list(range(0xf99,0xfbd)) + + list(range(0xfbe,0xfcd)) + + list(range(0xfce,0xfd5)) + + list(range(0xfd9,0xfdb)) + ), + 'Tifinagh': frozenset( + list(range(0x2d30,0x2d68)) + + list(range(0x2d6f,0x2d71)) + + [0x2d7f] + ), + 'Tirhuta': frozenset( + list(range(0x11480,0x114c8)) + + list(range(0x114d0,0x114da)) + ), + 'Ugaritic': frozenset( + list(range(0x10380,0x1039e)) + + [0x1039f] + ), + 'Vai': frozenset( + list(range(0xa500,0xa62c)) + ), + 'Warang_Citi': frozenset( + list(range(0x118a0,0x118f3)) + + [0x118ff] + ), + 'Yi': frozenset( + list(range(0xa000,0xa48d)) + + list(range(0xa490,0xa4c7)) + ), +} +joining_types = { + 0x600: 'U', + 0x601: 'U', + 0x602: 'U', + 0x603: 'U', + 0x604: 'U', + 0x605: 'U', + 0x608: 'U', + 0x60b: 'U', + 0x620: 'D', + 0x621: 'U', + 0x622: 'R', + 0x623: 'R', + 0x624: 'R', + 0x625: 'R', + 0x626: 'D', + 0x627: 'R', + 0x628: 'D', + 0x629: 'R', + 0x62a: 'D', + 0x62b: 'D', + 0x62c: 'D', + 0x62d: 'D', + 0x62e: 'D', + 0x62f: 'R', + 0x630: 'R', + 0x631: 'R', + 0x632: 'R', + 0x633: 'D', + 0x634: 'D', + 0x635: 'D', + 0x636: 'D', + 0x637: 'D', + 0x638: 'D', + 0x639: 'D', + 0x63a: 'D', + 0x63b: 'D', + 0x63c: 'D', + 0x63d: 'D', + 0x63e: 'D', + 0x63f: 'D', + 0x640: 'C', + 0x641: 'D', + 0x642: 'D', + 0x643: 'D', + 0x644: 'D', + 0x645: 'D', + 0x646: 'D', + 0x647: 'D', + 0x648: 'R', + 0x649: 'D', + 0x64a: 'D', + 0x66e: 'D', + 0x66f: 'D', + 0x671: 'R', + 0x672: 'R', + 0x673: 'R', + 0x674: 'U', + 0x675: 'R', + 0x676: 'R', + 0x677: 'R', + 0x678: 'D', + 0x679: 'D', + 0x67a: 'D', + 0x67b: 'D', + 0x67c: 'D', + 0x67d: 'D', + 0x67e: 'D', + 0x67f: 'D', + 0x680: 'D', + 0x681: 'D', + 0x682: 'D', + 0x683: 'D', + 0x684: 'D', + 0x685: 'D', + 0x686: 'D', + 0x687: 'D', + 0x688: 'R', + 0x689: 'R', + 0x68a: 'R', + 0x68b: 'R', + 0x68c: 'R', + 0x68d: 'R', + 0x68e: 'R', + 0x68f: 'R', + 0x690: 'R', + 0x691: 'R', + 0x692: 'R', + 0x693: 'R', + 0x694: 'R', + 0x695: 'R', + 0x696: 'R', + 0x697: 'R', + 0x698: 'R', + 0x699: 'R', + 0x69a: 'D', + 0x69b: 'D', + 0x69c: 'D', + 0x69d: 'D', + 0x69e: 'D', + 0x69f: 'D', + 0x6a0: 'D', + 0x6a1: 'D', + 0x6a2: 'D', + 0x6a3: 'D', + 0x6a4: 'D', + 0x6a5: 'D', + 0x6a6: 'D', + 0x6a7: 'D', + 0x6a8: 'D', + 0x6a9: 'D', + 0x6aa: 'D', + 0x6ab: 'D', + 0x6ac: 'D', + 0x6ad: 'D', + 0x6ae: 'D', + 0x6af: 'D', + 0x6b0: 'D', + 0x6b1: 'D', + 0x6b2: 'D', + 0x6b3: 'D', + 0x6b4: 'D', + 0x6b5: 'D', + 0x6b6: 'D', + 0x6b7: 'D', + 0x6b8: 'D', + 0x6b9: 'D', + 0x6ba: 'D', + 0x6bb: 'D', + 0x6bc: 'D', + 0x6bd: 'D', + 0x6be: 'D', + 0x6bf: 'D', + 0x6c0: 'R', + 0x6c1: 'D', + 0x6c2: 'D', + 0x6c3: 'R', + 0x6c4: 'R', + 0x6c5: 'R', + 0x6c6: 'R', + 0x6c7: 'R', + 0x6c8: 'R', + 0x6c9: 'R', + 0x6ca: 'R', + 0x6cb: 'R', + 0x6cc: 'D', + 0x6cd: 'R', + 0x6ce: 'D', + 0x6cf: 'R', + 0x6d0: 'D', + 0x6d1: 'D', + 0x6d2: 'R', + 0x6d3: 'R', + 0x6d5: 'R', + 0x6dd: 'U', + 0x6ee: 'R', + 0x6ef: 'R', + 0x6fa: 'D', + 0x6fb: 'D', + 0x6fc: 'D', + 0x6ff: 'D', + 0x710: 'R', + 0x712: 'D', + 0x713: 'D', + 0x714: 'D', + 0x715: 'R', + 0x716: 'R', + 0x717: 'R', + 0x718: 'R', + 0x719: 'R', + 0x71a: 'D', + 0x71b: 'D', + 0x71c: 'D', + 0x71d: 'D', + 0x71e: 'R', + 0x71f: 'D', + 0x720: 'D', + 0x721: 'D', + 0x722: 'D', + 0x723: 'D', + 0x724: 'D', + 0x725: 'D', + 0x726: 'D', + 0x727: 'D', + 0x728: 'R', + 0x729: 'D', + 0x72a: 'R', + 0x72b: 'D', + 0x72c: 'R', + 0x72d: 'D', + 0x72e: 'D', + 0x72f: 'R', + 0x74d: 'R', + 0x74e: 'D', + 0x74f: 'D', + 0x750: 'D', + 0x751: 'D', + 0x752: 'D', + 0x753: 'D', + 0x754: 'D', + 0x755: 'D', + 0x756: 'D', + 0x757: 'D', + 0x758: 'D', + 0x759: 'R', + 0x75a: 'R', + 0x75b: 'R', + 0x75c: 'D', + 0x75d: 'D', + 0x75e: 'D', + 0x75f: 'D', + 0x760: 'D', + 0x761: 'D', + 0x762: 'D', + 0x763: 'D', + 0x764: 'D', + 0x765: 'D', + 0x766: 'D', + 0x767: 'D', + 0x768: 'D', + 0x769: 'D', + 0x76a: 'D', + 0x76b: 'R', + 0x76c: 'R', + 0x76d: 'D', + 0x76e: 'D', + 0x76f: 'D', + 0x770: 'D', + 0x771: 'R', + 0x772: 'D', + 0x773: 'R', + 0x774: 'R', + 0x775: 'D', + 0x776: 'D', + 0x777: 'D', + 0x778: 'R', + 0x779: 'R', + 0x77a: 'D', + 0x77b: 'D', + 0x77c: 'D', + 0x77d: 'D', + 0x77e: 'D', + 0x77f: 'D', + 0x7ca: 'D', + 0x7cb: 'D', + 0x7cc: 'D', + 0x7cd: 'D', + 0x7ce: 'D', + 0x7cf: 'D', + 0x7d0: 'D', + 0x7d1: 'D', + 0x7d2: 'D', + 0x7d3: 'D', + 0x7d4: 'D', + 0x7d5: 'D', + 0x7d6: 'D', + 0x7d7: 'D', + 0x7d8: 'D', + 0x7d9: 'D', + 0x7da: 'D', + 0x7db: 'D', + 0x7dc: 'D', + 0x7dd: 'D', + 0x7de: 'D', + 0x7df: 'D', + 0x7e0: 'D', + 0x7e1: 'D', + 0x7e2: 'D', + 0x7e3: 'D', + 0x7e4: 'D', + 0x7e5: 'D', + 0x7e6: 'D', + 0x7e7: 'D', + 0x7e8: 'D', + 0x7e9: 'D', + 0x7ea: 'D', + 0x7fa: 'C', + 0x840: 'R', + 0x841: 'D', + 0x842: 'D', + 0x843: 'D', + 0x844: 'D', + 0x845: 'D', + 0x846: 'R', + 0x847: 'D', + 0x848: 'D', + 0x849: 'R', + 0x84a: 'D', + 0x84b: 'D', + 0x84c: 'D', + 0x84d: 'D', + 0x84e: 'D', + 0x84f: 'R', + 0x850: 'D', + 0x851: 'D', + 0x852: 'D', + 0x853: 'D', + 0x854: 'R', + 0x855: 'D', + 0x856: 'U', + 0x857: 'U', + 0x858: 'U', + 0x8a0: 'D', + 0x8a1: 'D', + 0x8a2: 'D', + 0x8a3: 'D', + 0x8a4: 'D', + 0x8a5: 'D', + 0x8a6: 'D', + 0x8a7: 'D', + 0x8a8: 'D', + 0x8a9: 'D', + 0x8aa: 'R', + 0x8ab: 'R', + 0x8ac: 'R', + 0x8ad: 'U', + 0x8ae: 'R', + 0x8af: 'D', + 0x8b0: 'D', + 0x8b1: 'R', + 0x8b2: 'R', + 0x1806: 'U', + 0x1807: 'D', + 0x180a: 'C', + 0x180e: 'U', + 0x1820: 'D', + 0x1821: 'D', + 0x1822: 'D', + 0x1823: 'D', + 0x1824: 'D', + 0x1825: 'D', + 0x1826: 'D', + 0x1827: 'D', + 0x1828: 'D', + 0x1829: 'D', + 0x182a: 'D', + 0x182b: 'D', + 0x182c: 'D', + 0x182d: 'D', + 0x182e: 'D', + 0x182f: 'D', + 0x1830: 'D', + 0x1831: 'D', + 0x1832: 'D', + 0x1833: 'D', + 0x1834: 'D', + 0x1835: 'D', + 0x1836: 'D', + 0x1837: 'D', + 0x1838: 'D', + 0x1839: 'D', + 0x183a: 'D', + 0x183b: 'D', + 0x183c: 'D', + 0x183d: 'D', + 0x183e: 'D', + 0x183f: 'D', + 0x1840: 'D', + 0x1841: 'D', + 0x1842: 'D', + 0x1843: 'D', + 0x1844: 'D', + 0x1845: 'D', + 0x1846: 'D', + 0x1847: 'D', + 0x1848: 'D', + 0x1849: 'D', + 0x184a: 'D', + 0x184b: 'D', + 0x184c: 'D', + 0x184d: 'D', + 0x184e: 'D', + 0x184f: 'D', + 0x1850: 'D', + 0x1851: 'D', + 0x1852: 'D', + 0x1853: 'D', + 0x1854: 'D', + 0x1855: 'D', + 0x1856: 'D', + 0x1857: 'D', + 0x1858: 'D', + 0x1859: 'D', + 0x185a: 'D', + 0x185b: 'D', + 0x185c: 'D', + 0x185d: 'D', + 0x185e: 'D', + 0x185f: 'D', + 0x1860: 'D', + 0x1861: 'D', + 0x1862: 'D', + 0x1863: 'D', + 0x1864: 'D', + 0x1865: 'D', + 0x1866: 'D', + 0x1867: 'D', + 0x1868: 'D', + 0x1869: 'D', + 0x186a: 'D', + 0x186b: 'D', + 0x186c: 'D', + 0x186d: 'D', + 0x186e: 'D', + 0x186f: 'D', + 0x1870: 'D', + 0x1871: 'D', + 0x1872: 'D', + 0x1873: 'D', + 0x1874: 'D', + 0x1875: 'D', + 0x1876: 'D', + 0x1877: 'D', + 0x1880: 'U', + 0x1881: 'U', + 0x1882: 'U', + 0x1883: 'U', + 0x1884: 'U', + 0x1885: 'U', + 0x1886: 'U', + 0x1887: 'D', + 0x1888: 'D', + 0x1889: 'D', + 0x188a: 'D', + 0x188b: 'D', + 0x188c: 'D', + 0x188d: 'D', + 0x188e: 'D', + 0x188f: 'D', + 0x1890: 'D', + 0x1891: 'D', + 0x1892: 'D', + 0x1893: 'D', + 0x1894: 'D', + 0x1895: 'D', + 0x1896: 'D', + 0x1897: 'D', + 0x1898: 'D', + 0x1899: 'D', + 0x189a: 'D', + 0x189b: 'D', + 0x189c: 'D', + 0x189d: 'D', + 0x189e: 'D', + 0x189f: 'D', + 0x18a0: 'D', + 0x18a1: 'D', + 0x18a2: 'D', + 0x18a3: 'D', + 0x18a4: 'D', + 0x18a5: 'D', + 0x18a6: 'D', + 0x18a7: 'D', + 0x18a8: 'D', + 0x18aa: 'D', + 0x200c: 'U', + 0x200d: 'C', + 0x2066: 'U', + 0x2067: 'U', + 0x2068: 'U', + 0x2069: 'U', + 0xa840: 'D', + 0xa841: 'D', + 0xa842: 'D', + 0xa843: 'D', + 0xa844: 'D', + 0xa845: 'D', + 0xa846: 'D', + 0xa847: 'D', + 0xa848: 'D', + 0xa849: 'D', + 0xa84a: 'D', + 0xa84b: 'D', + 0xa84c: 'D', + 0xa84d: 'D', + 0xa84e: 'D', + 0xa84f: 'D', + 0xa850: 'D', + 0xa851: 'D', + 0xa852: 'D', + 0xa853: 'D', + 0xa854: 'D', + 0xa855: 'D', + 0xa856: 'D', + 0xa857: 'D', + 0xa858: 'D', + 0xa859: 'D', + 0xa85a: 'D', + 0xa85b: 'D', + 0xa85c: 'D', + 0xa85d: 'D', + 0xa85e: 'D', + 0xa85f: 'D', + 0xa860: 'D', + 0xa861: 'D', + 0xa862: 'D', + 0xa863: 'D', + 0xa864: 'D', + 0xa865: 'D', + 0xa866: 'D', + 0xa867: 'D', + 0xa868: 'D', + 0xa869: 'D', + 0xa86a: 'D', + 0xa86b: 'D', + 0xa86c: 'D', + 0xa86d: 'D', + 0xa86e: 'D', + 0xa86f: 'D', + 0xa870: 'D', + 0xa871: 'D', + 0xa872: 'L', + 0xa873: 'U', + 0x10ac0: 'D', + 0x10ac1: 'D', + 0x10ac2: 'D', + 0x10ac3: 'D', + 0x10ac4: 'D', + 0x10ac5: 'R', + 0x10ac6: 'U', + 0x10ac7: 'R', + 0x10ac8: 'U', + 0x10ac9: 'R', + 0x10aca: 'R', + 0x10acb: 'U', + 0x10acc: 'U', + 0x10acd: 'L', + 0x10ace: 'R', + 0x10acf: 'R', + 0x10ad0: 'R', + 0x10ad1: 'R', + 0x10ad2: 'R', + 0x10ad3: 'D', + 0x10ad4: 'D', + 0x10ad5: 'D', + 0x10ad6: 'D', + 0x10ad7: 'L', + 0x10ad8: 'D', + 0x10ad9: 'D', + 0x10ada: 'D', + 0x10adb: 'D', + 0x10adc: 'D', + 0x10add: 'R', + 0x10ade: 'D', + 0x10adf: 'D', + 0x10ae0: 'D', + 0x10ae1: 'R', + 0x10ae2: 'U', + 0x10ae3: 'U', + 0x10ae4: 'R', + 0x10aeb: 'D', + 0x10aec: 'D', + 0x10aed: 'D', + 0x10aee: 'D', + 0x10aef: 'R', + 0x10b80: 'D', + 0x10b81: 'R', + 0x10b82: 'D', + 0x10b83: 'R', + 0x10b84: 'R', + 0x10b85: 'R', + 0x10b86: 'D', + 0x10b87: 'D', + 0x10b88: 'D', + 0x10b89: 'R', + 0x10b8a: 'D', + 0x10b8b: 'D', + 0x10b8c: 'R', + 0x10b8d: 'D', + 0x10b8e: 'R', + 0x10b8f: 'R', + 0x10b90: 'D', + 0x10b91: 'R', + 0x10ba9: 'R', + 0x10baa: 'R', + 0x10bab: 'R', + 0x10bac: 'R', + 0x10bad: 'D', + 0x10bae: 'D', + 0x10baf: 'U', +} +codepoint_classes = { + 'PVALID': frozenset( + [0x2d] + + list(range(0x30,0x3a)) + + list(range(0x61,0x7b)) + + list(range(0xdf,0xf7)) + + list(range(0xf8,0x100)) + + [0x101] + + [0x103] + + [0x105] + + [0x107] + + [0x109] + + [0x10b] + + [0x10d] + + [0x10f] + + [0x111] + + [0x113] + + [0x115] + + [0x117] + + [0x119] + + [0x11b] + + [0x11d] + + [0x11f] + + [0x121] + + [0x123] + + [0x125] + + [0x127] + + [0x129] + + [0x12b] + + [0x12d] + + [0x12f] + + [0x131] + + [0x135] + + list(range(0x137,0x139)) + + [0x13a] + + [0x13c] + + [0x13e] + + [0x142] + + [0x144] + + [0x146] + + [0x148] + + [0x14b] + + [0x14d] + + [0x14f] + + [0x151] + + [0x153] + + [0x155] + + [0x157] + + [0x159] + + [0x15b] + + [0x15d] + + [0x15f] + + [0x161] + + [0x163] + + [0x165] + + [0x167] + + [0x169] + + [0x16b] + + [0x16d] + + [0x16f] + + [0x171] + + [0x173] + + [0x175] + + [0x177] + + [0x17a] + + [0x17c] + + [0x17e] + + [0x180] + + [0x183] + + [0x185] + + [0x188] + + list(range(0x18c,0x18e)) + + [0x192] + + [0x195] + + list(range(0x199,0x19c)) + + [0x19e] + + [0x1a1] + + [0x1a3] + + [0x1a5] + + [0x1a8] + + list(range(0x1aa,0x1ac)) + + [0x1ad] + + [0x1b0] + + [0x1b4] + + [0x1b6] + + list(range(0x1b9,0x1bc)) + + list(range(0x1bd,0x1c4)) + + [0x1ce] + + [0x1d0] + + [0x1d2] + + [0x1d4] + + [0x1d6] + + [0x1d8] + + [0x1da] + + list(range(0x1dc,0x1de)) + + [0x1df] + + [0x1e1] + + [0x1e3] + + [0x1e5] + + [0x1e7] + + [0x1e9] + + [0x1eb] + + [0x1ed] + + list(range(0x1ef,0x1f1)) + + [0x1f5] + + [0x1f9] + + [0x1fb] + + [0x1fd] + + [0x1ff] + + [0x201] + + [0x203] + + [0x205] + + [0x207] + + [0x209] + + [0x20b] + + [0x20d] + + [0x20f] + + [0x211] + + [0x213] + + [0x215] + + [0x217] + + [0x219] + + [0x21b] + + [0x21d] + + [0x21f] + + [0x221] + + [0x223] + + [0x225] + + [0x227] + + [0x229] + + [0x22b] + + [0x22d] + + [0x22f] + + [0x231] + + list(range(0x233,0x23a)) + + [0x23c] + + list(range(0x23f,0x241)) + + [0x242] + + [0x247] + + [0x249] + + [0x24b] + + [0x24d] + + list(range(0x24f,0x2b0)) + + list(range(0x2b9,0x2c2)) + + list(range(0x2c6,0x2d2)) + + [0x2ec] + + [0x2ee] + + list(range(0x300,0x340)) + + [0x342] + + list(range(0x346,0x34f)) + + list(range(0x350,0x370)) + + [0x371] + + [0x373] + + [0x377] + + list(range(0x37b,0x37e)) + + [0x390] + + list(range(0x3ac,0x3cf)) + + [0x3d7] + + [0x3d9] + + [0x3db] + + [0x3dd] + + [0x3df] + + [0x3e1] + + [0x3e3] + + [0x3e5] + + [0x3e7] + + [0x3e9] + + [0x3eb] + + [0x3ed] + + [0x3ef] + + [0x3f3] + + [0x3f8] + + list(range(0x3fb,0x3fd)) + + list(range(0x430,0x460)) + + [0x461] + + [0x463] + + [0x465] + + [0x467] + + [0x469] + + [0x46b] + + [0x46d] + + [0x46f] + + [0x471] + + [0x473] + + [0x475] + + [0x477] + + [0x479] + + [0x47b] + + [0x47d] + + [0x47f] + + [0x481] + + list(range(0x483,0x488)) + + [0x48b] + + [0x48d] + + [0x48f] + + [0x491] + + [0x493] + + [0x495] + + [0x497] + + [0x499] + + [0x49b] + + [0x49d] + + [0x49f] + + [0x4a1] + + [0x4a3] + + [0x4a5] + + [0x4a7] + + [0x4a9] + + [0x4ab] + + [0x4ad] + + [0x4af] + + [0x4b1] + + [0x4b3] + + [0x4b5] + + [0x4b7] + + [0x4b9] + + [0x4bb] + + [0x4bd] + + [0x4bf] + + [0x4c2] + + [0x4c4] + + [0x4c6] + + [0x4c8] + + [0x4ca] + + [0x4cc] + + list(range(0x4ce,0x4d0)) + + [0x4d1] + + [0x4d3] + + [0x4d5] + + [0x4d7] + + [0x4d9] + + [0x4db] + + [0x4dd] + + [0x4df] + + [0x4e1] + + [0x4e3] + + [0x4e5] + + [0x4e7] + + [0x4e9] + + [0x4eb] + + [0x4ed] + + [0x4ef] + + [0x4f1] + + [0x4f3] + + [0x4f5] + + [0x4f7] + + [0x4f9] + + [0x4fb] + + [0x4fd] + + [0x4ff] + + [0x501] + + [0x503] + + [0x505] + + [0x507] + + [0x509] + + [0x50b] + + [0x50d] + + [0x50f] + + [0x511] + + [0x513] + + [0x515] + + [0x517] + + [0x519] + + [0x51b] + + [0x51d] + + [0x51f] + + [0x521] + + [0x523] + + [0x525] + + [0x527] + + [0x559] + + list(range(0x561,0x587)) + + list(range(0x591,0x5be)) + + [0x5bf] + + list(range(0x5c1,0x5c3)) + + list(range(0x5c4,0x5c6)) + + [0x5c7] + + list(range(0x5d0,0x5eb)) + + list(range(0x5f0,0x5f3)) + + list(range(0x610,0x61b)) + + list(range(0x620,0x640)) + + list(range(0x641,0x660)) + + list(range(0x66e,0x675)) + + list(range(0x679,0x6d4)) + + list(range(0x6d5,0x6dd)) + + list(range(0x6df,0x6e9)) + + list(range(0x6ea,0x6f0)) + + list(range(0x6fa,0x700)) + + list(range(0x710,0x74b)) + + list(range(0x74d,0x7b2)) + + list(range(0x7c0,0x7f6)) + + list(range(0x800,0x82e)) + + list(range(0x840,0x85c)) + + [0x8a0] + + list(range(0x8a2,0x8ad)) + + list(range(0x8e4,0x8ff)) + + list(range(0x900,0x958)) + + list(range(0x960,0x964)) + + list(range(0x966,0x970)) + + list(range(0x971,0x978)) + + list(range(0x979,0x980)) + + list(range(0x981,0x984)) + + list(range(0x985,0x98d)) + + list(range(0x98f,0x991)) + + list(range(0x993,0x9a9)) + + list(range(0x9aa,0x9b1)) + + [0x9b2] + + list(range(0x9b6,0x9ba)) + + list(range(0x9bc,0x9c5)) + + list(range(0x9c7,0x9c9)) + + list(range(0x9cb,0x9cf)) + + [0x9d7] + + list(range(0x9e0,0x9e4)) + + list(range(0x9e6,0x9f2)) + + list(range(0xa01,0xa04)) + + list(range(0xa05,0xa0b)) + + list(range(0xa0f,0xa11)) + + list(range(0xa13,0xa29)) + + list(range(0xa2a,0xa31)) + + [0xa32] + + [0xa35] + + list(range(0xa38,0xa3a)) + + [0xa3c] + + list(range(0xa3e,0xa43)) + + list(range(0xa47,0xa49)) + + list(range(0xa4b,0xa4e)) + + [0xa51] + + [0xa5c] + + list(range(0xa66,0xa76)) + + list(range(0xa81,0xa84)) + + list(range(0xa85,0xa8e)) + + list(range(0xa8f,0xa92)) + + list(range(0xa93,0xaa9)) + + list(range(0xaaa,0xab1)) + + list(range(0xab2,0xab4)) + + list(range(0xab5,0xaba)) + + list(range(0xabc,0xac6)) + + list(range(0xac7,0xaca)) + + list(range(0xacb,0xace)) + + [0xad0] + + list(range(0xae0,0xae4)) + + list(range(0xae6,0xaf0)) + + list(range(0xb01,0xb04)) + + list(range(0xb05,0xb0d)) + + list(range(0xb0f,0xb11)) + + list(range(0xb13,0xb29)) + + list(range(0xb2a,0xb31)) + + list(range(0xb32,0xb34)) + + list(range(0xb35,0xb3a)) + + list(range(0xb3c,0xb45)) + + list(range(0xb47,0xb49)) + + list(range(0xb4b,0xb4e)) + + list(range(0xb56,0xb58)) + + list(range(0xb5f,0xb64)) + + list(range(0xb66,0xb70)) + + [0xb71] + + list(range(0xb82,0xb84)) + + list(range(0xb85,0xb8b)) + + list(range(0xb8e,0xb91)) + + list(range(0xb92,0xb96)) + + list(range(0xb99,0xb9b)) + + [0xb9c] + + list(range(0xb9e,0xba0)) + + list(range(0xba3,0xba5)) + + list(range(0xba8,0xbab)) + + list(range(0xbae,0xbba)) + + list(range(0xbbe,0xbc3)) + + list(range(0xbc6,0xbc9)) + + list(range(0xbca,0xbce)) + + [0xbd0] + + [0xbd7] + + list(range(0xbe6,0xbf0)) + + list(range(0xc01,0xc04)) + + list(range(0xc05,0xc0d)) + + list(range(0xc0e,0xc11)) + + list(range(0xc12,0xc29)) + + list(range(0xc2a,0xc34)) + + list(range(0xc35,0xc3a)) + + list(range(0xc3d,0xc45)) + + list(range(0xc46,0xc49)) + + list(range(0xc4a,0xc4e)) + + list(range(0xc55,0xc57)) + + list(range(0xc58,0xc5a)) + + list(range(0xc60,0xc64)) + + list(range(0xc66,0xc70)) + + list(range(0xc82,0xc84)) + + list(range(0xc85,0xc8d)) + + list(range(0xc8e,0xc91)) + + list(range(0xc92,0xca9)) + + list(range(0xcaa,0xcb4)) + + list(range(0xcb5,0xcba)) + + list(range(0xcbc,0xcc5)) + + list(range(0xcc6,0xcc9)) + + list(range(0xcca,0xcce)) + + list(range(0xcd5,0xcd7)) + + [0xcde] + + list(range(0xce0,0xce4)) + + list(range(0xce6,0xcf0)) + + list(range(0xcf1,0xcf3)) + + list(range(0xd02,0xd04)) + + list(range(0xd05,0xd0d)) + + list(range(0xd0e,0xd11)) + + list(range(0xd12,0xd3b)) + + list(range(0xd3d,0xd45)) + + list(range(0xd46,0xd49)) + + list(range(0xd4a,0xd4f)) + + [0xd57] + + list(range(0xd60,0xd64)) + + list(range(0xd66,0xd70)) + + list(range(0xd7a,0xd80)) + + list(range(0xd82,0xd84)) + + list(range(0xd85,0xd97)) + + list(range(0xd9a,0xdb2)) + + list(range(0xdb3,0xdbc)) + + [0xdbd] + + list(range(0xdc0,0xdc7)) + + [0xdca] + + list(range(0xdcf,0xdd5)) + + [0xdd6] + + list(range(0xdd8,0xde0)) + + list(range(0xdf2,0xdf4)) + + list(range(0xe01,0xe33)) + + list(range(0xe34,0xe3b)) + + list(range(0xe40,0xe4f)) + + list(range(0xe50,0xe5a)) + + list(range(0xe81,0xe83)) + + [0xe84] + + list(range(0xe87,0xe89)) + + [0xe8a] + + [0xe8d] + + list(range(0xe94,0xe98)) + + list(range(0xe99,0xea0)) + + list(range(0xea1,0xea4)) + + [0xea5] + + [0xea7] + + list(range(0xeaa,0xeac)) + + list(range(0xead,0xeb3)) + + list(range(0xeb4,0xeba)) + + list(range(0xebb,0xebe)) + + list(range(0xec0,0xec5)) + + [0xec6] + + list(range(0xec8,0xece)) + + list(range(0xed0,0xeda)) + + list(range(0xede,0xee0)) + + [0xf00] + + [0xf0b] + + list(range(0xf18,0xf1a)) + + list(range(0xf20,0xf2a)) + + [0xf35] + + [0xf37] + + [0xf39] + + list(range(0xf3e,0xf43)) + + list(range(0xf44,0xf48)) + + list(range(0xf49,0xf4d)) + + list(range(0xf4e,0xf52)) + + list(range(0xf53,0xf57)) + + list(range(0xf58,0xf5c)) + + list(range(0xf5d,0xf69)) + + list(range(0xf6a,0xf6d)) + + list(range(0xf71,0xf73)) + + [0xf74] + + list(range(0xf7a,0xf81)) + + list(range(0xf82,0xf85)) + + list(range(0xf86,0xf93)) + + list(range(0xf94,0xf98)) + + list(range(0xf99,0xf9d)) + + list(range(0xf9e,0xfa2)) + + list(range(0xfa3,0xfa7)) + + list(range(0xfa8,0xfac)) + + list(range(0xfad,0xfb9)) + + list(range(0xfba,0xfbd)) + + [0xfc6] + + list(range(0x1000,0x104a)) + + list(range(0x1050,0x109e)) + + list(range(0x10d0,0x10fb)) + + list(range(0x10fd,0x1100)) + + list(range(0x1200,0x1249)) + + list(range(0x124a,0x124e)) + + list(range(0x1250,0x1257)) + + [0x1258] + + list(range(0x125a,0x125e)) + + list(range(0x1260,0x1289)) + + list(range(0x128a,0x128e)) + + list(range(0x1290,0x12b1)) + + list(range(0x12b2,0x12b6)) + + list(range(0x12b8,0x12bf)) + + [0x12c0] + + list(range(0x12c2,0x12c6)) + + list(range(0x12c8,0x12d7)) + + list(range(0x12d8,0x1311)) + + list(range(0x1312,0x1316)) + + list(range(0x1318,0x135b)) + + list(range(0x135d,0x1360)) + + list(range(0x1380,0x1390)) + + list(range(0x13a0,0x13f5)) + + list(range(0x1401,0x166d)) + + list(range(0x166f,0x1680)) + + list(range(0x1681,0x169b)) + + list(range(0x16a0,0x16eb)) + + list(range(0x1700,0x170d)) + + list(range(0x170e,0x1715)) + + list(range(0x1720,0x1735)) + + list(range(0x1740,0x1754)) + + list(range(0x1760,0x176d)) + + list(range(0x176e,0x1771)) + + list(range(0x1772,0x1774)) + + list(range(0x1780,0x17b4)) + + list(range(0x17b6,0x17d4)) + + [0x17d7] + + list(range(0x17dc,0x17de)) + + list(range(0x17e0,0x17ea)) + + list(range(0x1810,0x181a)) + + list(range(0x1820,0x1878)) + + list(range(0x1880,0x18ab)) + + list(range(0x18b0,0x18f6)) + + list(range(0x1900,0x191d)) + + list(range(0x1920,0x192c)) + + list(range(0x1930,0x193c)) + + list(range(0x1946,0x196e)) + + list(range(0x1970,0x1975)) + + list(range(0x1980,0x19ac)) + + list(range(0x19b0,0x19ca)) + + list(range(0x19d0,0x19da)) + + list(range(0x1a00,0x1a1c)) + + list(range(0x1a20,0x1a5f)) + + list(range(0x1a60,0x1a7d)) + + list(range(0x1a7f,0x1a8a)) + + list(range(0x1a90,0x1a9a)) + + [0x1aa7] + + list(range(0x1b00,0x1b4c)) + + list(range(0x1b50,0x1b5a)) + + list(range(0x1b6b,0x1b74)) + + list(range(0x1b80,0x1bf4)) + + list(range(0x1c00,0x1c38)) + + list(range(0x1c40,0x1c4a)) + + list(range(0x1c4d,0x1c7e)) + + list(range(0x1cd0,0x1cd3)) + + list(range(0x1cd4,0x1cf7)) + + list(range(0x1d00,0x1d2c)) + + [0x1d2f] + + [0x1d3b] + + [0x1d4e] + + list(range(0x1d6b,0x1d78)) + + list(range(0x1d79,0x1d9b)) + + list(range(0x1dc0,0x1de7)) + + list(range(0x1dfc,0x1e00)) + + [0x1e01] + + [0x1e03] + + [0x1e05] + + [0x1e07] + + [0x1e09] + + [0x1e0b] + + [0x1e0d] + + [0x1e0f] + + [0x1e11] + + [0x1e13] + + [0x1e15] + + [0x1e17] + + [0x1e19] + + [0x1e1b] + + [0x1e1d] + + [0x1e1f] + + [0x1e21] + + [0x1e23] + + [0x1e25] + + [0x1e27] + + [0x1e29] + + [0x1e2b] + + [0x1e2d] + + [0x1e2f] + + [0x1e31] + + [0x1e33] + + [0x1e35] + + [0x1e37] + + [0x1e39] + + [0x1e3b] + + [0x1e3d] + + [0x1e3f] + + [0x1e41] + + [0x1e43] + + [0x1e45] + + [0x1e47] + + [0x1e49] + + [0x1e4b] + + [0x1e4d] + + [0x1e4f] + + [0x1e51] + + [0x1e53] + + [0x1e55] + + [0x1e57] + + [0x1e59] + + [0x1e5b] + + [0x1e5d] + + [0x1e5f] + + [0x1e61] + + [0x1e63] + + [0x1e65] + + [0x1e67] + + [0x1e69] + + [0x1e6b] + + [0x1e6d] + + [0x1e6f] + + [0x1e71] + + [0x1e73] + + [0x1e75] + + [0x1e77] + + [0x1e79] + + [0x1e7b] + + [0x1e7d] + + [0x1e7f] + + [0x1e81] + + [0x1e83] + + [0x1e85] + + [0x1e87] + + [0x1e89] + + [0x1e8b] + + [0x1e8d] + + [0x1e8f] + + [0x1e91] + + [0x1e93] + + list(range(0x1e95,0x1e9a)) + + list(range(0x1e9c,0x1e9e)) + + [0x1e9f] + + [0x1ea1] + + [0x1ea3] + + [0x1ea5] + + [0x1ea7] + + [0x1ea9] + + [0x1eab] + + [0x1ead] + + [0x1eaf] + + [0x1eb1] + + [0x1eb3] + + [0x1eb5] + + [0x1eb7] + + [0x1eb9] + + [0x1ebb] + + [0x1ebd] + + [0x1ebf] + + [0x1ec1] + + [0x1ec3] + + [0x1ec5] + + [0x1ec7] + + [0x1ec9] + + [0x1ecb] + + [0x1ecd] + + [0x1ecf] + + [0x1ed1] + + [0x1ed3] + + [0x1ed5] + + [0x1ed7] + + [0x1ed9] + + [0x1edb] + + [0x1edd] + + [0x1edf] + + [0x1ee1] + + [0x1ee3] + + [0x1ee5] + + [0x1ee7] + + [0x1ee9] + + [0x1eeb] + + [0x1eed] + + [0x1eef] + + [0x1ef1] + + [0x1ef3] + + [0x1ef5] + + [0x1ef7] + + [0x1ef9] + + [0x1efb] + + [0x1efd] + + list(range(0x1eff,0x1f08)) + + list(range(0x1f10,0x1f16)) + + list(range(0x1f20,0x1f28)) + + list(range(0x1f30,0x1f38)) + + list(range(0x1f40,0x1f46)) + + list(range(0x1f50,0x1f58)) + + list(range(0x1f60,0x1f68)) + + [0x1f70] + + [0x1f72] + + [0x1f74] + + [0x1f76] + + [0x1f78] + + [0x1f7a] + + [0x1f7c] + + list(range(0x1fb0,0x1fb2)) + + [0x1fb6] + + [0x1fc6] + + list(range(0x1fd0,0x1fd3)) + + list(range(0x1fd6,0x1fd8)) + + list(range(0x1fe0,0x1fe3)) + + list(range(0x1fe4,0x1fe8)) + + [0x1ff6] + + [0x214e] + + [0x2184] + + list(range(0x2c30,0x2c5f)) + + [0x2c61] + + list(range(0x2c65,0x2c67)) + + [0x2c68] + + [0x2c6a] + + [0x2c6c] + + [0x2c71] + + list(range(0x2c73,0x2c75)) + + list(range(0x2c76,0x2c7c)) + + [0x2c81] + + [0x2c83] + + [0x2c85] + + [0x2c87] + + [0x2c89] + + [0x2c8b] + + [0x2c8d] + + [0x2c8f] + + [0x2c91] + + [0x2c93] + + [0x2c95] + + [0x2c97] + + [0x2c99] + + [0x2c9b] + + [0x2c9d] + + [0x2c9f] + + [0x2ca1] + + [0x2ca3] + + [0x2ca5] + + [0x2ca7] + + [0x2ca9] + + [0x2cab] + + [0x2cad] + + [0x2caf] + + [0x2cb1] + + [0x2cb3] + + [0x2cb5] + + [0x2cb7] + + [0x2cb9] + + [0x2cbb] + + [0x2cbd] + + [0x2cbf] + + [0x2cc1] + + [0x2cc3] + + [0x2cc5] + + [0x2cc7] + + [0x2cc9] + + [0x2ccb] + + [0x2ccd] + + [0x2ccf] + + [0x2cd1] + + [0x2cd3] + + [0x2cd5] + + [0x2cd7] + + [0x2cd9] + + [0x2cdb] + + [0x2cdd] + + [0x2cdf] + + [0x2ce1] + + list(range(0x2ce3,0x2ce5)) + + [0x2cec] + + list(range(0x2cee,0x2cf2)) + + [0x2cf3] + + list(range(0x2d00,0x2d26)) + + [0x2d27] + + [0x2d2d] + + list(range(0x2d30,0x2d68)) + + list(range(0x2d7f,0x2d97)) + + list(range(0x2da0,0x2da7)) + + list(range(0x2da8,0x2daf)) + + list(range(0x2db0,0x2db7)) + + list(range(0x2db8,0x2dbf)) + + list(range(0x2dc0,0x2dc7)) + + list(range(0x2dc8,0x2dcf)) + + list(range(0x2dd0,0x2dd7)) + + list(range(0x2dd8,0x2ddf)) + + list(range(0x2de0,0x2e00)) + + [0x2e2f] + + list(range(0x3005,0x3008)) + + list(range(0x302a,0x302e)) + + [0x303c] + + list(range(0x3041,0x3097)) + + list(range(0x3099,0x309b)) + + list(range(0x309d,0x309f)) + + list(range(0x30a1,0x30fb)) + + list(range(0x30fc,0x30ff)) + + list(range(0x3105,0x312e)) + + list(range(0x31a0,0x31bb)) + + list(range(0x31f0,0x3200)) + + list(range(0x3400,0x4db6)) + + list(range(0x4e00,0x9fcd)) + + list(range(0xa000,0xa48d)) + + list(range(0xa4d0,0xa4fe)) + + list(range(0xa500,0xa60d)) + + list(range(0xa610,0xa62c)) + + [0xa641] + + [0xa643] + + [0xa645] + + [0xa647] + + [0xa649] + + [0xa64b] + + [0xa64d] + + [0xa64f] + + [0xa651] + + [0xa653] + + [0xa655] + + [0xa657] + + [0xa659] + + [0xa65b] + + [0xa65d] + + [0xa65f] + + [0xa661] + + [0xa663] + + [0xa665] + + [0xa667] + + [0xa669] + + [0xa66b] + + list(range(0xa66d,0xa670)) + + list(range(0xa674,0xa67e)) + + [0xa67f] + + [0xa681] + + [0xa683] + + [0xa685] + + [0xa687] + + [0xa689] + + [0xa68b] + + [0xa68d] + + [0xa68f] + + [0xa691] + + [0xa693] + + [0xa695] + + [0xa697] + + list(range(0xa69f,0xa6e6)) + + list(range(0xa6f0,0xa6f2)) + + list(range(0xa717,0xa720)) + + [0xa723] + + [0xa725] + + [0xa727] + + [0xa729] + + [0xa72b] + + [0xa72d] + + list(range(0xa72f,0xa732)) + + [0xa733] + + [0xa735] + + [0xa737] + + [0xa739] + + [0xa73b] + + [0xa73d] + + [0xa73f] + + [0xa741] + + [0xa743] + + [0xa745] + + [0xa747] + + [0xa749] + + [0xa74b] + + [0xa74d] + + [0xa74f] + + [0xa751] + + [0xa753] + + [0xa755] + + [0xa757] + + [0xa759] + + [0xa75b] + + [0xa75d] + + [0xa75f] + + [0xa761] + + [0xa763] + + [0xa765] + + [0xa767] + + [0xa769] + + [0xa76b] + + [0xa76d] + + [0xa76f] + + list(range(0xa771,0xa779)) + + [0xa77a] + + [0xa77c] + + [0xa77f] + + [0xa781] + + [0xa783] + + [0xa785] + + list(range(0xa787,0xa789)) + + [0xa78c] + + [0xa78e] + + [0xa791] + + [0xa793] + + [0xa7a1] + + [0xa7a3] + + [0xa7a5] + + [0xa7a7] + + [0xa7a9] + + list(range(0xa7fa,0xa828)) + + list(range(0xa840,0xa874)) + + list(range(0xa880,0xa8c5)) + + list(range(0xa8d0,0xa8da)) + + list(range(0xa8e0,0xa8f8)) + + [0xa8fb] + + list(range(0xa900,0xa92e)) + + list(range(0xa930,0xa954)) + + list(range(0xa980,0xa9c1)) + + list(range(0xa9cf,0xa9da)) + + list(range(0xaa00,0xaa37)) + + list(range(0xaa40,0xaa4e)) + + list(range(0xaa50,0xaa5a)) + + list(range(0xaa60,0xaa77)) + + list(range(0xaa7a,0xaa7c)) + + list(range(0xaa80,0xaac3)) + + list(range(0xaadb,0xaade)) + + list(range(0xaae0,0xaaf0)) + + list(range(0xaaf2,0xaaf7)) + + list(range(0xab01,0xab07)) + + list(range(0xab09,0xab0f)) + + list(range(0xab11,0xab17)) + + list(range(0xab20,0xab27)) + + list(range(0xab28,0xab2f)) + + list(range(0xabc0,0xabeb)) + + list(range(0xabec,0xabee)) + + list(range(0xabf0,0xabfa)) + + list(range(0xac00,0xd7a4)) + + list(range(0xfa0e,0xfa10)) + + [0xfa11] + + list(range(0xfa13,0xfa15)) + + [0xfa1f] + + [0xfa21] + + list(range(0xfa23,0xfa25)) + + list(range(0xfa27,0xfa2a)) + + [0xfb1e] + + list(range(0xfe20,0xfe27)) + + [0xfe73] + + list(range(0x10000,0x1000c)) + + list(range(0x1000d,0x10027)) + + list(range(0x10028,0x1003b)) + + list(range(0x1003c,0x1003e)) + + list(range(0x1003f,0x1004e)) + + list(range(0x10050,0x1005e)) + + list(range(0x10080,0x100fb)) + + [0x101fd] + + list(range(0x10280,0x1029d)) + + list(range(0x102a0,0x102d1)) + + list(range(0x10300,0x1031f)) + + list(range(0x10330,0x10341)) + + list(range(0x10342,0x1034a)) + + list(range(0x10380,0x1039e)) + + list(range(0x103a0,0x103c4)) + + list(range(0x103c8,0x103d0)) + + list(range(0x10428,0x1049e)) + + list(range(0x104a0,0x104aa)) + + list(range(0x10800,0x10806)) + + [0x10808] + + list(range(0x1080a,0x10836)) + + list(range(0x10837,0x10839)) + + [0x1083c] + + list(range(0x1083f,0x10856)) + + list(range(0x10900,0x10916)) + + list(range(0x10920,0x1093a)) + + list(range(0x10980,0x109b8)) + + list(range(0x109be,0x109c0)) + + list(range(0x10a00,0x10a04)) + + list(range(0x10a05,0x10a07)) + + list(range(0x10a0c,0x10a14)) + + list(range(0x10a15,0x10a18)) + + list(range(0x10a19,0x10a34)) + + list(range(0x10a38,0x10a3b)) + + [0x10a3f] + + list(range(0x10a60,0x10a7d)) + + list(range(0x10b00,0x10b36)) + + list(range(0x10b40,0x10b56)) + + list(range(0x10b60,0x10b73)) + + list(range(0x10c00,0x10c49)) + + list(range(0x11000,0x11047)) + + list(range(0x11066,0x11070)) + + list(range(0x11080,0x110bb)) + + list(range(0x110d0,0x110e9)) + + list(range(0x110f0,0x110fa)) + + list(range(0x11100,0x11135)) + + list(range(0x11136,0x11140)) + + list(range(0x11180,0x111c5)) + + list(range(0x111d0,0x111da)) + + list(range(0x11680,0x116b8)) + + list(range(0x116c0,0x116ca)) + + list(range(0x12000,0x1236f)) + + list(range(0x13000,0x1342f)) + + list(range(0x16800,0x16a39)) + + list(range(0x16f00,0x16f45)) + + list(range(0x16f50,0x16f7f)) + + list(range(0x16f8f,0x16fa0)) + + list(range(0x1b000,0x1b002)) + + list(range(0x20000,0x2a6d7)) + + list(range(0x2a700,0x2b735)) + + list(range(0x2b740,0x2b81e)) + ), + 'CONTEXTJ': frozenset( + list(range(0x200c,0x200e)) + ), + 'CONTEXTO': frozenset( + [0xb7] + + [0x375] + + list(range(0x5f3,0x5f5)) + + list(range(0x660,0x66a)) + + list(range(0x6f0,0x6fa)) + + [0x30fb] + ), + 'DISALLOWED': frozenset( + list(range(0x0,0x2d)) + + list(range(0x2e,0x30)) + + list(range(0x3a,0x61)) + + list(range(0x7b,0xb7)) + + list(range(0xb8,0xdf)) + + [0xf7] + + [0x100] + + [0x102] + + [0x104] + + [0x106] + + [0x108] + + [0x10a] + + [0x10c] + + [0x10e] + + [0x110] + + [0x112] + + [0x114] + + [0x116] + + [0x118] + + [0x11a] + + [0x11c] + + [0x11e] + + [0x120] + + [0x122] + + [0x124] + + [0x126] + + [0x128] + + [0x12a] + + [0x12c] + + [0x12e] + + [0x130] + + list(range(0x132,0x135)) + + [0x136] + + [0x139] + + [0x13b] + + [0x13d] + + list(range(0x13f,0x142)) + + [0x143] + + [0x145] + + [0x147] + + list(range(0x149,0x14b)) + + [0x14c] + + [0x14e] + + [0x150] + + [0x152] + + [0x154] + + [0x156] + + [0x158] + + [0x15a] + + [0x15c] + + [0x15e] + + [0x160] + + [0x162] + + [0x164] + + [0x166] + + [0x168] + + [0x16a] + + [0x16c] + + [0x16e] + + [0x170] + + [0x172] + + [0x174] + + [0x176] + + list(range(0x178,0x17a)) + + [0x17b] + + [0x17d] + + [0x17f] + + list(range(0x181,0x183)) + + [0x184] + + list(range(0x186,0x188)) + + list(range(0x189,0x18c)) + + list(range(0x18e,0x192)) + + list(range(0x193,0x195)) + + list(range(0x196,0x199)) + + list(range(0x19c,0x19e)) + + list(range(0x19f,0x1a1)) + + [0x1a2] + + [0x1a4] + + list(range(0x1a6,0x1a8)) + + [0x1a9] + + [0x1ac] + + list(range(0x1ae,0x1b0)) + + list(range(0x1b1,0x1b4)) + + [0x1b5] + + list(range(0x1b7,0x1b9)) + + [0x1bc] + + list(range(0x1c4,0x1ce)) + + [0x1cf] + + [0x1d1] + + [0x1d3] + + [0x1d5] + + [0x1d7] + + [0x1d9] + + [0x1db] + + [0x1de] + + [0x1e0] + + [0x1e2] + + [0x1e4] + + [0x1e6] + + [0x1e8] + + [0x1ea] + + [0x1ec] + + [0x1ee] + + list(range(0x1f1,0x1f5)) + + list(range(0x1f6,0x1f9)) + + [0x1fa] + + [0x1fc] + + [0x1fe] + + [0x200] + + [0x202] + + [0x204] + + [0x206] + + [0x208] + + [0x20a] + + [0x20c] + + [0x20e] + + [0x210] + + [0x212] + + [0x214] + + [0x216] + + [0x218] + + [0x21a] + + [0x21c] + + [0x21e] + + [0x220] + + [0x222] + + [0x224] + + [0x226] + + [0x228] + + [0x22a] + + [0x22c] + + [0x22e] + + [0x230] + + [0x232] + + list(range(0x23a,0x23c)) + + list(range(0x23d,0x23f)) + + [0x241] + + list(range(0x243,0x247)) + + [0x248] + + [0x24a] + + [0x24c] + + [0x24e] + + list(range(0x2b0,0x2b9)) + + list(range(0x2c2,0x2c6)) + + list(range(0x2d2,0x2ec)) + + [0x2ed] + + list(range(0x2ef,0x300)) + + list(range(0x340,0x342)) + + list(range(0x343,0x346)) + + [0x34f] + + [0x370] + + [0x372] + + [0x374] + + [0x376] + + [0x37a] + + [0x37e] + + list(range(0x384,0x38b)) + + [0x38c] + + list(range(0x38e,0x390)) + + list(range(0x391,0x3a2)) + + list(range(0x3a3,0x3ac)) + + list(range(0x3cf,0x3d7)) + + [0x3d8] + + [0x3da] + + [0x3dc] + + [0x3de] + + [0x3e0] + + [0x3e2] + + [0x3e4] + + [0x3e6] + + [0x3e8] + + [0x3ea] + + [0x3ec] + + [0x3ee] + + list(range(0x3f0,0x3f3)) + + list(range(0x3f4,0x3f8)) + + list(range(0x3f9,0x3fb)) + + list(range(0x3fd,0x430)) + + [0x460] + + [0x462] + + [0x464] + + [0x466] + + [0x468] + + [0x46a] + + [0x46c] + + [0x46e] + + [0x470] + + [0x472] + + [0x474] + + [0x476] + + [0x478] + + [0x47a] + + [0x47c] + + [0x47e] + + [0x480] + + [0x482] + + list(range(0x488,0x48b)) + + [0x48c] + + [0x48e] + + [0x490] + + [0x492] + + [0x494] + + [0x496] + + [0x498] + + [0x49a] + + [0x49c] + + [0x49e] + + [0x4a0] + + [0x4a2] + + [0x4a4] + + [0x4a6] + + [0x4a8] + + [0x4aa] + + [0x4ac] + + [0x4ae] + + [0x4b0] + + [0x4b2] + + [0x4b4] + + [0x4b6] + + [0x4b8] + + [0x4ba] + + [0x4bc] + + [0x4be] + + list(range(0x4c0,0x4c2)) + + [0x4c3] + + [0x4c5] + + [0x4c7] + + [0x4c9] + + [0x4cb] + + [0x4cd] + + [0x4d0] + + [0x4d2] + + [0x4d4] + + [0x4d6] + + [0x4d8] + + [0x4da] + + [0x4dc] + + [0x4de] + + [0x4e0] + + [0x4e2] + + [0x4e4] + + [0x4e6] + + [0x4e8] + + [0x4ea] + + [0x4ec] + + [0x4ee] + + [0x4f0] + + [0x4f2] + + [0x4f4] + + [0x4f6] + + [0x4f8] + + [0x4fa] + + [0x4fc] + + [0x4fe] + + [0x500] + + [0x502] + + [0x504] + + [0x506] + + [0x508] + + [0x50a] + + [0x50c] + + [0x50e] + + [0x510] + + [0x512] + + [0x514] + + [0x516] + + [0x518] + + [0x51a] + + [0x51c] + + [0x51e] + + [0x520] + + [0x522] + + [0x524] + + [0x526] + + list(range(0x531,0x557)) + + list(range(0x55a,0x560)) + + [0x587] + + list(range(0x589,0x58b)) + + [0x58f] + + [0x5be] + + [0x5c0] + + [0x5c3] + + [0x5c6] + + list(range(0x600,0x605)) + + list(range(0x606,0x610)) + + list(range(0x61b,0x61d)) + + list(range(0x61e,0x620)) + + [0x640] + + list(range(0x66a,0x66e)) + + list(range(0x675,0x679)) + + [0x6d4] + + list(range(0x6dd,0x6df)) + + [0x6e9] + + list(range(0x700,0x70e)) + + [0x70f] + + list(range(0x7f6,0x7fb)) + + list(range(0x830,0x83f)) + + [0x85e] + + list(range(0x958,0x960)) + + list(range(0x964,0x966)) + + [0x970] + + list(range(0x9dc,0x9de)) + + [0x9df] + + list(range(0x9f2,0x9fc)) + + [0xa33] + + [0xa36] + + list(range(0xa59,0xa5c)) + + [0xa5e] + + list(range(0xaf0,0xaf2)) + + list(range(0xb5c,0xb5e)) + + [0xb70] + + list(range(0xb72,0xb78)) + + list(range(0xbf0,0xbfb)) + + list(range(0xc78,0xc80)) + + list(range(0xd70,0xd76)) + + [0xd79] + + [0xdf4] + + [0xe33] + + [0xe3f] + + [0xe4f] + + list(range(0xe5a,0xe5c)) + + [0xeb3] + + list(range(0xedc,0xede)) + + list(range(0xf01,0xf0b)) + + list(range(0xf0c,0xf18)) + + list(range(0xf1a,0xf20)) + + list(range(0xf2a,0xf35)) + + [0xf36] + + [0xf38] + + list(range(0xf3a,0xf3e)) + + [0xf43] + + [0xf4d] + + [0xf52] + + [0xf57] + + [0xf5c] + + [0xf69] + + [0xf73] + + list(range(0xf75,0xf7a)) + + [0xf81] + + [0xf85] + + [0xf93] + + [0xf9d] + + [0xfa2] + + [0xfa7] + + [0xfac] + + [0xfb9] + + list(range(0xfbe,0xfc6)) + + list(range(0xfc7,0xfcd)) + + list(range(0xfce,0xfdb)) + + list(range(0x104a,0x1050)) + + list(range(0x109e,0x10c6)) + + [0x10c7] + + [0x10cd] + + list(range(0x10fb,0x10fd)) + + list(range(0x1100,0x1200)) + + list(range(0x1360,0x137d)) + + list(range(0x1390,0x139a)) + + [0x1400] + + list(range(0x166d,0x166f)) + + [0x1680] + + list(range(0x169b,0x169d)) + + list(range(0x16eb,0x16f1)) + + list(range(0x1735,0x1737)) + + list(range(0x17b4,0x17b6)) + + list(range(0x17d4,0x17d7)) + + list(range(0x17d8,0x17dc)) + + list(range(0x17f0,0x17fa)) + + list(range(0x1800,0x180f)) + + [0x1940] + + list(range(0x1944,0x1946)) + + [0x19da] + + list(range(0x19de,0x1a00)) + + list(range(0x1a1e,0x1a20)) + + list(range(0x1aa0,0x1aa7)) + + list(range(0x1aa8,0x1aae)) + + list(range(0x1b5a,0x1b6b)) + + list(range(0x1b74,0x1b7d)) + + list(range(0x1bfc,0x1c00)) + + list(range(0x1c3b,0x1c40)) + + list(range(0x1c7e,0x1c80)) + + list(range(0x1cc0,0x1cc8)) + + [0x1cd3] + + list(range(0x1d2c,0x1d2f)) + + list(range(0x1d30,0x1d3b)) + + list(range(0x1d3c,0x1d4e)) + + list(range(0x1d4f,0x1d6b)) + + [0x1d78] + + list(range(0x1d9b,0x1dc0)) + + [0x1e00] + + [0x1e02] + + [0x1e04] + + [0x1e06] + + [0x1e08] + + [0x1e0a] + + [0x1e0c] + + [0x1e0e] + + [0x1e10] + + [0x1e12] + + [0x1e14] + + [0x1e16] + + [0x1e18] + + [0x1e1a] + + [0x1e1c] + + [0x1e1e] + + [0x1e20] + + [0x1e22] + + [0x1e24] + + [0x1e26] + + [0x1e28] + + [0x1e2a] + + [0x1e2c] + + [0x1e2e] + + [0x1e30] + + [0x1e32] + + [0x1e34] + + [0x1e36] + + [0x1e38] + + [0x1e3a] + + [0x1e3c] + + [0x1e3e] + + [0x1e40] + + [0x1e42] + + [0x1e44] + + [0x1e46] + + [0x1e48] + + [0x1e4a] + + [0x1e4c] + + [0x1e4e] + + [0x1e50] + + [0x1e52] + + [0x1e54] + + [0x1e56] + + [0x1e58] + + [0x1e5a] + + [0x1e5c] + + [0x1e5e] + + [0x1e60] + + [0x1e62] + + [0x1e64] + + [0x1e66] + + [0x1e68] + + [0x1e6a] + + [0x1e6c] + + [0x1e6e] + + [0x1e70] + + [0x1e72] + + [0x1e74] + + [0x1e76] + + [0x1e78] + + [0x1e7a] + + [0x1e7c] + + [0x1e7e] + + [0x1e80] + + [0x1e82] + + [0x1e84] + + [0x1e86] + + [0x1e88] + + [0x1e8a] + + [0x1e8c] + + [0x1e8e] + + [0x1e90] + + [0x1e92] + + [0x1e94] + + list(range(0x1e9a,0x1e9c)) + + [0x1e9e] + + [0x1ea0] + + [0x1ea2] + + [0x1ea4] + + [0x1ea6] + + [0x1ea8] + + [0x1eaa] + + [0x1eac] + + [0x1eae] + + [0x1eb0] + + [0x1eb2] + + [0x1eb4] + + [0x1eb6] + + [0x1eb8] + + [0x1eba] + + [0x1ebc] + + [0x1ebe] + + [0x1ec0] + + [0x1ec2] + + [0x1ec4] + + [0x1ec6] + + [0x1ec8] + + [0x1eca] + + [0x1ecc] + + [0x1ece] + + [0x1ed0] + + [0x1ed2] + + [0x1ed4] + + [0x1ed6] + + [0x1ed8] + + [0x1eda] + + [0x1edc] + + [0x1ede] + + [0x1ee0] + + [0x1ee2] + + [0x1ee4] + + [0x1ee6] + + [0x1ee8] + + [0x1eea] + + [0x1eec] + + [0x1eee] + + [0x1ef0] + + [0x1ef2] + + [0x1ef4] + + [0x1ef6] + + [0x1ef8] + + [0x1efa] + + [0x1efc] + + [0x1efe] + + list(range(0x1f08,0x1f10)) + + list(range(0x1f18,0x1f1e)) + + list(range(0x1f28,0x1f30)) + + list(range(0x1f38,0x1f40)) + + list(range(0x1f48,0x1f4e)) + + [0x1f59] + + [0x1f5b] + + [0x1f5d] + + [0x1f5f] + + list(range(0x1f68,0x1f70)) + + [0x1f71] + + [0x1f73] + + [0x1f75] + + [0x1f77] + + [0x1f79] + + [0x1f7b] + + [0x1f7d] + + list(range(0x1f80,0x1fb0)) + + list(range(0x1fb2,0x1fb5)) + + list(range(0x1fb7,0x1fc5)) + + list(range(0x1fc7,0x1fd0)) + + [0x1fd3] + + list(range(0x1fd8,0x1fdc)) + + list(range(0x1fdd,0x1fe0)) + + [0x1fe3] + + list(range(0x1fe8,0x1ff0)) + + list(range(0x1ff2,0x1ff5)) + + list(range(0x1ff7,0x1fff)) + + list(range(0x2000,0x200c)) + + list(range(0x200e,0x2065)) + + list(range(0x2066,0x2072)) + + list(range(0x2074,0x208f)) + + list(range(0x2090,0x209d)) + + list(range(0x20a0,0x20bb)) + + list(range(0x20d0,0x20f1)) + + list(range(0x2100,0x214e)) + + list(range(0x214f,0x2184)) + + list(range(0x2185,0x218a)) + + list(range(0x2190,0x23f4)) + + list(range(0x2400,0x2427)) + + list(range(0x2440,0x244b)) + + list(range(0x2460,0x2700)) + + list(range(0x2701,0x2b4d)) + + list(range(0x2b50,0x2b5a)) + + list(range(0x2c00,0x2c2f)) + + [0x2c60] + + list(range(0x2c62,0x2c65)) + + [0x2c67] + + [0x2c69] + + [0x2c6b] + + list(range(0x2c6d,0x2c71)) + + [0x2c72] + + [0x2c75] + + list(range(0x2c7c,0x2c81)) + + [0x2c82] + + [0x2c84] + + [0x2c86] + + [0x2c88] + + [0x2c8a] + + [0x2c8c] + + [0x2c8e] + + [0x2c90] + + [0x2c92] + + [0x2c94] + + [0x2c96] + + [0x2c98] + + [0x2c9a] + + [0x2c9c] + + [0x2c9e] + + [0x2ca0] + + [0x2ca2] + + [0x2ca4] + + [0x2ca6] + + [0x2ca8] + + [0x2caa] + + [0x2cac] + + [0x2cae] + + [0x2cb0] + + [0x2cb2] + + [0x2cb4] + + [0x2cb6] + + [0x2cb8] + + [0x2cba] + + [0x2cbc] + + [0x2cbe] + + [0x2cc0] + + [0x2cc2] + + [0x2cc4] + + [0x2cc6] + + [0x2cc8] + + [0x2cca] + + [0x2ccc] + + [0x2cce] + + [0x2cd0] + + [0x2cd2] + + [0x2cd4] + + [0x2cd6] + + [0x2cd8] + + [0x2cda] + + [0x2cdc] + + [0x2cde] + + [0x2ce0] + + [0x2ce2] + + list(range(0x2ce5,0x2cec)) + + [0x2ced] + + [0x2cf2] + + list(range(0x2cf9,0x2d00)) + + list(range(0x2d6f,0x2d71)) + + list(range(0x2e00,0x2e2f)) + + list(range(0x2e30,0x2e3c)) + + list(range(0x2e80,0x2e9a)) + + list(range(0x2e9b,0x2ef4)) + + list(range(0x2f00,0x2fd6)) + + list(range(0x2ff0,0x2ffc)) + + list(range(0x3000,0x3005)) + + list(range(0x3008,0x302a)) + + list(range(0x302e,0x303c)) + + list(range(0x303d,0x3040)) + + list(range(0x309b,0x309d)) + + list(range(0x309f,0x30a1)) + + [0x30ff] + + list(range(0x3131,0x318f)) + + list(range(0x3190,0x31a0)) + + list(range(0x31c0,0x31e4)) + + list(range(0x3200,0x321f)) + + list(range(0x3220,0x32ff)) + + list(range(0x3300,0x3400)) + + list(range(0x4dc0,0x4e00)) + + list(range(0xa490,0xa4c7)) + + list(range(0xa4fe,0xa500)) + + list(range(0xa60d,0xa610)) + + [0xa640] + + [0xa642] + + [0xa644] + + [0xa646] + + [0xa648] + + [0xa64a] + + [0xa64c] + + [0xa64e] + + [0xa650] + + [0xa652] + + [0xa654] + + [0xa656] + + [0xa658] + + [0xa65a] + + [0xa65c] + + [0xa65e] + + [0xa660] + + [0xa662] + + [0xa664] + + [0xa666] + + [0xa668] + + [0xa66a] + + [0xa66c] + + list(range(0xa670,0xa674)) + + [0xa67e] + + [0xa680] + + [0xa682] + + [0xa684] + + [0xa686] + + [0xa688] + + [0xa68a] + + [0xa68c] + + [0xa68e] + + [0xa690] + + [0xa692] + + [0xa694] + + [0xa696] + + list(range(0xa6e6,0xa6f0)) + + list(range(0xa6f2,0xa6f8)) + + list(range(0xa700,0xa717)) + + list(range(0xa720,0xa723)) + + [0xa724] + + [0xa726] + + [0xa728] + + [0xa72a] + + [0xa72c] + + [0xa72e] + + [0xa732] + + [0xa734] + + [0xa736] + + [0xa738] + + [0xa73a] + + [0xa73c] + + [0xa73e] + + [0xa740] + + [0xa742] + + [0xa744] + + [0xa746] + + [0xa748] + + [0xa74a] + + [0xa74c] + + [0xa74e] + + [0xa750] + + [0xa752] + + [0xa754] + + [0xa756] + + [0xa758] + + [0xa75a] + + [0xa75c] + + [0xa75e] + + [0xa760] + + [0xa762] + + [0xa764] + + [0xa766] + + [0xa768] + + [0xa76a] + + [0xa76c] + + [0xa76e] + + [0xa770] + + [0xa779] + + [0xa77b] + + list(range(0xa77d,0xa77f)) + + [0xa780] + + [0xa782] + + [0xa784] + + [0xa786] + + list(range(0xa789,0xa78c)) + + [0xa78d] + + [0xa790] + + [0xa792] + + [0xa7a0] + + [0xa7a2] + + [0xa7a4] + + [0xa7a6] + + [0xa7a8] + + [0xa7aa] + + list(range(0xa7f8,0xa7fa)) + + list(range(0xa828,0xa82c)) + + list(range(0xa830,0xa83a)) + + list(range(0xa874,0xa878)) + + list(range(0xa8ce,0xa8d0)) + + list(range(0xa8f8,0xa8fb)) + + list(range(0xa92e,0xa930)) + + list(range(0xa95f,0xa97d)) + + list(range(0xa9c1,0xa9ce)) + + list(range(0xa9de,0xa9e0)) + + list(range(0xaa5c,0xaa60)) + + list(range(0xaa77,0xaa7a)) + + list(range(0xaade,0xaae0)) + + list(range(0xaaf0,0xaaf2)) + + [0xabeb] + + list(range(0xd7b0,0xd7c7)) + + list(range(0xd7cb,0xd7fc)) + + list(range(0xd800,0xfa0e)) + + [0xfa10] + + [0xfa12] + + list(range(0xfa15,0xfa1f)) + + [0xfa20] + + [0xfa22] + + list(range(0xfa25,0xfa27)) + + list(range(0xfa2a,0xfa6e)) + + list(range(0xfa70,0xfada)) + + list(range(0xfb00,0xfb07)) + + list(range(0xfb13,0xfb18)) + + [0xfb1d] + + list(range(0xfb1f,0xfb37)) + + list(range(0xfb38,0xfb3d)) + + [0xfb3e] + + list(range(0xfb40,0xfb42)) + + list(range(0xfb43,0xfb45)) + + list(range(0xfb46,0xfbc2)) + + list(range(0xfbd3,0xfd40)) + + list(range(0xfd50,0xfd90)) + + list(range(0xfd92,0xfdc8)) + + list(range(0xfdd0,0xfdfe)) + + list(range(0xfe00,0xfe1a)) + + list(range(0xfe30,0xfe53)) + + list(range(0xfe54,0xfe67)) + + list(range(0xfe68,0xfe6c)) + + list(range(0xfe70,0xfe73)) + + [0xfe74] + + list(range(0xfe76,0xfefd)) + + [0xfeff] + + list(range(0xff01,0xffbf)) + + list(range(0xffc2,0xffc8)) + + list(range(0xffca,0xffd0)) + + list(range(0xffd2,0xffd8)) + + list(range(0xffda,0xffdd)) + + list(range(0xffe0,0xffe7)) + + list(range(0xffe8,0xffef)) + + list(range(0xfff9,0x10000)) + + list(range(0x10100,0x10103)) + + list(range(0x10107,0x10134)) + + list(range(0x10137,0x1018b)) + + list(range(0x10190,0x1019c)) + + list(range(0x101d0,0x101fd)) + + list(range(0x10320,0x10324)) + + [0x10341] + + [0x1034a] + + [0x1039f] + + list(range(0x103d0,0x103d6)) + + list(range(0x10400,0x10428)) + + list(range(0x10857,0x10860)) + + list(range(0x10916,0x1091c)) + + [0x1091f] + + [0x1093f] + + list(range(0x10a40,0x10a48)) + + list(range(0x10a50,0x10a59)) + + list(range(0x10a7d,0x10a80)) + + list(range(0x10b39,0x10b40)) + + list(range(0x10b58,0x10b60)) + + list(range(0x10b78,0x10b80)) + + list(range(0x10e60,0x10e7f)) + + list(range(0x11047,0x1104e)) + + list(range(0x11052,0x11066)) + + list(range(0x110bb,0x110c2)) + + list(range(0x11140,0x11144)) + + list(range(0x111c5,0x111c9)) + + list(range(0x12400,0x12463)) + + list(range(0x12470,0x12474)) + + list(range(0x1d000,0x1d0f6)) + + list(range(0x1d100,0x1d127)) + + list(range(0x1d129,0x1d1de)) + + list(range(0x1d200,0x1d246)) + + list(range(0x1d300,0x1d357)) + + list(range(0x1d360,0x1d372)) + + list(range(0x1d400,0x1d455)) + + list(range(0x1d456,0x1d49d)) + + list(range(0x1d49e,0x1d4a0)) + + [0x1d4a2] + + list(range(0x1d4a5,0x1d4a7)) + + list(range(0x1d4a9,0x1d4ad)) + + list(range(0x1d4ae,0x1d4ba)) + + [0x1d4bb] + + list(range(0x1d4bd,0x1d4c4)) + + list(range(0x1d4c5,0x1d506)) + + list(range(0x1d507,0x1d50b)) + + list(range(0x1d50d,0x1d515)) + + list(range(0x1d516,0x1d51d)) + + list(range(0x1d51e,0x1d53a)) + + list(range(0x1d53b,0x1d53f)) + + list(range(0x1d540,0x1d545)) + + [0x1d546] + + list(range(0x1d54a,0x1d551)) + + list(range(0x1d552,0x1d6a6)) + + list(range(0x1d6a8,0x1d7cc)) + + list(range(0x1d7ce,0x1d800)) + + list(range(0x1ee00,0x1ee04)) + + list(range(0x1ee05,0x1ee20)) + + list(range(0x1ee21,0x1ee23)) + + [0x1ee24] + + [0x1ee27] + + list(range(0x1ee29,0x1ee33)) + + list(range(0x1ee34,0x1ee38)) + + [0x1ee39] + + [0x1ee3b] + + [0x1ee42] + + [0x1ee47] + + [0x1ee49] + + [0x1ee4b] + + list(range(0x1ee4d,0x1ee50)) + + list(range(0x1ee51,0x1ee53)) + + [0x1ee54] + + [0x1ee57] + + [0x1ee59] + + [0x1ee5b] + + [0x1ee5d] + + [0x1ee5f] + + list(range(0x1ee61,0x1ee63)) + + [0x1ee64] + + list(range(0x1ee67,0x1ee6b)) + + list(range(0x1ee6c,0x1ee73)) + + list(range(0x1ee74,0x1ee78)) + + list(range(0x1ee79,0x1ee7d)) + + [0x1ee7e] + + list(range(0x1ee80,0x1ee8a)) + + list(range(0x1ee8b,0x1ee9c)) + + list(range(0x1eea1,0x1eea4)) + + list(range(0x1eea5,0x1eeaa)) + + list(range(0x1eeab,0x1eebc)) + + list(range(0x1eef0,0x1eef2)) + + list(range(0x1f000,0x1f02c)) + + list(range(0x1f030,0x1f094)) + + list(range(0x1f0a0,0x1f0af)) + + list(range(0x1f0b1,0x1f0bf)) + + list(range(0x1f0c1,0x1f0d0)) + + list(range(0x1f0d1,0x1f0e0)) + + list(range(0x1f100,0x1f10b)) + + list(range(0x1f110,0x1f12f)) + + list(range(0x1f130,0x1f16c)) + + list(range(0x1f170,0x1f19b)) + + list(range(0x1f1e6,0x1f203)) + + list(range(0x1f210,0x1f23b)) + + list(range(0x1f240,0x1f249)) + + list(range(0x1f250,0x1f252)) + + list(range(0x1f300,0x1f321)) + + list(range(0x1f330,0x1f336)) + + list(range(0x1f337,0x1f37d)) + + list(range(0x1f380,0x1f394)) + + list(range(0x1f3a0,0x1f3c5)) + + list(range(0x1f3c6,0x1f3cb)) + + list(range(0x1f3e0,0x1f3f1)) + + list(range(0x1f400,0x1f43f)) + + [0x1f440] + + list(range(0x1f442,0x1f4f8)) + + list(range(0x1f4f9,0x1f4fd)) + + list(range(0x1f500,0x1f53e)) + + list(range(0x1f540,0x1f544)) + + list(range(0x1f550,0x1f568)) + + list(range(0x1f5fb,0x1f641)) + + list(range(0x1f645,0x1f650)) + + list(range(0x1f680,0x1f6c6)) + + list(range(0x1f700,0x1f774)) + + list(range(0x1fffe,0x20000)) + + list(range(0x2f800,0x2fa1e)) + + list(range(0x2fffe,0x30000)) + + list(range(0x3fffe,0x40000)) + + list(range(0x4fffe,0x50000)) + + list(range(0x5fffe,0x60000)) + + list(range(0x6fffe,0x70000)) + + list(range(0x7fffe,0x80000)) + + list(range(0x8fffe,0x90000)) + + list(range(0x9fffe,0xa0000)) + + list(range(0xafffe,0xb0000)) + + list(range(0xbfffe,0xc0000)) + + list(range(0xcfffe,0xd0000)) + + list(range(0xdfffe,0xe0000)) + + [0xe0001] + + list(range(0xe0020,0xe0080)) + + list(range(0xe0100,0xe01f0)) + + list(range(0xefffe,0x110000)) + ), +} diff --git a/Darwin/lib/python3.5/site-packages/idna/uts46data.py b/Darwin/lib/python3.5/site-packages/idna/uts46data.py new file mode 100644 index 0000000..64e2c68 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/idna/uts46data.py @@ -0,0 +1,7267 @@ +# This file is automatically generated by tools/build-uts46data.py +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + +uts46data = ( + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' Ì„'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' Ì'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1â„4'), + (0xBD, 'M', u'1â„2'), + (0xBE, 'M', u'3â„4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'Ã¥'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'Ä'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'Ä…'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'Ä‹'), + (0x10B, 'V'), + (0x10C, 'M', u'Ä'), + (0x10D, 'V'), + (0x10E, 'M', u'Ä'), + (0x10F, 'V'), + (0x110, 'M', u'Ä‘'), + (0x111, 'V'), + (0x112, 'M', u'Ä“'), + (0x113, 'V'), + (0x114, 'M', u'Ä•'), + (0x115, 'V'), + (0x116, 'M', u'Ä—'), + (0x117, 'V'), + (0x118, 'M', u'Ä™'), + (0x119, 'V'), + (0x11A, 'M', u'Ä›'), + (0x11B, 'V'), + (0x11C, 'M', u'Ä'), + (0x11D, 'V'), + (0x11E, 'M', u'ÄŸ'), + (0x11F, 'V'), + (0x120, 'M', u'Ä¡'), + (0x121, 'V'), + (0x122, 'M', u'Ä£'), + (0x123, 'V'), + (0x124, 'M', u'Ä¥'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'Ä©'), + (0x129, 'V'), + (0x12A, 'M', u'Ä«'), + (0x12B, 'V'), + (0x12C, 'M', u'Ä­'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'Ä·'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'Å‚'), + (0x142, 'V'), + (0x143, 'M', u'Å„'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'Å‹'), + (0x14B, 'V'), + (0x14C, 'M', u'Å'), + (0x14D, 'V'), + (0x14E, 'M', u'Å'), + (0x14F, 'V'), + (0x150, 'M', u'Å‘'), + (0x151, 'V'), + (0x152, 'M', u'Å“'), + (0x153, 'V'), + (0x154, 'M', u'Å•'), + (0x155, 'V'), + (0x156, 'M', u'Å—'), + (0x157, 'V'), + (0x158, 'M', u'Å™'), + (0x159, 'V'), + (0x15A, 'M', u'Å›'), + (0x15B, 'V'), + (0x15C, 'M', u'Å'), + (0x15D, 'V'), + (0x15E, 'M', u'ÅŸ'), + (0x15F, 'V'), + (0x160, 'M', u'Å¡'), + (0x161, 'V'), + (0x162, 'M', u'Å£'), + (0x163, 'V'), + (0x164, 'M', u'Å¥'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'Å©'), + (0x169, 'V'), + (0x16A, 'M', u'Å«'), + (0x16B, 'V'), + (0x16C, 'M', u'Å­'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'Å·'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'É“'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'Æ…'), + (0x185, 'V'), + (0x186, 'M', u'É”'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'É–'), + (0x18A, 'M', u'É—'), + (0x18B, 'M', u'ÆŒ'), + (0x18C, 'V'), + (0x18E, 'M', u'Ç'), + (0x18F, 'M', u'É™'), + (0x190, 'M', u'É›'), + (0x191, 'M', u'Æ’'), + (0x192, 'V'), + (0x193, 'M', u'É '), + (0x194, 'M', u'É£'), + (0x195, 'V'), + (0x196, 'M', u'É©'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'Æ™'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'Æ¡'), + (0x1A1, 'V'), + (0x1A2, 'M', u'Æ£'), + (0x1A3, 'V'), + (0x1A4, 'M', u'Æ¥'), + (0x1A5, 'V'), + (0x1A6, 'M', u'Ê€'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'Æ­'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'Æ°'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ÊŠ'), + (0x1B2, 'M', u'Ê‹'), + (0x1B3, 'M', u'Æ´'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'Ê’'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ÇŽ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'Ç'), + (0x1D0, 'V'), + (0x1D1, 'M', u'Ç’'), + (0x1D2, 'V'), + (0x1D3, 'M', u'Ç”'), + (0x1D4, 'V'), + (0x1D5, 'M', u'Ç–'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'Çš'), + (0x1DA, 'V'), + (0x1DB, 'M', u'Çœ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ÇŸ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'Ç¡'), + (0x1E1, 'V'), + (0x1E2, 'M', u'Ç£'), + (0x1E3, 'V'), + (0x1E4, 'M', u'Ç¥'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'Ç©'), + (0x1E9, 'V'), + (0x1EA, 'M', u'Ç«'), + (0x1EB, 'V'), + (0x1EC, 'M', u'Ç­'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'Æ•'), + (0x1F7, 'M', u'Æ¿'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'Ç»'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'Ç¿'), + (0x1FF, 'V'), + (0x200, 'M', u'È'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'È…'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'È‹'), + (0x20B, 'V'), + (0x20C, 'M', u'È'), + (0x20D, 'V'), + (0x20E, 'M', u'È'), + (0x20F, 'V'), + (0x210, 'M', u'È‘'), + (0x211, 'V'), + (0x212, 'M', u'È“'), + (0x213, 'V'), + (0x214, 'M', u'È•'), + (0x215, 'V'), + (0x216, 'M', u'È—'), + (0x217, 'V'), + (0x218, 'M', u'È™'), + (0x219, 'V'), + (0x21A, 'M', u'È›'), + (0x21B, 'V'), + (0x21C, 'M', u'È'), + (0x21D, 'V'), + (0x21E, 'M', u'ÈŸ'), + (0x21F, 'V'), + (0x220, 'M', u'Æž'), + (0x221, 'V'), + (0x222, 'M', u'È£'), + (0x223, 'V'), + (0x224, 'M', u'È¥'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'È©'), + (0x229, 'V'), + (0x22A, 'M', u'È«'), + (0x22B, 'V'), + (0x22C, 'M', u'È­'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'â±¥'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'Æš'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'É‚'), + (0x242, 'V'), + (0x243, 'M', u'Æ€'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ÊŒ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'É‹'), + (0x24B, 'V'), + (0x24C, 'M', u'É'), + (0x24D, 'V'), + (0x24E, 'M', u'É'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'É»'), + (0x2B6, 'M', u'Ê'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ÌŠ'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' Ì‹'), + (0x2DE, 'V'), + (0x2E0, 'M', u'É£'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'Ê•'), + (0x2E5, 'V'), + (0x340, 'M', u'Ì€'), + (0x341, 'M', u'Ì'), + (0x342, 'V'), + (0x343, 'M', u'Ì“'), + (0x344, 'M', u'̈Ì'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'Í·'), + (0x377, 'V'), + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'X'), + (0x384, '3', u' Ì'), + (0x385, '3', u' ̈Ì'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ÏŒ'), + (0x38D, 'X'), + (0x38E, 'M', u'Ï'), + (0x38F, 'M', u'ÏŽ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'Ï€'), + (0x3A1, 'M', u'Ï'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'Ï„'), + (0x3A5, 'M', u'Ï…'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ÏŠ'), + (0x3AB, 'M', u'Ï‹'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'Ï—'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'Ï…'), + (0x3D3, 'M', u'Ï'), + (0x3D4, 'M', u'Ï‹'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'Ï€'), + (0x3D7, 'V'), + (0x3D8, 'M', u'Ï™'), + (0x3D9, 'V'), + (0x3DA, 'M', u'Ï›'), + (0x3DB, 'V'), + (0x3DC, 'M', u'Ï'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ÏŸ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'Ï¡'), + (0x3E1, 'V'), + (0x3E2, 'M', u'Ï£'), + (0x3E3, 'V'), + (0x3E4, 'M', u'Ï¥'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'Ï©'), + (0x3E9, 'V'), + (0x3EA, 'M', u'Ï«'), + (0x3EB, 'V'), + (0x3EC, 'M', u'Ï­'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'Ï'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'Ï»'), + (0x3FB, 'V'), + (0x3FD, 'M', u'Í»'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'Ñ'), + (0x401, 'M', u'Ñ‘'), + (0x402, 'M', u'Ñ’'), + (0x403, 'M', u'Ñ“'), + (0x404, 'M', u'Ñ”'), + (0x405, 'M', u'Ñ•'), + (0x406, 'M', u'Ñ–'), + (0x407, 'M', u'Ñ—'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'Ñ™'), + (0x40A, 'M', u'Ñš'), + (0x40B, 'M', u'Ñ›'), + (0x40C, 'M', u'Ñœ'), + (0x40D, 'M', u'Ñ'), + (0x40E, 'M', u'Ñž'), + (0x40F, 'M', u'ÑŸ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'Ñ€'), + (0x421, 'M', u'Ñ'), + (0x422, 'M', u'Ñ‚'), + (0x423, 'M', u'у'), + (0x424, 'M', u'Ñ„'), + (0x425, 'M', u'Ñ…'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ÑŠ'), + (0x42B, 'M', u'Ñ‹'), + (0x42C, 'M', u'ÑŒ'), + (0x42D, 'M', u'Ñ'), + (0x42E, 'M', u'ÑŽ'), + (0x42F, 'M', u'Ñ'), + (0x430, 'V'), + (0x460, 'M', u'Ñ¡'), + (0x461, 'V'), + (0x462, 'M', u'Ñ£'), + (0x463, 'V'), + (0x464, 'M', u'Ñ¥'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'Ñ©'), + (0x469, 'V'), + (0x46A, 'M', u'Ñ«'), + (0x46B, 'V'), + (0x46C, 'M', u'Ñ­'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'Ñ·'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'Ñ»'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'Ñ¿'), + (0x47F, 'V'), + (0x480, 'M', u'Ò'), + (0x481, 'V'), + (0x48A, 'M', u'Ò‹'), + (0x48B, 'V'), + (0x48C, 'M', u'Ò'), + (0x48D, 'V'), + (0x48E, 'M', u'Ò'), + (0x48F, 'V'), + (0x490, 'M', u'Ò‘'), + (0x491, 'V'), + (0x492, 'M', u'Ò“'), + (0x493, 'V'), + (0x494, 'M', u'Ò•'), + (0x495, 'V'), + (0x496, 'M', u'Ò—'), + (0x497, 'V'), + (0x498, 'M', u'Ò™'), + (0x499, 'V'), + (0x49A, 'M', u'Ò›'), + (0x49B, 'V'), + (0x49C, 'M', u'Ò'), + (0x49D, 'V'), + (0x49E, 'M', u'ÒŸ'), + (0x49F, 'V'), + (0x4A0, 'M', u'Ò¡'), + (0x4A1, 'V'), + (0x4A2, 'M', u'Ò£'), + (0x4A3, 'V'), + (0x4A4, 'M', u'Ò¥'), + (0x4A5, 'V'), + (0x4A6, 'M', u'Ò§'), + (0x4A7, 'V'), + (0x4A8, 'M', u'Ò©'), + (0x4A9, 'V'), + (0x4AA, 'M', u'Ò«'), + (0x4AB, 'V'), + (0x4AC, 'M', u'Ò­'), + (0x4AD, 'V'), + (0x4AE, 'M', u'Ò¯'), + (0x4AF, 'V'), + (0x4B0, 'M', u'Ò±'), + (0x4B1, 'V'), + (0x4B2, 'M', u'Ò³'), + (0x4B3, 'V'), + (0x4B4, 'M', u'Òµ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'Ò·'), + (0x4B7, 'V'), + (0x4B8, 'M', u'Ò¹'), + (0x4B9, 'V'), + (0x4BA, 'M', u'Ò»'), + (0x4BB, 'V'), + (0x4BC, 'M', u'Ò½'), + (0x4BD, 'V'), + (0x4BE, 'M', u'Ò¿'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'Ó‚'), + (0x4C2, 'V'), + (0x4C3, 'M', u'Ó„'), + (0x4C4, 'V'), + (0x4C5, 'M', u'Ó†'), + (0x4C6, 'V'), + (0x4C7, 'M', u'Óˆ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ÓŠ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ÓŒ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ÓŽ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'Ó‘'), + (0x4D1, 'V'), + (0x4D2, 'M', u'Ó“'), + (0x4D3, 'V'), + (0x4D4, 'M', u'Ó•'), + (0x4D5, 'V'), + (0x4D6, 'M', u'Ó—'), + (0x4D7, 'V'), + (0x4D8, 'M', u'Ó™'), + (0x4D9, 'V'), + (0x4DA, 'M', u'Ó›'), + (0x4DB, 'V'), + (0x4DC, 'M', u'Ó'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ÓŸ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'Ó¡'), + (0x4E1, 'V'), + (0x4E2, 'M', u'Ó£'), + (0x4E3, 'V'), + (0x4E4, 'M', u'Ó¥'), + (0x4E5, 'V'), + (0x4E6, 'M', u'Ó§'), + (0x4E7, 'V'), + (0x4E8, 'M', u'Ó©'), + (0x4E9, 'V'), + (0x4EA, 'M', u'Ó«'), + (0x4EB, 'V'), + (0x4EC, 'M', u'Ó­'), + (0x4ED, 'V'), + (0x4EE, 'M', u'Ó¯'), + (0x4EF, 'V'), + (0x4F0, 'M', u'Ó±'), + (0x4F1, 'V'), + (0x4F2, 'M', u'Ó³'), + (0x4F3, 'V'), + (0x4F4, 'M', u'Óµ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'Ó·'), + (0x4F7, 'V'), + (0x4F8, 'M', u'Ó¹'), + (0x4F9, 'V'), + (0x4FA, 'M', u'Ó»'), + (0x4FB, 'V'), + (0x4FC, 'M', u'Ó½'), + (0x4FD, 'V'), + (0x4FE, 'M', u'Ó¿'), + (0x4FF, 'V'), + (0x500, 'M', u'Ô'), + (0x501, 'V'), + (0x502, 'M', u'Ôƒ'), + (0x503, 'V'), + (0x504, 'M', u'Ô…'), + (0x505, 'V'), + (0x506, 'M', u'Ô‡'), + (0x507, 'V'), + (0x508, 'M', u'Ô‰'), + (0x509, 'V'), + (0x50A, 'M', u'Ô‹'), + (0x50B, 'V'), + (0x50C, 'M', u'Ô'), + (0x50D, 'V'), + (0x50E, 'M', u'Ô'), + (0x50F, 'V'), + (0x510, 'M', u'Ô‘'), + (0x511, 'V'), + (0x512, 'M', u'Ô“'), + (0x513, 'V'), + (0x514, 'M', u'Ô•'), + (0x515, 'V'), + (0x516, 'M', u'Ô—'), + (0x517, 'V'), + (0x518, 'M', u'Ô™'), + (0x519, 'V'), + (0x51A, 'M', u'Ô›'), + (0x51B, 'V'), + (0x51C, 'M', u'Ô'), + (0x51D, 'V'), + (0x51E, 'M', u'ÔŸ'), + (0x51F, 'V'), + (0x520, 'M', u'Ô¡'), + (0x521, 'V'), + (0x522, 'M', u'Ô£'), + (0x523, 'V'), + (0x524, 'M', u'Ô¥'), + (0x525, 'V'), + (0x526, 'M', u'Ô§'), + (0x527, 'V'), + (0x528, 'X'), + (0x531, 'M', u'Õ¡'), + (0x532, 'M', u'Õ¢'), + (0x533, 'M', u'Õ£'), + (0x534, 'M', u'Õ¤'), + (0x535, 'M', u'Õ¥'), + (0x536, 'M', u'Õ¦'), + (0x537, 'M', u'Õ§'), + (0x538, 'M', u'Õ¨'), + (0x539, 'M', u'Õ©'), + (0x53A, 'M', u'Õª'), + (0x53B, 'M', u'Õ«'), + (0x53C, 'M', u'Õ¬'), + (0x53D, 'M', u'Õ­'), + (0x53E, 'M', u'Õ®'), + (0x53F, 'M', u'Õ¯'), + (0x540, 'M', u'Õ°'), + (0x541, 'M', u'Õ±'), + (0x542, 'M', u'Õ²'), + (0x543, 'M', u'Õ³'), + (0x544, 'M', u'Õ´'), + (0x545, 'M', u'Õµ'), + (0x546, 'M', u'Õ¶'), + (0x547, 'M', u'Õ·'), + (0x548, 'M', u'Õ¸'), + (0x549, 'M', u'Õ¹'), + (0x54A, 'M', u'Õº'), + (0x54B, 'M', u'Õ»'), + (0x54C, 'M', u'Õ¼'), + (0x54D, 'M', u'Õ½'), + (0x54E, 'M', u'Õ¾'), + (0x54F, 'M', u'Õ¿'), + (0x550, 'M', u'Ö€'), + (0x551, 'M', u'Ö'), + (0x552, 'M', u'Ö‚'), + (0x553, 'M', u'Öƒ'), + (0x554, 'M', u'Ö„'), + (0x555, 'M', u'Ö…'), + (0x556, 'M', u'Ö†'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'Õ¥Ö‚'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58F, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'Û‡Ù´'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x8A0, 'V'), + (0x8A1, 'X'), + (0x8A2, 'V'), + (0x8AD, 'X'), + (0x8E4, 'V'), + (0x8FF, 'X'), + (0x900, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x978, 'X'), + (0x979, 'V'), + (0x980, 'X'), + (0x981, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FC, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC01, 'V'), + (0xC04, 'X'), + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC34, 'X'), + (0xC35, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5A, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC80, 'X'), + (0xC82, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD02, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD3B, 'X'), + (0xD3D, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD4F, 'X'), + (0xD57, 'V'), + (0xD58, 'X'), + (0xD60, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD76, 'X'), + (0xD79, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'à¹à¸²'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'à»àº²'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'à¾à¾µ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'â´§'), + (0x10C8, 'X'), + (0x10CD, 'M', u'â´­'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F5, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F1, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191D, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CF7, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'Ç'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'È£'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'É'), + (0x1D45, 'M', u'É‘'), + (0x1D46, 'M', u'á´‚'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'É™'), + (0x1D4B, 'M', u'É›'), + (0x1D4C, 'M', u'Éœ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'Å‹'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'É”'), + (0x1D54, 'M', u'á´–'), + (0x1D55, 'M', u'á´—'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'á´'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'á´¥'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'Ï'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'É’'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'É•'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'Éœ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ÉŸ'), + (0x1DA2, 'M', u'É¡'), + (0x1DA3, 'M', u'É¥'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'É©'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'áµ»'), + (0x1DA8, 'M', u'Ê'), + (0x1DA9, 'M', u'É­'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ÊŸ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'É°'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'É´'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'Ê‚'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'Æ«'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ÊŠ'), + (0x1DB8, 'M', u'á´œ'), + (0x1DB9, 'M', u'Ê‹'), + (0x1DBA, 'M', u'ÊŒ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'Ê'), + (0x1DBD, 'M', u'Ê‘'), + (0x1DBE, 'M', u'Ê’'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DE7, 'X'), + (0x1DFC, 'V'), + (0x1E00, 'M', u'á¸'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'á¸'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'á¸'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'á¸'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'á¹'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'á¹…'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'á¹'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'á¹'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'á¹—'), + (0x1E57, 'V'), + (0x1E58, 'M', u'á¹™'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'á¹›'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'á¹'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'á¹£'), + (0x1E63, 'V'), + (0x1E64, 'M', u'á¹¥'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'á¹­'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'á¹±'), + (0x1E71, 'V'), + (0x1E72, 'M', u'á¹³'), + (0x1E73, 'V'), + (0x1E74, 'M', u'á¹µ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'á¹·'), + (0x1E77, 'V'), + (0x1E78, 'M', u'á¹¹'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'á¹»'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'á¹½'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'áº'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'áº'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'áº'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'á»'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'á»…'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'á»'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'á»'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'á»—'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'á»™'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'á»›'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'á»'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'á»­'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'á»±'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'á»·'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'á»»'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'á¼€'), + (0x1F09, 'M', u'á¼'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'á¼…'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'á¼'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'á¼’'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'á¼”'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'á¼ '), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'á¼¢'), + (0x1F2B, 'M', u'á¼£'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'á¼¥'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'á¼°'), + (0x1F39, 'M', u'á¼±'), + (0x1F3A, 'M', u'á¼²'), + (0x1F3B, 'M', u'á¼³'), + (0x1F3C, 'M', u'á¼´'), + (0x1F3D, 'M', u'á¼µ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'á¼·'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'á½€'), + (0x1F49, 'M', u'á½'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'á½…'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'á½—'), + (0x1F60, 'V'), + (0x1F68, 'M', u'á½ '), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'á½¢'), + (0x1F6B, 'M', u'á½£'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'á½¥'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ÏŒ'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'Ï'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ÏŽ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'á¼Î¹'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'á¼Î¹'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'á¾°'), + (0x1FB9, 'M', u'á¾±'), + (0x1FBA, 'M', u'á½°'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' Ì“'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' Ì“'), + (0x1FC0, '3', u' Í‚'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'á½²'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'á½´'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' Ì“Ì€'), + (0x1FCE, '3', u' Ì“Ì'), + (0x1FCF, '3', u' Ì“Í‚'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'Î'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'á¿'), + (0x1FD9, 'M', u'á¿‘'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' Ì”Ì'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'á¿ '), + (0x1FE9, 'M', u'á¿¡'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'Ï'), + (0x1FEC, 'M', u'á¿¥'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈Ì'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ÏŒ'), + (0x1FFA, 'M', u'á½¼'), + (0x1FFB, 'M', u'ÏŽ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' Ì'), + (0x1FFE, '3', u' Ì”'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'â€'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' Ì…'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'É™'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20BB, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'É›'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'Ã¥'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'×'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'×’'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'Ï€'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'Ï€'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1â„7'), + (0x2151, 'M', u'1â„9'), + (0x2152, 'M', u'1â„10'), + (0x2153, 'M', u'1â„3'), + (0x2154, 'M', u'2â„3'), + (0x2155, 'M', u'1â„5'), + (0x2156, 'M', u'2â„5'), + (0x2157, 'M', u'3â„5'), + (0x2158, 'M', u'4â„5'), + (0x2159, 'M', u'1â„6'), + (0x215A, 'M', u'5â„6'), + (0x215B, 'M', u'1â„8'), + (0x215C, 'M', u'3â„8'), + (0x215D, 'M', u'5â„8'), + (0x215E, 'M', u'7â„8'), + (0x215F, 'M', u'1â„'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0â„3'), + (0x218A, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x23F4, 'X'), + (0x2400, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2700, 'X'), + (0x2701, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'â«Ì¸'), + (0x2ADD, 'V'), + (0x2B4D, 'X'), + (0x2B50, 'V'), + (0x2B5A, 'X'), + (0x2C00, 'M', u'â°°'), + (0x2C01, 'M', u'â°±'), + (0x2C02, 'M', u'â°²'), + (0x2C03, 'M', u'â°³'), + (0x2C04, 'M', u'â°´'), + (0x2C05, 'M', u'â°µ'), + (0x2C06, 'M', u'â°¶'), + (0x2C07, 'M', u'â°·'), + (0x2C08, 'M', u'â°¸'), + (0x2C09, 'M', u'â°¹'), + (0x2C0A, 'M', u'â°º'), + (0x2C0B, 'M', u'â°»'), + (0x2C0C, 'M', u'â°¼'), + (0x2C0D, 'M', u'â°½'), + (0x2C0E, 'M', u'â°¾'), + (0x2C0F, 'M', u'â°¿'), + (0x2C10, 'M', u'â±€'), + (0x2C11, 'M', u'â±'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'â±…'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'â±'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'â±'), + (0x2C20, 'M', u'â±'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'â±’'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'â±”'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'â±–'), + (0x2C27, 'M', u'â±—'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'â±™'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'â±›'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'â±'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'É«'), + (0x2C63, 'M', u'áµ½'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'É‘'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'É'), + (0x2C70, 'M', u'É’'), + (0x2C71, 'V'), + (0x2C72, 'M', u'â±³'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'È¿'), + (0x2C7F, 'M', u'É€'), + (0x2C80, 'M', u'â²'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'â²…'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'â²'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'â²'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'â²—'), + (0x2C97, 'V'), + (0x2C98, 'M', u'â²™'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'â²›'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'â²'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'â²£'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'â²¥'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'â²­'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'â²±'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'â²³'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'â²µ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'â²·'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'â²¹'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'â²»'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'â²½'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'â³'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'â³…'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'â³'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'â³'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'â³—'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'â³™'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'â³›'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'â³'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'â³£'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'â³®'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'â³³'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E3C, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'æ¯'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'ä¹™'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'å„¿'), + (0x2F0A, 'M', u'å…¥'), + (0x2F0B, 'M', u'å…«'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'å'), + (0x2F18, 'M', u'åœ'), + (0x2F19, 'M', u'å©'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'åˆ'), + (0x2F1D, 'M', u'å£'), + (0x2F1E, 'M', u'å›—'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'å­'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'å°'), + (0x2F2A, 'M', u'å°¢'), + (0x2F2B, 'M', u'å°¸'), + (0x2F2C, 'M', u'å±®'), + (0x2F2D, 'M', u'å±±'), + (0x2F2E, 'M', u'å·›'), + (0x2F2F, 'M', u'å·¥'), + (0x2F30, 'M', u'å·±'), + (0x2F31, 'M', u'å·¾'), + (0x2F32, 'M', u'å¹²'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'å»´'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'å½'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'å½³'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'æ”´'), + (0x2F42, 'M', u'æ–‡'), + (0x2F43, 'M', u'æ–—'), + (0x2F44, 'M', u'æ–¤'), + (0x2F45, 'M', u'æ–¹'), + (0x2F46, 'M', u'æ— '), + (0x2F47, 'M', u'æ—¥'), + (0x2F48, 'M', u'æ›°'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'æ­¢'), + (0x2F4D, 'M', u'æ­¹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'æ°'), + (0x2F53, 'M', u'æ°”'), + (0x2F54, 'M', u'æ°´'), + (0x2F55, 'M', u'ç«'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'ç“œ'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'ç”°'), + (0x2F66, 'M', u'ç–‹'), + (0x2F67, 'M', u'ç–’'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'çš®'), + (0x2F6B, 'M', u'çš¿'), + (0x2F6C, 'M', u'ç›®'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'ç©´'), + (0x2F74, 'M', u'ç«‹'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'ç±³'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'ç¾½'), + (0x2F7C, 'M', u'è€'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'è¿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'è™'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'è¡€'), + (0x2F8F, 'M', u'è¡Œ'), + (0x2F90, 'M', u'è¡£'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'è°·'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'è²'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'èµ°'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'è¾›'), + (0x2FA0, 'M', u'è¾°'), + (0x2FA1, 'M', u'è¾µ'), + (0x2FA2, 'M', u'é‚‘'), + (0x2FA3, 'M', u'é…‰'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'é•·'), + (0x2FA8, 'M', u'é–€'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'éš¹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'é‘'), + (0x2FAE, 'M', u'éž'), + (0x2FAF, 'M', u'é¢'), + (0x2FB0, 'M', u'é©'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'é '), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'é«Ÿ'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'é­š'), + (0x2FC3, 'M', u'é³¥'), + (0x2FC4, 'M', u'é¹µ'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'é»'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'é¼ '), + (0x2FD0, 'M', u'é¼»'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'é½’'), + (0x2FD3, 'M', u'é¾'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'é¾ '), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'å'), + (0x3039, 'M', u'å„'), + (0x303A, 'M', u'å…'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ã‚™'), + (0x309C, '3', u' ã‚š'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x312E, 'X'), + (0x3131, 'M', u'á„€'), + (0x3132, 'M', u'á„'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'á„‚'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'á„„'), + (0x3139, 'M', u'á„…'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'á„š'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'á„¡'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'á„Š'), + (0x3147, 'M', u'á„‹'), + (0x3148, 'M', u'á„Œ'), + (0x3149, 'M', u'á„'), + (0x314A, 'M', u'á„Ž'), + (0x314B, 'M', u'á„'), + (0x314C, 'M', u'á„'), + (0x314D, 'M', u'á„‘'), + (0x314E, 'M', u'á„’'), + (0x314F, 'M', u'á…¡'), + (0x3150, 'M', u'á…¢'), + (0x3151, 'M', u'á…£'), + (0x3152, 'M', u'á…¤'), + (0x3153, 'M', u'á…¥'), + (0x3154, 'M', u'á…¦'), + (0x3155, 'M', u'á…§'), + (0x3156, 'M', u'á…¨'), + (0x3157, 'M', u'á…©'), + (0x3158, 'M', u'á…ª'), + (0x3159, 'M', u'á…«'), + (0x315A, 'M', u'á…¬'), + (0x315B, 'M', u'á…­'), + (0x315C, 'M', u'á…®'), + (0x315D, 'M', u'á…¯'), + (0x315E, 'M', u'á…°'), + (0x315F, 'M', u'á…±'), + (0x3160, 'M', u'á…²'), + (0x3161, 'M', u'á…³'), + (0x3162, 'M', u'á…´'), + (0x3163, 'M', u'á…µ'), + (0x3164, 'X'), + (0x3165, 'M', u'á„”'), + (0x3166, 'M', u'á„•'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'á„œ'), + (0x316F, 'M', u'á‡'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'á„'), + (0x3172, 'M', u'á„ž'), + (0x3173, 'M', u'á„ '), + (0x3174, 'M', u'á„¢'), + (0x3175, 'M', u'á„£'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'á„©'), + (0x3178, 'M', u'á„«'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'á„­'), + (0x317B, 'M', u'á„®'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'á…€'), + (0x3180, 'M', u'á…‡'), + (0x3181, 'M', u'á…Œ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'á…—'), + (0x3185, 'M', u'á…˜'), + (0x3186, 'M', u'á…™'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'å››'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'ä¹™'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'ä¸'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(á„€)'), + (0x3201, '3', u'(á„‚)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(á„…)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(á„‹)'), + (0x3208, '3', u'(á„Œ)'), + (0x3209, '3', u'(á„Ž)'), + (0x320A, '3', u'(á„)'), + (0x320B, '3', u'(á„)'), + (0x320C, '3', u'(á„‘)'), + (0x320D, '3', u'(á„’)'), + (0x320E, '3', u'(ê°€)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(ë¼)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(ë°”)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(ì•„)'), + (0x3216, '3', u'(ìž)'), + (0x3217, '3', u'(ì°¨)'), + (0x3218, '3', u'(ì¹´)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(å››)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(å…­)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(å…«)'), + (0x3228, '3', u'(ä¹)'), + (0x3229, '3', u'(å)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(ç«)'), + (0x322C, '3', u'(æ°´)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(æ—¥)'), + (0x3231, '3', u'(æ ª)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(å)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(ç¥)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(å­¦)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(ä¼)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(å”)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'å•'), + (0x3245, 'M', u'å¹¼'), + (0x3246, 'M', u'æ–‡'), + (0x3247, 'M', u'ç®'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'á„€'), + (0x3261, 'M', u'á„‚'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'á„…'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'á„‹'), + (0x3268, 'M', u'á„Œ'), + (0x3269, 'M', u'á„Ž'), + (0x326A, 'M', u'á„'), + (0x326B, 'M', u'á„'), + (0x326C, 'M', u'á„‘'), + (0x326D, 'M', u'á„’'), + (0x326E, 'M', u'ê°€'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'ë¼'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'ë°”'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'ì•„'), + (0x3276, 'M', u'ìž'), + (0x3277, 'M', u'ì°¨'), + (0x3278, 'M', u'ì¹´'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주ì˜'), + (0x327E, 'M', u'ìš°'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'å››'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'å…­'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'å…«'), + (0x3288, 'M', u'ä¹'), + (0x3289, 'M', u'å'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'ç«'), + (0x328C, 'M', u'æ°´'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'æ—¥'), + (0x3291, 'M', u'æ ª'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'å'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'ç¥'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'ç”·'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'é©'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'å°'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'é …'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'æ­£'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'å·¦'), + (0x32A8, 'M', u'å³'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'å®—'), + (0x32AB, 'M', u'å­¦'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'ä¼'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'å”'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ã‚¢'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'ã‚«'), + (0x32D6, 'M', u'ã‚­'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'ã‚·'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'ã‚»'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'ã‚¿'), + (0x32E0, 'M', u'ãƒ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ãƒ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ãƒ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インãƒ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローãƒ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーãƒ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンãƒãƒ¼ãƒ '), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センãƒ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ãƒã‚¤ãƒ„'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'ãƒãƒ¼ãƒ¬ãƒ«'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ãƒã‚¤ãƒ³ãƒˆ'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ãƒãƒ³ãƒ‰'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッãƒ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリãƒãƒ¼ãƒ«'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'å¹³æˆ'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'æ ªå¼ä¼šç¤¾'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1æ—¥'), + (0x33E1, 'M', u'2æ—¥'), + (0x33E2, 'M', u'3æ—¥'), + (0x33E3, 'M', u'4æ—¥'), + (0x33E4, 'M', u'5æ—¥'), + (0x33E5, 'M', u'6æ—¥'), + (0x33E6, 'M', u'7æ—¥'), + (0x33E7, 'M', u'8æ—¥'), + (0x33E8, 'M', u'9æ—¥'), + (0x33E9, 'M', u'10æ—¥'), + (0x33EA, 'M', u'11æ—¥'), + (0x33EB, 'M', u'12æ—¥'), + (0x33EC, 'M', u'13æ—¥'), + (0x33ED, 'M', u'14æ—¥'), + (0x33EE, 'M', u'15æ—¥'), + (0x33EF, 'M', u'16æ—¥'), + (0x33F0, 'M', u'17æ—¥'), + (0x33F1, 'M', u'18æ—¥'), + (0x33F2, 'M', u'19æ—¥'), + (0x33F3, 'M', u'20æ—¥'), + (0x33F4, 'M', u'21æ—¥'), + (0x33F5, 'M', u'22æ—¥'), + (0x33F6, 'M', u'23æ—¥'), + (0x33F7, 'M', u'24æ—¥'), + (0x33F8, 'M', u'25æ—¥'), + (0x33F9, 'M', u'26æ—¥'), + (0x33FA, 'M', u'27æ—¥'), + (0x33FB, 'M', u'28æ—¥'), + (0x33FC, 'M', u'29æ—¥'), + (0x33FD, 'M', u'30æ—¥'), + (0x33FE, 'M', u'31æ—¥'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FCD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ê™'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ê™…'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ê™'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ê™'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ê™—'), + (0xA657, 'V'), + (0xA658, 'M', u'ê™™'), + (0xA659, 'V'), + (0xA65A, 'M', u'ê™›'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ê™'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ê™­'), + (0xA66D, 'V'), + (0xA680, 'M', u'êš'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'êš…'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'êš‹'), + (0xA68B, 'V'), + (0xA68C, 'M', u'êš'), + (0xA68D, 'V'), + (0xA68E, 'M', u'êš'), + (0xA68F, 'V'), + (0xA690, 'M', u'êš‘'), + (0xA691, 'V'), + (0xA692, 'M', u'êš“'), + (0xA693, 'V'), + (0xA694, 'M', u'êš•'), + (0xA695, 'V'), + (0xA696, 'M', u'êš—'), + (0xA697, 'V'), + (0xA698, 'X'), + (0xA69F, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ê'), + (0xA741, 'V'), + (0xA742, 'M', u'êƒ'), + (0xA743, 'V'), + (0xA744, 'M', u'ê…'), + (0xA745, 'V'), + (0xA746, 'M', u'ê‡'), + (0xA747, 'V'), + (0xA748, 'M', u'ê‰'), + (0xA749, 'V'), + (0xA74A, 'M', u'ê‹'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ê'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ê'), + (0xA74F, 'V'), + (0xA750, 'M', u'ê‘'), + (0xA751, 'V'), + (0xA752, 'M', u'ê“'), + (0xA753, 'V'), + (0xA754, 'M', u'ê•'), + (0xA755, 'V'), + (0xA756, 'M', u'ê—'), + (0xA757, 'V'), + (0xA758, 'M', u'ê™'), + (0xA759, 'V'), + (0xA75A, 'M', u'ê›'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ê'), + (0xA75D, 'V'), + (0xA75E, 'M', u'êŸ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ê¡'), + (0xA761, 'V'), + (0xA762, 'M', u'ê£'), + (0xA763, 'V'), + (0xA764, 'M', u'ê¥'), + (0xA765, 'V'), + (0xA766, 'M', u'ê§'), + (0xA767, 'V'), + (0xA768, 'M', u'ê©'), + (0xA769, 'V'), + (0xA76A, 'M', u'ê«'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ê­'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ê¯'), + (0xA76F, 'V'), + (0xA770, 'M', u'ê¯'), + (0xA771, 'V'), + (0xA779, 'M', u'êº'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ê¼'), + (0xA77C, 'V'), + (0xA77D, 'M', u'áµ¹'), + (0xA77E, 'M', u'ê¿'), + (0xA77F, 'V'), + (0xA780, 'M', u'êž'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'êž…'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'É¥'), + (0xA78E, 'V'), + (0xA78F, 'X'), + (0xA790, 'M', u'êž‘'), + (0xA791, 'V'), + (0xA792, 'M', u'êž“'), + (0xA793, 'V'), + (0xA794, 'X'), + (0xA7A0, 'M', u'êž¡'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'êž©'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'X'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'Å“'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C5, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FC, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9E0, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAA7C, 'X'), + (0xAA80, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'æ›´'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'å¥'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'å–‡'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'ç¾…'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'é‚'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'æ´›'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'çž'), + (0xF918, 'M', u'è½'), + (0xF919, 'M', u'é…ª'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'åµ'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'åµ'), + (0xF922, 'M', u'æ¿«'), + (0xF923, 'M', u'è—'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'è Ÿ'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'å‹ž'), + (0xF930, 'M', u'æ“„'), + (0xF931, 'M', u'æ«“'), + (0xF932, 'M', u'çˆ'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'è€'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'è·¯'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'é­¯'), + (0xF93A, 'M', u'é·º'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'è‰'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'è«–'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'ç± '), + (0xF945, 'M', u'è¾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'é›·'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'å±¢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'æ·š'), + (0xF94E, 'M', u'æ¼'), + (0xF94F, 'M', u'ç´¯'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'å‹’'), + (0xF953, 'M', u'è‚‹'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'è±'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'æ‹'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'ç•°'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'ä¸'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'ç´¢'), + (0xF96B, 'M', u'åƒ'), + (0xF96C, 'M', u'å¡ž'), + (0xF96D, 'M', u'çœ'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'è¾°'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'è‹¥'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'ç•¥'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'å…©'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'æ¢'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'è«’'), + (0xF97E, 'M', u'é‡'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'å‘‚'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'æ—…'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'é–­'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'æ­·'), + (0xF98D, 'M', u'è½¢'), + (0xF98E, 'M', u'å¹´'), + (0xF98F, 'M', u'æ†'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'æ’š'), + (0xF992, 'M', u'æ¼£'), + (0xF993, 'M', u'ç…‰'), + (0xF994, 'M', u'ç’‰'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'ç·´'), + (0xF997, 'M', u'è¯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'è“®'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'éŠ'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'å’½'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'æ»'), + (0xF9A5, 'M', u'æ®®'), + (0xF9A6, 'M', u'ç°¾'), + (0xF9A7, 'M', u'çµ'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'ç‘©'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'è†'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'éˆ'), + (0xF9B4, 'M', u'é ˜'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'å°¿'), + (0xF9BE, 'M', u'æ–™'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'é¼'), + (0xF9C4, 'M', u'é¾'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'æ»'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'æµ'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'ç‰'), + (0xF9CD, 'M', u'ç•™'), + (0xF9CE, 'M', u'ç¡«'), + (0xF9CF, 'M', u'ç´'), + (0xF9D0, 'M', u'é¡ž'), + (0xF9D1, 'M', u'å…­'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'å´™'), + (0xF9D6, 'M', u'æ·ª'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'æ…„'), + (0xF9DA, 'M', u'æ —'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'å'), + (0xF9DF, 'M', u'å±¥'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'æŽ'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'æ³¥'), + (0xF9E4, 'M', u'ç†'), + (0xF9E5, 'M', u'ç—¢'), + (0xF9E6, 'M', u'ç½¹'), + (0xF9E7, 'M', u'è£'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'å'), + (0xF9EE, 'M', u'ç‡'), + (0xF9EF, 'M', u'ç’˜'), + (0xF9F0, 'M', u'è—º'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'é±—'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'æž—'), + (0xF9F5, 'M', u'æ·‹'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'ç«‹'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'ç²’'), + (0xF9FA, 'M', u'ç‹€'), + (0xF9FB, 'M', u'ç‚™'), + (0xF9FC, 'M', u'è­˜'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'æ‹“'), + (0xFA03, 'M', u'ç³–'), + (0xFA04, 'M', u'å®…'), + (0xFA05, 'M', u'æ´ž'), + (0xFA06, 'M', u'æš´'), + (0xFA07, 'M', u'è¼»'), + (0xFA08, 'M', u'è¡Œ'), + (0xFA09, 'M', u'é™'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'å…€'), + (0xFA0D, 'M', u'å—€'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'å¡š'), + (0xFA11, 'V'), + (0xFA12, 'M', u'æ™´'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'ç¦'), + (0xFA1C, 'M', u'é–'), + (0xFA1D, 'M', u'ç²¾'), + (0xFA1E, 'M', u'ç¾½'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'éš·'), + (0xFA30, 'M', u'ä¾®'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'å…'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'å‘'), + (0xFA36, 'M', u'å–'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'å¡€'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'å±®'), + (0xFA3D, 'M', u'æ‚”'), + (0xFA3E, 'M', u'æ…¨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'æ•'), + (0xFA42, 'M', u'æ—¢'), + (0xFA43, 'M', u'æš‘'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'æµ·'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'æ¼¢'), + (0xFA48, 'M', u'ç…®'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'ç¢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'ç¥'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'ç¥'), + (0xFA52, 'M', u'ç¦'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'ç©€'), + (0xFA55, 'M', u'çª'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'ç·´'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'ç¹'), + (0xFA5A, 'M', u'ç½²'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'è‘—'), + (0xFA60, 'M', u'è¤'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'è¬'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'è´ˆ'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'é »'), + (0xFA6B, 'M', u'æµ'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'å…¨'), + (0xFA73, 'M', u'ä¾€'), + (0xFA74, 'M', u'å……'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'å–'), + (0xFA79, 'M', u'å••'), + (0xFA7A, 'M', u'å–™'), + (0xFA7B, 'M', u'å—¢'), + (0xFA7C, 'M', u'å¡š'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'å©¢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'å»’'), + (0xFA83, 'M', u'å»™'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'å¾­'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'æ…Ž'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'æ… '), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'æ„'), + (0xFA8E, 'M', u'æœ'), + (0xFA8F, 'M', u'æ‘’'), + (0xFA90, 'M', u'æ•–'), + (0xFA91, 'M', u'æ™´'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'æ–'), + (0xFA95, 'M', u'æ­¹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'æµ'), + (0xFA98, 'M', u'æ»›'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'æ¼¢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'ç…®'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'ç”»'), + (0xFAA4, 'M', u'ç˜'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'ç››'), + (0xFAA8, 'M', u'ç›´'), + (0xFAA9, 'M', u'çŠ'), + (0xFAAA, 'M', u'ç€'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'ç±»'), + (0xFAAF, 'M', u'çµ›'), + (0xFAB0, 'M', u'ç·´'), + (0xFAB1, 'M', u'ç¼¾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'è’'), + (0xFAB4, 'M', u'è¯'), + (0xFAB5, 'M', u'è¹'), + (0xFAB6, 'M', u'è¥'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'è«‹'), + (0xFABC, 'M', u'è¬'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'è«­'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'è´ˆ'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'é²'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'é–'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'é ‹'), + (0xFACC, 'M', u'é »'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'ð£•'), + (0xFAD2, 'M', u'ã®'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'ð¥³'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'Õ´Õ¶'), + (0xFB14, 'M', u'Õ´Õ¥'), + (0xFB15, 'M', u'Õ´Õ«'), + (0xFB16, 'M', u'Õ¾Õ¶'), + (0xFB17, 'M', u'Õ´Õ­'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'×™Ö´'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'×¢'), + (0xFB21, 'M', u'×'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'×”'), + (0xFB24, 'M', u'×›'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'×'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'ש×'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּ×'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'×Ö·'), + (0xFB2F, 'M', u'×Ö¸'), + (0xFB30, 'M', u'×Ö¼'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'×’Ö¼'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'×”Ö¼'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'×–Ö¼'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'×™Ö¼'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'×›Ö¼'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'× Ö¼'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'×£Ö¼'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'×›Ö¿'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'×ל'), + (0xFB50, 'M', u'Ù±'), + (0xFB52, 'M', u'Ù»'), + (0xFB56, 'M', u'Ù¾'), + (0xFB5A, 'M', u'Ú€'), + (0xFB5E, 'M', u'Ùº'), + (0xFB62, 'M', u'Ù¿'), + (0xFB66, 'M', u'Ù¹'), + (0xFB6A, 'M', u'Ú¤'), + (0xFB6E, 'M', u'Ú¦'), + (0xFB72, 'M', u'Ú„'), + (0xFB76, 'M', u'Úƒ'), + (0xFB7A, 'M', u'Ú†'), + (0xFB7E, 'M', u'Ú‡'), + (0xFB82, 'M', u'Ú'), + (0xFB84, 'M', u'ÚŒ'), + (0xFB86, 'M', u'ÚŽ'), + (0xFB88, 'M', u'Úˆ'), + (0xFB8A, 'M', u'Ú˜'), + (0xFB8C, 'M', u'Ú‘'), + (0xFB8E, 'M', u'Ú©'), + (0xFB92, 'M', u'Ú¯'), + (0xFB96, 'M', u'Ú³'), + (0xFB9A, 'M', u'Ú±'), + (0xFB9E, 'M', u'Úº'), + (0xFBA0, 'M', u'Ú»'), + (0xFBA4, 'M', u'Û€'), + (0xFBA6, 'M', u'Û'), + (0xFBAA, 'M', u'Ú¾'), + (0xFBAE, 'M', u'Û’'), + (0xFBB0, 'M', u'Û“'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'Ú­'), + (0xFBD7, 'M', u'Û‡'), + (0xFBD9, 'M', u'Û†'), + (0xFBDB, 'M', u'Ûˆ'), + (0xFBDD, 'M', u'Û‡Ù´'), + (0xFBDE, 'M', u'Û‹'), + (0xFBE0, 'M', u'Û…'), + (0xFBE2, 'M', u'Û‰'), + (0xFBE4, 'M', u'Û'), + (0xFBE8, 'M', u'Ù‰'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئÛ'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ÛŒ'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'Ùج'), + (0xFC2E, 'M', u'ÙØ­'), + (0xFC2F, 'M', u'ÙØ®'), + (0xFC30, 'M', u'ÙÙ…'), + (0xFC31, 'M', u'ÙÙ‰'), + (0xFC32, 'M', u'ÙÙŠ'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ÙÙ‘'), + (0xFC60, '3', u' ÙŽÙ‘'), + (0xFC61, '3', u' ÙÙ‘'), + (0xFC62, '3', u' ÙÙ‘'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'ÙÙ‰'), + (0xFC7D, 'M', u'ÙÙŠ'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'Ùج'), + (0xFCBF, 'M', u'ÙØ­'), + (0xFCC0, 'M', u'ÙØ®'), + (0xFCC1, 'M', u'ÙÙ…'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'Ù€ÙŽÙ‘'), + (0xFCF3, 'M', u'Ù€ÙÙ‘'), + (0xFCF4, 'M', u'Ù€ÙÙ‘'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'Ùخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'Ùمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'ã€'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE27, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'ã€'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'ã€'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'ã€'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' Ì…'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'ã€'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' Ù‹'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ÙŒ'), + (0xFE73, 'V'), + (0xFE74, '3', u' Ù'), + (0xFE75, 'X'), + (0xFE76, '3', u' ÙŽ'), + (0xFE77, 'M', u'Ù€ÙŽ'), + (0xFE78, '3', u' Ù'), + (0xFE79, 'M', u'Ù€Ù'), + (0xFE7A, '3', u' Ù'), + (0xFE7B, 'M', u'Ù€Ù'), + (0xFE7C, '3', u' Ù‘'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' Ù’'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'Ø¡'), + (0xFE81, 'M', u'Ø¢'), + (0xFE83, 'M', u'Ø£'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'Ø¥'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'Ø©'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'Ø«'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'Ø­'), + (0xFEA5, 'M', u'Ø®'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'Ø°'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'Ø´'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'Ø·'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'Ù'), + (0xFED5, 'M', u'Ù‚'), + (0xFED9, 'M', u'Ùƒ'), + (0xFEDD, 'M', u'Ù„'), + (0xFEE1, 'M', u'Ù…'), + (0xFEE5, 'M', u'Ù†'), + (0xFEE9, 'M', u'Ù‡'), + (0xFEED, 'M', u'Ùˆ'), + (0xFEEF, 'M', u'Ù‰'), + (0xFEF1, 'M', u'ÙŠ'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'ã€'), + (0xFF64, 'M', u'ã€'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ã‚¡'), + (0xFF68, 'M', u'ã‚£'), + (0xFF69, 'M', u'ã‚¥'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ã‚©'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ã‚¢'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'ã‚«'), + (0xFF77, 'M', u'ã‚­'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'ã‚·'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'ã‚»'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'ã‚¿'), + (0xFF81, 'M', u'ãƒ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ãƒ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ãƒ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'ã‚™'), + (0xFF9F, 'M', u'ã‚š'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'á„€'), + (0xFFA2, 'M', u'á„'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'á„‚'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'á„„'), + (0xFFA9, 'M', u'á„…'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'á„š'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'á„¡'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'á„Š'), + (0xFFB7, 'M', u'á„‹'), + (0xFFB8, 'M', u'á„Œ'), + (0xFFB9, 'M', u'á„'), + (0xFFBA, 'M', u'á„Ž'), + (0xFFBB, 'M', u'á„'), + (0xFFBC, 'M', u'á„'), + (0xFFBD, 'M', u'á„‘'), + (0xFFBE, 'M', u'á„’'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'á…¡'), + (0xFFC3, 'M', u'á…¢'), + (0xFFC4, 'M', u'á…£'), + (0xFFC5, 'M', u'á…¤'), + (0xFFC6, 'M', u'á…¥'), + (0xFFC7, 'M', u'á…¦'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'á…§'), + (0xFFCB, 'M', u'á…¨'), + (0xFFCC, 'M', u'á…©'), + (0xFFCD, 'M', u'á…ª'), + (0xFFCE, 'M', u'á…«'), + (0xFFCF, 'M', u'á…¬'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'á…­'), + (0xFFD3, 'M', u'á…®'), + (0xFFD4, 'M', u'á…¯'), + (0xFFD5, 'M', u'á…°'), + (0xFFD6, 'M', u'á…±'), + (0xFFD7, 'M', u'á…²'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'á…³'), + (0xFFDB, 'M', u'á…´'), + (0xFFDC, 'M', u'á…µ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' Ì„'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'Â¥'), + (0xFFE6, 'M', u'â‚©'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'â†'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'â– '), + (0xFFEE, 'M', u'â—‹'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018B, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x10300, 'V'), + (0x1031F, 'X'), + (0x10320, 'V'), + (0x10324, 'X'), + (0x10330, 'V'), + (0x1034B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'ð¨'), + (0x10401, 'M', u'ð©'), + (0x10402, 'M', u'ðª'), + (0x10403, 'M', u'ð«'), + (0x10404, 'M', u'ð¬'), + (0x10405, 'M', u'ð­'), + (0x10406, 'M', u'ð®'), + (0x10407, 'M', u'ð¯'), + (0x10408, 'M', u'ð°'), + (0x10409, 'M', u'ð±'), + (0x1040A, 'M', u'ð²'), + (0x1040B, 'M', u'ð³'), + (0x1040C, 'M', u'ð´'), + (0x1040D, 'M', u'ðµ'), + (0x1040E, 'M', u'ð¶'), + (0x1040F, 'M', u'ð·'), + (0x10410, 'M', u'ð¸'), + (0x10411, 'M', u'ð¹'), + (0x10412, 'M', u'ðº'), + (0x10413, 'M', u'ð»'), + (0x10414, 'M', u'ð¼'), + (0x10415, 'M', u'ð½'), + (0x10416, 'M', u'ð¾'), + (0x10417, 'M', u'ð¿'), + (0x10418, 'M', u'ð‘€'), + (0x10419, 'M', u'ð‘'), + (0x1041A, 'M', u'ð‘‚'), + (0x1041B, 'M', u'ð‘ƒ'), + (0x1041C, 'M', u'ð‘„'), + (0x1041D, 'M', u'ð‘…'), + (0x1041E, 'M', u'ð‘†'), + (0x1041F, 'M', u'ð‘‡'), + (0x10420, 'M', u'ð‘ˆ'), + (0x10421, 'M', u'ð‘‰'), + (0x10422, 'M', u'ð‘Š'), + (0x10423, 'M', u'ð‘‹'), + (0x10424, 'M', u'ð‘Œ'), + (0x10425, 'M', u'ð‘'), + (0x10426, 'M', u'ð‘Ž'), + (0x10427, 'M', u'ð‘'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x10860, 'X'), + (0x10900, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BE, 'V'), + (0x109C0, 'X'), + (0x10A00, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10A80, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B80, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x11080, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11180, 'V'), + (0x111C9, 'X'), + (0x111D0, 'V'), + (0x111DA, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x12000, 'V'), + (0x1236F, 'X'), + (0x12400, 'V'), + (0x12463, 'X'), + (0x12470, 'V'), + (0x12474, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x1B000, 'V'), + (0x1B002, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'ð…—ð…¥'), + (0x1D15F, 'M', u'ð…˜ð…¥'), + (0x1D160, 'M', u'ð…˜ð…¥ð…®'), + (0x1D161, 'M', u'ð…˜ð…¥ð…¯'), + (0x1D162, 'M', u'ð…˜ð…¥ð…°'), + (0x1D163, 'M', u'ð…˜ð…¥ð…±'), + (0x1D164, 'M', u'ð…˜ð…¥ð…²'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'ð†¹ð…¥'), + (0x1D1BC, 'M', u'ð†ºð…¥'), + (0x1D1BD, 'M', u'ð†¹ð…¥ð…®'), + (0x1D1BE, 'M', u'ð†ºð…¥ð…®'), + (0x1D1BF, 'M', u'ð†¹ð…¥ð…¯'), + (0x1D1C0, 'M', u'ð†ºð…¥ð…¯'), + (0x1D1C1, 'V'), + (0x1D1DE, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'È·'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'Ï€'), + (0x1D6B8, 'M', u'Ï'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'Ï„'), + (0x1D6BC, 'M', u'Ï…'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'Ï€'), + (0x1D6D2, 'M', u'Ï'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'Ï„'), + (0x1D6D6, 'M', u'Ï…'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'Ï'), + (0x1D6E1, 'M', u'Ï€'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'Ï€'), + (0x1D6F2, 'M', u'Ï'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'Ï„'), + (0x1D6F6, 'M', u'Ï…'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'Ï€'), + (0x1D70C, 'M', u'Ï'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'Ï„'), + (0x1D710, 'M', u'Ï…'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'Ï'), + (0x1D71B, 'M', u'Ï€'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'Ï€'), + (0x1D72C, 'M', u'Ï'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'Ï„'), + (0x1D730, 'M', u'Ï…'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'Ï€'), + (0x1D746, 'M', u'Ï'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'Ï„'), + (0x1D74A, 'M', u'Ï…'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'Ï'), + (0x1D755, 'M', u'Ï€'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'Ï€'), + (0x1D766, 'M', u'Ï'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'Ï„'), + (0x1D76A, 'M', u'Ï…'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'Ï€'), + (0x1D780, 'M', u'Ï'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'Ï„'), + (0x1D784, 'M', u'Ï…'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'Ï'), + (0x1D78F, 'M', u'Ï€'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'Ï€'), + (0x1D7A0, 'M', u'Ï'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'Ï„'), + (0x1D7A4, 'M', u'Ï…'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'Ï€'), + (0x1D7BA, 'M', u'Ï'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'Ï„'), + (0x1D7BE, 'M', u'Ï…'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'Ï'), + (0x1D7C9, 'M', u'Ï€'), + (0x1D7CA, 'M', u'Ï'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'Ùˆ'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'Ø­'), + (0x1EE08, 'M', u'Ø·'), + (0x1EE09, 'M', u'ÙŠ'), + (0x1EE0A, 'M', u'Ùƒ'), + (0x1EE0B, 'M', u'Ù„'), + (0x1EE0C, 'M', u'Ù…'), + (0x1EE0D, 'M', u'Ù†'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'Ù'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'Ù‚'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'Ø´'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'Ø«'), + (0x1EE17, 'M', u'Ø®'), + (0x1EE18, 'M', u'Ø°'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'Ù®'), + (0x1EE1D, 'M', u'Úº'), + (0x1EE1E, 'M', u'Ú¡'), + (0x1EE1F, 'M', u'Ù¯'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'Ù‡'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'Ø­'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ÙŠ'), + (0x1EE2A, 'M', u'Ùƒ'), + (0x1EE2B, 'M', u'Ù„'), + (0x1EE2C, 'M', u'Ù…'), + (0x1EE2D, 'M', u'Ù†'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'Ù'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'Ù‚'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'Ø´'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'Ø«'), + (0x1EE37, 'M', u'Ø®'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'Ø­'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ÙŠ'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'Ù„'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'Ù†'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'Ù‚'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'Ø´'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'Ø®'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'Úº'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'Ù¯'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'Ù‡'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'Ø­'), + (0x1EE68, 'M', u'Ø·'), + (0x1EE69, 'M', u'ÙŠ'), + (0x1EE6A, 'M', u'Ùƒ'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'Ù…'), + (0x1EE6D, 'M', u'Ù†'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'Ù'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'Ù‚'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'Ø´'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'Ø«'), + (0x1EE77, 'M', u'Ø®'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'Ù®'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'Ú¡'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'Ù‡'), + (0x1EE85, 'M', u'Ùˆ'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'Ø­'), + (0x1EE88, 'M', u'Ø·'), + (0x1EE89, 'M', u'ÙŠ'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'Ù„'), + (0x1EE8C, 'M', u'Ù…'), + (0x1EE8D, 'M', u'Ù†'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'Ù'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'Ù‚'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'Ø´'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'Ø«'), + (0x1EE97, 'M', u'Ø®'), + (0x1EE98, 'M', u'Ø°'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'Ùˆ'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'Ø­'), + (0x1EEA8, 'M', u'Ø·'), + (0x1EEA9, 'M', u'ÙŠ'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'Ù„'), + (0x1EEAC, 'M', u'Ù…'), + (0x1EEAD, 'M', u'Ù†'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'Ù'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'Ù‚'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'Ø´'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'Ø«'), + (0x1EEB7, 'M', u'Ø®'), + (0x1EEB8, 'M', u'Ø°'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0BF, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0E0, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F19B, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ã»ã‹'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'å­—'), + (0x1F212, 'M', u'åŒ'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'ç„¡'), + (0x1F21B, 'M', u'æ–™'), + (0x1F21C, 'M', u'å‰'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'å†'), + (0x1F21F, 'M', u'æ–°'), + (0x1F220, 'M', u'åˆ'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'å¹'), + (0x1F226, 'M', u'æ¼”'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'æ•'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'éŠ'), + (0x1F22C, 'M', u'å·¦'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'å³'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'èµ°'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'ç¦'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'åˆ'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'å–¶'), + (0x1F23B, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔å‹ã€•'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'å¾—'), + (0x1F251, 'M', u'å¯'), + (0x1F252, 'X'), + (0x1F300, 'V'), + (0x1F321, 'X'), + (0x1F330, 'V'), + (0x1F336, 'X'), + (0x1F337, 'V'), + (0x1F37D, 'X'), + (0x1F380, 'V'), + (0x1F394, 'X'), + (0x1F3A0, 'V'), + (0x1F3C5, 'X'), + (0x1F3C6, 'V'), + (0x1F3CB, 'X'), + (0x1F3E0, 'V'), + (0x1F3F1, 'X'), + (0x1F400, 'V'), + (0x1F43F, 'X'), + (0x1F440, 'V'), + (0x1F441, 'X'), + (0x1F442, 'V'), + (0x1F4F8, 'X'), + (0x1F4F9, 'V'), + (0x1F4FD, 'X'), + (0x1F500, 'V'), + (0x1F53E, 'X'), + (0x1F540, 'V'), + (0x1F544, 'X'), + (0x1F550, 'V'), + (0x1F568, 'X'), + (0x1F5FB, 'V'), + (0x1F641, 'X'), + (0x1F645, 'V'), + (0x1F650, 'X'), + (0x1F680, 'V'), + (0x1F6C6, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'ä¹'), + (0x2F803, 'M', u'ð „¢'), + (0x2F804, 'M', u'ä½ '), + (0x2F805, 'M', u'ä¾®'), + (0x2F806, 'M', u'ä¾»'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'åº'), + (0x2F809, 'M', u'å‚™'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'åƒ'), + (0x2F80C, 'M', u'ã’ž'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'å…'), + (0x2F80F, 'M', u'å…”'), + (0x2F810, 'M', u'å…¤'), + (0x2F811, 'M', u'å…·'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'ã’¹'), + (0x2F814, 'M', u'å…§'), + (0x2F815, 'M', u'å†'), + (0x2F816, 'M', u'ð •‹'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'ã“Ÿ'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'å‰'), + (0x2F82D, 'M', u'å‘'), + (0x2F82E, 'M', u'åš'), + (0x2F82F, 'M', u'å³'), + (0x2F830, 'M', u'å½'), + (0x2F831, 'M', u'å¿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'ç°'), + (0x2F836, 'M', u'åŠ'), + (0x2F837, 'M', u'åŸ'), + (0x2F838, 'M', u'ð ­£'), + (0x2F839, 'M', u'å«'), + (0x2F83A, 'M', u'å±'), + (0x2F83B, 'M', u'å†'), + (0x2F83C, 'M', u'å’ž'), + (0x2F83D, 'M', u'å¸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'å’¢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'å”'), + (0x2F843, 'M', u'å•“'), + (0x2F844, 'M', u'å•£'), + (0x2F845, 'M', u'å–„'), + (0x2F847, 'M', u'å–™'), + (0x2F848, 'M', u'å–«'), + (0x2F849, 'M', u'å–³'), + (0x2F84A, 'M', u'å—‚'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'å™´'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'å '), + (0x2F855, 'M', u'åž‹'), + (0x2F856, 'M', u'å ²'), + (0x2F857, 'M', u'å ±'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'ã›®'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'å°†'), + (0x2F874, 'X'), + (0x2F875, 'M', u'å°¢'), + (0x2F876, 'M', u'ãž'), + (0x2F877, 'M', u'å± '), + (0x2F878, 'M', u'å±®'), + (0x2F879, 'M', u'å³€'), + (0x2F87A, 'M', u'å²'), + (0x2F87B, 'M', u'ð¡·¤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'ð¡·¦'), + (0x2F87E, 'M', u'åµ®'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'åµ¼'), + (0x2F881, 'M', u'å·¡'), + (0x2F882, 'M', u'å·¢'), + (0x2F883, 'M', u'ã ¯'), + (0x2F884, 'M', u'å·½'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'ã¡¢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'èˆ'), + (0x2F894, 'M', u'å¼¢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'å½¢'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'å¿'), + (0x2F89E, 'M', u'å¿—'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'æ‚'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'æ‚”'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'æ…ˆ'), + (0x2F8A7, 'M', u'æ…Œ'), + (0x2F8A8, 'M', u'æ…Ž'), + (0x2F8A9, 'M', u'æ…Œ'), + (0x2F8AA, 'M', u'æ…º'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'æˆ'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'æ‰'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'æ‹”'), + (0x2F8B7, 'M', u'æ'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'æ¨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'æ¤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'æ¢'), + (0x2F8C0, 'M', u'æ…'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'æ‘©'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'æ’'), + (0x2F8C6, 'M', u'æ‘·'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'æ•'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'æ—£'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'æš‘'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'æšœ'), + (0x2F8D6, 'M', u'è‚­'), + (0x2F8D7, 'M', u'ä™'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'æž'), + (0x2F8DC, 'M', u'æ“'), + (0x2F8DD, 'M', u'ð£ƒ'), + (0x2F8DE, 'M', u'ã­‰'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'æž…'), + (0x2F8E1, 'M', u'æ¡’'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'æ Ÿ'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'ã®'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'æ«›'), + (0x2F8EE, 'M', u'ã°˜'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'æ­”'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'æ­²'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'æ®»'), + (0x2F8F7, 'M', u'ð£ª'), + (0x2F8F8, 'M', u'ð¡´‹'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'æ³'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'æ´–'), + (0x2F900, 'M', u'æ´¾'), + (0x2F901, 'M', u'æµ·'), + (0x2F902, 'M', u'æµ'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'æ´´'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'æ¹®'), + (0x2F90A, 'M', u'ã´³'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'æ·¹'), + (0x2F90F, 'M', u'æ½®'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'çŠ'), + (0x2F918, 'M', u'ç½'), + (0x2F919, 'M', u'ç·'), + (0x2F91A, 'M', u'ç‚­'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'ç……'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'ç‰'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'çº'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'ç‘œ'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'ç’…'), + (0x2F932, 'M', u'ç“Š'), + (0x2F933, 'M', u'ã¼›'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'ç•°'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'ç˜'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'ð¥„'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'ç›´'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'çŠ'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'çž‹'), + (0x2F94B, 'M', u'ä†'), + (0x2F94C, 'M', u'ä‚–'), + (0x2F94D, 'M', u'ð¥'), + (0x2F94E, 'M', u'ç¡Ž'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'ç¦'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'ç©€'), + (0x2F95A, 'M', u'ç©Š'), + (0x2F95B, 'M', u'ç©'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'ç³’'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'ç³£'), + (0x2F96A, 'M', u'ç´€'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'çµ£'), + (0x2F96D, 'M', u'äŒ'), + (0x2F96E, 'M', u'ç·‡'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'ç¹…'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'ä™'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'è '), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'è°'), + (0x2F980, 'M', u'ð£Ÿ'), + (0x2F981, 'M', u'ä•'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'ä‹'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'èˆ'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'ä‘«'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'èŠ'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'è‹¥'), + (0x2F999, 'M', u'èŒ'), + (0x2F99A, 'M', u'è£'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'è§'), + (0x2F99F, 'M', u'è‘—'), + (0x2F9A0, 'M', u'è“'), + (0x2F9A1, 'M', u'èŠ'), + (0x2F9A2, 'M', u'èŒ'), + (0x2F9A3, 'M', u'èœ'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'è”–'), + (0x2F9AB, 'M', u'ð§Š'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'ä•'), + (0x2F9AF, 'M', u'ä•¡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'ä•«'), + (0x2F9B3, 'M', u'è™'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'èš©'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'è¹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'è«'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'è '), + (0x2F9C2, 'M', u'ä—¹'), + (0x2F9C3, 'M', u'è¡ '), + (0x2F9C4, 'M', u'è¡£'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'ã’»'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'äš¾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'è«­'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'è³'), + (0x2F9D6, 'M', u'è´›'), + (0x2F9D7, 'M', u'èµ·'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'ð  „'), + (0x2F9DA, 'M', u'è·‹'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'è·°'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'è»”'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'é‚”'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'é„‘'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'é„›'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'é‹—'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'é¹'), + (0x2F9EC, 'M', u'é•'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'é–‹'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'é–·'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'ð©……'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'ä©®'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'ð©Š'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'ð©’–'), + (0x2F9FE, 'M', u'é ‹'), + (0x2FA00, 'M', u'é ©'), + (0x2FA01, 'M', u'ð©–¶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'é±€'), + (0x2FA0C, 'M', u'é³½'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'ä³­'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'äµ–'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'é¼…'), + (0x2FA1A, 'M', u'é¼'), + (0x2FA1B, 'M', u'é¼–'), + (0x2FA1C, 'M', u'é¼»'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), +) diff --git a/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ca4775a --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,46 @@ +lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It +provides safe and convenient access to these libraries using the ElementTree +API. + +It extends the ElementTree API significantly to offer support for XPath, +RelaxNG, XML Schema, XSLT, C14N and much more. + +To contact the project, go to the `project home page +`_ or see our bug tracker at +https://launchpad.net/lxml + +In case you want to use the current in-development version of lxml, +you can get it from the github repository at +https://github.com/lxml/lxml . Note that this requires Cython to +build the sources, see the build instructions on the project home +page. To the same end, running ``easy_install lxml==dev`` will +install lxml from +https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have +an appropriate version of Cython installed. + + +After an official release of a new stable series, bug fixes may become +available at +https://github.com/lxml/lxml/tree/lxml-3.5 . +Running ``easy_install lxml==3.5bugfix`` will install +the unreleased branch state from +https://github.com/lxml/lxml/tarball/lxml-3.5#egg=lxml-3.5bugfix +as soon as a maintenance branch has been established. Note that this +requires Cython to be installed at an appropriate version for the build. + +3.5.0 (2015-11-13) +================== + +Bugs fixed +---------- + +* Unicode string results failed XPath queries in PyPy. + +* LP#1497051: HTML target parser failed to terminate on exceptions + and continued parsing instead. + +* Deprecated API usage in doctestcompare. + + + + diff --git a/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/METADATA b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/METADATA new file mode 100644 index 0000000..84053a8 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/METADATA @@ -0,0 +1,82 @@ +Metadata-Version: 2.0 +Name: lxml +Version: 3.5.0 +Summary: Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API. +Home-page: http://lxml.de/ +Author: lxml dev team +Author-email: lxml-dev@lxml.de +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: C +Classifier: Operating System :: OS Independent +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: XML +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: cssselect +Requires-Dist: cssselect (>=0.7); extra == 'cssselect' +Provides-Extra: html5 +Requires-Dist: html5lib; extra == 'html5' +Provides-Extra: htmlsoup +Requires-Dist: BeautifulSoup4; extra == 'htmlsoup' +Provides-Extra: source +Requires-Dist: Cython (>=0.20); extra == 'source' + +lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It +provides safe and convenient access to these libraries using the ElementTree +API. + +It extends the ElementTree API significantly to offer support for XPath, +RelaxNG, XML Schema, XSLT, C14N and much more. + +To contact the project, go to the `project home page +`_ or see our bug tracker at +https://launchpad.net/lxml + +In case you want to use the current in-development version of lxml, +you can get it from the github repository at +https://github.com/lxml/lxml . Note that this requires Cython to +build the sources, see the build instructions on the project home +page. To the same end, running ``easy_install lxml==dev`` will +install lxml from +https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have +an appropriate version of Cython installed. + + +After an official release of a new stable series, bug fixes may become +available at +https://github.com/lxml/lxml/tree/lxml-3.5 . +Running ``easy_install lxml==3.5bugfix`` will install +the unreleased branch state from +https://github.com/lxml/lxml/tarball/lxml-3.5#egg=lxml-3.5bugfix +as soon as a maintenance branch has been established. Note that this +requires Cython to be installed at an appropriate version for the build. + +3.5.0 (2015-11-13) +================== + +Bugs fixed +---------- + +* Unicode string results failed XPath queries in PyPy. + +* LP#1497051: HTML target parser failed to terminate on exceptions + and continued parsing instead. + +* Deprecated API usage in doctestcompare. + + + + diff --git a/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/RECORD b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/RECORD new file mode 100644 index 0000000..bcc43fc --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/RECORD @@ -0,0 +1,84 @@ +lxml/ElementInclude.py,sha256=v2Zp-fy190Jvn2Qx7ArC3pjtFEqgQGIBr2J8H87nj04,7641 +lxml/__init__.py,sha256=lYLRn8M0_xdnN2bhEnZ2L7mlY5zM3gwz1kgLILLxC34,551 +lxml/_elementpath.py,sha256=FPpSgh-8KwZUzJQ0Ac2_L0l7i5HnVs5CaMriqDJAPZM,9490 +lxml/builder.py,sha256=2AuTtaN2F8noBWZ9eEfxOlVdztfLGYQGUtfK2uOu--g,8152 +lxml/cssselect.py,sha256=ADTqox2BUhZI_28K26Dnd-rPqvwL1A7KpXwDetXZLfA,3366 +lxml/doctestcompare.py,sha256=c1_02F7p5KE9epBcOJD0OA81v7ANrDyEr2P6A2irB5A,18387 +lxml/etree.cpython-35m-darwin.so,sha256=IocKvHJkuEGw-cExYguA82IQ8AG66v4jGPbPJ4u8zPY,2510084 +lxml/lxml.etree.h,sha256=t1SdZnhZroV_KadtXWt5LQhXg2PEV9pUx4zQ_hCf2pI,8902 +lxml/lxml.etree_api.h,sha256=qy4Y24d0Xi0D3CImAp5Rm2XrPyJW1IRgLa-5b4nPHHQ,17450 +lxml/objectify.cpython-35m-darwin.so,sha256=nBajYA1hh_bmAitZPCJlgLUkqKJ-D8Dxqv6uQmNfeuU,507108 +lxml/pyclasslookup.py,sha256=gLD1HM2HtITYYiGzjEOewSwbB7XkVx_NZv_quCt79Oc,92 +lxml/sax.py,sha256=LHiQ02sYvfKwecxcsRsoZjuH-O6FrQu2PvtK_b_12Mk,8531 +lxml/usedoctest.py,sha256=qRgZKQVcAZcl-zN0AIXVJnOsETUXz2nPXkxuzs1lGgk,230 +lxml/html/ElementSoup.py,sha256=9NQNksJmEr0M2DqzaKhXWdALLo0btwVwjgTHnkfGfvk,319 +lxml/html/__init__.py,sha256=UKxX64g9kNlHmpkS7sTo2tOWPMrhjF9wVqx_kRwiSLI,64571 +lxml/html/_diffcommand.py,sha256=FgOLayf6pz3qmN2EJM9aU6Zrvbi-ecGCPpKKd3jv4MI,2084 +lxml/html/_html5builder.py,sha256=cASxN0Tks3_vqCA_sXa1oCx_McyRL6VpuRLA1T-B58o,3246 +lxml/html/_setmixin.py,sha256=Rt1Pb_GFWp_5BD5GioZBaUPUZbVwPruDPJJMR7O7ntE,1111 +lxml/html/builder.py,sha256=1NuLqKPSaat75wCGufVuv1jIeBLuQCWsJVA_T2XjjUI,4310 +lxml/html/clean.py,sha256=2jHDo-j0eZXIJXbvQkog5XXDo_rtRDWHpeAsRHAsZzk,26084 +lxml/html/defs.py,sha256=fDYcHB4hJ8ncoGqfCIv9s6sZ9JYDEIRBqsNFBYEh2_g,4250 +lxml/html/diff.py,sha256=VwoIaJgvRWTouUmA7FYURwo_JlDce4MkkS0-g36-TVs,30500 +lxml/html/formfill.py,sha256=TcIlKqnGez5L6vfItobqiRlCS5bTaqglgDV5C_56VhM,9699 +lxml/html/html5parser.py,sha256=2MbS_4yTP-Tm8HvHiRm5xaGv9qLsmvS1RAD398--7Cs,6498 +lxml/html/soupparser.py,sha256=KMfefzgk5OLg_US0Rt2jHjJmWafj40I6OTDYV_WPvSE,10012 +lxml/html/usedoctest.py,sha256=tPlmVz4KK1GRKV5DJLrdVECeqsT9PlDzSqqTodVi5s0,249 +lxml/includes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lxml/includes/c14n.pxd,sha256=pGf910mVH9IdBb7r_aE-J59axIQcqFH4Sx_Tm0PA1m0,1123 +lxml/includes/config.pxd,sha256=H6Mrl8It21hzRI2hzMId9W48QqkYYkoLT4dniLNmdTw,96 +lxml/includes/dtdvalid.pxd,sha256=Rf2vRBbM4O1AOiIsUk_5M7pV3Dz309sS7Ccd2zGFHT0,671 +lxml/includes/etree_defs.h,sha256=cao5QR96gTItCTignEEoGoowC-sZS9vSl0IY7HnSMRk,13748 +lxml/includes/etreepublic.pxd,sha256=d6uN90usIgY_y00OYZqOn4xpk0yuLTmfxlgHlH0fI74,9985 +lxml/includes/htmlparser.pxd,sha256=Va2qbs5zVokERn57HbDY__CiBQOoCS4uI9wEfCnT6zk,2868 +lxml/includes/lxml-version.h,sha256=SUD-ow8oiO7gqnrbQ52G7GEQZrPrpfngegos-asSkjo,71 +lxml/includes/relaxng.pxd,sha256=12yapjqDZLF_HTlcuSXSoQpPGK1NU7fj7gzS1EF8kZw,2669 +lxml/includes/schematron.pxd,sha256=5_PUpLHTzzYZ_d-8d2OjKLdwtLIdOm7C20HFUAX8hD4,1640 +lxml/includes/tree.pxd,sha256=Ly-j3oXLo0jyS7nwtoDZKukjt5RydwnctGCFHamQjeA,19759 +lxml/includes/uri.pxd,sha256=5wPtpGU1JtdmpZMTzR8EswazihP3dxkns6Fgo9NWOt8,139 +lxml/includes/xinclude.pxd,sha256=onXD71LVdAbXjUj82_SDtSixNsNh8xbu6Nd9x0V3bmM,852 +lxml/includes/xmlerror.pxd,sha256=8OehYSACGPbrDoUp5arNf7QV_yZqIJOvX8K7wYgOibE,57916 +lxml/includes/xmlparser.pxd,sha256=VG9WadjfcdHUZ3wcstG3iO_pnhDoA2OICAi_lg1C1MQ,10824 +lxml/includes/xmlschema.pxd,sha256=yYQFrIKAQ_feenENV24X2AZyBIYGBltRDm9qB7CYMww,1696 +lxml/includes/xpath.pxd,sha256=tKYAcwpbSRq8qrsZ2ISVYvEaLnCV9GadNC5o_f8Ua_g,5794 +lxml/includes/xslt.pxd,sha256=bAg5a44I0aqVcYZ6sBcsyXddTNiuMI7UjHWlO8FccyU,7631 +lxml/isoschematron/__init__.py,sha256=FzrpeBJets_w6BH3MW1VbwTfThS35XL_Hk0bGr-NcOQ,12408 +lxml/isoschematron/resources/rng/iso-schematron.rng,sha256=cFvf7ObbuHh52exAqMlTBlGu-5iuOH2pE0WERkdC1Tk,15572 +lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl,sha256=ObebsB8Wt-d3uIA_U5NU85TpnQ3PxPX38TdOAqosMac,3172 +lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl,sha256=QweRrIIM-zFcgg98GXA2CaWfIbgVE0XKEeYSfvv67A0,4563 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl,sha256=0SdZY7oqbDgbuIjB9NpUHLXbMmFHVkxIEUZwTa3iRMM,10917 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl,sha256=x42QJ-dxQ1waPzydsCoQnp2Xj15y53nW43O7BuoDRHk,39957 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl,sha256=Tr9BnO6pzjVWwhqJfm10UlvAy95EgfSCz2iMlrVGT6Q,2015 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl,sha256=ue8q_88X4e_jsJizo31GRNBxNhdxkEE9fY20oq0Iqwk,71764 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl,sha256=BBAdsVSi5zAzeGepuN6gS1saQINDqITXKplmmj4dTWg,20382 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt,sha256=AVTFPZpEKzuHr7OvQZmhaU3LvwKz06AJw8mT_pNh2yI,3144 +lxml-3.5.0.dist-info/DESCRIPTION.rst,sha256=EPfgejczVWfMqhdWp2Wuc8OMyGXAR6lrTYfmjuVWGUE,1541 +lxml-3.5.0.dist-info/METADATA,sha256=QGRDbFLBUsEeTGXmS_zcFC3iLI_dJfttkb8BIRYd590,3018 +lxml-3.5.0.dist-info/RECORD,, +lxml-3.5.0.dist-info/WHEEL,sha256=Er7DBTU_C2g_rTGCxcwhCKegQSKoYLj1ncusWiwlKwM,111 +lxml-3.5.0.dist-info/metadata.json,sha256=ByuBabP24VYY3OZKLqS-7huMMWdAoC6H2LUI9RuabHE,1490 +lxml-3.5.0.dist-info/top_level.txt,sha256=NjD988wqaKq512nshNdLt-uDxsjkp4Bh51m6N-dhUrk,5 +lxml/__pycache__/_elementpath.cpython-35.pyc,, +lxml/__pycache__/pyclasslookup.cpython-35.pyc,, +lxml/html/__pycache__/usedoctest.cpython-35.pyc,, +lxml/__pycache__/sax.cpython-35.pyc,, +lxml/html/__pycache__/_setmixin.cpython-35.pyc,, +lxml/html/__pycache__/defs.cpython-35.pyc,, +lxml/__pycache__/ElementInclude.cpython-35.pyc,, +lxml/__pycache__/builder.cpython-35.pyc,, +lxml/html/__pycache__/formfill.cpython-35.pyc,, +lxml/html/__pycache__/builder.cpython-35.pyc,, +lxml/html/__pycache__/html5parser.cpython-35.pyc,, +lxml/html/__pycache__/clean.cpython-35.pyc,, +lxml/html/__pycache__/soupparser.cpython-35.pyc,, +lxml/isoschematron/__pycache__/__init__.cpython-35.pyc,, +lxml/__pycache__/cssselect.cpython-35.pyc,, +lxml/__pycache__/usedoctest.cpython-35.pyc,, +lxml/__pycache__/doctestcompare.cpython-35.pyc,, +lxml/includes/__pycache__/__init__.cpython-35.pyc,, +lxml/html/__pycache__/_diffcommand.cpython-35.pyc,, +lxml/html/__pycache__/diff.cpython-35.pyc,, +lxml/html/__pycache__/ElementSoup.cpython-35.pyc,, +lxml/html/__pycache__/_html5builder.cpython-35.pyc,, +lxml/__pycache__/__init__.cpython-35.pyc,, +lxml/html/__pycache__/__init__.cpython-35.pyc,, diff --git a/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/WHEEL b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/WHEEL new file mode 100644 index 0000000..b37ab74 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-cp35m-macosx_10_11_x86_64 + diff --git a/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/metadata.json b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/metadata.json new file mode 100644 index 0000000..e00ea39 --- /dev/null +++ b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API.", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: BSD License", "Programming Language :: Cython", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: C", "Operating System :: OS Independent", "Topic :: Text Processing :: Markup :: HTML", "Topic :: Text Processing :: Markup :: XML", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"project_urls": {"Home": "http://lxml.de/"}, "contacts": [{"email": "lxml-dev@lxml.de", "name": "lxml dev team", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "metadata_version": "2.0", "name": "lxml", "extras": ["cssselect", "html5", "htmlsoup", "source"], "run_requires": [{"requires": ["BeautifulSoup4"], "extra": "htmlsoup"}, {"requires": ["Cython (>=0.20)"], "extra": "source"}, {"requires": ["cssselect (>=0.7)"], "extra": "cssselect"}, {"requires": ["html5lib"], "extra": "html5"}], "version": "3.5.0"} \ No newline at end of file diff --git a/Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/top_level.txt b/Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/top_level.txt similarity index 100% rename from Darwin/lib/python3.4/site-packages/lxml-3.3.6-py3.4.egg-info/top_level.txt rename to Darwin/lib/python3.5/site-packages/lxml-3.5.0.dist-info/top_level.txt diff --git a/Darwin/lib/python3.4/site-packages/lxml/ElementInclude.py b/Darwin/lib/python3.5/site-packages/lxml/ElementInclude.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/lxml/ElementInclude.py rename to Darwin/lib/python3.5/site-packages/lxml/ElementInclude.py diff --git a/Darwin/lib/python3.4/site-packages/lxml/__init__.py b/Darwin/lib/python3.5/site-packages/lxml/__init__.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/lxml/__init__.py rename to Darwin/lib/python3.5/site-packages/lxml/__init__.py diff --git a/Darwin/lib/python3.4/site-packages/lxml/_elementpath.py b/Darwin/lib/python3.5/site-packages/lxml/_elementpath.py similarity index 95% rename from Darwin/lib/python3.4/site-packages/lxml/_elementpath.py rename to Darwin/lib/python3.5/site-packages/lxml/_elementpath.py index bc9176e..e560de4 100644 --- a/Darwin/lib/python3.4/site-packages/lxml/_elementpath.py +++ b/Darwin/lib/python3.5/site-packages/lxml/_elementpath.py @@ -220,18 +220,23 @@ ops = { "..": prepare_parent, "//": prepare_descendant, "[": prepare_predicate, - } +} -_cache = {} # -------------------------------------------------------------------- +_cache = {} + + def _build_path_iterator(path, namespaces): - # compile selector pattern + """compile selector pattern""" + if namespaces and (None in namespaces or '' in namespaces): + raise ValueError("empty namespace prefix is not supported in ElementPath") if path[-1:] == "/": - path = path + "*" # implicit all (FIXME: keep this?) + path += "*" # implicit all (FIXME: keep this?) + cache_key = (path, namespaces and tuple(sorted(namespaces.items())) or None) try: - return _cache[(path, namespaces and tuple(sorted(namespaces.items())) or None)] + return _cache[cache_key] except KeyError: pass if len(_cache) > 100: @@ -261,9 +266,10 @@ def _build_path_iterator(path, namespaces): token = _next() except StopIteration: break - _cache[path] = selector + _cache[cache_key] = selector return selector + ## # Iterate over the matching nodes @@ -274,6 +280,7 @@ def iterfind(elem, path, namespaces=None): result = select(result) return result + ## # Find first matching object. @@ -289,12 +296,14 @@ def find(elem, path, namespaces=None): except StopIteration: return None + ## # Find all matching objects. def findall(elem, path, namespaces=None): return list(iterfind(elem, path, namespaces)) + ## # Find text for first matching object. diff --git a/Darwin/lib/python3.4/site-packages/lxml/builder.py b/Darwin/lib/python3.5/site-packages/lxml/builder.py similarity index 96% rename from Darwin/lib/python3.4/site-packages/lxml/builder.py rename to Darwin/lib/python3.5/site-packages/lxml/builder.py index ad61a80..0bb3eff 100644 --- a/Darwin/lib/python3.4/site-packages/lxml/builder.py +++ b/Darwin/lib/python3.5/site-packages/lxml/builder.py @@ -177,16 +177,24 @@ class ElementMaker(object): typemap = typemap.copy() else: typemap = {} - + def add_text(elem, item): try: elem[-1].tail = (elem[-1].tail or "") + item except IndexError: elem.text = (elem.text or "") + item + + def add_cdata(elem, cdata): + if elem.text: + raise ValueError("Can't add a CDATA section. Element already has some text: %r" % elem.text) + elem.text = cdata + if str not in typemap: typemap[str] = add_text if unicode not in typemap: typemap[unicode] = add_text + if ET.CDATA not in typemap: + typemap[ET.CDATA] = add_cdata def add_dict(elem, item): attrib = elem.attrib diff --git a/Darwin/lib/python3.4/site-packages/lxml/cssselect.py b/Darwin/lib/python3.5/site-packages/lxml/cssselect.py similarity index 90% rename from Darwin/lib/python3.4/site-packages/lxml/cssselect.py rename to Darwin/lib/python3.5/site-packages/lxml/cssselect.py index e8effaa..586a142 100644 --- a/Darwin/lib/python3.4/site-packages/lxml/cssselect.py +++ b/Darwin/lib/python3.5/site-packages/lxml/cssselect.py @@ -6,17 +6,16 @@ See the `CSSSelector` class for details. This is a thin wrapper around cssselect 0.7 or later. """ -import sys -from lxml import etree +from __future__ import absolute_import -## Work-around the lack of absolute import in Python 2.4 -#from __future__ import absolute_import -#from cssselect import ... +from . import etree try: - external_cssselect = __import__('cssselect') + import cssselect as external_cssselect except ImportError: - raise ImportError('cssselect seems not to be installed. ' - 'See http://packages.python.org/cssselect/') + raise ImportError( + 'cssselect does not seem to be installed. ' + 'See http://packages.python.org/cssselect/') + SelectorSyntaxError = external_cssselect.SelectorSyntaxError ExpressionError = external_cssselect.ExpressionError diff --git a/Darwin/lib/python3.4/site-packages/lxml/doctestcompare.py b/Darwin/lib/python3.5/site-packages/lxml/doctestcompare.py similarity index 98% rename from Darwin/lib/python3.4/site-packages/lxml/doctestcompare.py rename to Darwin/lib/python3.5/site-packages/lxml/doctestcompare.py index 3cd5ce4..eb7c7f9 100644 --- a/Darwin/lib/python3.4/site-packages/lxml/doctestcompare.py +++ b/Darwin/lib/python3.5/site-packages/lxml/doctestcompare.py @@ -29,8 +29,8 @@ attribute matches any and all attributes. When a match fails, the reformatted example and gotten text is displayed (indented), and a rough diff-like output is given. Anything -marked with ``-`` is in the output but wasn't supposed to be, and -similarly ``+`` means its in the example but wasn't in the output. +marked with ``+`` is in the output but wasn't supposed to be, and +similarly ``-`` means its in the example but wasn't in the output. You can disable parsing on one line with ``# doctest:+NOPARSE_MARKUP`` """ @@ -39,7 +39,10 @@ from lxml import etree import sys import re import doctest -import cgi +try: + from html import escape as html_escape +except ImportError: + from cgi import escape as html_escape __all__ = ['PARSE_HTML', 'PARSE_XML', 'NOPARSE_MARKUP', 'LXMLOutputChecker', 'LHTMLOutputChecker', 'install', 'temp_install'] @@ -265,7 +268,7 @@ class LXMLOutputChecker(OutputChecker): return '' if strip: text = text.strip() - return cgi.escape(text, 1) + return html_escape(text, 1) def format_tag(self, el): attrs = [] @@ -306,10 +309,10 @@ class LXMLOutputChecker(OutputChecker): got_children = list(got) while want_children or got_children: if not want_children: - parts.append(self.format_doc(got_children.pop(0), html, indent+2, '-')) + parts.append(self.format_doc(got_children.pop(0), html, indent+2, '+')) continue if not got_children: - parts.append(self.format_doc(want_children.pop(0), html, indent+2, '+')) + parts.append(self.format_doc(want_children.pop(0), html, indent+2, '-')) continue parts.append(self.collect_diff( want_children.pop(0), got_children.pop(0), html, indent+2)) @@ -331,7 +334,7 @@ class LXMLOutputChecker(OutputChecker): any = want.tag == 'any' or 'any' in want.attrib for name, value in sorted(got.attrib.items()): if name not in want.attrib and not any: - attrs.append('-%s="%s"' % (name, self.format_text(value, False))) + attrs.append('+%s="%s"' % (name, self.format_text(value, False))) else: if name in want.attrib: text = self.collect_diff_text(want.attrib[name], value, False) @@ -342,7 +345,7 @@ class LXMLOutputChecker(OutputChecker): for name, value in sorted(want.attrib.items()): if name in got.attrib: continue - attrs.append('+%s="%s"' % (name, self.format_text(value, False))) + attrs.append('-%s="%s"' % (name, self.format_text(value, False))) if attrs: tag = '<%s %s>' % (tag, ' '.join(attrs)) else: diff --git a/Darwin/lib/python3.5/site-packages/lxml/etree.cpython-35m-darwin.so b/Darwin/lib/python3.5/site-packages/lxml/etree.cpython-35m-darwin.so new file mode 100755 index 0000000..5b061d4 Binary files /dev/null and b/Darwin/lib/python3.5/site-packages/lxml/etree.cpython-35m-darwin.so differ diff --git a/Darwin/lib/python3.4/site-packages/lxml/html/ElementSoup.py b/Darwin/lib/python3.5/site-packages/lxml/html/ElementSoup.py similarity index 100% rename from Darwin/lib/python3.4/site-packages/lxml/html/ElementSoup.py rename to Darwin/lib/python3.5/site-packages/lxml/html/ElementSoup.py diff --git a/Darwin/lib/python3.4/site-packages/lxml/html/__init__.py b/Darwin/lib/python3.5/site-packages/lxml/html/__init__.py similarity index 85% rename from Darwin/lib/python3.4/site-packages/lxml/html/__init__.py rename to Darwin/lib/python3.5/site-packages/lxml/html/__init__.py index fe28c3b..1a1d7d2 100644 --- a/Darwin/lib/python3.4/site-packages/lxml/html/__init__.py +++ b/Darwin/lib/python3.5/site-packages/lxml/html/__init__.py @@ -31,32 +31,36 @@ """The ``lxml.html`` tool set for HTML handling. """ +from __future__ import absolute_import + +__all__ = [ + 'document_fromstring', 'fragment_fromstring', 'fragments_fromstring', 'fromstring', + 'tostring', 'Element', 'defs', 'open_in_browser', 'submit_form', + 'find_rel_links', 'find_class', 'make_links_absolute', + 'resolve_base_href', 'iterlinks', 'rewrite_links', 'open_in_browser', 'parse'] + + +import copy import sys import re +from functools import partial + +try: + # while unnecessary, importing from 'collections.abc' is the right way to do it + from collections.abc import MutableMapping, MutableSet +except ImportError: + from collections import MutableMapping, MutableSet + +from .. import etree +from . import defs +from ._setmixin import SetMixin + try: from urlparse import urljoin except ImportError: # Python 3 from urllib.parse import urljoin -import copy -from lxml import etree -from lxml.html import defs -from lxml.html._setmixin import SetMixin -try: - from collections import MutableMapping as DictMixin -except ImportError: - # Python < 2.6 - from UserDict import DictMixin -try: - set -except NameError: - # Python 2.3 - from sets import Set as set -try: - bytes -except NameError: - # Python < 2.6 - bytes = str + try: unicode except NameError: @@ -68,21 +72,16 @@ except NameError: # Python 3 basestring = (str, bytes) + def __fix_docstring(s): if not s: return s - import sys if sys.version_info[0] >= 3: sub = re.compile(r"^(\s*)u'", re.M).sub else: sub = re.compile(r"^(\s*)b'", re.M).sub return sub(r"\1'", s) -__all__ = [ - 'document_fromstring', 'fragment_fromstring', 'fragments_fromstring', 'fromstring', - 'tostring', 'Element', 'defs', 'open_in_browser', 'submit_form', - 'find_rel_links', 'find_class', 'make_links_absolute', - 'resolve_base_href', 'iterlinks', 'rewrite_links', 'open_in_browser', 'parse'] XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml" @@ -96,11 +95,14 @@ _forms_xpath = etree.XPath("descendant-or-self::form|descendant-or-self::x:form" _class_xpath = etree.XPath("descendant-or-self::*[@class and contains(concat(' ', normalize-space(@class), ' '), concat(' ', $class_name, ' '))]") _id_xpath = etree.XPath("descendant-or-self::*[@id=$id]") _collect_string_content = etree.XPath("string()") -_css_url_re = re.compile(r'url\(('+'["][^"]*["]|'+"['][^']*[']|"+r'[^)]*)\)', re.I) -_css_import_re = re.compile(r'@import "(.*?)"') +_iter_css_urls = re.compile(r'url\(('+'["][^"]*["]|'+"['][^']*[']|"+r'[^)]*)\)', re.I).finditer +_iter_css_imports = re.compile(r'@import "(.*?)"').finditer _label_xpath = etree.XPath("//label[@for=$id]|//x:label[@for=$id]", namespaces={'x':XHTML_NAMESPACE}) _archive_re = re.compile(r'[^ ]+') +_parse_meta_refresh_url = re.compile( + r'[^;=]*;\s*(?:url\s*=\s*)?(?P.*)$', re.I).search + def _unquote_match(s, pos): if s[:1] == '"' and s[-1:] == '"' or s[:1] == "'" and s[-1:] == "'": @@ -108,6 +110,7 @@ def _unquote_match(s, pos): else: return s,pos + def _transform_result(typ, result): """Convert the result back into the input type. """ @@ -118,14 +121,141 @@ def _transform_result(typ, result): else: return result + def _nons(tag): if isinstance(tag, basestring): if tag[0] == '{' and tag[1:len(XHTML_NAMESPACE)+1] == XHTML_NAMESPACE: return tag.split('}')[-1] return tag + +class Classes(MutableSet): + """Provides access to an element's class attribute as a set-like collection. + Usage:: + + >>> el = fromstring('

') + >>> classes = el.classes # or: classes = Classes(el.attrib) + >>> classes |= ['block', 'paragraph'] + >>> el.get('class') + 'hidden large block paragraph' + >>> classes.toggle('hidden') + False + >>> el.get('class') + 'large block paragraph' + >>> classes -= ('some', 'classes', 'block') + >>> el.get('class') + 'large paragraph' + """ + def __init__(self, attributes): + self._attributes = attributes + self._get_class_value = partial(attributes.get, 'class', '') + + def add(self, value): + """ + Add a class. + + This has no effect if the class is already present. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = self._get_class_value().split() + if value in classes: + return + classes.append(value) + self._attributes['class'] = ' '.join(classes) + + def discard(self, value): + """ + Remove a class if it is currently present. + + If the class is not present, do nothing. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = [name for name in self._get_class_value().split() + if name != value] + if classes: + self._attributes['class'] = ' '.join(classes) + elif 'class' in self._attributes: + del self._attributes['class'] + + def remove(self, value): + """ + Remove a class; it must currently be present. + + If the class is not present, raise a KeyError. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + super(Classes, self).remove(value) + + def __contains__(self, name): + classes = self._get_class_value() + return name in classes and name in classes.split() + + def __iter__(self): + return iter(self._get_class_value().split()) + + def __len__(self): + return len(self._get_class_value().split()) + + # non-standard methods + + def update(self, values): + """ + Add all names from 'values'. + """ + classes = self._get_class_value().split() + extended = False + for value in values: + if value not in classes: + classes.append(value) + extended = True + if extended: + self._attributes['class'] = ' '.join(classes) + + def toggle(self, value): + """ + Add a class name if it isn't there yet, or remove it if it exists. + + Returns true if the class was added (and is now enabled) and + false if it was removed (and is now disabled). + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = self._get_class_value().split() + try: + classes.remove(value) + enabled = False + except ValueError: + classes.append(value) + enabled = True + if classes: + self._attributes['class'] = ' '.join(classes) + else: + del self._attributes['class'] + return enabled + + class HtmlMixin(object): + @property + def classes(self): + """ + A set-like wrapper around the 'class' attribute. + """ + return Classes(self.attrib) + + @classes.setter + def classes(self, classes): + assert isinstance(classes, Classes) # only allow "el.classes |= ..." etc. + value = classes._get_class_value() + if value: + self.set('class', value) + elif self.get('class') is not None: + del self.attrib['class'] + + @property def base_url(self): """ Returns the base URL, given when the page was parsed. @@ -134,32 +264,32 @@ class HtmlMixin(object): absolute URLs. """ return self.getroottree().docinfo.URL - base_url = property(base_url, doc=base_url.__doc__) + @property def forms(self): """ Return a list of all the forms """ return _forms_xpath(self) - forms = property(forms, doc=forms.__doc__) + @property def body(self): """ Return the element. Can be called from a child element to get the document's head. """ return self.xpath('//body|//x:body', namespaces={'x':XHTML_NAMESPACE})[0] - body = property(body, doc=body.__doc__) + @property def head(self): """ Returns the element. Can be called from a child element to get the document's head. """ return self.xpath('//head|//x:head', namespaces={'x':XHTML_NAMESPACE})[0] - head = property(head, doc=head.__doc__) - def _label__get(self): + @property + def label(self): """ Get or set any