Switch to python3
This commit is contained in:
parent
531041e89a
commit
9ba4b6a91a
5286 changed files with 677347 additions and 576888 deletions
4
Shared/lib/python3.4/site-packages/tornado/test/README
Normal file
4
Shared/lib/python3.4/site-packages/tornado/test/README
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
Test coverage is almost non-existent, but it's a start. Be sure to
|
||||
set PYTHONPATH apprioriately (generally to the root directory of your
|
||||
tornado checkout) when running tests to make sure you're getting the
|
||||
version of the tornado package that you expect.
|
||||
14
Shared/lib/python3.4/site-packages/tornado/test/__main__.py
Normal file
14
Shared/lib/python3.4/site-packages/tornado/test/__main__.py
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
"""Shim to allow python -m tornado.test.
|
||||
|
||||
This only works in python 2.7+.
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
from tornado.test.runtests import all, main
|
||||
|
||||
# tornado.testing.main autodiscovery relies on 'all' being present in
|
||||
# the main module, so import it here even though it is not used directly.
|
||||
# The following line prevents a pyflakes warning.
|
||||
all = all
|
||||
|
||||
main()
|
||||
451
Shared/lib/python3.4/site-packages/tornado/test/auth_test.py
Normal file
451
Shared/lib/python3.4/site-packages/tornado/test/auth_test.py
Normal file
|
|
@ -0,0 +1,451 @@
|
|||
# These tests do not currently do much to verify the correct implementation
|
||||
# of the openid/oauth protocols, they just exercise the major code paths
|
||||
# and ensure that it doesn't blow up (e.g. with unicode/bytes issues in
|
||||
# python 3)
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin, TwitterMixin, GoogleMixin, AuthError
|
||||
from tornado.concurrent import Future
|
||||
from tornado.escape import json_decode
|
||||
from tornado import gen
|
||||
from tornado.log import gen_log
|
||||
from tornado.testing import AsyncHTTPTestCase, ExpectLog
|
||||
from tornado.util import u
|
||||
from tornado.web import RequestHandler, Application, asynchronous, HTTPError
|
||||
|
||||
|
||||
class OpenIdClientLoginHandler(RequestHandler, OpenIdMixin):
|
||||
def initialize(self, test):
|
||||
self._OPENID_ENDPOINT = test.get_url('/openid/server/authenticate')
|
||||
|
||||
@asynchronous
|
||||
def get(self):
|
||||
if self.get_argument('openid.mode', None):
|
||||
self.get_authenticated_user(
|
||||
self.on_user, http_client=self.settings['http_client'])
|
||||
return
|
||||
res = self.authenticate_redirect()
|
||||
assert isinstance(res, Future)
|
||||
assert res.done()
|
||||
|
||||
def on_user(self, user):
|
||||
if user is None:
|
||||
raise Exception("user is None")
|
||||
self.finish(user)
|
||||
|
||||
|
||||
class OpenIdServerAuthenticateHandler(RequestHandler):
|
||||
def post(self):
|
||||
if self.get_argument('openid.mode') != 'check_authentication':
|
||||
raise Exception("incorrect openid.mode %r")
|
||||
self.write('is_valid:true')
|
||||
|
||||
|
||||
class OAuth1ClientLoginHandler(RequestHandler, OAuthMixin):
|
||||
def initialize(self, test, version):
|
||||
self._OAUTH_VERSION = version
|
||||
self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token')
|
||||
self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize')
|
||||
self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/oauth1/server/access_token')
|
||||
|
||||
def _oauth_consumer_token(self):
|
||||
return dict(key='asdf', secret='qwer')
|
||||
|
||||
@asynchronous
|
||||
def get(self):
|
||||
if self.get_argument('oauth_token', None):
|
||||
self.get_authenticated_user(
|
||||
self.on_user, http_client=self.settings['http_client'])
|
||||
return
|
||||
res = self.authorize_redirect(http_client=self.settings['http_client'])
|
||||
assert isinstance(res, Future)
|
||||
|
||||
def on_user(self, user):
|
||||
if user is None:
|
||||
raise Exception("user is None")
|
||||
self.finish(user)
|
||||
|
||||
def _oauth_get_user(self, access_token, callback):
|
||||
if self.get_argument('fail_in_get_user', None):
|
||||
raise Exception("failing in get_user")
|
||||
if access_token != dict(key='uiop', secret='5678'):
|
||||
raise Exception("incorrect access token %r" % access_token)
|
||||
callback(dict(email='foo@example.com'))
|
||||
|
||||
|
||||
class OAuth1ClientLoginCoroutineHandler(OAuth1ClientLoginHandler):
|
||||
"""Replaces OAuth1ClientLoginCoroutineHandler's get() with a coroutine."""
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
if self.get_argument('oauth_token', None):
|
||||
# Ensure that any exceptions are set on the returned Future,
|
||||
# not simply thrown into the surrounding StackContext.
|
||||
try:
|
||||
yield self.get_authenticated_user()
|
||||
except Exception as e:
|
||||
self.set_status(503)
|
||||
self.write("got exception: %s" % e)
|
||||
else:
|
||||
yield self.authorize_redirect()
|
||||
|
||||
|
||||
class OAuth1ClientRequestParametersHandler(RequestHandler, OAuthMixin):
|
||||
def initialize(self, version):
|
||||
self._OAUTH_VERSION = version
|
||||
|
||||
def _oauth_consumer_token(self):
|
||||
return dict(key='asdf', secret='qwer')
|
||||
|
||||
def get(self):
|
||||
params = self._oauth_request_parameters(
|
||||
'http://www.example.com/api/asdf',
|
||||
dict(key='uiop', secret='5678'),
|
||||
parameters=dict(foo='bar'))
|
||||
self.write(params)
|
||||
|
||||
|
||||
class OAuth1ServerRequestTokenHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write('oauth_token=zxcv&oauth_token_secret=1234')
|
||||
|
||||
|
||||
class OAuth1ServerAccessTokenHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write('oauth_token=uiop&oauth_token_secret=5678')
|
||||
|
||||
|
||||
class OAuth2ClientLoginHandler(RequestHandler, OAuth2Mixin):
|
||||
def initialize(self, test):
|
||||
self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth2/server/authorize')
|
||||
|
||||
def get(self):
|
||||
res = self.authorize_redirect()
|
||||
assert isinstance(res, Future)
|
||||
assert res.done()
|
||||
|
||||
|
||||
class TwitterClientHandler(RequestHandler, TwitterMixin):
|
||||
def initialize(self, test):
|
||||
self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token')
|
||||
self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/twitter/server/access_token')
|
||||
self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize')
|
||||
self._TWITTER_BASE_URL = test.get_url('/twitter/api')
|
||||
|
||||
def get_auth_http_client(self):
|
||||
return self.settings['http_client']
|
||||
|
||||
|
||||
class TwitterClientLoginHandler(TwitterClientHandler):
|
||||
@asynchronous
|
||||
def get(self):
|
||||
if self.get_argument("oauth_token", None):
|
||||
self.get_authenticated_user(self.on_user)
|
||||
return
|
||||
self.authorize_redirect()
|
||||
|
||||
def on_user(self, user):
|
||||
if user is None:
|
||||
raise Exception("user is None")
|
||||
self.finish(user)
|
||||
|
||||
|
||||
class TwitterClientLoginGenEngineHandler(TwitterClientHandler):
|
||||
@asynchronous
|
||||
@gen.engine
|
||||
def get(self):
|
||||
if self.get_argument("oauth_token", None):
|
||||
user = yield self.get_authenticated_user()
|
||||
self.finish(user)
|
||||
else:
|
||||
# Old style: with @gen.engine we can ignore the Future from
|
||||
# authorize_redirect.
|
||||
self.authorize_redirect()
|
||||
|
||||
|
||||
class TwitterClientLoginGenCoroutineHandler(TwitterClientHandler):
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
if self.get_argument("oauth_token", None):
|
||||
user = yield self.get_authenticated_user()
|
||||
self.finish(user)
|
||||
else:
|
||||
# New style: with @gen.coroutine the result must be yielded
|
||||
# or else the request will be auto-finished too soon.
|
||||
yield self.authorize_redirect()
|
||||
|
||||
|
||||
class TwitterClientShowUserHandler(TwitterClientHandler):
|
||||
@asynchronous
|
||||
@gen.engine
|
||||
def get(self):
|
||||
# TODO: would be nice to go through the login flow instead of
|
||||
# cheating with a hard-coded access token.
|
||||
response = yield gen.Task(self.twitter_request,
|
||||
'/users/show/%s' % self.get_argument('name'),
|
||||
access_token=dict(key='hjkl', secret='vbnm'))
|
||||
if response is None:
|
||||
self.set_status(500)
|
||||
self.finish('error from twitter request')
|
||||
else:
|
||||
self.finish(response)
|
||||
|
||||
|
||||
class TwitterClientShowUserFutureHandler(TwitterClientHandler):
|
||||
@asynchronous
|
||||
@gen.engine
|
||||
def get(self):
|
||||
try:
|
||||
response = yield self.twitter_request(
|
||||
'/users/show/%s' % self.get_argument('name'),
|
||||
access_token=dict(key='hjkl', secret='vbnm'))
|
||||
except AuthError as e:
|
||||
self.set_status(500)
|
||||
self.finish(str(e))
|
||||
return
|
||||
assert response is not None
|
||||
self.finish(response)
|
||||
|
||||
|
||||
class TwitterServerAccessTokenHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write('oauth_token=hjkl&oauth_token_secret=vbnm&screen_name=foo')
|
||||
|
||||
|
||||
class TwitterServerShowUserHandler(RequestHandler):
|
||||
def get(self, screen_name):
|
||||
if screen_name == 'error':
|
||||
raise HTTPError(500)
|
||||
assert 'oauth_nonce' in self.request.arguments
|
||||
assert 'oauth_timestamp' in self.request.arguments
|
||||
assert 'oauth_signature' in self.request.arguments
|
||||
assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key'
|
||||
assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1'
|
||||
assert self.get_argument('oauth_version') == '1.0'
|
||||
assert self.get_argument('oauth_token') == 'hjkl'
|
||||
self.write(dict(screen_name=screen_name, name=screen_name.capitalize()))
|
||||
|
||||
|
||||
class TwitterServerVerifyCredentialsHandler(RequestHandler):
|
||||
def get(self):
|
||||
assert 'oauth_nonce' in self.request.arguments
|
||||
assert 'oauth_timestamp' in self.request.arguments
|
||||
assert 'oauth_signature' in self.request.arguments
|
||||
assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key'
|
||||
assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1'
|
||||
assert self.get_argument('oauth_version') == '1.0'
|
||||
assert self.get_argument('oauth_token') == 'hjkl'
|
||||
self.write(dict(screen_name='foo', name='Foo'))
|
||||
|
||||
|
||||
class GoogleOpenIdClientLoginHandler(RequestHandler, GoogleMixin):
|
||||
def initialize(self, test):
|
||||
self._OPENID_ENDPOINT = test.get_url('/openid/server/authenticate')
|
||||
|
||||
@asynchronous
|
||||
def get(self):
|
||||
if self.get_argument("openid.mode", None):
|
||||
self.get_authenticated_user(self.on_user)
|
||||
return
|
||||
res = self.authenticate_redirect()
|
||||
assert isinstance(res, Future)
|
||||
assert res.done()
|
||||
|
||||
def on_user(self, user):
|
||||
if user is None:
|
||||
raise Exception("user is None")
|
||||
self.finish(user)
|
||||
|
||||
def get_auth_http_client(self):
|
||||
return self.settings['http_client']
|
||||
|
||||
|
||||
class AuthTest(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
return Application(
|
||||
[
|
||||
# test endpoints
|
||||
('/openid/client/login', OpenIdClientLoginHandler, dict(test=self)),
|
||||
('/oauth10/client/login', OAuth1ClientLoginHandler,
|
||||
dict(test=self, version='1.0')),
|
||||
('/oauth10/client/request_params',
|
||||
OAuth1ClientRequestParametersHandler,
|
||||
dict(version='1.0')),
|
||||
('/oauth10a/client/login', OAuth1ClientLoginHandler,
|
||||
dict(test=self, version='1.0a')),
|
||||
('/oauth10a/client/login_coroutine',
|
||||
OAuth1ClientLoginCoroutineHandler,
|
||||
dict(test=self, version='1.0a')),
|
||||
('/oauth10a/client/request_params',
|
||||
OAuth1ClientRequestParametersHandler,
|
||||
dict(version='1.0a')),
|
||||
('/oauth2/client/login', OAuth2ClientLoginHandler, dict(test=self)),
|
||||
|
||||
('/twitter/client/login', TwitterClientLoginHandler, dict(test=self)),
|
||||
('/twitter/client/login_gen_engine', TwitterClientLoginGenEngineHandler, dict(test=self)),
|
||||
('/twitter/client/login_gen_coroutine', TwitterClientLoginGenCoroutineHandler, dict(test=self)),
|
||||
('/twitter/client/show_user', TwitterClientShowUserHandler, dict(test=self)),
|
||||
('/twitter/client/show_user_future', TwitterClientShowUserFutureHandler, dict(test=self)),
|
||||
('/google/client/openid_login', GoogleOpenIdClientLoginHandler, dict(test=self)),
|
||||
|
||||
# simulated servers
|
||||
('/openid/server/authenticate', OpenIdServerAuthenticateHandler),
|
||||
('/oauth1/server/request_token', OAuth1ServerRequestTokenHandler),
|
||||
('/oauth1/server/access_token', OAuth1ServerAccessTokenHandler),
|
||||
|
||||
('/twitter/server/access_token', TwitterServerAccessTokenHandler),
|
||||
(r'/twitter/api/users/show/(.*)\.json', TwitterServerShowUserHandler),
|
||||
(r'/twitter/api/account/verify_credentials\.json', TwitterServerVerifyCredentialsHandler),
|
||||
],
|
||||
http_client=self.http_client,
|
||||
twitter_consumer_key='test_twitter_consumer_key',
|
||||
twitter_consumer_secret='test_twitter_consumer_secret')
|
||||
|
||||
def test_openid_redirect(self):
|
||||
response = self.fetch('/openid/client/login', follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue(
|
||||
'/openid/server/authenticate?' in response.headers['Location'])
|
||||
|
||||
def test_openid_get_user(self):
|
||||
response = self.fetch('/openid/client/login?openid.mode=blah&openid.ns.ax=http://openid.net/srv/ax/1.0&openid.ax.type.email=http://axschema.org/contact/email&openid.ax.value.email=foo@example.com')
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed["email"], "foo@example.com")
|
||||
|
||||
def test_oauth10_redirect(self):
|
||||
response = self.fetch('/oauth10/client/login', follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue(response.headers['Location'].endswith(
|
||||
'/oauth1/server/authorize?oauth_token=zxcv'))
|
||||
# the cookie is base64('zxcv')|base64('1234')
|
||||
self.assertTrue(
|
||||
'_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
|
||||
response.headers['Set-Cookie'])
|
||||
|
||||
def test_oauth10_get_user(self):
|
||||
response = self.fetch(
|
||||
'/oauth10/client/login?oauth_token=zxcv',
|
||||
headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed['email'], 'foo@example.com')
|
||||
self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678'))
|
||||
|
||||
def test_oauth10_request_parameters(self):
|
||||
response = self.fetch('/oauth10/client/request_params')
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed['oauth_consumer_key'], 'asdf')
|
||||
self.assertEqual(parsed['oauth_token'], 'uiop')
|
||||
self.assertTrue('oauth_nonce' in parsed)
|
||||
self.assertTrue('oauth_signature' in parsed)
|
||||
|
||||
def test_oauth10a_redirect(self):
|
||||
response = self.fetch('/oauth10a/client/login', follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue(response.headers['Location'].endswith(
|
||||
'/oauth1/server/authorize?oauth_token=zxcv'))
|
||||
# the cookie is base64('zxcv')|base64('1234')
|
||||
self.assertTrue(
|
||||
'_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
|
||||
response.headers['Set-Cookie'])
|
||||
|
||||
def test_oauth10a_get_user(self):
|
||||
response = self.fetch(
|
||||
'/oauth10a/client/login?oauth_token=zxcv',
|
||||
headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed['email'], 'foo@example.com')
|
||||
self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678'))
|
||||
|
||||
def test_oauth10a_request_parameters(self):
|
||||
response = self.fetch('/oauth10a/client/request_params')
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed['oauth_consumer_key'], 'asdf')
|
||||
self.assertEqual(parsed['oauth_token'], 'uiop')
|
||||
self.assertTrue('oauth_nonce' in parsed)
|
||||
self.assertTrue('oauth_signature' in parsed)
|
||||
|
||||
def test_oauth10a_get_user_coroutine_exception(self):
|
||||
response = self.fetch(
|
||||
'/oauth10a/client/login_coroutine?oauth_token=zxcv&fail_in_get_user=true',
|
||||
headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
|
||||
self.assertEqual(response.code, 503)
|
||||
|
||||
def test_oauth2_redirect(self):
|
||||
response = self.fetch('/oauth2/client/login', follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue('/oauth2/server/authorize?' in response.headers['Location'])
|
||||
|
||||
def base_twitter_redirect(self, url):
|
||||
# Same as test_oauth10a_redirect
|
||||
response = self.fetch(url, follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue(response.headers['Location'].endswith(
|
||||
'/oauth1/server/authorize?oauth_token=zxcv'))
|
||||
# the cookie is base64('zxcv')|base64('1234')
|
||||
self.assertTrue(
|
||||
'_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
|
||||
response.headers['Set-Cookie'])
|
||||
|
||||
def test_twitter_redirect(self):
|
||||
self.base_twitter_redirect('/twitter/client/login')
|
||||
|
||||
def test_twitter_redirect_gen_engine(self):
|
||||
self.base_twitter_redirect('/twitter/client/login_gen_engine')
|
||||
|
||||
def test_twitter_redirect_gen_coroutine(self):
|
||||
self.base_twitter_redirect('/twitter/client/login_gen_coroutine')
|
||||
|
||||
def test_twitter_get_user(self):
|
||||
response = self.fetch(
|
||||
'/twitter/client/login?oauth_token=zxcv',
|
||||
headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed,
|
||||
{u('access_token'): {u('key'): u('hjkl'),
|
||||
u('screen_name'): u('foo'),
|
||||
u('secret'): u('vbnm')},
|
||||
u('name'): u('Foo'),
|
||||
u('screen_name'): u('foo'),
|
||||
u('username'): u('foo')})
|
||||
|
||||
def test_twitter_show_user(self):
|
||||
response = self.fetch('/twitter/client/show_user?name=somebody')
|
||||
response.rethrow()
|
||||
self.assertEqual(json_decode(response.body),
|
||||
{'name': 'Somebody', 'screen_name': 'somebody'})
|
||||
|
||||
def test_twitter_show_user_error(self):
|
||||
with ExpectLog(gen_log, 'Error response HTTP 500'):
|
||||
response = self.fetch('/twitter/client/show_user?name=error')
|
||||
self.assertEqual(response.code, 500)
|
||||
self.assertEqual(response.body, b'error from twitter request')
|
||||
|
||||
def test_twitter_show_user_future(self):
|
||||
response = self.fetch('/twitter/client/show_user_future?name=somebody')
|
||||
response.rethrow()
|
||||
self.assertEqual(json_decode(response.body),
|
||||
{'name': 'Somebody', 'screen_name': 'somebody'})
|
||||
|
||||
def test_twitter_show_user_future_error(self):
|
||||
response = self.fetch('/twitter/client/show_user_future?name=error')
|
||||
self.assertEqual(response.code, 500)
|
||||
self.assertIn(b'Error response HTTP 500', response.body)
|
||||
|
||||
def test_google_redirect(self):
|
||||
# same as test_openid_redirect
|
||||
response = self.fetch('/google/client/openid_login', follow_redirects=False)
|
||||
self.assertEqual(response.code, 302)
|
||||
self.assertTrue(
|
||||
'/openid/server/authenticate?' in response.headers['Location'])
|
||||
|
||||
def test_google_get_user(self):
|
||||
response = self.fetch('/google/client/openid_login?openid.mode=blah&openid.ns.ax=http://openid.net/srv/ax/1.0&openid.ax.type.email=http://axschema.org/contact/email&openid.ax.value.email=foo@example.com', follow_redirects=False)
|
||||
response.rethrow()
|
||||
parsed = json_decode(response.body)
|
||||
self.assertEqual(parsed["email"], "foo@example.com")
|
||||
|
|
@ -0,0 +1,336 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2012 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from tornado.concurrent import Future, return_future, ReturnValueIgnoredError
|
||||
from tornado.escape import utf8, to_unicode
|
||||
from tornado import gen
|
||||
from tornado.iostream import IOStream
|
||||
from tornado import stack_context
|
||||
from tornado.tcpserver import TCPServer
|
||||
from tornado.testing import AsyncTestCase, LogTrapTestCase, bind_unused_port, gen_test
|
||||
|
||||
|
||||
try:
|
||||
from concurrent import futures
|
||||
except ImportError:
|
||||
futures = None
|
||||
|
||||
|
||||
class ReturnFutureTest(AsyncTestCase):
|
||||
@return_future
|
||||
def sync_future(self, callback):
|
||||
callback(42)
|
||||
|
||||
@return_future
|
||||
def async_future(self, callback):
|
||||
self.io_loop.add_callback(callback, 42)
|
||||
|
||||
@return_future
|
||||
def immediate_failure(self, callback):
|
||||
1 / 0
|
||||
|
||||
@return_future
|
||||
def delayed_failure(self, callback):
|
||||
self.io_loop.add_callback(lambda: 1 / 0)
|
||||
|
||||
@return_future
|
||||
def return_value(self, callback):
|
||||
# Note that the result of both running the callback and returning
|
||||
# a value (or raising an exception) is unspecified; with current
|
||||
# implementations the last event prior to callback resolution wins.
|
||||
return 42
|
||||
|
||||
@return_future
|
||||
def no_result_future(self, callback):
|
||||
callback()
|
||||
|
||||
def test_immediate_failure(self):
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
# The caller sees the error just like a normal function.
|
||||
self.immediate_failure(callback=self.stop)
|
||||
# The callback is not run because the function failed synchronously.
|
||||
self.io_loop.add_timeout(self.io_loop.time() + 0.05, self.stop)
|
||||
result = self.wait()
|
||||
self.assertIs(result, None)
|
||||
|
||||
def test_return_value(self):
|
||||
with self.assertRaises(ReturnValueIgnoredError):
|
||||
self.return_value(callback=self.stop)
|
||||
|
||||
def test_callback_kw(self):
|
||||
future = self.sync_future(callback=self.stop)
|
||||
result = self.wait()
|
||||
self.assertEqual(result, 42)
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_callback_positional(self):
|
||||
# When the callback is passed in positionally, future_wrap shouldn't
|
||||
# add another callback in the kwargs.
|
||||
future = self.sync_future(self.stop)
|
||||
result = self.wait()
|
||||
self.assertEqual(result, 42)
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_no_callback(self):
|
||||
future = self.sync_future()
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_none_callback_kw(self):
|
||||
# explicitly pass None as callback
|
||||
future = self.sync_future(callback=None)
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_none_callback_pos(self):
|
||||
future = self.sync_future(None)
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_async_future(self):
|
||||
future = self.async_future()
|
||||
self.assertFalse(future.done())
|
||||
self.io_loop.add_future(future, self.stop)
|
||||
future2 = self.wait()
|
||||
self.assertIs(future, future2)
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
@gen_test
|
||||
def test_async_future_gen(self):
|
||||
result = yield self.async_future()
|
||||
self.assertEqual(result, 42)
|
||||
|
||||
def test_delayed_failure(self):
|
||||
future = self.delayed_failure()
|
||||
self.io_loop.add_future(future, self.stop)
|
||||
future2 = self.wait()
|
||||
self.assertIs(future, future2)
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
future.result()
|
||||
|
||||
def test_kw_only_callback(self):
|
||||
@return_future
|
||||
def f(**kwargs):
|
||||
kwargs['callback'](42)
|
||||
future = f()
|
||||
self.assertEqual(future.result(), 42)
|
||||
|
||||
def test_error_in_callback(self):
|
||||
self.sync_future(callback=lambda future: 1 / 0)
|
||||
# The exception gets caught by our StackContext and will be re-raised
|
||||
# when we wait.
|
||||
self.assertRaises(ZeroDivisionError, self.wait)
|
||||
|
||||
def test_no_result_future(self):
|
||||
future = self.no_result_future(self.stop)
|
||||
result = self.wait()
|
||||
self.assertIs(result, None)
|
||||
# result of this future is undefined, but not an error
|
||||
future.result()
|
||||
|
||||
def test_no_result_future_callback(self):
|
||||
future = self.no_result_future(callback=lambda: self.stop())
|
||||
result = self.wait()
|
||||
self.assertIs(result, None)
|
||||
future.result()
|
||||
|
||||
@gen_test
|
||||
def test_future_traceback(self):
|
||||
@return_future
|
||||
@gen.engine
|
||||
def f(callback):
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
try:
|
||||
1 / 0
|
||||
except ZeroDivisionError:
|
||||
self.expected_frame = traceback.extract_tb(
|
||||
sys.exc_info()[2], limit=1)[0]
|
||||
raise
|
||||
try:
|
||||
yield f()
|
||||
self.fail("didn't get expected exception")
|
||||
except ZeroDivisionError:
|
||||
tb = traceback.extract_tb(sys.exc_info()[2])
|
||||
self.assertIn(self.expected_frame, tb)
|
||||
|
||||
# The following series of classes demonstrate and test various styles
|
||||
# of use, with and without generators and futures.
|
||||
|
||||
|
||||
class CapServer(TCPServer):
|
||||
def handle_stream(self, stream, address):
|
||||
logging.info("handle_stream")
|
||||
self.stream = stream
|
||||
self.stream.read_until(b"\n", self.handle_read)
|
||||
|
||||
def handle_read(self, data):
|
||||
logging.info("handle_read")
|
||||
data = to_unicode(data)
|
||||
if data == data.upper():
|
||||
self.stream.write(b"error\talready capitalized\n")
|
||||
else:
|
||||
# data already has \n
|
||||
self.stream.write(utf8("ok\t%s" % data.upper()))
|
||||
self.stream.close()
|
||||
|
||||
|
||||
class CapError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BaseCapClient(object):
|
||||
def __init__(self, port, io_loop):
|
||||
self.port = port
|
||||
self.io_loop = io_loop
|
||||
|
||||
def process_response(self, data):
|
||||
status, message = re.match('(.*)\t(.*)\n', to_unicode(data)).groups()
|
||||
if status == 'ok':
|
||||
return message
|
||||
else:
|
||||
raise CapError(message)
|
||||
|
||||
|
||||
class ManualCapClient(BaseCapClient):
|
||||
def capitalize(self, request_data, callback=None):
|
||||
logging.info("capitalize")
|
||||
self.request_data = request_data
|
||||
self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
|
||||
self.stream.connect(('127.0.0.1', self.port),
|
||||
callback=self.handle_connect)
|
||||
self.future = Future()
|
||||
if callback is not None:
|
||||
self.future.add_done_callback(
|
||||
stack_context.wrap(lambda future: callback(future.result())))
|
||||
return self.future
|
||||
|
||||
def handle_connect(self):
|
||||
logging.info("handle_connect")
|
||||
self.stream.write(utf8(self.request_data + "\n"))
|
||||
self.stream.read_until(b'\n', callback=self.handle_read)
|
||||
|
||||
def handle_read(self, data):
|
||||
logging.info("handle_read")
|
||||
self.stream.close()
|
||||
try:
|
||||
self.future.set_result(self.process_response(data))
|
||||
except CapError as e:
|
||||
self.future.set_exception(e)
|
||||
|
||||
|
||||
class DecoratorCapClient(BaseCapClient):
|
||||
@return_future
|
||||
def capitalize(self, request_data, callback):
|
||||
logging.info("capitalize")
|
||||
self.request_data = request_data
|
||||
self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
|
||||
self.stream.connect(('127.0.0.1', self.port),
|
||||
callback=self.handle_connect)
|
||||
self.callback = callback
|
||||
|
||||
def handle_connect(self):
|
||||
logging.info("handle_connect")
|
||||
self.stream.write(utf8(self.request_data + "\n"))
|
||||
self.stream.read_until(b'\n', callback=self.handle_read)
|
||||
|
||||
def handle_read(self, data):
|
||||
logging.info("handle_read")
|
||||
self.stream.close()
|
||||
self.callback(self.process_response(data))
|
||||
|
||||
|
||||
class GeneratorCapClient(BaseCapClient):
|
||||
@return_future
|
||||
@gen.engine
|
||||
def capitalize(self, request_data, callback):
|
||||
logging.info('capitalize')
|
||||
stream = IOStream(socket.socket(), io_loop=self.io_loop)
|
||||
logging.info('connecting')
|
||||
yield gen.Task(stream.connect, ('127.0.0.1', self.port))
|
||||
stream.write(utf8(request_data + '\n'))
|
||||
logging.info('reading')
|
||||
data = yield gen.Task(stream.read_until, b'\n')
|
||||
logging.info('returning')
|
||||
stream.close()
|
||||
callback(self.process_response(data))
|
||||
|
||||
|
||||
class ClientTestMixin(object):
|
||||
def setUp(self):
|
||||
super(ClientTestMixin, self).setUp()
|
||||
self.server = CapServer(io_loop=self.io_loop)
|
||||
sock, port = bind_unused_port()
|
||||
self.server.add_sockets([sock])
|
||||
self.client = self.client_class(io_loop=self.io_loop, port=port)
|
||||
|
||||
def tearDown(self):
|
||||
self.server.stop()
|
||||
super(ClientTestMixin, self).tearDown()
|
||||
|
||||
def test_callback(self):
|
||||
self.client.capitalize("hello", callback=self.stop)
|
||||
result = self.wait()
|
||||
self.assertEqual(result, "HELLO")
|
||||
|
||||
def test_callback_error(self):
|
||||
self.client.capitalize("HELLO", callback=self.stop)
|
||||
self.assertRaisesRegexp(CapError, "already capitalized", self.wait)
|
||||
|
||||
def test_future(self):
|
||||
future = self.client.capitalize("hello")
|
||||
self.io_loop.add_future(future, self.stop)
|
||||
self.wait()
|
||||
self.assertEqual(future.result(), "HELLO")
|
||||
|
||||
def test_future_error(self):
|
||||
future = self.client.capitalize("HELLO")
|
||||
self.io_loop.add_future(future, self.stop)
|
||||
self.wait()
|
||||
self.assertRaisesRegexp(CapError, "already capitalized", future.result)
|
||||
|
||||
def test_generator(self):
|
||||
@gen.engine
|
||||
def f():
|
||||
result = yield self.client.capitalize("hello")
|
||||
self.assertEqual(result, "HELLO")
|
||||
self.stop()
|
||||
f()
|
||||
self.wait()
|
||||
|
||||
def test_generator_error(self):
|
||||
@gen.engine
|
||||
def f():
|
||||
with self.assertRaisesRegexp(CapError, "already capitalized"):
|
||||
yield self.client.capitalize("HELLO")
|
||||
self.stop()
|
||||
f()
|
||||
self.wait()
|
||||
|
||||
|
||||
class ManualClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
|
||||
client_class = ManualCapClient
|
||||
|
||||
|
||||
class DecoratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
|
||||
client_class = DecoratorCapClient
|
||||
|
||||
|
||||
class GeneratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
|
||||
client_class = GeneratorCapClient
|
||||
|
|
@ -0,0 +1 @@
|
|||
"school","école"
|
||||
|
|
|
@ -0,0 +1,122 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
from hashlib import md5
|
||||
|
||||
from tornado.escape import utf8
|
||||
from tornado.httpclient import HTTPRequest
|
||||
from tornado.stack_context import ExceptionStackContext
|
||||
from tornado.testing import AsyncHTTPTestCase
|
||||
from tornado.test import httpclient_test
|
||||
from tornado.test.util import unittest
|
||||
from tornado.web import Application, RequestHandler
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
except ImportError:
|
||||
pycurl = None
|
||||
|
||||
if pycurl is not None:
|
||||
from tornado.curl_httpclient import CurlAsyncHTTPClient
|
||||
|
||||
|
||||
@unittest.skipIf(pycurl is None, "pycurl module not present")
|
||||
class CurlHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
|
||||
def get_http_client(self):
|
||||
client = CurlAsyncHTTPClient(io_loop=self.io_loop,
|
||||
defaults=dict(allow_ipv6=False))
|
||||
# make sure AsyncHTTPClient magic doesn't give us the wrong class
|
||||
self.assertTrue(isinstance(client, CurlAsyncHTTPClient))
|
||||
return client
|
||||
|
||||
|
||||
class DigestAuthHandler(RequestHandler):
|
||||
def get(self):
|
||||
realm = 'test'
|
||||
opaque = 'asdf'
|
||||
# Real implementations would use a random nonce.
|
||||
nonce = "1234"
|
||||
username = 'foo'
|
||||
password = 'bar'
|
||||
|
||||
auth_header = self.request.headers.get('Authorization', None)
|
||||
if auth_header is not None:
|
||||
auth_mode, params = auth_header.split(' ', 1)
|
||||
assert auth_mode == 'Digest'
|
||||
param_dict = {}
|
||||
for pair in params.split(','):
|
||||
k, v = pair.strip().split('=', 1)
|
||||
if v[0] == '"' and v[-1] == '"':
|
||||
v = v[1:-1]
|
||||
param_dict[k] = v
|
||||
assert param_dict['realm'] == realm
|
||||
assert param_dict['opaque'] == opaque
|
||||
assert param_dict['nonce'] == nonce
|
||||
assert param_dict['username'] == username
|
||||
assert param_dict['uri'] == self.request.path
|
||||
h1 = md5(utf8('%s:%s:%s' % (username, realm, password))).hexdigest()
|
||||
h2 = md5(utf8('%s:%s' % (self.request.method,
|
||||
self.request.path))).hexdigest()
|
||||
digest = md5(utf8('%s:%s:%s' % (h1, nonce, h2))).hexdigest()
|
||||
if digest == param_dict['response']:
|
||||
self.write('ok')
|
||||
else:
|
||||
self.write('fail')
|
||||
else:
|
||||
self.set_status(401)
|
||||
self.set_header('WWW-Authenticate',
|
||||
'Digest realm="%s", nonce="%s", opaque="%s"' %
|
||||
(realm, nonce, opaque))
|
||||
|
||||
|
||||
class CustomReasonHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.set_status(200, "Custom reason")
|
||||
|
||||
|
||||
class CustomFailReasonHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.set_status(400, "Custom reason")
|
||||
|
||||
|
||||
@unittest.skipIf(pycurl is None, "pycurl module not present")
|
||||
class CurlHTTPClientTestCase(AsyncHTTPTestCase):
|
||||
def setUp(self):
|
||||
super(CurlHTTPClientTestCase, self).setUp()
|
||||
self.http_client = CurlAsyncHTTPClient(self.io_loop,
|
||||
defaults=dict(allow_ipv6=False))
|
||||
|
||||
def get_app(self):
|
||||
return Application([
|
||||
('/digest', DigestAuthHandler),
|
||||
('/custom_reason', CustomReasonHandler),
|
||||
('/custom_fail_reason', CustomFailReasonHandler),
|
||||
])
|
||||
|
||||
def test_prepare_curl_callback_stack_context(self):
|
||||
exc_info = []
|
||||
|
||||
def error_handler(typ, value, tb):
|
||||
exc_info.append((typ, value, tb))
|
||||
self.stop()
|
||||
return True
|
||||
|
||||
with ExceptionStackContext(error_handler):
|
||||
request = HTTPRequest(self.get_url('/'),
|
||||
prepare_curl_callback=lambda curl: 1 / 0)
|
||||
self.http_client.fetch(request, callback=self.stop)
|
||||
self.wait()
|
||||
self.assertEqual(1, len(exc_info))
|
||||
self.assertIs(exc_info[0][0], ZeroDivisionError)
|
||||
|
||||
def test_digest_auth(self):
|
||||
response = self.fetch('/digest', auth_mode='digest',
|
||||
auth_username='foo', auth_password='bar')
|
||||
self.assertEqual(response.body, b'ok')
|
||||
|
||||
def test_custom_reason(self):
|
||||
response = self.fetch('/custom_reason')
|
||||
self.assertEqual(response.reason, "Custom reason")
|
||||
|
||||
def test_fail_custom_reason(self):
|
||||
response = self.fetch('/custom_fail_reason')
|
||||
self.assertEqual(str(response.error), "HTTP 400: Custom reason")
|
||||
217
Shared/lib/python3.4/site-packages/tornado/test/escape_test.py
Normal file
217
Shared/lib/python3.4/site-packages/tornado/test/escape_test.py
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import tornado.escape
|
||||
|
||||
from tornado.escape import utf8, xhtml_escape, xhtml_unescape, url_escape, url_unescape, to_unicode, json_decode, json_encode
|
||||
from tornado.util import u, unicode_type, bytes_type
|
||||
from tornado.test.util import unittest
|
||||
|
||||
linkify_tests = [
|
||||
# (input, linkify_kwargs, expected_output)
|
||||
|
||||
("hello http://world.com/!", {},
|
||||
u('hello <a href="http://world.com/">http://world.com/</a>!')),
|
||||
|
||||
("hello http://world.com/with?param=true&stuff=yes", {},
|
||||
u('hello <a href="http://world.com/with?param=true&stuff=yes">http://world.com/with?param=true&stuff=yes</a>')),
|
||||
|
||||
# an opened paren followed by many chars killed Gruber's regex
|
||||
("http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", {},
|
||||
u('<a href="http://url.com/w">http://url.com/w</a>(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')),
|
||||
|
||||
# as did too many dots at the end
|
||||
("http://url.com/withmany.......................................", {},
|
||||
u('<a href="http://url.com/withmany">http://url.com/withmany</a>.......................................')),
|
||||
|
||||
("http://url.com/withmany((((((((((((((((((((((((((((((((((a)", {},
|
||||
u('<a href="http://url.com/withmany">http://url.com/withmany</a>((((((((((((((((((((((((((((((((((a)')),
|
||||
|
||||
# some examples from http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
|
||||
# plus a fex extras (such as multiple parentheses).
|
||||
("http://foo.com/blah_blah", {},
|
||||
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>')),
|
||||
|
||||
("http://foo.com/blah_blah/", {},
|
||||
u('<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>')),
|
||||
|
||||
("(Something like http://foo.com/blah_blah)", {},
|
||||
u('(Something like <a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>)')),
|
||||
|
||||
("http://foo.com/blah_blah_(wikipedia)", {},
|
||||
u('<a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>')),
|
||||
|
||||
("http://foo.com/blah_(blah)_(wikipedia)_blah", {},
|
||||
u('<a href="http://foo.com/blah_(blah)_(wikipedia)_blah">http://foo.com/blah_(blah)_(wikipedia)_blah</a>')),
|
||||
|
||||
("(Something like http://foo.com/blah_blah_(wikipedia))", {},
|
||||
u('(Something like <a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>)')),
|
||||
|
||||
("http://foo.com/blah_blah.", {},
|
||||
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>.')),
|
||||
|
||||
("http://foo.com/blah_blah/.", {},
|
||||
u('<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>.')),
|
||||
|
||||
("<http://foo.com/blah_blah>", {},
|
||||
u('<<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>>')),
|
||||
|
||||
("<http://foo.com/blah_blah/>", {},
|
||||
u('<<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>>')),
|
||||
|
||||
("http://foo.com/blah_blah,", {},
|
||||
u('<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>,')),
|
||||
|
||||
("http://www.example.com/wpstyle/?p=364.", {},
|
||||
u('<a href="http://www.example.com/wpstyle/?p=364">http://www.example.com/wpstyle/?p=364</a>.')),
|
||||
|
||||
("rdar://1234",
|
||||
{"permitted_protocols": ["http", "rdar"]},
|
||||
u('<a href="rdar://1234">rdar://1234</a>')),
|
||||
|
||||
("rdar:/1234",
|
||||
{"permitted_protocols": ["rdar"]},
|
||||
u('<a href="rdar:/1234">rdar:/1234</a>')),
|
||||
|
||||
("http://userid:password@example.com:8080", {},
|
||||
u('<a href="http://userid:password@example.com:8080">http://userid:password@example.com:8080</a>')),
|
||||
|
||||
("http://userid@example.com", {},
|
||||
u('<a href="http://userid@example.com">http://userid@example.com</a>')),
|
||||
|
||||
("http://userid@example.com:8080", {},
|
||||
u('<a href="http://userid@example.com:8080">http://userid@example.com:8080</a>')),
|
||||
|
||||
("http://userid:password@example.com", {},
|
||||
u('<a href="http://userid:password@example.com">http://userid:password@example.com</a>')),
|
||||
|
||||
("message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e",
|
||||
{"permitted_protocols": ["http", "message"]},
|
||||
u('<a href="message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e">message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e</a>')),
|
||||
|
||||
(u("http://\u27a1.ws/\u4a39"), {},
|
||||
u('<a href="http://\u27a1.ws/\u4a39">http://\u27a1.ws/\u4a39</a>')),
|
||||
|
||||
("<tag>http://example.com</tag>", {},
|
||||
u('<tag><a href="http://example.com">http://example.com</a></tag>')),
|
||||
|
||||
("Just a www.example.com link.", {},
|
||||
u('Just a <a href="http://www.example.com">www.example.com</a> link.')),
|
||||
|
||||
("Just a www.example.com link.",
|
||||
{"require_protocol": True},
|
||||
u('Just a www.example.com link.')),
|
||||
|
||||
("A http://reallylong.com/link/that/exceedsthelenglimit.html",
|
||||
{"require_protocol": True, "shorten": True},
|
||||
u('A <a href="http://reallylong.com/link/that/exceedsthelenglimit.html" title="http://reallylong.com/link/that/exceedsthelenglimit.html">http://reallylong.com/link...</a>')),
|
||||
|
||||
("A http://reallylongdomainnamethatwillbetoolong.com/hi!",
|
||||
{"shorten": True},
|
||||
u('A <a href="http://reallylongdomainnamethatwillbetoolong.com/hi" title="http://reallylongdomainnamethatwillbetoolong.com/hi">http://reallylongdomainnametha...</a>!')),
|
||||
|
||||
("A file:///passwords.txt and http://web.com link", {},
|
||||
u('A file:///passwords.txt and <a href="http://web.com">http://web.com</a> link')),
|
||||
|
||||
("A file:///passwords.txt and http://web.com link",
|
||||
{"permitted_protocols": ["file"]},
|
||||
u('A <a href="file:///passwords.txt">file:///passwords.txt</a> and http://web.com link')),
|
||||
|
||||
("www.external-link.com",
|
||||
{"extra_params": 'rel="nofollow" class="external"'},
|
||||
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>')),
|
||||
|
||||
("www.external-link.com and www.internal-link.com/blogs extra",
|
||||
{"extra_params": lambda href: 'class="internal"' if href.startswith("http://www.internal-link.com") else 'rel="nofollow" class="external"'},
|
||||
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a> and <a href="http://www.internal-link.com/blogs" class="internal">www.internal-link.com/blogs</a> extra')),
|
||||
|
||||
("www.external-link.com",
|
||||
{"extra_params": lambda href: ' rel="nofollow" class="external" '},
|
||||
u('<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>')),
|
||||
]
|
||||
|
||||
|
||||
class EscapeTestCase(unittest.TestCase):
|
||||
def test_linkify(self):
|
||||
for text, kwargs, html in linkify_tests:
|
||||
linked = tornado.escape.linkify(text, **kwargs)
|
||||
self.assertEqual(linked, html)
|
||||
|
||||
def test_xhtml_escape(self):
|
||||
tests = [
|
||||
("<foo>", "<foo>"),
|
||||
(u("<foo>"), u("<foo>")),
|
||||
(b"<foo>", b"<foo>"),
|
||||
|
||||
("<>&\"'", "<>&"'"),
|
||||
("&", "&amp;"),
|
||||
|
||||
(u("<\u00e9>"), u("<\u00e9>")),
|
||||
(b"<\xc3\xa9>", b"<\xc3\xa9>"),
|
||||
]
|
||||
for unescaped, escaped in tests:
|
||||
self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped))
|
||||
self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
|
||||
|
||||
def test_url_escape_unicode(self):
|
||||
tests = [
|
||||
# byte strings are passed through as-is
|
||||
(u('\u00e9').encode('utf8'), '%C3%A9'),
|
||||
(u('\u00e9').encode('latin1'), '%E9'),
|
||||
|
||||
# unicode strings become utf8
|
||||
(u('\u00e9'), '%C3%A9'),
|
||||
]
|
||||
for unescaped, escaped in tests:
|
||||
self.assertEqual(url_escape(unescaped), escaped)
|
||||
|
||||
def test_url_unescape_unicode(self):
|
||||
tests = [
|
||||
('%C3%A9', u('\u00e9'), 'utf8'),
|
||||
('%C3%A9', u('\u00c3\u00a9'), 'latin1'),
|
||||
('%C3%A9', utf8(u('\u00e9')), None),
|
||||
]
|
||||
for escaped, unescaped, encoding in tests:
|
||||
# input strings to url_unescape should only contain ascii
|
||||
# characters, but make sure the function accepts both byte
|
||||
# and unicode strings.
|
||||
self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped)
|
||||
self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped)
|
||||
|
||||
def test_url_escape_quote_plus(self):
|
||||
unescaped = '+ #%'
|
||||
plus_escaped = '%2B+%23%25'
|
||||
escaped = '%2B%20%23%25'
|
||||
self.assertEqual(url_escape(unescaped), plus_escaped)
|
||||
self.assertEqual(url_escape(unescaped, plus=False), escaped)
|
||||
self.assertEqual(url_unescape(plus_escaped), unescaped)
|
||||
self.assertEqual(url_unescape(escaped, plus=False), unescaped)
|
||||
self.assertEqual(url_unescape(plus_escaped, encoding=None),
|
||||
utf8(unescaped))
|
||||
self.assertEqual(url_unescape(escaped, encoding=None, plus=False),
|
||||
utf8(unescaped))
|
||||
|
||||
def test_escape_return_types(self):
|
||||
# On python2 the escape methods should generally return the same
|
||||
# type as their argument
|
||||
self.assertEqual(type(xhtml_escape("foo")), str)
|
||||
self.assertEqual(type(xhtml_escape(u("foo"))), unicode_type)
|
||||
|
||||
def test_json_decode(self):
|
||||
# json_decode accepts both bytes and unicode, but strings it returns
|
||||
# are always unicode.
|
||||
self.assertEqual(json_decode(b'"foo"'), u("foo"))
|
||||
self.assertEqual(json_decode(u('"foo"')), u("foo"))
|
||||
|
||||
# Non-ascii bytes are interpreted as utf8
|
||||
self.assertEqual(json_decode(utf8(u('"\u00e9"'))), u("\u00e9"))
|
||||
|
||||
def test_json_encode(self):
|
||||
# json deals with strings, not bytes. On python 2 byte strings will
|
||||
# convert automatically if they are utf8; on python 3 byte strings
|
||||
# are not allowed.
|
||||
self.assertEqual(json_decode(json_encode(u("\u00e9"))), u("\u00e9"))
|
||||
if bytes_type is str:
|
||||
self.assertEqual(json_decode(json_encode(utf8(u("\u00e9")))), u("\u00e9"))
|
||||
self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
|
||||
1071
Shared/lib/python3.4/site-packages/tornado/test/gen_test.py
Normal file
1071
Shared/lib/python3.4/site-packages/tornado/test/gen_test.py
Normal file
File diff suppressed because it is too large
Load diff
Binary file not shown.
|
|
@ -0,0 +1,22 @@
|
|||
# SOME DESCRIPTIVE TITLE.
|
||||
# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
|
||||
# This file is distributed under the same license as the PACKAGE package.
|
||||
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
|
||||
#
|
||||
#, fuzzy
|
||||
msgid ""
|
||||
msgstr ""
|
||||
"Project-Id-Version: PACKAGE VERSION\n"
|
||||
"Report-Msgid-Bugs-To: \n"
|
||||
"POT-Creation-Date: 2012-06-14 01:10-0700\n"
|
||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||
"Language: \n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=utf-8\n"
|
||||
"Content-Transfer-Encoding: 8bit\n"
|
||||
|
||||
#: extract_me.py:1
|
||||
msgid "school"
|
||||
msgstr "école"
|
||||
|
|
@ -0,0 +1,517 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
from contextlib import closing
|
||||
import functools
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from tornado.escape import utf8
|
||||
from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.iostream import IOStream
|
||||
from tornado.log import gen_log
|
||||
from tornado import netutil
|
||||
from tornado.stack_context import ExceptionStackContext, NullContext
|
||||
from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog
|
||||
from tornado.test.util import unittest, skipOnTravis
|
||||
from tornado.util import u, bytes_type
|
||||
from tornado.web import Application, RequestHandler, url
|
||||
|
||||
try:
|
||||
from io import BytesIO # python 3
|
||||
except ImportError:
|
||||
from cStringIO import StringIO as BytesIO
|
||||
|
||||
|
||||
class HelloWorldHandler(RequestHandler):
|
||||
def get(self):
|
||||
name = self.get_argument("name", "world")
|
||||
self.set_header("Content-Type", "text/plain")
|
||||
self.finish("Hello %s!" % name)
|
||||
|
||||
|
||||
class PostHandler(RequestHandler):
|
||||
def post(self):
|
||||
self.finish("Post arg1: %s, arg2: %s" % (
|
||||
self.get_argument("arg1"), self.get_argument("arg2")))
|
||||
|
||||
|
||||
class ChunkHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write("asdf")
|
||||
self.flush()
|
||||
self.write("qwer")
|
||||
|
||||
|
||||
class AuthHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.finish(self.request.headers["Authorization"])
|
||||
|
||||
|
||||
class CountdownHandler(RequestHandler):
|
||||
def get(self, count):
|
||||
count = int(count)
|
||||
if count > 0:
|
||||
self.redirect(self.reverse_url("countdown", count - 1))
|
||||
else:
|
||||
self.write("Zero")
|
||||
|
||||
|
||||
class EchoPostHandler(RequestHandler):
|
||||
def post(self):
|
||||
self.write(self.request.body)
|
||||
|
||||
|
||||
class UserAgentHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write(self.request.headers.get('User-Agent', 'User agent not set'))
|
||||
|
||||
|
||||
class ContentLength304Handler(RequestHandler):
|
||||
def get(self):
|
||||
self.set_status(304)
|
||||
self.set_header('Content-Length', 42)
|
||||
|
||||
def _clear_headers_for_304(self):
|
||||
# Tornado strips content-length from 304 responses, but here we
|
||||
# want to simulate servers that include the headers anyway.
|
||||
pass
|
||||
|
||||
|
||||
class AllMethodsHandler(RequestHandler):
|
||||
SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
|
||||
|
||||
def method(self):
|
||||
self.write(self.request.method)
|
||||
|
||||
get = post = put = delete = options = patch = other = method
|
||||
|
||||
# These tests end up getting run redundantly: once here with the default
|
||||
# HTTPClient implementation, and then again in each implementation's own
|
||||
# test suite.
|
||||
|
||||
|
||||
class HTTPClientCommonTestCase(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
return Application([
|
||||
url("/hello", HelloWorldHandler),
|
||||
url("/post", PostHandler),
|
||||
url("/chunk", ChunkHandler),
|
||||
url("/auth", AuthHandler),
|
||||
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
|
||||
url("/echopost", EchoPostHandler),
|
||||
url("/user_agent", UserAgentHandler),
|
||||
url("/304_with_content_length", ContentLength304Handler),
|
||||
url("/all_methods", AllMethodsHandler),
|
||||
], gzip=True)
|
||||
|
||||
@skipOnTravis
|
||||
def test_hello_world(self):
|
||||
response = self.fetch("/hello")
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers["Content-Type"], "text/plain")
|
||||
self.assertEqual(response.body, b"Hello world!")
|
||||
self.assertEqual(int(response.request_time), 0)
|
||||
|
||||
response = self.fetch("/hello?name=Ben")
|
||||
self.assertEqual(response.body, b"Hello Ben!")
|
||||
|
||||
def test_streaming_callback(self):
|
||||
# streaming_callback is also tested in test_chunked
|
||||
chunks = []
|
||||
response = self.fetch("/hello",
|
||||
streaming_callback=chunks.append)
|
||||
# with streaming_callback, data goes to the callback and not response.body
|
||||
self.assertEqual(chunks, [b"Hello world!"])
|
||||
self.assertFalse(response.body)
|
||||
|
||||
def test_post(self):
|
||||
response = self.fetch("/post", method="POST",
|
||||
body="arg1=foo&arg2=bar")
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
|
||||
|
||||
def test_chunked(self):
|
||||
response = self.fetch("/chunk")
|
||||
self.assertEqual(response.body, b"asdfqwer")
|
||||
|
||||
chunks = []
|
||||
response = self.fetch("/chunk",
|
||||
streaming_callback=chunks.append)
|
||||
self.assertEqual(chunks, [b"asdf", b"qwer"])
|
||||
self.assertFalse(response.body)
|
||||
|
||||
def test_chunked_close(self):
|
||||
# test case in which chunks spread read-callback processing
|
||||
# over several ioloop iterations, but the connection is already closed.
|
||||
sock, port = bind_unused_port()
|
||||
with closing(sock):
|
||||
def write_response(stream, request_data):
|
||||
stream.write(b"""\
|
||||
HTTP/1.1 200 OK
|
||||
Transfer-Encoding: chunked
|
||||
|
||||
1
|
||||
1
|
||||
1
|
||||
2
|
||||
0
|
||||
|
||||
""".replace(b"\n", b"\r\n"), callback=stream.close)
|
||||
|
||||
def accept_callback(conn, address):
|
||||
# fake an HTTP server using chunked encoding where the final chunks
|
||||
# and connection close all happen at once
|
||||
stream = IOStream(conn, io_loop=self.io_loop)
|
||||
stream.read_until(b"\r\n\r\n",
|
||||
functools.partial(write_response, stream))
|
||||
netutil.add_accept_handler(sock, accept_callback, self.io_loop)
|
||||
self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
|
||||
resp = self.wait()
|
||||
resp.rethrow()
|
||||
self.assertEqual(resp.body, b"12")
|
||||
self.io_loop.remove_handler(sock.fileno())
|
||||
|
||||
def test_streaming_stack_context(self):
|
||||
chunks = []
|
||||
exc_info = []
|
||||
|
||||
def error_handler(typ, value, tb):
|
||||
exc_info.append((typ, value, tb))
|
||||
return True
|
||||
|
||||
def streaming_cb(chunk):
|
||||
chunks.append(chunk)
|
||||
if chunk == b'qwer':
|
||||
1 / 0
|
||||
|
||||
with ExceptionStackContext(error_handler):
|
||||
self.fetch('/chunk', streaming_callback=streaming_cb)
|
||||
|
||||
self.assertEqual(chunks, [b'asdf', b'qwer'])
|
||||
self.assertEqual(1, len(exc_info))
|
||||
self.assertIs(exc_info[0][0], ZeroDivisionError)
|
||||
|
||||
def test_basic_auth(self):
|
||||
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
|
||||
auth_password="open sesame").body,
|
||||
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
|
||||
|
||||
def test_basic_auth_explicit_mode(self):
|
||||
self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
|
||||
auth_password="open sesame",
|
||||
auth_mode="basic").body,
|
||||
b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
|
||||
|
||||
def test_unsupported_auth_mode(self):
|
||||
# curl and simple clients handle errors a bit differently; the
|
||||
# important thing is that they don't fall back to basic auth
|
||||
# on an unknown mode.
|
||||
with ExpectLog(gen_log, "uncaught exception", required=False):
|
||||
with self.assertRaises((ValueError, HTTPError)):
|
||||
response = self.fetch("/auth", auth_username="Aladdin",
|
||||
auth_password="open sesame",
|
||||
auth_mode="asdf")
|
||||
response.rethrow()
|
||||
|
||||
def test_follow_redirect(self):
|
||||
response = self.fetch("/countdown/2", follow_redirects=False)
|
||||
self.assertEqual(302, response.code)
|
||||
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
|
||||
|
||||
response = self.fetch("/countdown/2")
|
||||
self.assertEqual(200, response.code)
|
||||
self.assertTrue(response.effective_url.endswith("/countdown/0"))
|
||||
self.assertEqual(b"Zero", response.body)
|
||||
|
||||
def test_credentials_in_url(self):
|
||||
url = self.get_url("/auth").replace("http://", "http://me:secret@")
|
||||
self.http_client.fetch(url, self.stop)
|
||||
response = self.wait()
|
||||
self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"),
|
||||
response.body)
|
||||
|
||||
def test_body_encoding(self):
|
||||
unicode_body = u("\xe9")
|
||||
byte_body = binascii.a2b_hex(b"e9")
|
||||
|
||||
# unicode string in body gets converted to utf8
|
||||
response = self.fetch("/echopost", method="POST", body=unicode_body,
|
||||
headers={"Content-Type": "application/blah"})
|
||||
self.assertEqual(response.headers["Content-Length"], "2")
|
||||
self.assertEqual(response.body, utf8(unicode_body))
|
||||
|
||||
# byte strings pass through directly
|
||||
response = self.fetch("/echopost", method="POST",
|
||||
body=byte_body,
|
||||
headers={"Content-Type": "application/blah"})
|
||||
self.assertEqual(response.headers["Content-Length"], "1")
|
||||
self.assertEqual(response.body, byte_body)
|
||||
|
||||
# Mixing unicode in headers and byte string bodies shouldn't
|
||||
# break anything
|
||||
response = self.fetch("/echopost", method="POST", body=byte_body,
|
||||
headers={"Content-Type": "application/blah"},
|
||||
user_agent=u("foo"))
|
||||
self.assertEqual(response.headers["Content-Length"], "1")
|
||||
self.assertEqual(response.body, byte_body)
|
||||
|
||||
def test_types(self):
|
||||
response = self.fetch("/hello")
|
||||
self.assertEqual(type(response.body), bytes_type)
|
||||
self.assertEqual(type(response.headers["Content-Type"]), str)
|
||||
self.assertEqual(type(response.code), int)
|
||||
self.assertEqual(type(response.effective_url), str)
|
||||
|
||||
def test_header_callback(self):
|
||||
first_line = []
|
||||
headers = {}
|
||||
chunks = []
|
||||
|
||||
def header_callback(header_line):
|
||||
if header_line.startswith('HTTP/'):
|
||||
first_line.append(header_line)
|
||||
elif header_line != '\r\n':
|
||||
k, v = header_line.split(':', 1)
|
||||
headers[k] = v.strip()
|
||||
|
||||
def streaming_callback(chunk):
|
||||
# All header callbacks are run before any streaming callbacks,
|
||||
# so the header data is available to process the data as it
|
||||
# comes in.
|
||||
self.assertEqual(headers['Content-Type'], 'text/html; charset=UTF-8')
|
||||
chunks.append(chunk)
|
||||
|
||||
self.fetch('/chunk', header_callback=header_callback,
|
||||
streaming_callback=streaming_callback)
|
||||
self.assertEqual(len(first_line), 1)
|
||||
self.assertRegexpMatches(first_line[0], 'HTTP/1.[01] 200 OK\r\n')
|
||||
self.assertEqual(chunks, [b'asdf', b'qwer'])
|
||||
|
||||
def test_header_callback_stack_context(self):
|
||||
exc_info = []
|
||||
|
||||
def error_handler(typ, value, tb):
|
||||
exc_info.append((typ, value, tb))
|
||||
return True
|
||||
|
||||
def header_callback(header_line):
|
||||
if header_line.startswith('Content-Type:'):
|
||||
1 / 0
|
||||
|
||||
with ExceptionStackContext(error_handler):
|
||||
self.fetch('/chunk', header_callback=header_callback)
|
||||
self.assertEqual(len(exc_info), 1)
|
||||
self.assertIs(exc_info[0][0], ZeroDivisionError)
|
||||
|
||||
def test_configure_defaults(self):
|
||||
defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False)
|
||||
# Construct a new instance of the configured client class
|
||||
client = self.http_client.__class__(self.io_loop, force_instance=True,
|
||||
defaults=defaults)
|
||||
client.fetch(self.get_url('/user_agent'), callback=self.stop)
|
||||
response = self.wait()
|
||||
self.assertEqual(response.body, b'TestDefaultUserAgent')
|
||||
client.close()
|
||||
|
||||
def test_304_with_content_length(self):
|
||||
# According to the spec 304 responses SHOULD NOT include
|
||||
# Content-Length or other entity headers, but some servers do it
|
||||
# anyway.
|
||||
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
|
||||
response = self.fetch('/304_with_content_length')
|
||||
self.assertEqual(response.code, 304)
|
||||
self.assertEqual(response.headers['Content-Length'], '42')
|
||||
|
||||
def test_final_callback_stack_context(self):
|
||||
# The final callback should be run outside of the httpclient's
|
||||
# stack_context. We want to ensure that there is not stack_context
|
||||
# between the user's callback and the IOLoop, so monkey-patch
|
||||
# IOLoop.handle_callback_exception and disable the test harness's
|
||||
# context with a NullContext.
|
||||
# Note that this does not apply to secondary callbacks (header
|
||||
# and streaming_callback), as errors there must be seen as errors
|
||||
# by the http client so it can clean up the connection.
|
||||
exc_info = []
|
||||
|
||||
def handle_callback_exception(callback):
|
||||
exc_info.append(sys.exc_info())
|
||||
self.stop()
|
||||
self.io_loop.handle_callback_exception = handle_callback_exception
|
||||
with NullContext():
|
||||
self.http_client.fetch(self.get_url('/hello'),
|
||||
lambda response: 1 / 0)
|
||||
self.wait()
|
||||
self.assertEqual(exc_info[0][0], ZeroDivisionError)
|
||||
|
||||
@gen_test
|
||||
def test_future_interface(self):
|
||||
response = yield self.http_client.fetch(self.get_url('/hello'))
|
||||
self.assertEqual(response.body, b'Hello world!')
|
||||
|
||||
@gen_test
|
||||
def test_future_http_error(self):
|
||||
with self.assertRaises(HTTPError) as context:
|
||||
yield self.http_client.fetch(self.get_url('/notfound'))
|
||||
self.assertEqual(context.exception.code, 404)
|
||||
self.assertEqual(context.exception.response.code, 404)
|
||||
|
||||
@gen_test
|
||||
def test_reuse_request_from_response(self):
|
||||
# The response.request attribute should be an HTTPRequest, not
|
||||
# a _RequestProxy.
|
||||
# This test uses self.http_client.fetch because self.fetch calls
|
||||
# self.get_url on the input unconditionally.
|
||||
url = self.get_url('/hello')
|
||||
response = yield self.http_client.fetch(url)
|
||||
self.assertEqual(response.request.url, url)
|
||||
self.assertTrue(isinstance(response.request, HTTPRequest))
|
||||
response2 = yield self.http_client.fetch(response.request)
|
||||
self.assertEqual(response2.body, b'Hello world!')
|
||||
|
||||
def test_all_methods(self):
|
||||
for method in ['GET', 'DELETE', 'OPTIONS']:
|
||||
response = self.fetch('/all_methods', method=method)
|
||||
self.assertEqual(response.body, utf8(method))
|
||||
for method in ['POST', 'PUT', 'PATCH']:
|
||||
response = self.fetch('/all_methods', method=method, body=b'')
|
||||
self.assertEqual(response.body, utf8(method))
|
||||
response = self.fetch('/all_methods', method='HEAD')
|
||||
self.assertEqual(response.body, b'')
|
||||
response = self.fetch('/all_methods', method='OTHER',
|
||||
allow_nonstandard_methods=True)
|
||||
self.assertEqual(response.body, b'OTHER')
|
||||
|
||||
@gen_test
|
||||
def test_body(self):
|
||||
hello_url = self.get_url('/hello')
|
||||
with self.assertRaises(AssertionError) as context:
|
||||
yield self.http_client.fetch(hello_url, body='data')
|
||||
|
||||
self.assertTrue('must be empty' in str(context.exception))
|
||||
|
||||
with self.assertRaises(AssertionError) as context:
|
||||
yield self.http_client.fetch(hello_url, method='POST')
|
||||
|
||||
self.assertTrue('must not be empty' in str(context.exception))
|
||||
|
||||
|
||||
class RequestProxyTest(unittest.TestCase):
|
||||
def test_request_set(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/',
|
||||
user_agent='foo'),
|
||||
dict())
|
||||
self.assertEqual(proxy.user_agent, 'foo')
|
||||
|
||||
def test_default_set(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
|
||||
dict(network_interface='foo'))
|
||||
self.assertEqual(proxy.network_interface, 'foo')
|
||||
|
||||
def test_both_set(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/',
|
||||
proxy_host='foo'),
|
||||
dict(proxy_host='bar'))
|
||||
self.assertEqual(proxy.proxy_host, 'foo')
|
||||
|
||||
def test_neither_set(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
|
||||
dict())
|
||||
self.assertIs(proxy.auth_username, None)
|
||||
|
||||
def test_bad_attribute(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/'),
|
||||
dict())
|
||||
with self.assertRaises(AttributeError):
|
||||
proxy.foo
|
||||
|
||||
def test_defaults_none(self):
|
||||
proxy = _RequestProxy(HTTPRequest('http://example.com/'), None)
|
||||
self.assertIs(proxy.auth_username, None)
|
||||
|
||||
|
||||
class HTTPResponseTestCase(unittest.TestCase):
|
||||
def test_str(self):
|
||||
response = HTTPResponse(HTTPRequest('http://example.com'),
|
||||
200, headers={}, buffer=BytesIO())
|
||||
s = str(response)
|
||||
self.assertTrue(s.startswith('HTTPResponse('))
|
||||
self.assertIn('code=200', s)
|
||||
|
||||
|
||||
class SyncHTTPClientTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
if IOLoop.configured_class().__name__ in ('TwistedIOLoop',
|
||||
'AsyncIOMainLoop'):
|
||||
# TwistedIOLoop only supports the global reactor, so we can't have
|
||||
# separate IOLoops for client and server threads.
|
||||
# AsyncIOMainLoop doesn't work with the default policy
|
||||
# (although it could with some tweaks to this test and a
|
||||
# policy that created loops for non-main threads).
|
||||
raise unittest.SkipTest(
|
||||
'Sync HTTPClient not compatible with TwistedIOLoop or '
|
||||
'AsyncIOMainLoop')
|
||||
self.server_ioloop = IOLoop()
|
||||
|
||||
sock, self.port = bind_unused_port()
|
||||
app = Application([('/', HelloWorldHandler)])
|
||||
self.server = HTTPServer(app, io_loop=self.server_ioloop)
|
||||
self.server.add_socket(sock)
|
||||
|
||||
self.server_thread = threading.Thread(target=self.server_ioloop.start)
|
||||
self.server_thread.start()
|
||||
|
||||
self.http_client = HTTPClient()
|
||||
|
||||
def tearDown(self):
|
||||
def stop_server():
|
||||
self.server.stop()
|
||||
self.server_ioloop.stop()
|
||||
self.server_ioloop.add_callback(stop_server)
|
||||
self.server_thread.join()
|
||||
self.http_client.close()
|
||||
self.server_ioloop.close(all_fds=True)
|
||||
|
||||
def get_url(self, path):
|
||||
return 'http://localhost:%d%s' % (self.port, path)
|
||||
|
||||
def test_sync_client(self):
|
||||
response = self.http_client.fetch(self.get_url('/'))
|
||||
self.assertEqual(b'Hello world!', response.body)
|
||||
|
||||
def test_sync_client_error(self):
|
||||
# Synchronous HTTPClient raises errors directly; no need for
|
||||
# response.rethrow()
|
||||
with self.assertRaises(HTTPError) as assertion:
|
||||
self.http_client.fetch(self.get_url('/notfound'))
|
||||
self.assertEqual(assertion.exception.code, 404)
|
||||
|
||||
|
||||
class HTTPRequestTestCase(unittest.TestCase):
|
||||
def test_headers(self):
|
||||
request = HTTPRequest('http://example.com', headers={'foo': 'bar'})
|
||||
self.assertEqual(request.headers, {'foo': 'bar'})
|
||||
|
||||
def test_headers_setter(self):
|
||||
request = HTTPRequest('http://example.com')
|
||||
request.headers = {'bar': 'baz'}
|
||||
self.assertEqual(request.headers, {'bar': 'baz'})
|
||||
|
||||
def test_null_headers_setter(self):
|
||||
request = HTTPRequest('http://example.com')
|
||||
request.headers = None
|
||||
self.assertEqual(request.headers, {})
|
||||
|
||||
def test_body(self):
|
||||
request = HTTPRequest('http://example.com', body='foo')
|
||||
self.assertEqual(request.body, utf8('foo'))
|
||||
|
||||
def test_body_setter(self):
|
||||
request = HTTPRequest('http://example.com')
|
||||
request.body = 'foo'
|
||||
self.assertEqual(request.body, utf8('foo'))
|
||||
1035
Shared/lib/python3.4/site-packages/tornado/test/httpserver_test.py
Normal file
1035
Shared/lib/python3.4/site-packages/tornado/test/httpserver_test.py
Normal file
File diff suppressed because it is too large
Load diff
255
Shared/lib/python3.4/site-packages/tornado/test/httputil_test.py
Normal file
255
Shared/lib/python3.4/site-packages/tornado/test/httputil_test.py
Normal file
|
|
@ -0,0 +1,255 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders, format_timestamp
|
||||
from tornado.escape import utf8
|
||||
from tornado.log import gen_log
|
||||
from tornado.testing import ExpectLog
|
||||
from tornado.test.util import unittest
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
|
||||
class TestUrlConcat(unittest.TestCase):
|
||||
|
||||
def test_url_concat_no_query_params(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path",
|
||||
[('y', 'y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?y=y&z=z")
|
||||
|
||||
def test_url_concat_encode_args(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path",
|
||||
[('y', '/y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z")
|
||||
|
||||
def test_url_concat_trailing_q(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path?",
|
||||
[('y', 'y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?y=y&z=z")
|
||||
|
||||
def test_url_concat_q_with_no_trailing_amp(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path?x",
|
||||
[('y', 'y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
|
||||
|
||||
def test_url_concat_trailing_amp(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path?x&",
|
||||
[('y', 'y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
|
||||
|
||||
def test_url_concat_mult_params(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path?a=1&b=2",
|
||||
[('y', 'y'), ('z', 'z')],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z")
|
||||
|
||||
def test_url_concat_no_params(self):
|
||||
url = url_concat(
|
||||
"https://localhost/path?r=1&t=2",
|
||||
[],
|
||||
)
|
||||
self.assertEqual(url, "https://localhost/path?r=1&t=2")
|
||||
|
||||
|
||||
class MultipartFormDataTest(unittest.TestCase):
|
||||
def test_file_upload(self):
|
||||
data = b"""\
|
||||
--1234
|
||||
Content-Disposition: form-data; name="files"; filename="ab.txt"
|
||||
|
||||
Foo
|
||||
--1234--""".replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
file = files["files"][0]
|
||||
self.assertEqual(file["filename"], "ab.txt")
|
||||
self.assertEqual(file["body"], b"Foo")
|
||||
|
||||
def test_unquoted_names(self):
|
||||
# quotes are optional unless special characters are present
|
||||
data = b"""\
|
||||
--1234
|
||||
Content-Disposition: form-data; name=files; filename=ab.txt
|
||||
|
||||
Foo
|
||||
--1234--""".replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
file = files["files"][0]
|
||||
self.assertEqual(file["filename"], "ab.txt")
|
||||
self.assertEqual(file["body"], b"Foo")
|
||||
|
||||
def test_special_filenames(self):
|
||||
filenames = ['a;b.txt',
|
||||
'a"b.txt',
|
||||
'a";b.txt',
|
||||
'a;"b.txt',
|
||||
'a";";.txt',
|
||||
'a\\"b.txt',
|
||||
'a\\b.txt',
|
||||
]
|
||||
for filename in filenames:
|
||||
logging.debug("trying filename %r", filename)
|
||||
data = """\
|
||||
--1234
|
||||
Content-Disposition: form-data; name="files"; filename="%s"
|
||||
|
||||
Foo
|
||||
--1234--""" % filename.replace('\\', '\\\\').replace('"', '\\"')
|
||||
data = utf8(data.replace("\n", "\r\n"))
|
||||
args = {}
|
||||
files = {}
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
file = files["files"][0]
|
||||
self.assertEqual(file["filename"], filename)
|
||||
self.assertEqual(file["body"], b"Foo")
|
||||
|
||||
def test_boundary_starts_and_ends_with_quotes(self):
|
||||
data = b'''\
|
||||
--1234
|
||||
Content-Disposition: form-data; name="files"; filename="ab.txt"
|
||||
|
||||
Foo
|
||||
--1234--'''.replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
parse_multipart_form_data(b'"1234"', data, args, files)
|
||||
file = files["files"][0]
|
||||
self.assertEqual(file["filename"], "ab.txt")
|
||||
self.assertEqual(file["body"], b"Foo")
|
||||
|
||||
def test_missing_headers(self):
|
||||
data = b'''\
|
||||
--1234
|
||||
|
||||
Foo
|
||||
--1234--'''.replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
with ExpectLog(gen_log, "multipart/form-data missing headers"):
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
self.assertEqual(files, {})
|
||||
|
||||
def test_invalid_content_disposition(self):
|
||||
data = b'''\
|
||||
--1234
|
||||
Content-Disposition: invalid; name="files"; filename="ab.txt"
|
||||
|
||||
Foo
|
||||
--1234--'''.replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
with ExpectLog(gen_log, "Invalid multipart/form-data"):
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
self.assertEqual(files, {})
|
||||
|
||||
def test_line_does_not_end_with_correct_line_break(self):
|
||||
data = b'''\
|
||||
--1234
|
||||
Content-Disposition: form-data; name="files"; filename="ab.txt"
|
||||
|
||||
Foo--1234--'''.replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
with ExpectLog(gen_log, "Invalid multipart/form-data"):
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
self.assertEqual(files, {})
|
||||
|
||||
def test_content_disposition_header_without_name_parameter(self):
|
||||
data = b"""\
|
||||
--1234
|
||||
Content-Disposition: form-data; filename="ab.txt"
|
||||
|
||||
Foo
|
||||
--1234--""".replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
with ExpectLog(gen_log, "multipart/form-data value missing name"):
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
self.assertEqual(files, {})
|
||||
|
||||
def test_data_after_final_boundary(self):
|
||||
# The spec requires that data after the final boundary be ignored.
|
||||
# http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
|
||||
# In practice, some libraries include an extra CRLF after the boundary.
|
||||
data = b"""\
|
||||
--1234
|
||||
Content-Disposition: form-data; name="files"; filename="ab.txt"
|
||||
|
||||
Foo
|
||||
--1234--
|
||||
""".replace(b"\n", b"\r\n")
|
||||
args = {}
|
||||
files = {}
|
||||
parse_multipart_form_data(b"1234", data, args, files)
|
||||
file = files["files"][0]
|
||||
self.assertEqual(file["filename"], "ab.txt")
|
||||
self.assertEqual(file["body"], b"Foo")
|
||||
|
||||
|
||||
class HTTPHeadersTest(unittest.TestCase):
|
||||
def test_multi_line(self):
|
||||
# Lines beginning with whitespace are appended to the previous line
|
||||
# with any leading whitespace replaced by a single space.
|
||||
# Note that while multi-line headers are a part of the HTTP spec,
|
||||
# their use is strongly discouraged.
|
||||
data = """\
|
||||
Foo: bar
|
||||
baz
|
||||
Asdf: qwer
|
||||
\tzxcv
|
||||
Foo: even
|
||||
more
|
||||
lines
|
||||
""".replace("\n", "\r\n")
|
||||
headers = HTTPHeaders.parse(data)
|
||||
self.assertEqual(headers["asdf"], "qwer zxcv")
|
||||
self.assertEqual(headers.get_list("asdf"), ["qwer zxcv"])
|
||||
self.assertEqual(headers["Foo"], "bar baz,even more lines")
|
||||
self.assertEqual(headers.get_list("foo"), ["bar baz", "even more lines"])
|
||||
self.assertEqual(sorted(list(headers.get_all())),
|
||||
[("Asdf", "qwer zxcv"),
|
||||
("Foo", "bar baz"),
|
||||
("Foo", "even more lines")])
|
||||
|
||||
|
||||
class FormatTimestampTest(unittest.TestCase):
|
||||
# Make sure that all the input types are supported.
|
||||
TIMESTAMP = 1359312200.503611
|
||||
EXPECTED = 'Sun, 27 Jan 2013 18:43:20 GMT'
|
||||
|
||||
def check(self, value):
|
||||
self.assertEqual(format_timestamp(value), self.EXPECTED)
|
||||
|
||||
def test_unix_time_float(self):
|
||||
self.check(self.TIMESTAMP)
|
||||
|
||||
def test_unix_time_int(self):
|
||||
self.check(int(self.TIMESTAMP))
|
||||
|
||||
def test_struct_time(self):
|
||||
self.check(time.gmtime(self.TIMESTAMP))
|
||||
|
||||
def test_time_tuple(self):
|
||||
tup = tuple(time.gmtime(self.TIMESTAMP))
|
||||
self.assertEqual(9, len(tup))
|
||||
self.check(tup)
|
||||
|
||||
def test_datetime(self):
|
||||
self.check(datetime.datetime.utcfromtimestamp(self.TIMESTAMP))
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from tornado.test.util import unittest
|
||||
|
||||
|
||||
class ImportTest(unittest.TestCase):
|
||||
def test_import_everything(self):
|
||||
# Some of our modules are not otherwise tested. Import them
|
||||
# all (unless they have external dependencies) here to at
|
||||
# least ensure that there are no syntax errors.
|
||||
import tornado.auth
|
||||
import tornado.autoreload
|
||||
import tornado.concurrent
|
||||
# import tornado.curl_httpclient # depends on pycurl
|
||||
import tornado.escape
|
||||
import tornado.gen
|
||||
import tornado.http1connection
|
||||
import tornado.httpclient
|
||||
import tornado.httpserver
|
||||
import tornado.httputil
|
||||
import tornado.ioloop
|
||||
import tornado.iostream
|
||||
import tornado.locale
|
||||
import tornado.log
|
||||
import tornado.netutil
|
||||
import tornado.options
|
||||
import tornado.process
|
||||
import tornado.simple_httpclient
|
||||
import tornado.stack_context
|
||||
import tornado.tcpserver
|
||||
import tornado.template
|
||||
import tornado.testing
|
||||
import tornado.util
|
||||
import tornado.web
|
||||
import tornado.websocket
|
||||
import tornado.wsgi
|
||||
|
||||
# for modules with dependencies, if those dependencies can be loaded,
|
||||
# load them too.
|
||||
|
||||
def test_import_pycurl(self):
|
||||
try:
|
||||
import pycurl
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
import tornado.curl_httpclient
|
||||
461
Shared/lib/python3.4/site-packages/tornado/test/ioloop_test.py
Normal file
461
Shared/lib/python3.4/site-packages/tornado/test/ioloop_test.py
Normal file
|
|
@ -0,0 +1,461 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import contextlib
|
||||
import datetime
|
||||
import functools
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
|
||||
from tornado import gen
|
||||
from tornado.ioloop import IOLoop, TimeoutError
|
||||
from tornado.log import app_log
|
||||
from tornado.stack_context import ExceptionStackContext, StackContext, wrap, NullContext
|
||||
from tornado.testing import AsyncTestCase, bind_unused_port, ExpectLog
|
||||
from tornado.test.util import unittest, skipIfNonUnix, skipOnTravis
|
||||
|
||||
try:
|
||||
from concurrent import futures
|
||||
except ImportError:
|
||||
futures = None
|
||||
|
||||
|
||||
class TestIOLoop(AsyncTestCase):
|
||||
@skipOnTravis
|
||||
def test_add_callback_wakeup(self):
|
||||
# Make sure that add_callback from inside a running IOLoop
|
||||
# wakes up the IOLoop immediately instead of waiting for a timeout.
|
||||
def callback():
|
||||
self.called = True
|
||||
self.stop()
|
||||
|
||||
def schedule_callback():
|
||||
self.called = False
|
||||
self.io_loop.add_callback(callback)
|
||||
# Store away the time so we can check if we woke up immediately
|
||||
self.start_time = time.time()
|
||||
self.io_loop.add_timeout(self.io_loop.time(), schedule_callback)
|
||||
self.wait()
|
||||
self.assertAlmostEqual(time.time(), self.start_time, places=2)
|
||||
self.assertTrue(self.called)
|
||||
|
||||
@skipOnTravis
|
||||
def test_add_callback_wakeup_other_thread(self):
|
||||
def target():
|
||||
# sleep a bit to let the ioloop go into its poll loop
|
||||
time.sleep(0.01)
|
||||
self.stop_time = time.time()
|
||||
self.io_loop.add_callback(self.stop)
|
||||
thread = threading.Thread(target=target)
|
||||
self.io_loop.add_callback(thread.start)
|
||||
self.wait()
|
||||
delta = time.time() - self.stop_time
|
||||
self.assertLess(delta, 0.1)
|
||||
thread.join()
|
||||
|
||||
def test_add_timeout_timedelta(self):
|
||||
self.io_loop.add_timeout(datetime.timedelta(microseconds=1), self.stop)
|
||||
self.wait()
|
||||
|
||||
def test_multiple_add(self):
|
||||
sock, port = bind_unused_port()
|
||||
try:
|
||||
self.io_loop.add_handler(sock.fileno(), lambda fd, events: None,
|
||||
IOLoop.READ)
|
||||
# Attempting to add the same handler twice fails
|
||||
# (with a platform-dependent exception)
|
||||
self.assertRaises(Exception, self.io_loop.add_handler,
|
||||
sock.fileno(), lambda fd, events: None,
|
||||
IOLoop.READ)
|
||||
finally:
|
||||
self.io_loop.remove_handler(sock.fileno())
|
||||
sock.close()
|
||||
|
||||
def test_remove_without_add(self):
|
||||
# remove_handler should not throw an exception if called on an fd
|
||||
# was never added.
|
||||
sock, port = bind_unused_port()
|
||||
try:
|
||||
self.io_loop.remove_handler(sock.fileno())
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
def test_add_callback_from_signal(self):
|
||||
# cheat a little bit and just run this normally, since we can't
|
||||
# easily simulate the races that happen with real signal handlers
|
||||
self.io_loop.add_callback_from_signal(self.stop)
|
||||
self.wait()
|
||||
|
||||
def test_add_callback_from_signal_other_thread(self):
|
||||
# Very crude test, just to make sure that we cover this case.
|
||||
# This also happens to be the first test where we run an IOLoop in
|
||||
# a non-main thread.
|
||||
other_ioloop = IOLoop()
|
||||
thread = threading.Thread(target=other_ioloop.start)
|
||||
thread.start()
|
||||
other_ioloop.add_callback_from_signal(other_ioloop.stop)
|
||||
thread.join()
|
||||
other_ioloop.close()
|
||||
|
||||
def test_add_callback_while_closing(self):
|
||||
# Issue #635: add_callback() should raise a clean exception
|
||||
# if called while another thread is closing the IOLoop.
|
||||
closing = threading.Event()
|
||||
|
||||
def target():
|
||||
other_ioloop.add_callback(other_ioloop.stop)
|
||||
other_ioloop.start()
|
||||
closing.set()
|
||||
other_ioloop.close(all_fds=True)
|
||||
other_ioloop = IOLoop()
|
||||
thread = threading.Thread(target=target)
|
||||
thread.start()
|
||||
closing.wait()
|
||||
for i in range(1000):
|
||||
try:
|
||||
other_ioloop.add_callback(lambda: None)
|
||||
except RuntimeError as e:
|
||||
self.assertEqual("IOLoop is closing", str(e))
|
||||
break
|
||||
|
||||
def test_handle_callback_exception(self):
|
||||
# IOLoop.handle_callback_exception can be overridden to catch
|
||||
# exceptions in callbacks.
|
||||
def handle_callback_exception(callback):
|
||||
self.assertIs(sys.exc_info()[0], ZeroDivisionError)
|
||||
self.stop()
|
||||
self.io_loop.handle_callback_exception = handle_callback_exception
|
||||
with NullContext():
|
||||
# remove the test StackContext that would see this uncaught
|
||||
# exception as a test failure.
|
||||
self.io_loop.add_callback(lambda: 1 / 0)
|
||||
self.wait()
|
||||
|
||||
@skipIfNonUnix # just because socketpair is so convenient
|
||||
def test_read_while_writeable(self):
|
||||
# Ensure that write events don't come in while we're waiting for
|
||||
# a read and haven't asked for writeability. (the reverse is
|
||||
# difficult to test for)
|
||||
client, server = socket.socketpair()
|
||||
try:
|
||||
def handler(fd, events):
|
||||
self.assertEqual(events, IOLoop.READ)
|
||||
self.stop()
|
||||
self.io_loop.add_handler(client.fileno(), handler, IOLoop.READ)
|
||||
self.io_loop.add_timeout(self.io_loop.time() + 0.01,
|
||||
functools.partial(server.send, b'asdf'))
|
||||
self.wait()
|
||||
self.io_loop.remove_handler(client.fileno())
|
||||
finally:
|
||||
client.close()
|
||||
server.close()
|
||||
|
||||
def test_remove_timeout_after_fire(self):
|
||||
# It is not an error to call remove_timeout after it has run.
|
||||
handle = self.io_loop.add_timeout(self.io_loop.time(), self.stop)
|
||||
self.wait()
|
||||
self.io_loop.remove_timeout(handle)
|
||||
|
||||
def test_remove_timeout_cleanup(self):
|
||||
# Add and remove enough callbacks to trigger cleanup.
|
||||
# Not a very thorough test, but it ensures that the cleanup code
|
||||
# gets executed and doesn't blow up. This test is only really useful
|
||||
# on PollIOLoop subclasses, but it should run silently on any
|
||||
# implementation.
|
||||
for i in range(2000):
|
||||
timeout = self.io_loop.add_timeout(self.io_loop.time() + 3600,
|
||||
lambda: None)
|
||||
self.io_loop.remove_timeout(timeout)
|
||||
# HACK: wait two IOLoop iterations for the GC to happen.
|
||||
self.io_loop.add_callback(lambda: self.io_loop.add_callback(self.stop))
|
||||
self.wait()
|
||||
|
||||
def test_timeout_with_arguments(self):
|
||||
# This tests that all the timeout methods pass through *args correctly.
|
||||
results = []
|
||||
self.io_loop.add_timeout(self.io_loop.time(), results.append, 1)
|
||||
self.io_loop.add_timeout(datetime.timedelta(seconds=0),
|
||||
results.append, 2)
|
||||
self.io_loop.call_at(self.io_loop.time(), results.append, 3)
|
||||
self.io_loop.call_later(0, results.append, 4)
|
||||
self.io_loop.call_later(0, self.stop)
|
||||
self.wait()
|
||||
self.assertEqual(results, [1, 2, 3, 4])
|
||||
|
||||
def test_close_file_object(self):
|
||||
"""When a file object is used instead of a numeric file descriptor,
|
||||
the object should be closed (by IOLoop.close(all_fds=True),
|
||||
not just the fd.
|
||||
"""
|
||||
# Use a socket since they are supported by IOLoop on all platforms.
|
||||
# Unfortunately, sockets don't support the .closed attribute for
|
||||
# inspecting their close status, so we must use a wrapper.
|
||||
class SocketWrapper(object):
|
||||
def __init__(self, sockobj):
|
||||
self.sockobj = sockobj
|
||||
self.closed = False
|
||||
|
||||
def fileno(self):
|
||||
return self.sockobj.fileno()
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
self.sockobj.close()
|
||||
sockobj, port = bind_unused_port()
|
||||
socket_wrapper = SocketWrapper(sockobj)
|
||||
io_loop = IOLoop()
|
||||
io_loop.add_handler(socket_wrapper, lambda fd, events: None,
|
||||
IOLoop.READ)
|
||||
io_loop.close(all_fds=True)
|
||||
self.assertTrue(socket_wrapper.closed)
|
||||
|
||||
def test_handler_callback_file_object(self):
|
||||
"""The handler callback receives the same fd object it passed in."""
|
||||
server_sock, port = bind_unused_port()
|
||||
fds = []
|
||||
def handle_connection(fd, events):
|
||||
fds.append(fd)
|
||||
conn, addr = server_sock.accept()
|
||||
conn.close()
|
||||
self.stop()
|
||||
self.io_loop.add_handler(server_sock, handle_connection, IOLoop.READ)
|
||||
with contextlib.closing(socket.socket()) as client_sock:
|
||||
client_sock.connect(('127.0.0.1', port))
|
||||
self.wait()
|
||||
self.io_loop.remove_handler(server_sock)
|
||||
self.io_loop.add_handler(server_sock.fileno(), handle_connection,
|
||||
IOLoop.READ)
|
||||
with contextlib.closing(socket.socket()) as client_sock:
|
||||
client_sock.connect(('127.0.0.1', port))
|
||||
self.wait()
|
||||
self.assertIs(fds[0], server_sock)
|
||||
self.assertEqual(fds[1], server_sock.fileno())
|
||||
self.io_loop.remove_handler(server_sock.fileno())
|
||||
server_sock.close()
|
||||
|
||||
def test_mixed_fd_fileobj(self):
|
||||
server_sock, port = bind_unused_port()
|
||||
def f(fd, events):
|
||||
pass
|
||||
self.io_loop.add_handler(server_sock, f, IOLoop.READ)
|
||||
with self.assertRaises(Exception):
|
||||
# The exact error is unspecified - some implementations use
|
||||
# IOError, others use ValueError.
|
||||
self.io_loop.add_handler(server_sock.fileno(), f, IOLoop.READ)
|
||||
self.io_loop.remove_handler(server_sock.fileno())
|
||||
server_sock.close()
|
||||
|
||||
def test_reentrant(self):
|
||||
"""Calling start() twice should raise an error, not deadlock."""
|
||||
returned_from_start = [False]
|
||||
got_exception = [False]
|
||||
def callback():
|
||||
try:
|
||||
self.io_loop.start()
|
||||
returned_from_start[0] = True
|
||||
except Exception:
|
||||
got_exception[0] = True
|
||||
self.stop()
|
||||
self.io_loop.add_callback(callback)
|
||||
self.wait()
|
||||
self.assertTrue(got_exception[0])
|
||||
self.assertFalse(returned_from_start[0])
|
||||
|
||||
def test_exception_logging(self):
|
||||
"""Uncaught exceptions get logged by the IOLoop."""
|
||||
# Use a NullContext to keep the exception from being caught by
|
||||
# AsyncTestCase.
|
||||
with NullContext():
|
||||
self.io_loop.add_callback(lambda: 1/0)
|
||||
self.io_loop.add_callback(self.stop)
|
||||
with ExpectLog(app_log, "Exception in callback"):
|
||||
self.wait()
|
||||
|
||||
def test_exception_logging_future(self):
|
||||
"""The IOLoop examines exceptions from Futures and logs them."""
|
||||
with NullContext():
|
||||
@gen.coroutine
|
||||
def callback():
|
||||
self.io_loop.add_callback(self.stop)
|
||||
1/0
|
||||
self.io_loop.add_callback(callback)
|
||||
with ExpectLog(app_log, "Exception in callback"):
|
||||
self.wait()
|
||||
|
||||
def test_spawn_callback(self):
|
||||
# An added callback runs in the test's stack_context, so will be
|
||||
# re-arised in wait().
|
||||
self.io_loop.add_callback(lambda: 1/0)
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
self.wait()
|
||||
# A spawned callback is run directly on the IOLoop, so it will be
|
||||
# logged without stopping the test.
|
||||
self.io_loop.spawn_callback(lambda: 1/0)
|
||||
self.io_loop.add_callback(self.stop)
|
||||
with ExpectLog(app_log, "Exception in callback"):
|
||||
self.wait()
|
||||
|
||||
|
||||
# Deliberately not a subclass of AsyncTestCase so the IOLoop isn't
|
||||
# automatically set as current.
|
||||
class TestIOLoopCurrent(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.io_loop = IOLoop()
|
||||
|
||||
def tearDown(self):
|
||||
self.io_loop.close()
|
||||
|
||||
def test_current(self):
|
||||
def f():
|
||||
self.current_io_loop = IOLoop.current()
|
||||
self.io_loop.stop()
|
||||
self.io_loop.add_callback(f)
|
||||
self.io_loop.start()
|
||||
self.assertIs(self.current_io_loop, self.io_loop)
|
||||
|
||||
|
||||
class TestIOLoopAddCallback(AsyncTestCase):
|
||||
def setUp(self):
|
||||
super(TestIOLoopAddCallback, self).setUp()
|
||||
self.active_contexts = []
|
||||
|
||||
def add_callback(self, callback, *args, **kwargs):
|
||||
self.io_loop.add_callback(callback, *args, **kwargs)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def context(self, name):
|
||||
self.active_contexts.append(name)
|
||||
yield
|
||||
self.assertEqual(self.active_contexts.pop(), name)
|
||||
|
||||
def test_pre_wrap(self):
|
||||
# A pre-wrapped callback is run in the context in which it was
|
||||
# wrapped, not when it was added to the IOLoop.
|
||||
def f1():
|
||||
self.assertIn('c1', self.active_contexts)
|
||||
self.assertNotIn('c2', self.active_contexts)
|
||||
self.stop()
|
||||
|
||||
with StackContext(functools.partial(self.context, 'c1')):
|
||||
wrapped = wrap(f1)
|
||||
|
||||
with StackContext(functools.partial(self.context, 'c2')):
|
||||
self.add_callback(wrapped)
|
||||
|
||||
self.wait()
|
||||
|
||||
def test_pre_wrap_with_args(self):
|
||||
# Same as test_pre_wrap, but the function takes arguments.
|
||||
# Implementation note: The function must not be wrapped in a
|
||||
# functools.partial until after it has been passed through
|
||||
# stack_context.wrap
|
||||
def f1(foo, bar):
|
||||
self.assertIn('c1', self.active_contexts)
|
||||
self.assertNotIn('c2', self.active_contexts)
|
||||
self.stop((foo, bar))
|
||||
|
||||
with StackContext(functools.partial(self.context, 'c1')):
|
||||
wrapped = wrap(f1)
|
||||
|
||||
with StackContext(functools.partial(self.context, 'c2')):
|
||||
self.add_callback(wrapped, 1, bar=2)
|
||||
|
||||
result = self.wait()
|
||||
self.assertEqual(result, (1, 2))
|
||||
|
||||
|
||||
class TestIOLoopAddCallbackFromSignal(TestIOLoopAddCallback):
|
||||
# Repeat the add_callback tests using add_callback_from_signal
|
||||
def add_callback(self, callback, *args, **kwargs):
|
||||
self.io_loop.add_callback_from_signal(callback, *args, **kwargs)
|
||||
|
||||
|
||||
@unittest.skipIf(futures is None, "futures module not present")
|
||||
class TestIOLoopFutures(AsyncTestCase):
|
||||
def test_add_future_threads(self):
|
||||
with futures.ThreadPoolExecutor(1) as pool:
|
||||
self.io_loop.add_future(pool.submit(lambda: None),
|
||||
lambda future: self.stop(future))
|
||||
future = self.wait()
|
||||
self.assertTrue(future.done())
|
||||
self.assertTrue(future.result() is None)
|
||||
|
||||
def test_add_future_stack_context(self):
|
||||
ready = threading.Event()
|
||||
|
||||
def task():
|
||||
# we must wait for the ioloop callback to be scheduled before
|
||||
# the task completes to ensure that add_future adds the callback
|
||||
# asynchronously (which is the scenario in which capturing
|
||||
# the stack_context matters)
|
||||
ready.wait(1)
|
||||
assert ready.isSet(), "timed out"
|
||||
raise Exception("worker")
|
||||
|
||||
def callback(future):
|
||||
self.future = future
|
||||
raise Exception("callback")
|
||||
|
||||
def handle_exception(typ, value, traceback):
|
||||
self.exception = value
|
||||
self.stop()
|
||||
return True
|
||||
|
||||
# stack_context propagates to the ioloop callback, but the worker
|
||||
# task just has its exceptions caught and saved in the Future.
|
||||
with futures.ThreadPoolExecutor(1) as pool:
|
||||
with ExceptionStackContext(handle_exception):
|
||||
self.io_loop.add_future(pool.submit(task), callback)
|
||||
ready.set()
|
||||
self.wait()
|
||||
|
||||
self.assertEqual(self.exception.args[0], "callback")
|
||||
self.assertEqual(self.future.exception().args[0], "worker")
|
||||
|
||||
|
||||
class TestIOLoopRunSync(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.io_loop = IOLoop()
|
||||
|
||||
def tearDown(self):
|
||||
self.io_loop.close()
|
||||
|
||||
def test_sync_result(self):
|
||||
self.assertEqual(self.io_loop.run_sync(lambda: 42), 42)
|
||||
|
||||
def test_sync_exception(self):
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
self.io_loop.run_sync(lambda: 1 / 0)
|
||||
|
||||
def test_async_result(self):
|
||||
@gen.coroutine
|
||||
def f():
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
raise gen.Return(42)
|
||||
self.assertEqual(self.io_loop.run_sync(f), 42)
|
||||
|
||||
def test_async_exception(self):
|
||||
@gen.coroutine
|
||||
def f():
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
1 / 0
|
||||
with self.assertRaises(ZeroDivisionError):
|
||||
self.io_loop.run_sync(f)
|
||||
|
||||
def test_current(self):
|
||||
def f():
|
||||
self.assertIs(IOLoop.current(), self.io_loop)
|
||||
self.io_loop.run_sync(f)
|
||||
|
||||
def test_timeout(self):
|
||||
@gen.coroutine
|
||||
def f():
|
||||
yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)
|
||||
self.assertRaises(TimeoutError, self.io_loop.run_sync, f, timeout=0.01)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
907
Shared/lib/python3.4/site-packages/tornado/test/iostream_test.py
Normal file
907
Shared/lib/python3.4/site-packages/tornado/test/iostream_test.py
Normal file
|
|
@ -0,0 +1,907 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from tornado.concurrent import Future
|
||||
from tornado import gen
|
||||
from tornado import netutil
|
||||
from tornado.iostream import IOStream, SSLIOStream, PipeIOStream, StreamClosedError
|
||||
from tornado.httputil import HTTPHeaders
|
||||
from tornado.log import gen_log, app_log
|
||||
from tornado.netutil import ssl_wrap_socket
|
||||
from tornado.stack_context import NullContext
|
||||
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, bind_unused_port, ExpectLog, gen_test
|
||||
from tornado.test.util import unittest, skipIfNonUnix
|
||||
from tornado.web import RequestHandler, Application
|
||||
import certifi
|
||||
import errno
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import ssl
|
||||
import sys
|
||||
|
||||
|
||||
def _server_ssl_options():
|
||||
return dict(
|
||||
certfile=os.path.join(os.path.dirname(__file__), 'test.crt'),
|
||||
keyfile=os.path.join(os.path.dirname(__file__), 'test.key'),
|
||||
)
|
||||
|
||||
|
||||
class HelloHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write("Hello")
|
||||
|
||||
|
||||
class TestIOStreamWebMixin(object):
|
||||
def _make_client_iostream(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_app(self):
|
||||
return Application([('/', HelloHandler)])
|
||||
|
||||
def test_connection_closed(self):
|
||||
# When a server sends a response and then closes the connection,
|
||||
# the client must be allowed to read the data before the IOStream
|
||||
# closes itself. Epoll reports closed connections with a separate
|
||||
# EPOLLRDHUP event delivered at the same time as the read event,
|
||||
# while kqueue reports them as a second read/write event with an EOF
|
||||
# flag.
|
||||
response = self.fetch("/", headers={"Connection": "close"})
|
||||
response.rethrow()
|
||||
|
||||
def test_read_until_close(self):
|
||||
stream = self._make_client_iostream()
|
||||
stream.connect(('localhost', self.get_http_port()), callback=self.stop)
|
||||
self.wait()
|
||||
stream.write(b"GET / HTTP/1.0\r\n\r\n")
|
||||
|
||||
stream.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertTrue(data.startswith(b"HTTP/1.0 200"))
|
||||
self.assertTrue(data.endswith(b"Hello"))
|
||||
|
||||
def test_read_zero_bytes(self):
|
||||
self.stream = self._make_client_iostream()
|
||||
self.stream.connect(("localhost", self.get_http_port()),
|
||||
callback=self.stop)
|
||||
self.wait()
|
||||
self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
|
||||
|
||||
# normal read
|
||||
self.stream.read_bytes(9, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"HTTP/1.0 ")
|
||||
|
||||
# zero bytes
|
||||
self.stream.read_bytes(0, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"")
|
||||
|
||||
# another normal read
|
||||
self.stream.read_bytes(3, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"200")
|
||||
|
||||
self.stream.close()
|
||||
|
||||
def test_write_while_connecting(self):
|
||||
stream = self._make_client_iostream()
|
||||
connected = [False]
|
||||
|
||||
def connected_callback():
|
||||
connected[0] = True
|
||||
self.stop()
|
||||
stream.connect(("localhost", self.get_http_port()),
|
||||
callback=connected_callback)
|
||||
# unlike the previous tests, try to write before the connection
|
||||
# is complete.
|
||||
written = [False]
|
||||
|
||||
def write_callback():
|
||||
written[0] = True
|
||||
self.stop()
|
||||
stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n",
|
||||
callback=write_callback)
|
||||
self.assertTrue(not connected[0])
|
||||
# by the time the write has flushed, the connection callback has
|
||||
# also run
|
||||
try:
|
||||
self.wait(lambda: connected[0] and written[0])
|
||||
finally:
|
||||
logging.debug((connected, written))
|
||||
|
||||
stream.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertTrue(data.endswith(b"Hello"))
|
||||
|
||||
stream.close()
|
||||
|
||||
@gen_test
|
||||
def test_future_interface(self):
|
||||
"""Basic test of IOStream's ability to return Futures."""
|
||||
stream = self._make_client_iostream()
|
||||
connect_result = yield stream.connect(
|
||||
("localhost", self.get_http_port()))
|
||||
self.assertIs(connect_result, stream)
|
||||
yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
|
||||
first_line = yield stream.read_until(b"\r\n")
|
||||
self.assertEqual(first_line, b"HTTP/1.0 200 OK\r\n")
|
||||
# callback=None is equivalent to no callback.
|
||||
header_data = yield stream.read_until(b"\r\n\r\n", callback=None)
|
||||
headers = HTTPHeaders.parse(header_data.decode('latin1'))
|
||||
content_length = int(headers['Content-Length'])
|
||||
body = yield stream.read_bytes(content_length)
|
||||
self.assertEqual(body, b'Hello')
|
||||
stream.close()
|
||||
|
||||
@gen_test
|
||||
def test_future_close_while_reading(self):
|
||||
stream = self._make_client_iostream()
|
||||
yield stream.connect(("localhost", self.get_http_port()))
|
||||
yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
|
||||
with self.assertRaises(StreamClosedError):
|
||||
yield stream.read_bytes(1024 * 1024)
|
||||
stream.close()
|
||||
|
||||
@gen_test
|
||||
def test_future_read_until_close(self):
|
||||
# Ensure that the data comes through before the StreamClosedError.
|
||||
stream = self._make_client_iostream()
|
||||
yield stream.connect(("localhost", self.get_http_port()))
|
||||
yield stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n")
|
||||
yield stream.read_until(b"\r\n\r\n")
|
||||
body = yield stream.read_until_close()
|
||||
self.assertEqual(body, b"Hello")
|
||||
|
||||
# Nothing else to read; the error comes immediately without waiting
|
||||
# for yield.
|
||||
with self.assertRaises(StreamClosedError):
|
||||
stream.read_bytes(1)
|
||||
|
||||
|
||||
class TestIOStreamMixin(object):
|
||||
def _make_server_iostream(self, connection, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def _make_client_iostream(self, connection, **kwargs):
|
||||
raise NotImplementedError()
|
||||
|
||||
def make_iostream_pair(self, **kwargs):
|
||||
listener, port = bind_unused_port()
|
||||
streams = [None, None]
|
||||
|
||||
def accept_callback(connection, address):
|
||||
streams[0] = self._make_server_iostream(connection, **kwargs)
|
||||
self.stop()
|
||||
|
||||
def connect_callback():
|
||||
streams[1] = client_stream
|
||||
self.stop()
|
||||
netutil.add_accept_handler(listener, accept_callback,
|
||||
io_loop=self.io_loop)
|
||||
client_stream = self._make_client_iostream(socket.socket(), **kwargs)
|
||||
client_stream.connect(('127.0.0.1', port),
|
||||
callback=connect_callback)
|
||||
self.wait(condition=lambda: all(streams))
|
||||
self.io_loop.remove_handler(listener.fileno())
|
||||
listener.close()
|
||||
return streams
|
||||
|
||||
def test_streaming_callback_with_data_in_buffer(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.write(b"abcd\r\nefgh")
|
||||
server.read_until(b"\r\n", self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"abcd\r\n")
|
||||
|
||||
def closed_callback(chunk):
|
||||
self.fail()
|
||||
server.read_until_close(callback=closed_callback,
|
||||
streaming_callback=self.stop)
|
||||
# self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"efgh")
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_write_zero_bytes(self):
|
||||
# Attempting to write zero bytes should run the callback without
|
||||
# going into an infinite loop.
|
||||
server, client = self.make_iostream_pair()
|
||||
server.write(b'', callback=self.stop)
|
||||
self.wait()
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_connection_refused(self):
|
||||
# When a connection is refused, the connect callback should not
|
||||
# be run. (The kqueue IOLoop used to behave differently from the
|
||||
# epoll IOLoop in this respect)
|
||||
server_socket, port = bind_unused_port()
|
||||
server_socket.close()
|
||||
stream = IOStream(socket.socket(), self.io_loop)
|
||||
self.connect_called = False
|
||||
|
||||
def connect_callback():
|
||||
self.connect_called = True
|
||||
stream.set_close_callback(self.stop)
|
||||
# log messages vary by platform and ioloop implementation
|
||||
with ExpectLog(gen_log, ".*", required=False):
|
||||
stream.connect(("localhost", port), connect_callback)
|
||||
self.wait()
|
||||
self.assertFalse(self.connect_called)
|
||||
self.assertTrue(isinstance(stream.error, socket.error), stream.error)
|
||||
if sys.platform != 'cygwin':
|
||||
_ERRNO_CONNREFUSED = (errno.ECONNREFUSED,)
|
||||
if hasattr(errno, "WSAECONNREFUSED"):
|
||||
_ERRNO_CONNREFUSED += (errno.WSAECONNREFUSED,)
|
||||
# cygwin's errnos don't match those used on native windows python
|
||||
self.assertTrue(stream.error.args[0] in _ERRNO_CONNREFUSED)
|
||||
|
||||
def test_gaierror(self):
|
||||
# Test that IOStream sets its exc_info on getaddrinfo error
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
|
||||
stream = IOStream(s, io_loop=self.io_loop)
|
||||
stream.set_close_callback(self.stop)
|
||||
# To reliably generate a gaierror we use a malformed domain name
|
||||
# instead of a name that's simply unlikely to exist (since
|
||||
# opendns and some ISPs return bogus addresses for nonexistent
|
||||
# domains instead of the proper error codes).
|
||||
with ExpectLog(gen_log, "Connect error"):
|
||||
stream.connect(('an invalid domain', 54321))
|
||||
self.assertTrue(isinstance(stream.error, socket.gaierror), stream.error)
|
||||
|
||||
def test_read_callback_error(self):
|
||||
# Test that IOStream sets its exc_info when a read callback throws
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
server.set_close_callback(self.stop)
|
||||
with ExpectLog(
|
||||
app_log, "(Uncaught exception|Exception in callback)"
|
||||
):
|
||||
# Clear ExceptionStackContext so IOStream catches error
|
||||
with NullContext():
|
||||
server.read_bytes(1, callback=lambda data: 1 / 0)
|
||||
client.write(b"1")
|
||||
self.wait()
|
||||
self.assertTrue(isinstance(server.error, ZeroDivisionError))
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_streaming_callback(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
chunks = []
|
||||
final_called = []
|
||||
|
||||
def streaming_callback(data):
|
||||
chunks.append(data)
|
||||
self.stop()
|
||||
|
||||
def final_callback(data):
|
||||
self.assertFalse(data)
|
||||
final_called.append(True)
|
||||
self.stop()
|
||||
server.read_bytes(6, callback=final_callback,
|
||||
streaming_callback=streaming_callback)
|
||||
client.write(b"1234")
|
||||
self.wait(condition=lambda: chunks)
|
||||
client.write(b"5678")
|
||||
self.wait(condition=lambda: final_called)
|
||||
self.assertEqual(chunks, [b"1234", b"56"])
|
||||
|
||||
# the rest of the last chunk is still in the buffer
|
||||
server.read_bytes(2, callback=self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"78")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_streaming_until_close(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
chunks = []
|
||||
closed = [False]
|
||||
|
||||
def streaming_callback(data):
|
||||
chunks.append(data)
|
||||
self.stop()
|
||||
def close_callback(data):
|
||||
assert not data, data
|
||||
closed[0] = True
|
||||
self.stop()
|
||||
client.read_until_close(callback=close_callback,
|
||||
streaming_callback=streaming_callback)
|
||||
server.write(b"1234")
|
||||
self.wait(condition=lambda: len(chunks) == 1)
|
||||
server.write(b"5678", self.stop)
|
||||
self.wait()
|
||||
server.close()
|
||||
self.wait(condition=lambda: closed[0])
|
||||
self.assertEqual(chunks, [b"1234", b"5678"])
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_delayed_close_callback(self):
|
||||
# The scenario: Server closes the connection while there is a pending
|
||||
# read that can be served out of buffered data. The client does not
|
||||
# run the close_callback as soon as it detects the close, but rather
|
||||
# defers it until after the buffered read has finished.
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
client.set_close_callback(self.stop)
|
||||
server.write(b"12")
|
||||
chunks = []
|
||||
|
||||
def callback1(data):
|
||||
chunks.append(data)
|
||||
client.read_bytes(1, callback2)
|
||||
server.close()
|
||||
|
||||
def callback2(data):
|
||||
chunks.append(data)
|
||||
client.read_bytes(1, callback1)
|
||||
self.wait() # stopped by close_callback
|
||||
self.assertEqual(chunks, [b"1", b"2"])
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_future_delayed_close_callback(self):
|
||||
# Same as test_delayed_close_callback, but with the future interface.
|
||||
server, client = self.make_iostream_pair()
|
||||
# We can't call make_iostream_pair inside a gen_test function
|
||||
# because the ioloop is not reentrant.
|
||||
@gen_test
|
||||
def f(self):
|
||||
server.write(b"12")
|
||||
chunks = []
|
||||
chunks.append((yield client.read_bytes(1)))
|
||||
server.close()
|
||||
chunks.append((yield client.read_bytes(1)))
|
||||
self.assertEqual(chunks, [b"1", b"2"])
|
||||
try:
|
||||
f(self)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_close_buffered_data(self):
|
||||
# Similar to the previous test, but with data stored in the OS's
|
||||
# socket buffers instead of the IOStream's read buffer. Out-of-band
|
||||
# close notifications must be delayed until all data has been
|
||||
# drained into the IOStream buffer. (epoll used to use out-of-band
|
||||
# close events with EPOLLRDHUP, but no longer)
|
||||
#
|
||||
# This depends on the read_chunk_size being smaller than the
|
||||
# OS socket buffer, so make it small.
|
||||
server, client = self.make_iostream_pair(read_chunk_size=256)
|
||||
try:
|
||||
server.write(b"A" * 512)
|
||||
client.read_bytes(256, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(b"A" * 256, data)
|
||||
server.close()
|
||||
# Allow the close to propagate to the client side of the
|
||||
# connection. Using add_callback instead of add_timeout
|
||||
# doesn't seem to work, even with multiple iterations
|
||||
self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
|
||||
self.wait()
|
||||
client.read_bytes(256, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(b"A" * 256, data)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_close_after_close(self):
|
||||
# Similar to test_delayed_close_callback, but read_until_close takes
|
||||
# a separate code path so test it separately.
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
server.write(b"1234")
|
||||
server.close()
|
||||
# Read one byte to make sure the client has received the data.
|
||||
# It won't run the close callback as long as there is more buffered
|
||||
# data that could satisfy a later read.
|
||||
client.read_bytes(1, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"1")
|
||||
client.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"234")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_streaming_read_until_close_after_close(self):
|
||||
# Same as the preceding test but with a streaming_callback.
|
||||
# All data should go through the streaming callback,
|
||||
# and the final read callback just gets an empty string.
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
server.write(b"1234")
|
||||
server.close()
|
||||
client.read_bytes(1, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"1")
|
||||
streaming_data = []
|
||||
client.read_until_close(self.stop,
|
||||
streaming_callback=streaming_data.append)
|
||||
data = self.wait()
|
||||
self.assertEqual(b'', data)
|
||||
self.assertEqual(b''.join(streaming_data), b"234")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_large_read_until(self):
|
||||
# Performance test: read_until used to have a quadratic component
|
||||
# so a read_until of 4MB would take 8 seconds; now it takes 0.25
|
||||
# seconds.
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
# This test fails on pypy with ssl. I think it's because
|
||||
# pypy's gc defeats moves objects, breaking the
|
||||
# "frozen write buffer" assumption.
|
||||
if (isinstance(server, SSLIOStream) and
|
||||
platform.python_implementation() == 'PyPy'):
|
||||
raise unittest.SkipTest(
|
||||
"pypy gc causes problems with openssl")
|
||||
NUM_KB = 4096
|
||||
for i in range(NUM_KB):
|
||||
client.write(b"A" * 1024)
|
||||
client.write(b"\r\n")
|
||||
server.read_until(b"\r\n", self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(len(data), NUM_KB * 1024 + 2)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_close_callback_with_pending_read(self):
|
||||
# Regression test for a bug that was introduced in 2.3
|
||||
# where the IOStream._close_callback would never be called
|
||||
# if there were pending reads.
|
||||
OK = b"OK\r\n"
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(self.stop)
|
||||
try:
|
||||
server.write(OK)
|
||||
client.read_until(b"\r\n", self.stop)
|
||||
res = self.wait()
|
||||
self.assertEqual(res, OK)
|
||||
|
||||
server.close()
|
||||
client.read_until(b"\r\n", lambda x: x)
|
||||
# If _close_callback (self.stop) is not called,
|
||||
# an AssertionError: Async operation timed out after 5 seconds
|
||||
# will be raised.
|
||||
res = self.wait()
|
||||
self.assertTrue(res is None)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
@skipIfNonUnix
|
||||
def test_inline_read_error(self):
|
||||
# An error on an inline read is raised without logging (on the
|
||||
# assumption that it will eventually be noticed or logged further
|
||||
# up the stack).
|
||||
#
|
||||
# This test is posix-only because windows os.close() doesn't work
|
||||
# on socket FDs, but we can't close the socket object normally
|
||||
# because we won't get the error we want if the socket knows
|
||||
# it's closed.
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
os.close(server.socket.fileno())
|
||||
with self.assertRaises(socket.error):
|
||||
server.read_bytes(1, lambda data: None)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_async_read_error_logging(self):
|
||||
# Socket errors on asynchronous reads should be logged (but only
|
||||
# once).
|
||||
server, client = self.make_iostream_pair()
|
||||
server.set_close_callback(self.stop)
|
||||
try:
|
||||
# Start a read that will be fullfilled asynchronously.
|
||||
server.read_bytes(1, lambda data: None)
|
||||
client.write(b'a')
|
||||
# Stub out read_from_fd to make it fail.
|
||||
|
||||
def fake_read_from_fd():
|
||||
os.close(server.socket.fileno())
|
||||
server.__class__.read_from_fd(server)
|
||||
server.read_from_fd = fake_read_from_fd
|
||||
# This log message is from _handle_read (not read_from_fd).
|
||||
with ExpectLog(gen_log, "error on read"):
|
||||
self.wait()
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_future_close_callback(self):
|
||||
# Regression test for interaction between the Future read interfaces
|
||||
# and IOStream._maybe_add_error_listener.
|
||||
server, client = self.make_iostream_pair()
|
||||
closed = [False]
|
||||
def close_callback():
|
||||
closed[0] = True
|
||||
self.stop()
|
||||
server.set_close_callback(close_callback)
|
||||
try:
|
||||
client.write(b'a')
|
||||
future = server.read_bytes(1)
|
||||
self.io_loop.add_future(future, self.stop)
|
||||
self.assertEqual(self.wait().result(), b'a')
|
||||
self.assertFalse(closed[0])
|
||||
client.close()
|
||||
self.wait()
|
||||
self.assertTrue(closed[0])
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_bytes_partial(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
try:
|
||||
# Ask for more than is available with partial=True
|
||||
client.read_bytes(50, self.stop, partial=True)
|
||||
server.write(b"hello")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"hello")
|
||||
|
||||
# Ask for less than what is available; num_bytes is still
|
||||
# respected.
|
||||
client.read_bytes(3, self.stop, partial=True)
|
||||
server.write(b"world")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"wor")
|
||||
|
||||
# Partial reads won't return an empty string, but read_bytes(0)
|
||||
# will.
|
||||
client.read_bytes(0, self.stop, partial=True)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b'')
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_max_bytes(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Extra room under the limit
|
||||
client.read_until(b"def", self.stop, max_bytes=50)
|
||||
server.write(b"abcdef")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"abcdef")
|
||||
|
||||
# Just enough space
|
||||
client.read_until(b"def", self.stop, max_bytes=6)
|
||||
server.write(b"abcdef")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"abcdef")
|
||||
|
||||
# Not enough space, but we don't know it until all we can do is
|
||||
# log a warning and close the connection.
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until(b"def", self.stop, max_bytes=5)
|
||||
server.write(b"123456")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_max_bytes_inline(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Similar to the error case in the previous test, but the
|
||||
# server writes first so client reads are satisfied
|
||||
# inline. For consistency with the out-of-line case, we
|
||||
# do not raise the error synchronously.
|
||||
server.write(b"123456")
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until(b"def", self.stop, max_bytes=5)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_max_bytes_ignores_extra(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Even though data that matches arrives the same packet that
|
||||
# puts us over the limit, we fail the request because it was not
|
||||
# found within the limit.
|
||||
server.write(b"abcdef")
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until(b"def", self.stop, max_bytes=5)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_regex_max_bytes(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Extra room under the limit
|
||||
client.read_until_regex(b"def", self.stop, max_bytes=50)
|
||||
server.write(b"abcdef")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"abcdef")
|
||||
|
||||
# Just enough space
|
||||
client.read_until_regex(b"def", self.stop, max_bytes=6)
|
||||
server.write(b"abcdef")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"abcdef")
|
||||
|
||||
# Not enough space, but we don't know it until all we can do is
|
||||
# log a warning and close the connection.
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until_regex(b"def", self.stop, max_bytes=5)
|
||||
server.write(b"123456")
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_regex_max_bytes_inline(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Similar to the error case in the previous test, but the
|
||||
# server writes first so client reads are satisfied
|
||||
# inline. For consistency with the out-of-line case, we
|
||||
# do not raise the error synchronously.
|
||||
server.write(b"123456")
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until_regex(b"def", self.stop, max_bytes=5)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_read_until_regex_max_bytes_ignores_extra(self):
|
||||
server, client = self.make_iostream_pair()
|
||||
client.set_close_callback(lambda: self.stop("closed"))
|
||||
try:
|
||||
# Even though data that matches arrives the same packet that
|
||||
# puts us over the limit, we fail the request because it was not
|
||||
# found within the limit.
|
||||
server.write(b"abcdef")
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
client.read_until_regex(b"def", self.stop, max_bytes=5)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, "closed")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_small_reads_from_large_buffer(self):
|
||||
# 10KB buffer size, 100KB available to read.
|
||||
# Read 1KB at a time and make sure that the buffer is not eagerly
|
||||
# filled.
|
||||
server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
|
||||
try:
|
||||
server.write(b"a" * 1024 * 100)
|
||||
for i in range(100):
|
||||
client.read_bytes(1024, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"a" * 1024)
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
def test_small_read_untils_from_large_buffer(self):
|
||||
# 10KB buffer size, 100KB available to read.
|
||||
# Read 1KB at a time and make sure that the buffer is not eagerly
|
||||
# filled.
|
||||
server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
|
||||
try:
|
||||
server.write((b"a" * 1023 + b"\n") * 100)
|
||||
for i in range(100):
|
||||
client.read_until(b"\n", self.stop, max_bytes=4096)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"a" * 1023 + b"\n")
|
||||
finally:
|
||||
server.close()
|
||||
client.close()
|
||||
|
||||
|
||||
class TestIOStreamWebHTTP(TestIOStreamWebMixin, AsyncHTTPTestCase):
|
||||
def _make_client_iostream(self):
|
||||
return IOStream(socket.socket(), io_loop=self.io_loop)
|
||||
|
||||
|
||||
class TestIOStreamWebHTTPS(TestIOStreamWebMixin, AsyncHTTPSTestCase):
|
||||
def _make_client_iostream(self):
|
||||
return SSLIOStream(socket.socket(), io_loop=self.io_loop)
|
||||
|
||||
|
||||
class TestIOStream(TestIOStreamMixin, AsyncTestCase):
|
||||
def _make_server_iostream(self, connection, **kwargs):
|
||||
return IOStream(connection, **kwargs)
|
||||
|
||||
def _make_client_iostream(self, connection, **kwargs):
|
||||
return IOStream(connection, **kwargs)
|
||||
|
||||
|
||||
class TestIOStreamSSL(TestIOStreamMixin, AsyncTestCase):
|
||||
def _make_server_iostream(self, connection, **kwargs):
|
||||
connection = ssl.wrap_socket(connection,
|
||||
server_side=True,
|
||||
do_handshake_on_connect=False,
|
||||
**_server_ssl_options())
|
||||
return SSLIOStream(connection, io_loop=self.io_loop, **kwargs)
|
||||
|
||||
def _make_client_iostream(self, connection, **kwargs):
|
||||
return SSLIOStream(connection, io_loop=self.io_loop, **kwargs)
|
||||
|
||||
|
||||
# This will run some tests that are basically redundant but it's the
|
||||
# simplest way to make sure that it works to pass an SSLContext
|
||||
# instead of an ssl_options dict to the SSLIOStream constructor.
|
||||
@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present')
|
||||
class TestIOStreamSSLContext(TestIOStreamMixin, AsyncTestCase):
|
||||
def _make_server_iostream(self, connection, **kwargs):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
context.load_cert_chain(
|
||||
os.path.join(os.path.dirname(__file__), 'test.crt'),
|
||||
os.path.join(os.path.dirname(__file__), 'test.key'))
|
||||
connection = ssl_wrap_socket(connection, context,
|
||||
server_side=True,
|
||||
do_handshake_on_connect=False)
|
||||
return SSLIOStream(connection, io_loop=self.io_loop, **kwargs)
|
||||
|
||||
def _make_client_iostream(self, connection, **kwargs):
|
||||
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
||||
return SSLIOStream(connection, io_loop=self.io_loop,
|
||||
ssl_options=context, **kwargs)
|
||||
|
||||
|
||||
class TestIOStreamStartTLS(AsyncTestCase):
|
||||
def setUp(self):
|
||||
try:
|
||||
super(TestIOStreamStartTLS, self).setUp()
|
||||
self.listener, self.port = bind_unused_port()
|
||||
self.server_stream = None
|
||||
self.server_accepted = Future()
|
||||
netutil.add_accept_handler(self.listener, self.accept)
|
||||
self.client_stream = IOStream(socket.socket())
|
||||
self.io_loop.add_future(self.client_stream.connect(
|
||||
('127.0.0.1', self.port)), self.stop)
|
||||
self.wait()
|
||||
self.io_loop.add_future(self.server_accepted, self.stop)
|
||||
self.wait()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise
|
||||
|
||||
def tearDown(self):
|
||||
if self.server_stream is not None:
|
||||
self.server_stream.close()
|
||||
if self.client_stream is not None:
|
||||
self.client_stream.close()
|
||||
self.listener.close()
|
||||
super(TestIOStreamStartTLS, self).tearDown()
|
||||
|
||||
def accept(self, connection, address):
|
||||
if self.server_stream is not None:
|
||||
self.fail("should only get one connection")
|
||||
self.server_stream = IOStream(connection)
|
||||
self.server_accepted.set_result(None)
|
||||
|
||||
@gen.coroutine
|
||||
def client_send_line(self, line):
|
||||
self.client_stream.write(line)
|
||||
recv_line = yield self.server_stream.read_until(b"\r\n")
|
||||
self.assertEqual(line, recv_line)
|
||||
|
||||
@gen.coroutine
|
||||
def server_send_line(self, line):
|
||||
self.server_stream.write(line)
|
||||
recv_line = yield self.client_stream.read_until(b"\r\n")
|
||||
self.assertEqual(line, recv_line)
|
||||
|
||||
def client_start_tls(self, ssl_options=None):
|
||||
client_stream = self.client_stream
|
||||
self.client_stream = None
|
||||
return client_stream.start_tls(False, ssl_options)
|
||||
|
||||
def server_start_tls(self, ssl_options=None):
|
||||
server_stream = self.server_stream
|
||||
self.server_stream = None
|
||||
return server_stream.start_tls(True, ssl_options)
|
||||
|
||||
@gen_test
|
||||
def test_start_tls_smtp(self):
|
||||
# This flow is simplified from RFC 3207 section 5.
|
||||
# We don't really need all of this, but it helps to make sure
|
||||
# that after realistic back-and-forth traffic the buffers end up
|
||||
# in a sane state.
|
||||
yield self.server_send_line(b"220 mail.example.com ready\r\n")
|
||||
yield self.client_send_line(b"EHLO mail.example.com\r\n")
|
||||
yield self.server_send_line(b"250-mail.example.com welcome\r\n")
|
||||
yield self.server_send_line(b"250 STARTTLS\r\n")
|
||||
yield self.client_send_line(b"STARTTLS\r\n")
|
||||
yield self.server_send_line(b"220 Go ahead\r\n")
|
||||
client_future = self.client_start_tls()
|
||||
server_future = self.server_start_tls(_server_ssl_options())
|
||||
self.client_stream = yield client_future
|
||||
self.server_stream = yield server_future
|
||||
self.assertTrue(isinstance(self.client_stream, SSLIOStream))
|
||||
self.assertTrue(isinstance(self.server_stream, SSLIOStream))
|
||||
yield self.client_send_line(b"EHLO mail.example.com\r\n")
|
||||
yield self.server_send_line(b"250 mail.example.com welcome\r\n")
|
||||
|
||||
@gen_test
|
||||
def test_handshake_fail(self):
|
||||
self.server_start_tls(_server_ssl_options())
|
||||
client_future = self.client_start_tls(
|
||||
dict(cert_reqs=ssl.CERT_REQUIRED, ca_certs=certifi.where()))
|
||||
with ExpectLog(gen_log, "SSL Error"):
|
||||
with self.assertRaises(ssl.SSLError):
|
||||
yield client_future
|
||||
|
||||
|
||||
@skipIfNonUnix
|
||||
class TestPipeIOStream(AsyncTestCase):
|
||||
def test_pipe_iostream(self):
|
||||
r, w = os.pipe()
|
||||
|
||||
rs = PipeIOStream(r, io_loop=self.io_loop)
|
||||
ws = PipeIOStream(w, io_loop=self.io_loop)
|
||||
|
||||
ws.write(b"hel")
|
||||
ws.write(b"lo world")
|
||||
|
||||
rs.read_until(b' ', callback=self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"hello ")
|
||||
|
||||
rs.read_bytes(3, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"wor")
|
||||
|
||||
ws.close()
|
||||
|
||||
rs.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"ld")
|
||||
|
||||
rs.close()
|
||||
|
||||
def test_pipe_iostream_big_write(self):
|
||||
r, w = os.pipe()
|
||||
|
||||
rs = PipeIOStream(r, io_loop=self.io_loop)
|
||||
ws = PipeIOStream(w, io_loop=self.io_loop)
|
||||
|
||||
NUM_BYTES = 1048576
|
||||
|
||||
# Write 1MB of data, which should fill the buffer
|
||||
ws.write(b"1" * NUM_BYTES)
|
||||
|
||||
rs.read_bytes(NUM_BYTES, self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"1" * NUM_BYTES)
|
||||
|
||||
ws.close()
|
||||
rs.close()
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import tornado.locale
|
||||
from tornado.escape import utf8
|
||||
from tornado.test.util import unittest
|
||||
from tornado.util import u, unicode_type
|
||||
|
||||
|
||||
class TranslationLoaderTest(unittest.TestCase):
|
||||
# TODO: less hacky way to get isolated tests
|
||||
SAVE_VARS = ['_translations', '_supported_locales', '_use_gettext']
|
||||
|
||||
def clear_locale_cache(self):
|
||||
if hasattr(tornado.locale.Locale, '_cache'):
|
||||
del tornado.locale.Locale._cache
|
||||
|
||||
def setUp(self):
|
||||
self.saved = {}
|
||||
for var in TranslationLoaderTest.SAVE_VARS:
|
||||
self.saved[var] = getattr(tornado.locale, var)
|
||||
self.clear_locale_cache()
|
||||
|
||||
def tearDown(self):
|
||||
for k, v in self.saved.items():
|
||||
setattr(tornado.locale, k, v)
|
||||
self.clear_locale_cache()
|
||||
|
||||
def test_csv(self):
|
||||
tornado.locale.load_translations(
|
||||
os.path.join(os.path.dirname(__file__), 'csv_translations'))
|
||||
locale = tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, tornado.locale.CSVLocale))
|
||||
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
|
||||
|
||||
def test_gettext(self):
|
||||
tornado.locale.load_gettext_translations(
|
||||
os.path.join(os.path.dirname(__file__), 'gettext_translations'),
|
||||
"tornado_test")
|
||||
locale = tornado.locale.get("fr_FR")
|
||||
self.assertTrue(isinstance(locale, tornado.locale.GettextLocale))
|
||||
self.assertEqual(locale.translate("school"), u("\u00e9cole"))
|
||||
|
||||
|
||||
class LocaleDataTest(unittest.TestCase):
|
||||
def test_non_ascii_name(self):
|
||||
name = tornado.locale.LOCALE_NAMES['es_LA']['name']
|
||||
self.assertTrue(isinstance(name, unicode_type))
|
||||
self.assertEqual(name, u('Espa\u00f1ol'))
|
||||
self.assertEqual(utf8(name), b'Espa\xc3\xb1ol')
|
||||
|
||||
|
||||
class EnglishTest(unittest.TestCase):
|
||||
def test_format_date(self):
|
||||
locale = tornado.locale.get('en_US')
|
||||
date = datetime.datetime(2013, 4, 28, 18, 35)
|
||||
self.assertEqual(locale.format_date(date, full_format=True),
|
||||
'April 28, 2013 at 6:35 pm')
|
||||
207
Shared/lib/python3.4/site-packages/tornado/test/log_test.py
Normal file
207
Shared/lib/python3.4/site-packages/tornado/test/log_test.py
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2012 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import contextlib
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import warnings
|
||||
|
||||
from tornado.escape import utf8
|
||||
from tornado.log import LogFormatter, define_logging_options, enable_pretty_logging
|
||||
from tornado.options import OptionParser
|
||||
from tornado.test.util import unittest
|
||||
from tornado.util import u, bytes_type, basestring_type
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ignore_bytes_warning():
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter('ignore', category=BytesWarning)
|
||||
yield
|
||||
|
||||
|
||||
class LogFormatterTest(unittest.TestCase):
|
||||
# Matches the output of a single logging call (which may be multiple lines
|
||||
# if a traceback was included, so we use the DOTALL option)
|
||||
LINE_RE = re.compile(b"(?s)\x01\\[E [0-9]{6} [0-9]{2}:[0-9]{2}:[0-9]{2} log_test:[0-9]+\\]\x02 (.*)")
|
||||
|
||||
def setUp(self):
|
||||
self.formatter = LogFormatter(color=False)
|
||||
# Fake color support. We can't guarantee anything about the $TERM
|
||||
# variable when the tests are run, so just patch in some values
|
||||
# for testing. (testing with color off fails to expose some potential
|
||||
# encoding issues from the control characters)
|
||||
self.formatter._colors = {
|
||||
logging.ERROR: u("\u0001"),
|
||||
}
|
||||
self.formatter._normal = u("\u0002")
|
||||
# construct a Logger directly to bypass getLogger's caching
|
||||
self.logger = logging.Logger('LogFormatterTest')
|
||||
self.logger.propagate = False
|
||||
self.tempdir = tempfile.mkdtemp()
|
||||
self.filename = os.path.join(self.tempdir, 'log.out')
|
||||
self.handler = self.make_handler(self.filename)
|
||||
self.handler.setFormatter(self.formatter)
|
||||
self.logger.addHandler(self.handler)
|
||||
|
||||
def tearDown(self):
|
||||
self.handler.close()
|
||||
os.unlink(self.filename)
|
||||
os.rmdir(self.tempdir)
|
||||
|
||||
def make_handler(self, filename):
|
||||
# Base case: default setup without explicit encoding.
|
||||
# In python 2, supports arbitrary byte strings and unicode objects
|
||||
# that contain only ascii. In python 3, supports ascii-only unicode
|
||||
# strings (but byte strings will be repr'd automatically).
|
||||
return logging.FileHandler(filename)
|
||||
|
||||
def get_output(self):
|
||||
with open(self.filename, "rb") as f:
|
||||
line = f.read().strip()
|
||||
m = LogFormatterTest.LINE_RE.match(line)
|
||||
if m:
|
||||
return m.group(1)
|
||||
else:
|
||||
raise Exception("output didn't match regex: %r" % line)
|
||||
|
||||
def test_basic_logging(self):
|
||||
self.logger.error("foo")
|
||||
self.assertEqual(self.get_output(), b"foo")
|
||||
|
||||
def test_bytes_logging(self):
|
||||
with ignore_bytes_warning():
|
||||
# This will be "\xe9" on python 2 or "b'\xe9'" on python 3
|
||||
self.logger.error(b"\xe9")
|
||||
self.assertEqual(self.get_output(), utf8(repr(b"\xe9")))
|
||||
|
||||
def test_utf8_logging(self):
|
||||
self.logger.error(u("\u00e9").encode("utf8"))
|
||||
if issubclass(bytes_type, basestring_type):
|
||||
# on python 2, utf8 byte strings (and by extension ascii byte
|
||||
# strings) are passed through as-is.
|
||||
self.assertEqual(self.get_output(), utf8(u("\u00e9")))
|
||||
else:
|
||||
# on python 3, byte strings always get repr'd even if
|
||||
# they're ascii-only, so this degenerates into another
|
||||
# copy of test_bytes_logging.
|
||||
self.assertEqual(self.get_output(), utf8(repr(utf8(u("\u00e9")))))
|
||||
|
||||
def test_bytes_exception_logging(self):
|
||||
try:
|
||||
raise Exception(b'\xe9')
|
||||
except Exception:
|
||||
self.logger.exception('caught exception')
|
||||
# This will be "Exception: \xe9" on python 2 or
|
||||
# "Exception: b'\xe9'" on python 3.
|
||||
output = self.get_output()
|
||||
self.assertRegexpMatches(output, br'Exception.*\\xe9')
|
||||
# The traceback contains newlines, which should not have been escaped.
|
||||
self.assertNotIn(br'\n', output)
|
||||
|
||||
|
||||
class UnicodeLogFormatterTest(LogFormatterTest):
|
||||
def make_handler(self, filename):
|
||||
# Adding an explicit encoding configuration allows non-ascii unicode
|
||||
# strings in both python 2 and 3, without changing the behavior
|
||||
# for byte strings.
|
||||
return logging.FileHandler(filename, encoding="utf8")
|
||||
|
||||
def test_unicode_logging(self):
|
||||
self.logger.error(u("\u00e9"))
|
||||
self.assertEqual(self.get_output(), utf8(u("\u00e9")))
|
||||
|
||||
|
||||
class EnablePrettyLoggingTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
super(EnablePrettyLoggingTest, self).setUp()
|
||||
self.options = OptionParser()
|
||||
define_logging_options(self.options)
|
||||
self.logger = logging.Logger('tornado.test.log_test.EnablePrettyLoggingTest')
|
||||
self.logger.propagate = False
|
||||
|
||||
def test_log_file(self):
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
self.options.log_file_prefix = tmpdir + '/test_log'
|
||||
enable_pretty_logging(options=self.options, logger=self.logger)
|
||||
self.assertEqual(1, len(self.logger.handlers))
|
||||
self.logger.error('hello')
|
||||
self.logger.handlers[0].flush()
|
||||
filenames = glob.glob(tmpdir + '/test_log*')
|
||||
self.assertEqual(1, len(filenames))
|
||||
with open(filenames[0]) as f:
|
||||
self.assertRegexpMatches(f.read(), r'^\[E [^]]*\] hello$')
|
||||
finally:
|
||||
for handler in self.logger.handlers:
|
||||
handler.flush()
|
||||
handler.close()
|
||||
for filename in glob.glob(tmpdir + '/test_log*'):
|
||||
os.unlink(filename)
|
||||
os.rmdir(tmpdir)
|
||||
|
||||
|
||||
class LoggingOptionTest(unittest.TestCase):
|
||||
"""Test the ability to enable and disable Tornado's logging hooks."""
|
||||
def logs_present(self, statement, args=None):
|
||||
# Each test may manipulate and/or parse the options and then logs
|
||||
# a line at the 'info' level. This level is ignored in the
|
||||
# logging module by default, but Tornado turns it on by default
|
||||
# so it is the easiest way to tell whether tornado's logging hooks
|
||||
# ran.
|
||||
IMPORT = 'from tornado.options import options, parse_command_line'
|
||||
LOG_INFO = 'import logging; logging.info("hello")'
|
||||
program = ';'.join([IMPORT, statement, LOG_INFO])
|
||||
proc = subprocess.Popen(
|
||||
[sys.executable, '-c', program] + (args or []),
|
||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout, stderr = proc.communicate()
|
||||
self.assertEqual(proc.returncode, 0, 'process failed: %r' % stdout)
|
||||
return b'hello' in stdout
|
||||
|
||||
def test_default(self):
|
||||
self.assertFalse(self.logs_present('pass'))
|
||||
|
||||
def test_tornado_default(self):
|
||||
self.assertTrue(self.logs_present('parse_command_line()'))
|
||||
|
||||
def test_disable_command_line(self):
|
||||
self.assertFalse(self.logs_present('parse_command_line()',
|
||||
['--logging=none']))
|
||||
|
||||
def test_disable_command_line_case_insensitive(self):
|
||||
self.assertFalse(self.logs_present('parse_command_line()',
|
||||
['--logging=None']))
|
||||
|
||||
def test_disable_code_string(self):
|
||||
self.assertFalse(self.logs_present(
|
||||
'options.logging = "none"; parse_command_line()'))
|
||||
|
||||
def test_disable_code_none(self):
|
||||
self.assertFalse(self.logs_present(
|
||||
'options.logging = None; parse_command_line()'))
|
||||
|
||||
def test_disable_override(self):
|
||||
# command line trumps code defaults
|
||||
self.assertTrue(self.logs_present(
|
||||
'options.logging = None; parse_command_line()',
|
||||
['--logging=info']))
|
||||
168
Shared/lib/python3.4/site-packages/tornado/test/netutil_test.py
Normal file
168
Shared/lib/python3.4/site-packages/tornado/test/netutil_test.py
Normal file
|
|
@ -0,0 +1,168 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import os
|
||||
import signal
|
||||
import socket
|
||||
from subprocess import Popen
|
||||
import sys
|
||||
import time
|
||||
|
||||
from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip, bind_sockets
|
||||
from tornado.stack_context import ExceptionStackContext
|
||||
from tornado.testing import AsyncTestCase, gen_test
|
||||
from tornado.test.util import unittest, skipIfNoNetwork
|
||||
|
||||
try:
|
||||
from concurrent import futures
|
||||
except ImportError:
|
||||
futures = None
|
||||
|
||||
try:
|
||||
import pycares
|
||||
except ImportError:
|
||||
pycares = None
|
||||
else:
|
||||
from tornado.platform.caresresolver import CaresResolver
|
||||
|
||||
try:
|
||||
import twisted
|
||||
import twisted.names
|
||||
except ImportError:
|
||||
twisted = None
|
||||
else:
|
||||
from tornado.platform.twisted import TwistedResolver
|
||||
|
||||
|
||||
class _ResolverTestMixin(object):
|
||||
def skipOnCares(self):
|
||||
# Some DNS-hijacking ISPs (e.g. Time Warner) return non-empty results
|
||||
# with an NXDOMAIN status code. Most resolvers treat this as an error;
|
||||
# C-ares returns the results, making the "bad_host" tests unreliable.
|
||||
# C-ares will try to resolve even malformed names, such as the
|
||||
# name with spaces used in this test.
|
||||
if self.resolver.__class__.__name__ == 'CaresResolver':
|
||||
self.skipTest("CaresResolver doesn't recognize fake NXDOMAIN")
|
||||
|
||||
def test_localhost(self):
|
||||
self.resolver.resolve('localhost', 80, callback=self.stop)
|
||||
result = self.wait()
|
||||
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)), result)
|
||||
|
||||
@gen_test
|
||||
def test_future_interface(self):
|
||||
addrinfo = yield self.resolver.resolve('localhost', 80,
|
||||
socket.AF_UNSPEC)
|
||||
self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
|
||||
addrinfo)
|
||||
|
||||
def test_bad_host(self):
|
||||
self.skipOnCares()
|
||||
def handler(exc_typ, exc_val, exc_tb):
|
||||
self.stop(exc_val)
|
||||
return True # Halt propagation.
|
||||
|
||||
with ExceptionStackContext(handler):
|
||||
self.resolver.resolve('an invalid domain', 80, callback=self.stop)
|
||||
|
||||
result = self.wait()
|
||||
self.assertIsInstance(result, Exception)
|
||||
|
||||
@gen_test
|
||||
def test_future_interface_bad_host(self):
|
||||
self.skipOnCares()
|
||||
with self.assertRaises(Exception):
|
||||
yield self.resolver.resolve('an invalid domain', 80,
|
||||
socket.AF_UNSPEC)
|
||||
|
||||
|
||||
@skipIfNoNetwork
|
||||
class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
|
||||
def setUp(self):
|
||||
super(BlockingResolverTest, self).setUp()
|
||||
self.resolver = BlockingResolver(io_loop=self.io_loop)
|
||||
|
||||
|
||||
@skipIfNoNetwork
|
||||
@unittest.skipIf(futures is None, "futures module not present")
|
||||
class ThreadedResolverTest(AsyncTestCase, _ResolverTestMixin):
|
||||
def setUp(self):
|
||||
super(ThreadedResolverTest, self).setUp()
|
||||
self.resolver = ThreadedResolver(io_loop=self.io_loop)
|
||||
|
||||
def tearDown(self):
|
||||
self.resolver.close()
|
||||
super(ThreadedResolverTest, self).tearDown()
|
||||
|
||||
|
||||
@skipIfNoNetwork
|
||||
@unittest.skipIf(futures is None, "futures module not present")
|
||||
@unittest.skipIf(sys.platform == 'win32', "preexec_fn not available on win32")
|
||||
class ThreadedResolverImportTest(unittest.TestCase):
|
||||
def test_import(self):
|
||||
TIMEOUT = 5
|
||||
|
||||
# Test for a deadlock when importing a module that runs the
|
||||
# ThreadedResolver at import-time. See resolve_test.py for
|
||||
# full explanation.
|
||||
command = [
|
||||
sys.executable,
|
||||
'-c',
|
||||
'import tornado.test.resolve_test_helper']
|
||||
|
||||
start = time.time()
|
||||
popen = Popen(command, preexec_fn=lambda: signal.alarm(TIMEOUT))
|
||||
while time.time() - start < TIMEOUT:
|
||||
return_code = popen.poll()
|
||||
if return_code is not None:
|
||||
self.assertEqual(0, return_code)
|
||||
return # Success.
|
||||
time.sleep(0.05)
|
||||
|
||||
self.fail("import timed out")
|
||||
|
||||
|
||||
@skipIfNoNetwork
|
||||
@unittest.skipIf(pycares is None, "pycares module not present")
|
||||
class CaresResolverTest(AsyncTestCase, _ResolverTestMixin):
|
||||
def setUp(self):
|
||||
super(CaresResolverTest, self).setUp()
|
||||
self.resolver = CaresResolver(io_loop=self.io_loop)
|
||||
|
||||
|
||||
@skipIfNoNetwork
|
||||
@unittest.skipIf(twisted is None, "twisted module not present")
|
||||
@unittest.skipIf(getattr(twisted, '__version__', '0.0') < "12.1", "old version of twisted")
|
||||
class TwistedResolverTest(AsyncTestCase, _ResolverTestMixin):
|
||||
def setUp(self):
|
||||
super(TwistedResolverTest, self).setUp()
|
||||
self.resolver = TwistedResolver(io_loop=self.io_loop)
|
||||
|
||||
|
||||
class IsValidIPTest(unittest.TestCase):
|
||||
def test_is_valid_ip(self):
|
||||
self.assertTrue(is_valid_ip('127.0.0.1'))
|
||||
self.assertTrue(is_valid_ip('4.4.4.4'))
|
||||
self.assertTrue(is_valid_ip('::1'))
|
||||
self.assertTrue(is_valid_ip('2620:0:1cfe:face:b00c::3'))
|
||||
self.assertTrue(not is_valid_ip('www.google.com'))
|
||||
self.assertTrue(not is_valid_ip('localhost'))
|
||||
self.assertTrue(not is_valid_ip('4.4.4.4<'))
|
||||
self.assertTrue(not is_valid_ip(' 127.0.0.1'))
|
||||
self.assertTrue(not is_valid_ip(''))
|
||||
self.assertTrue(not is_valid_ip(' '))
|
||||
self.assertTrue(not is_valid_ip('\n'))
|
||||
self.assertTrue(not is_valid_ip('\x00'))
|
||||
|
||||
|
||||
class TestPortAllocation(unittest.TestCase):
|
||||
def test_same_port_allocation(self):
|
||||
if 'TRAVIS' in os.environ:
|
||||
self.skipTest("dual-stack servers often have port conflicts on travis")
|
||||
sockets = bind_sockets(None, 'localhost')
|
||||
try:
|
||||
port = sockets[0].getsockname()[1]
|
||||
self.assertTrue(all(s.getsockname()[1] == port
|
||||
for s in sockets[1:]))
|
||||
finally:
|
||||
for sock in sockets:
|
||||
sock.close()
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
port=443
|
||||
port=443
|
||||
220
Shared/lib/python3.4/site-packages/tornado/test/options_test.py
Normal file
220
Shared/lib/python3.4/site-packages/tornado/test/options_test.py
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import sys
|
||||
|
||||
from tornado.options import OptionParser, Error
|
||||
from tornado.util import basestring_type
|
||||
from tornado.test.util import unittest
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO # python 2
|
||||
except ImportError:
|
||||
from io import StringIO # python 3
|
||||
|
||||
try:
|
||||
from unittest import mock # python 3.3
|
||||
except ImportError:
|
||||
try:
|
||||
import mock # third-party mock package
|
||||
except ImportError:
|
||||
mock = None
|
||||
|
||||
|
||||
class OptionsTest(unittest.TestCase):
|
||||
def test_parse_command_line(self):
|
||||
options = OptionParser()
|
||||
options.define("port", default=80)
|
||||
options.parse_command_line(["main.py", "--port=443"])
|
||||
self.assertEqual(options.port, 443)
|
||||
|
||||
def test_parse_config_file(self):
|
||||
options = OptionParser()
|
||||
options.define("port", default=80)
|
||||
options.parse_config_file(os.path.join(os.path.dirname(__file__),
|
||||
"options_test.cfg"))
|
||||
self.assertEquals(options.port, 443)
|
||||
|
||||
def test_parse_callbacks(self):
|
||||
options = OptionParser()
|
||||
self.called = False
|
||||
|
||||
def callback():
|
||||
self.called = True
|
||||
options.add_parse_callback(callback)
|
||||
|
||||
# non-final parse doesn't run callbacks
|
||||
options.parse_command_line(["main.py"], final=False)
|
||||
self.assertFalse(self.called)
|
||||
|
||||
# final parse does
|
||||
options.parse_command_line(["main.py"])
|
||||
self.assertTrue(self.called)
|
||||
|
||||
# callbacks can be run more than once on the same options
|
||||
# object if there are multiple final parses
|
||||
self.called = False
|
||||
options.parse_command_line(["main.py"])
|
||||
self.assertTrue(self.called)
|
||||
|
||||
def test_help(self):
|
||||
options = OptionParser()
|
||||
try:
|
||||
orig_stderr = sys.stderr
|
||||
sys.stderr = StringIO()
|
||||
with self.assertRaises(SystemExit):
|
||||
options.parse_command_line(["main.py", "--help"])
|
||||
usage = sys.stderr.getvalue()
|
||||
finally:
|
||||
sys.stderr = orig_stderr
|
||||
self.assertIn("Usage:", usage)
|
||||
|
||||
def test_subcommand(self):
|
||||
base_options = OptionParser()
|
||||
base_options.define("verbose", default=False)
|
||||
sub_options = OptionParser()
|
||||
sub_options.define("foo", type=str)
|
||||
rest = base_options.parse_command_line(
|
||||
["main.py", "--verbose", "subcommand", "--foo=bar"])
|
||||
self.assertEqual(rest, ["subcommand", "--foo=bar"])
|
||||
self.assertTrue(base_options.verbose)
|
||||
rest2 = sub_options.parse_command_line(rest)
|
||||
self.assertEqual(rest2, [])
|
||||
self.assertEqual(sub_options.foo, "bar")
|
||||
|
||||
# the two option sets are distinct
|
||||
try:
|
||||
orig_stderr = sys.stderr
|
||||
sys.stderr = StringIO()
|
||||
with self.assertRaises(Error):
|
||||
sub_options.parse_command_line(["subcommand", "--verbose"])
|
||||
finally:
|
||||
sys.stderr = orig_stderr
|
||||
|
||||
def test_setattr(self):
|
||||
options = OptionParser()
|
||||
options.define('foo', default=1, type=int)
|
||||
options.foo = 2
|
||||
self.assertEqual(options.foo, 2)
|
||||
|
||||
def test_setattr_type_check(self):
|
||||
# setattr requires that options be the right type and doesn't
|
||||
# parse from string formats.
|
||||
options = OptionParser()
|
||||
options.define('foo', default=1, type=int)
|
||||
with self.assertRaises(Error):
|
||||
options.foo = '2'
|
||||
|
||||
def test_setattr_with_callback(self):
|
||||
values = []
|
||||
options = OptionParser()
|
||||
options.define('foo', default=1, type=int, callback=values.append)
|
||||
options.foo = 2
|
||||
self.assertEqual(values, [2])
|
||||
|
||||
def _sample_options(self):
|
||||
options = OptionParser()
|
||||
options.define('a', default=1)
|
||||
options.define('b', default=2)
|
||||
return options
|
||||
|
||||
def test_iter(self):
|
||||
options = self._sample_options()
|
||||
# OptionParsers always define 'help'.
|
||||
self.assertEqual(set(['a', 'b', 'help']), set(iter(options)))
|
||||
|
||||
def test_getitem(self):
|
||||
options = self._sample_options()
|
||||
self.assertEqual(1, options['a'])
|
||||
|
||||
def test_items(self):
|
||||
options = self._sample_options()
|
||||
# OptionParsers always define 'help'.
|
||||
expected = [('a', 1), ('b', 2), ('help', options.help)]
|
||||
actual = sorted(options.items())
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_as_dict(self):
|
||||
options = self._sample_options()
|
||||
expected = {'a': 1, 'b': 2, 'help': options.help}
|
||||
self.assertEqual(expected, options.as_dict())
|
||||
|
||||
def test_group_dict(self):
|
||||
options = OptionParser()
|
||||
options.define('a', default=1)
|
||||
options.define('b', group='b_group', default=2)
|
||||
|
||||
frame = sys._getframe(0)
|
||||
this_file = frame.f_code.co_filename
|
||||
self.assertEqual(set(['b_group', '', this_file]), options.groups())
|
||||
|
||||
b_group_dict = options.group_dict('b_group')
|
||||
self.assertEqual({'b': 2}, b_group_dict)
|
||||
|
||||
self.assertEqual({}, options.group_dict('nonexistent'))
|
||||
|
||||
@unittest.skipIf(mock is None, 'mock package not present')
|
||||
def test_mock_patch(self):
|
||||
# ensure that our setattr hooks don't interfere with mock.patch
|
||||
options = OptionParser()
|
||||
options.define('foo', default=1)
|
||||
options.parse_command_line(['main.py', '--foo=2'])
|
||||
self.assertEqual(options.foo, 2)
|
||||
|
||||
with mock.patch.object(options.mockable(), 'foo', 3):
|
||||
self.assertEqual(options.foo, 3)
|
||||
self.assertEqual(options.foo, 2)
|
||||
|
||||
# Try nested patches mixed with explicit sets
|
||||
with mock.patch.object(options.mockable(), 'foo', 4):
|
||||
self.assertEqual(options.foo, 4)
|
||||
options.foo = 5
|
||||
self.assertEqual(options.foo, 5)
|
||||
with mock.patch.object(options.mockable(), 'foo', 6):
|
||||
self.assertEqual(options.foo, 6)
|
||||
self.assertEqual(options.foo, 5)
|
||||
self.assertEqual(options.foo, 2)
|
||||
|
||||
def test_types(self):
|
||||
options = OptionParser()
|
||||
options.define('str', type=str)
|
||||
options.define('basestring', type=basestring_type)
|
||||
options.define('int', type=int)
|
||||
options.define('float', type=float)
|
||||
options.define('datetime', type=datetime.datetime)
|
||||
options.define('timedelta', type=datetime.timedelta)
|
||||
options.parse_command_line(['main.py',
|
||||
'--str=asdf',
|
||||
'--basestring=qwer',
|
||||
'--int=42',
|
||||
'--float=1.5',
|
||||
'--datetime=2013-04-28 05:16',
|
||||
'--timedelta=45s'])
|
||||
self.assertEqual(options.str, 'asdf')
|
||||
self.assertEqual(options.basestring, 'qwer')
|
||||
self.assertEqual(options.int, 42)
|
||||
self.assertEqual(options.float, 1.5)
|
||||
self.assertEqual(options.datetime,
|
||||
datetime.datetime(2013, 4, 28, 5, 16))
|
||||
self.assertEqual(options.timedelta, datetime.timedelta(seconds=45))
|
||||
|
||||
def test_multiple_string(self):
|
||||
options = OptionParser()
|
||||
options.define('foo', type=str, multiple=True)
|
||||
options.parse_command_line(['main.py', '--foo=a,b,c'])
|
||||
self.assertEqual(options.foo, ['a', 'b', 'c'])
|
||||
|
||||
def test_multiple_int(self):
|
||||
options = OptionParser()
|
||||
options.define('foo', type=int, multiple=True)
|
||||
options.parse_command_line(['main.py', '--foo=1,3,5:7'])
|
||||
self.assertEqual(options.foo, [1, 3, 5, 6, 7])
|
||||
|
||||
def test_error_redefine(self):
|
||||
options = OptionParser()
|
||||
options.define('foo')
|
||||
with self.assertRaises(Error) as cm:
|
||||
options.define('foo')
|
||||
self.assertRegexpMatches(str(cm.exception),
|
||||
'Option.*foo.*already defined')
|
||||
214
Shared/lib/python3.4/site-packages/tornado/test/process_test.py
Normal file
214
Shared/lib/python3.4/site-packages/tornado/test/process_test.py
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
from tornado.httpclient import HTTPClient, HTTPError
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.log import gen_log
|
||||
from tornado.process import fork_processes, task_id, Subprocess
|
||||
from tornado.simple_httpclient import SimpleAsyncHTTPClient
|
||||
from tornado.testing import bind_unused_port, ExpectLog, AsyncTestCase
|
||||
from tornado.test.util import unittest, skipIfNonUnix
|
||||
from tornado.web import RequestHandler, Application
|
||||
|
||||
|
||||
def skip_if_twisted():
|
||||
if IOLoop.configured_class().__name__.endswith(('TwistedIOLoop',
|
||||
'AsyncIOMainLoop')):
|
||||
raise unittest.SkipTest("Process tests not compatible with "
|
||||
"TwistedIOLoop or AsyncIOMainLoop")
|
||||
|
||||
# Not using AsyncHTTPTestCase because we need control over the IOLoop.
|
||||
|
||||
|
||||
@skipIfNonUnix
|
||||
class ProcessTest(unittest.TestCase):
|
||||
def get_app(self):
|
||||
class ProcessHandler(RequestHandler):
|
||||
def get(self):
|
||||
if self.get_argument("exit", None):
|
||||
# must use os._exit instead of sys.exit so unittest's
|
||||
# exception handler doesn't catch it
|
||||
os._exit(int(self.get_argument("exit")))
|
||||
if self.get_argument("signal", None):
|
||||
os.kill(os.getpid(),
|
||||
int(self.get_argument("signal")))
|
||||
self.write(str(os.getpid()))
|
||||
return Application([("/", ProcessHandler)])
|
||||
|
||||
def tearDown(self):
|
||||
if task_id() is not None:
|
||||
# We're in a child process, and probably got to this point
|
||||
# via an uncaught exception. If we return now, both
|
||||
# processes will continue with the rest of the test suite.
|
||||
# Exit now so the parent process will restart the child
|
||||
# (since we don't have a clean way to signal failure to
|
||||
# the parent that won't restart)
|
||||
logging.error("aborting child process from tearDown")
|
||||
logging.shutdown()
|
||||
os._exit(1)
|
||||
# In the surviving process, clear the alarm we set earlier
|
||||
signal.alarm(0)
|
||||
super(ProcessTest, self).tearDown()
|
||||
|
||||
def test_multi_process(self):
|
||||
# This test can't work on twisted because we use the global reactor
|
||||
# and have no way to get it back into a sane state after the fork.
|
||||
skip_if_twisted()
|
||||
with ExpectLog(gen_log, "(Starting .* processes|child .* exited|uncaught exception)"):
|
||||
self.assertFalse(IOLoop.initialized())
|
||||
sock, port = bind_unused_port()
|
||||
|
||||
def get_url(path):
|
||||
return "http://127.0.0.1:%d%s" % (port, path)
|
||||
# ensure that none of these processes live too long
|
||||
signal.alarm(5) # master process
|
||||
try:
|
||||
id = fork_processes(3, max_restarts=3)
|
||||
self.assertTrue(id is not None)
|
||||
signal.alarm(5) # child processes
|
||||
except SystemExit as e:
|
||||
# if we exit cleanly from fork_processes, all the child processes
|
||||
# finished with status 0
|
||||
self.assertEqual(e.code, 0)
|
||||
self.assertTrue(task_id() is None)
|
||||
sock.close()
|
||||
return
|
||||
try:
|
||||
if id in (0, 1):
|
||||
self.assertEqual(id, task_id())
|
||||
server = HTTPServer(self.get_app())
|
||||
server.add_sockets([sock])
|
||||
IOLoop.instance().start()
|
||||
elif id == 2:
|
||||
self.assertEqual(id, task_id())
|
||||
sock.close()
|
||||
# Always use SimpleAsyncHTTPClient here; the curl
|
||||
# version appears to get confused sometimes if the
|
||||
# connection gets closed before it's had a chance to
|
||||
# switch from writing mode to reading mode.
|
||||
client = HTTPClient(SimpleAsyncHTTPClient)
|
||||
|
||||
def fetch(url, fail_ok=False):
|
||||
try:
|
||||
return client.fetch(get_url(url))
|
||||
except HTTPError as e:
|
||||
if not (fail_ok and e.code == 599):
|
||||
raise
|
||||
|
||||
# Make two processes exit abnormally
|
||||
fetch("/?exit=2", fail_ok=True)
|
||||
fetch("/?exit=3", fail_ok=True)
|
||||
|
||||
# They've been restarted, so a new fetch will work
|
||||
int(fetch("/").body)
|
||||
|
||||
# Now the same with signals
|
||||
# Disabled because on the mac a process dying with a signal
|
||||
# can trigger an "Application exited abnormally; send error
|
||||
# report to Apple?" prompt.
|
||||
# fetch("/?signal=%d" % signal.SIGTERM, fail_ok=True)
|
||||
# fetch("/?signal=%d" % signal.SIGABRT, fail_ok=True)
|
||||
# int(fetch("/").body)
|
||||
|
||||
# Now kill them normally so they won't be restarted
|
||||
fetch("/?exit=0", fail_ok=True)
|
||||
# One process left; watch it's pid change
|
||||
pid = int(fetch("/").body)
|
||||
fetch("/?exit=4", fail_ok=True)
|
||||
pid2 = int(fetch("/").body)
|
||||
self.assertNotEqual(pid, pid2)
|
||||
|
||||
# Kill the last one so we shut down cleanly
|
||||
fetch("/?exit=0", fail_ok=True)
|
||||
|
||||
os._exit(0)
|
||||
except Exception:
|
||||
logging.error("exception in child process %d", id, exc_info=True)
|
||||
raise
|
||||
|
||||
|
||||
@skipIfNonUnix
|
||||
class SubprocessTest(AsyncTestCase):
|
||||
def test_subprocess(self):
|
||||
if IOLoop.configured_class().__name__.endswith('LayeredTwistedIOLoop'):
|
||||
# This test fails non-deterministically with LayeredTwistedIOLoop.
|
||||
# (the read_until('\n') returns '\n' instead of 'hello\n')
|
||||
# This probably indicates a problem with either TornadoReactor
|
||||
# or TwistedIOLoop, but I haven't been able to track it down
|
||||
# and for now this is just causing spurious travis-ci failures.
|
||||
raise unittest.SkipTest("Subprocess tests not compatible with "
|
||||
"LayeredTwistedIOLoop")
|
||||
subproc = Subprocess([sys.executable, '-u', '-i'],
|
||||
stdin=Subprocess.STREAM,
|
||||
stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
|
||||
io_loop=self.io_loop)
|
||||
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
|
||||
subproc.stdout.read_until(b'>>> ', self.stop)
|
||||
self.wait()
|
||||
subproc.stdin.write(b"print('hello')\n")
|
||||
subproc.stdout.read_until(b'\n', self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"hello\n")
|
||||
|
||||
subproc.stdout.read_until(b">>> ", self.stop)
|
||||
self.wait()
|
||||
subproc.stdin.write(b"raise SystemExit\n")
|
||||
subproc.stdout.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"")
|
||||
|
||||
def test_close_stdin(self):
|
||||
# Close the parent's stdin handle and see that the child recognizes it.
|
||||
subproc = Subprocess([sys.executable, '-u', '-i'],
|
||||
stdin=Subprocess.STREAM,
|
||||
stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
|
||||
io_loop=self.io_loop)
|
||||
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
|
||||
subproc.stdout.read_until(b'>>> ', self.stop)
|
||||
self.wait()
|
||||
subproc.stdin.close()
|
||||
subproc.stdout.read_until_close(self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b"\n")
|
||||
|
||||
def test_stderr(self):
|
||||
subproc = Subprocess([sys.executable, '-u', '-c',
|
||||
r"import sys; sys.stderr.write('hello\n')"],
|
||||
stderr=Subprocess.STREAM,
|
||||
io_loop=self.io_loop)
|
||||
self.addCleanup(lambda: os.kill(subproc.pid, signal.SIGTERM))
|
||||
subproc.stderr.read_until(b'\n', self.stop)
|
||||
data = self.wait()
|
||||
self.assertEqual(data, b'hello\n')
|
||||
|
||||
def test_sigchild(self):
|
||||
# Twisted's SIGCHLD handler and Subprocess's conflict with each other.
|
||||
skip_if_twisted()
|
||||
Subprocess.initialize(io_loop=self.io_loop)
|
||||
self.addCleanup(Subprocess.uninitialize)
|
||||
subproc = Subprocess([sys.executable, '-c', 'pass'],
|
||||
io_loop=self.io_loop)
|
||||
subproc.set_exit_callback(self.stop)
|
||||
ret = self.wait()
|
||||
self.assertEqual(ret, 0)
|
||||
self.assertEqual(subproc.returncode, ret)
|
||||
|
||||
def test_sigchild_signal(self):
|
||||
skip_if_twisted()
|
||||
Subprocess.initialize(io_loop=self.io_loop)
|
||||
self.addCleanup(Subprocess.uninitialize)
|
||||
subproc = Subprocess([sys.executable, '-c',
|
||||
'import time; time.sleep(30)'],
|
||||
io_loop=self.io_loop)
|
||||
subproc.set_exit_callback(self.stop)
|
||||
os.kill(subproc.pid, signal.SIGTERM)
|
||||
ret = self.wait()
|
||||
self.assertEqual(subproc.returncode, ret)
|
||||
self.assertEqual(ret, -signal.SIGTERM)
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.netutil import ThreadedResolver
|
||||
from tornado.util import u
|
||||
|
||||
# When this module is imported, it runs getaddrinfo on a thread. Since
|
||||
# the hostname is unicode, getaddrinfo attempts to import encodings.idna
|
||||
# but blocks on the import lock. Verify that ThreadedResolver avoids
|
||||
# this deadlock.
|
||||
|
||||
resolver = ThreadedResolver()
|
||||
IOLoop.current().run_sync(lambda: resolver.resolve(u('localhost'), 80))
|
||||
137
Shared/lib/python3.4/site-packages/tornado/test/runtests.py
Normal file
137
Shared/lib/python3.4/site-packages/tornado/test/runtests.py
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import gc
|
||||
import locale # system locale module, not tornado.locale
|
||||
import logging
|
||||
import operator
|
||||
import textwrap
|
||||
import sys
|
||||
from tornado.httpclient import AsyncHTTPClient
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.netutil import Resolver
|
||||
from tornado.options import define, options, add_parse_callback
|
||||
from tornado.test.util import unittest
|
||||
|
||||
try:
|
||||
reduce # py2
|
||||
except NameError:
|
||||
from functools import reduce # py3
|
||||
|
||||
TEST_MODULES = [
|
||||
'tornado.httputil.doctests',
|
||||
'tornado.iostream.doctests',
|
||||
'tornado.util.doctests',
|
||||
'tornado.test.auth_test',
|
||||
'tornado.test.concurrent_test',
|
||||
'tornado.test.curl_httpclient_test',
|
||||
'tornado.test.escape_test',
|
||||
'tornado.test.gen_test',
|
||||
'tornado.test.httpclient_test',
|
||||
'tornado.test.httpserver_test',
|
||||
'tornado.test.httputil_test',
|
||||
'tornado.test.import_test',
|
||||
'tornado.test.ioloop_test',
|
||||
'tornado.test.iostream_test',
|
||||
'tornado.test.locale_test',
|
||||
'tornado.test.netutil_test',
|
||||
'tornado.test.log_test',
|
||||
'tornado.test.options_test',
|
||||
'tornado.test.process_test',
|
||||
'tornado.test.simple_httpclient_test',
|
||||
'tornado.test.stack_context_test',
|
||||
'tornado.test.tcpclient_test',
|
||||
'tornado.test.template_test',
|
||||
'tornado.test.testing_test',
|
||||
'tornado.test.twisted_test',
|
||||
'tornado.test.util_test',
|
||||
'tornado.test.web_test',
|
||||
'tornado.test.websocket_test',
|
||||
'tornado.test.wsgi_test',
|
||||
]
|
||||
|
||||
|
||||
def all():
|
||||
return unittest.defaultTestLoader.loadTestsFromNames(TEST_MODULES)
|
||||
|
||||
|
||||
class TornadoTextTestRunner(unittest.TextTestRunner):
|
||||
def run(self, test):
|
||||
result = super(TornadoTextTestRunner, self).run(test)
|
||||
if result.skipped:
|
||||
skip_reasons = set(reason for (test, reason) in result.skipped)
|
||||
self.stream.write(textwrap.fill(
|
||||
"Some tests were skipped because: %s" %
|
||||
", ".join(sorted(skip_reasons))))
|
||||
self.stream.write("\n")
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
# The -W command-line option does not work in a virtualenv with
|
||||
# python 3 (as of virtualenv 1.7), so configure warnings
|
||||
# programmatically instead.
|
||||
import warnings
|
||||
# Be strict about most warnings. This also turns on warnings that are
|
||||
# ignored by default, including DeprecationWarnings and
|
||||
# python 3.2's ResourceWarnings.
|
||||
warnings.filterwarnings("error")
|
||||
# setuptools sometimes gives ImportWarnings about things that are on
|
||||
# sys.path even if they're not being used.
|
||||
warnings.filterwarnings("ignore", category=ImportWarning)
|
||||
# Tornado generally shouldn't use anything deprecated, but some of
|
||||
# our dependencies do (last match wins).
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
||||
warnings.filterwarnings("error", category=DeprecationWarning,
|
||||
module=r"tornado\..*")
|
||||
warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
|
||||
warnings.filterwarnings("error", category=PendingDeprecationWarning,
|
||||
module=r"tornado\..*")
|
||||
# The unittest module is aggressive about deprecating redundant methods,
|
||||
# leaving some without non-deprecated spellings that work on both
|
||||
# 2.7 and 3.2
|
||||
warnings.filterwarnings("ignore", category=DeprecationWarning,
|
||||
message="Please use assert.* instead")
|
||||
|
||||
logging.getLogger("tornado.access").setLevel(logging.CRITICAL)
|
||||
|
||||
define('httpclient', type=str, default=None,
|
||||
callback=lambda s: AsyncHTTPClient.configure(
|
||||
s, defaults=dict(allow_ipv6=False)))
|
||||
define('ioloop', type=str, default=None)
|
||||
define('ioloop_time_monotonic', default=False)
|
||||
define('resolver', type=str, default=None,
|
||||
callback=Resolver.configure)
|
||||
define('debug_gc', type=str, multiple=True,
|
||||
help="A comma-separated list of gc module debug constants, "
|
||||
"e.g. DEBUG_STATS or DEBUG_COLLECTABLE,DEBUG_OBJECTS",
|
||||
callback=lambda values: gc.set_debug(
|
||||
reduce(operator.or_, (getattr(gc, v) for v in values))))
|
||||
define('locale', type=str, default=None,
|
||||
callback=lambda x: locale.setlocale(locale.LC_ALL, x))
|
||||
|
||||
def configure_ioloop():
|
||||
kwargs = {}
|
||||
if options.ioloop_time_monotonic:
|
||||
from tornado.platform.auto import monotonic_time
|
||||
if monotonic_time is None:
|
||||
raise RuntimeError("monotonic clock not found")
|
||||
kwargs['time_func'] = monotonic_time
|
||||
if options.ioloop or kwargs:
|
||||
IOLoop.configure(options.ioloop, **kwargs)
|
||||
add_parse_callback(configure_ioloop)
|
||||
|
||||
import tornado.testing
|
||||
kwargs = {}
|
||||
if sys.version_info >= (3, 2):
|
||||
# HACK: unittest.main will make its own changes to the warning
|
||||
# configuration, which may conflict with the settings above
|
||||
# or command-line flags like -bb. Passing warnings=False
|
||||
# suppresses this behavior, although this looks like an implementation
|
||||
# detail. http://bugs.python.org/issue15626
|
||||
kwargs['warnings'] = False
|
||||
kwargs['testRunner'] = TornadoTextTestRunner
|
||||
tornado.testing.main(**kwargs)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,552 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import collections
|
||||
from contextlib import closing
|
||||
import errno
|
||||
import gzip
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
|
||||
from tornado import gen
|
||||
from tornado.httpclient import AsyncHTTPClient
|
||||
from tornado.httputil import HTTPHeaders
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.log import gen_log, app_log
|
||||
from tornado.netutil import Resolver, bind_sockets
|
||||
from tornado.simple_httpclient import SimpleAsyncHTTPClient, _default_ca_certs
|
||||
from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler
|
||||
from tornado.test import httpclient_test
|
||||
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, bind_unused_port, ExpectLog
|
||||
from tornado.test.util import skipOnTravis, skipIfNoIPv6
|
||||
from tornado.web import RequestHandler, Application, asynchronous, url, stream_request_body
|
||||
|
||||
|
||||
class SimpleHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
|
||||
def get_http_client(self):
|
||||
client = SimpleAsyncHTTPClient(io_loop=self.io_loop,
|
||||
force_instance=True)
|
||||
self.assertTrue(isinstance(client, SimpleAsyncHTTPClient))
|
||||
return client
|
||||
|
||||
|
||||
class TriggerHandler(RequestHandler):
|
||||
def initialize(self, queue, wake_callback):
|
||||
self.queue = queue
|
||||
self.wake_callback = wake_callback
|
||||
|
||||
@asynchronous
|
||||
def get(self):
|
||||
logging.debug("queuing trigger")
|
||||
self.queue.append(self.finish)
|
||||
if self.get_argument("wake", "true") == "true":
|
||||
self.wake_callback()
|
||||
|
||||
|
||||
class HangHandler(RequestHandler):
|
||||
@asynchronous
|
||||
def get(self):
|
||||
pass
|
||||
|
||||
|
||||
class ContentLengthHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.set_header("Content-Length", self.get_argument("value"))
|
||||
self.write("ok")
|
||||
|
||||
|
||||
class HeadHandler(RequestHandler):
|
||||
def head(self):
|
||||
self.set_header("Content-Length", "7")
|
||||
|
||||
|
||||
class OptionsHandler(RequestHandler):
|
||||
def options(self):
|
||||
self.set_header("Access-Control-Allow-Origin", "*")
|
||||
self.write("ok")
|
||||
|
||||
|
||||
class NoContentHandler(RequestHandler):
|
||||
def get(self):
|
||||
if self.get_argument("error", None):
|
||||
self.set_header("Content-Length", "5")
|
||||
self.write("hello")
|
||||
self.set_status(204)
|
||||
|
||||
|
||||
class SeeOtherPostHandler(RequestHandler):
|
||||
def post(self):
|
||||
redirect_code = int(self.request.body)
|
||||
assert redirect_code in (302, 303), "unexpected body %r" % self.request.body
|
||||
self.set_header("Location", "/see_other_get")
|
||||
self.set_status(redirect_code)
|
||||
|
||||
|
||||
class SeeOtherGetHandler(RequestHandler):
|
||||
def get(self):
|
||||
if self.request.body:
|
||||
raise Exception("unexpected body %r" % self.request.body)
|
||||
self.write("ok")
|
||||
|
||||
|
||||
class HostEchoHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write(self.request.headers["Host"])
|
||||
|
||||
|
||||
class NoContentLengthHandler(RequestHandler):
|
||||
@gen.coroutine
|
||||
def get(self):
|
||||
# Emulate the old HTTP/1.0 behavior of returning a body with no
|
||||
# content-length. Tornado handles content-length at the framework
|
||||
# level so we have to go around it.
|
||||
stream = self.request.connection.stream
|
||||
yield stream.write(b"HTTP/1.0 200 OK\r\n\r\n"
|
||||
b"hello")
|
||||
stream.close()
|
||||
|
||||
|
||||
class EchoPostHandler(RequestHandler):
|
||||
def post(self):
|
||||
self.write(self.request.body)
|
||||
|
||||
|
||||
@stream_request_body
|
||||
class RespondInPrepareHandler(RequestHandler):
|
||||
def prepare(self):
|
||||
self.set_status(403)
|
||||
self.finish("forbidden")
|
||||
|
||||
|
||||
class SimpleHTTPClientTestMixin(object):
|
||||
def get_app(self):
|
||||
# callable objects to finish pending /trigger requests
|
||||
self.triggers = collections.deque()
|
||||
return Application([
|
||||
url("/trigger", TriggerHandler, dict(queue=self.triggers,
|
||||
wake_callback=self.stop)),
|
||||
url("/chunk", ChunkHandler),
|
||||
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
|
||||
url("/hang", HangHandler),
|
||||
url("/hello", HelloWorldHandler),
|
||||
url("/content_length", ContentLengthHandler),
|
||||
url("/head", HeadHandler),
|
||||
url("/options", OptionsHandler),
|
||||
url("/no_content", NoContentHandler),
|
||||
url("/see_other_post", SeeOtherPostHandler),
|
||||
url("/see_other_get", SeeOtherGetHandler),
|
||||
url("/host_echo", HostEchoHandler),
|
||||
url("/no_content_length", NoContentLengthHandler),
|
||||
url("/echo_post", EchoPostHandler),
|
||||
url("/respond_in_prepare", RespondInPrepareHandler),
|
||||
], gzip=True)
|
||||
|
||||
def test_singleton(self):
|
||||
# Class "constructor" reuses objects on the same IOLoop
|
||||
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is
|
||||
SimpleAsyncHTTPClient(self.io_loop))
|
||||
# unless force_instance is used
|
||||
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
|
||||
SimpleAsyncHTTPClient(self.io_loop,
|
||||
force_instance=True))
|
||||
# different IOLoops use different objects
|
||||
with closing(IOLoop()) as io_loop2:
|
||||
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
|
||||
SimpleAsyncHTTPClient(io_loop2))
|
||||
|
||||
def test_connection_limit(self):
|
||||
with closing(self.create_client(max_clients=2)) as client:
|
||||
self.assertEqual(client.max_clients, 2)
|
||||
seen = []
|
||||
# Send 4 requests. Two can be sent immediately, while the others
|
||||
# will be queued
|
||||
for i in range(4):
|
||||
client.fetch(self.get_url("/trigger"),
|
||||
lambda response, i=i: (seen.append(i), self.stop()))
|
||||
self.wait(condition=lambda: len(self.triggers) == 2)
|
||||
self.assertEqual(len(client.queue), 2)
|
||||
|
||||
# Finish the first two requests and let the next two through
|
||||
self.triggers.popleft()()
|
||||
self.triggers.popleft()()
|
||||
self.wait(condition=lambda: (len(self.triggers) == 2 and
|
||||
len(seen) == 2))
|
||||
self.assertEqual(set(seen), set([0, 1]))
|
||||
self.assertEqual(len(client.queue), 0)
|
||||
|
||||
# Finish all the pending requests
|
||||
self.triggers.popleft()()
|
||||
self.triggers.popleft()()
|
||||
self.wait(condition=lambda: len(seen) == 4)
|
||||
self.assertEqual(set(seen), set([0, 1, 2, 3]))
|
||||
self.assertEqual(len(self.triggers), 0)
|
||||
|
||||
def test_redirect_connection_limit(self):
|
||||
# following redirects should not consume additional connections
|
||||
with closing(self.create_client(max_clients=1)) as client:
|
||||
client.fetch(self.get_url('/countdown/3'), self.stop,
|
||||
max_redirects=3)
|
||||
response = self.wait()
|
||||
response.rethrow()
|
||||
|
||||
def test_default_certificates_exist(self):
|
||||
open(_default_ca_certs()).close()
|
||||
|
||||
def test_gzip(self):
|
||||
# All the tests in this file should be using gzip, but this test
|
||||
# ensures that it is in fact getting compressed.
|
||||
# Setting Accept-Encoding manually bypasses the client's
|
||||
# decompression so we can see the raw data.
|
||||
response = self.fetch("/chunk", use_gzip=False,
|
||||
headers={"Accept-Encoding": "gzip"})
|
||||
self.assertEqual(response.headers["Content-Encoding"], "gzip")
|
||||
self.assertNotEqual(response.body, b"asdfqwer")
|
||||
# Our test data gets bigger when gzipped. Oops. :)
|
||||
self.assertEqual(len(response.body), 34)
|
||||
f = gzip.GzipFile(mode="r", fileobj=response.buffer)
|
||||
self.assertEqual(f.read(), b"asdfqwer")
|
||||
|
||||
def test_max_redirects(self):
|
||||
response = self.fetch("/countdown/5", max_redirects=3)
|
||||
self.assertEqual(302, response.code)
|
||||
# We requested 5, followed three redirects for 4, 3, 2, then the last
|
||||
# unfollowed redirect is to 1.
|
||||
self.assertTrue(response.request.url.endswith("/countdown/5"))
|
||||
self.assertTrue(response.effective_url.endswith("/countdown/2"))
|
||||
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
|
||||
|
||||
def test_header_reuse(self):
|
||||
# Apps may reuse a headers object if they are only passing in constant
|
||||
# headers like user-agent. The header object should not be modified.
|
||||
headers = HTTPHeaders({'User-Agent': 'Foo'})
|
||||
self.fetch("/hello", headers=headers)
|
||||
self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])
|
||||
|
||||
def test_see_other_redirect(self):
|
||||
for code in (302, 303):
|
||||
response = self.fetch("/see_other_post", method="POST", body="%d" % code)
|
||||
self.assertEqual(200, response.code)
|
||||
self.assertTrue(response.request.url.endswith("/see_other_post"))
|
||||
self.assertTrue(response.effective_url.endswith("/see_other_get"))
|
||||
# request is the original request, is a POST still
|
||||
self.assertEqual("POST", response.request.method)
|
||||
|
||||
@skipOnTravis
|
||||
def test_request_timeout(self):
|
||||
response = self.fetch('/trigger?wake=false', request_timeout=0.1)
|
||||
self.assertEqual(response.code, 599)
|
||||
self.assertTrue(0.099 < response.request_time < 0.15, response.request_time)
|
||||
self.assertEqual(str(response.error), "HTTP 599: Timeout")
|
||||
# trigger the hanging request to let it clean up after itself
|
||||
self.triggers.popleft()()
|
||||
|
||||
@skipIfNoIPv6
|
||||
def test_ipv6(self):
|
||||
try:
|
||||
[sock] = bind_sockets(None, '::1', family=socket.AF_INET6)
|
||||
port = sock.getsockname()[1]
|
||||
self.http_server.add_socket(sock)
|
||||
except socket.gaierror as e:
|
||||
if e.args[0] == socket.EAI_ADDRFAMILY:
|
||||
# python supports ipv6, but it's not configured on the network
|
||||
# interface, so skip this test.
|
||||
return
|
||||
raise
|
||||
url = '%s://[::1]:%d/hello' % (self.get_protocol(), port)
|
||||
|
||||
# ipv6 is currently enabled by default but can be disabled
|
||||
self.http_client.fetch(url, self.stop, allow_ipv6=False)
|
||||
response = self.wait()
|
||||
self.assertEqual(response.code, 599)
|
||||
|
||||
self.http_client.fetch(url, self.stop)
|
||||
response = self.wait()
|
||||
self.assertEqual(response.body, b"Hello world!")
|
||||
|
||||
def xtest_multiple_content_length_accepted(self):
|
||||
response = self.fetch("/content_length?value=2,2")
|
||||
self.assertEqual(response.body, b"ok")
|
||||
response = self.fetch("/content_length?value=2,%202,2")
|
||||
self.assertEqual(response.body, b"ok")
|
||||
|
||||
response = self.fetch("/content_length?value=2,4")
|
||||
self.assertEqual(response.code, 599)
|
||||
response = self.fetch("/content_length?value=2,%202,3")
|
||||
self.assertEqual(response.code, 599)
|
||||
|
||||
def test_head_request(self):
|
||||
response = self.fetch("/head", method="HEAD")
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers["content-length"], "7")
|
||||
self.assertFalse(response.body)
|
||||
|
||||
def test_options_request(self):
|
||||
response = self.fetch("/options", method="OPTIONS")
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers["content-length"], "2")
|
||||
self.assertEqual(response.headers["access-control-allow-origin"], "*")
|
||||
self.assertEqual(response.body, b"ok")
|
||||
|
||||
def test_no_content(self):
|
||||
response = self.fetch("/no_content")
|
||||
self.assertEqual(response.code, 204)
|
||||
# 204 status doesn't need a content-length, but tornado will
|
||||
# add a zero content-length anyway.
|
||||
self.assertEqual(response.headers["Content-length"], "0")
|
||||
|
||||
# 204 status with non-zero content length is malformed
|
||||
with ExpectLog(app_log, "Uncaught exception"):
|
||||
response = self.fetch("/no_content?error=1")
|
||||
self.assertEqual(response.code, 599)
|
||||
|
||||
def test_host_header(self):
|
||||
host_re = re.compile(b"^localhost:[0-9]+$")
|
||||
response = self.fetch("/host_echo")
|
||||
self.assertTrue(host_re.match(response.body))
|
||||
|
||||
url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
|
||||
self.http_client.fetch(url, self.stop)
|
||||
response = self.wait()
|
||||
self.assertTrue(host_re.match(response.body), response.body)
|
||||
|
||||
def test_connection_refused(self):
|
||||
server_socket, port = bind_unused_port()
|
||||
server_socket.close()
|
||||
with ExpectLog(gen_log, ".*", required=False):
|
||||
self.http_client.fetch("http://localhost:%d/" % port, self.stop)
|
||||
response = self.wait()
|
||||
self.assertEqual(599, response.code)
|
||||
|
||||
if sys.platform != 'cygwin':
|
||||
# cygwin returns EPERM instead of ECONNREFUSED here
|
||||
contains_errno = str(errno.ECONNREFUSED) in str(response.error)
|
||||
if not contains_errno and hasattr(errno, "WSAECONNREFUSED"):
|
||||
contains_errno = str(errno.WSAECONNREFUSED) in str(response.error)
|
||||
self.assertTrue(contains_errno, response.error)
|
||||
# This is usually "Connection refused".
|
||||
# On windows, strerror is broken and returns "Unknown error".
|
||||
expected_message = os.strerror(errno.ECONNREFUSED)
|
||||
self.assertTrue(expected_message in str(response.error),
|
||||
response.error)
|
||||
|
||||
def test_queue_timeout(self):
|
||||
with closing(self.create_client(max_clients=1)) as client:
|
||||
client.fetch(self.get_url('/trigger'), self.stop,
|
||||
request_timeout=10)
|
||||
# Wait for the trigger request to block, not complete.
|
||||
self.wait()
|
||||
client.fetch(self.get_url('/hello'), self.stop,
|
||||
connect_timeout=0.1)
|
||||
response = self.wait()
|
||||
|
||||
self.assertEqual(response.code, 599)
|
||||
self.assertTrue(response.request_time < 1, response.request_time)
|
||||
self.assertEqual(str(response.error), "HTTP 599: Timeout")
|
||||
self.triggers.popleft()()
|
||||
self.wait()
|
||||
|
||||
def test_no_content_length(self):
|
||||
response = self.fetch("/no_content_length")
|
||||
self.assertEquals(b"hello", response.body)
|
||||
|
||||
def sync_body_producer(self, write):
|
||||
write(b'1234')
|
||||
write(b'5678')
|
||||
|
||||
@gen.coroutine
|
||||
def async_body_producer(self, write):
|
||||
yield write(b'1234')
|
||||
yield gen.Task(IOLoop.current().add_callback)
|
||||
yield write(b'5678')
|
||||
|
||||
def test_sync_body_producer_chunked(self):
|
||||
response = self.fetch("/echo_post", method="POST",
|
||||
body_producer=self.sync_body_producer)
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b"12345678")
|
||||
|
||||
def test_sync_body_producer_content_length(self):
|
||||
response = self.fetch("/echo_post", method="POST",
|
||||
body_producer=self.sync_body_producer,
|
||||
headers={'Content-Length': '8'})
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b"12345678")
|
||||
|
||||
def test_async_body_producer_chunked(self):
|
||||
response = self.fetch("/echo_post", method="POST",
|
||||
body_producer=self.async_body_producer)
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b"12345678")
|
||||
|
||||
def test_async_body_producer_content_length(self):
|
||||
response = self.fetch("/echo_post", method="POST",
|
||||
body_producer=self.async_body_producer,
|
||||
headers={'Content-Length': '8'})
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b"12345678")
|
||||
|
||||
def test_100_continue(self):
|
||||
response = self.fetch("/echo_post", method="POST",
|
||||
body=b"1234",
|
||||
expect_100_continue=True)
|
||||
self.assertEqual(response.body, b"1234")
|
||||
|
||||
def test_100_continue_early_response(self):
|
||||
def body_producer(write):
|
||||
raise Exception("should not be called")
|
||||
response = self.fetch("/respond_in_prepare", method="POST",
|
||||
body_producer=body_producer,
|
||||
expect_100_continue=True)
|
||||
self.assertEqual(response.code, 403)
|
||||
|
||||
|
||||
class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
|
||||
def setUp(self):
|
||||
super(SimpleHTTPClientTestCase, self).setUp()
|
||||
self.http_client = self.create_client()
|
||||
|
||||
def create_client(self, **kwargs):
|
||||
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
|
||||
**kwargs)
|
||||
|
||||
|
||||
class SimpleHTTPSClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPSTestCase):
|
||||
def setUp(self):
|
||||
super(SimpleHTTPSClientTestCase, self).setUp()
|
||||
self.http_client = self.create_client()
|
||||
|
||||
def create_client(self, **kwargs):
|
||||
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
|
||||
defaults=dict(validate_cert=False),
|
||||
**kwargs)
|
||||
|
||||
|
||||
class CreateAsyncHTTPClientTestCase(AsyncTestCase):
|
||||
def setUp(self):
|
||||
super(CreateAsyncHTTPClientTestCase, self).setUp()
|
||||
self.saved = AsyncHTTPClient._save_configuration()
|
||||
|
||||
def tearDown(self):
|
||||
AsyncHTTPClient._restore_configuration(self.saved)
|
||||
super(CreateAsyncHTTPClientTestCase, self).tearDown()
|
||||
|
||||
def test_max_clients(self):
|
||||
AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
|
||||
with closing(AsyncHTTPClient(
|
||||
self.io_loop, force_instance=True)) as client:
|
||||
self.assertEqual(client.max_clients, 10)
|
||||
with closing(AsyncHTTPClient(
|
||||
self.io_loop, max_clients=11, force_instance=True)) as client:
|
||||
self.assertEqual(client.max_clients, 11)
|
||||
|
||||
# Now configure max_clients statically and try overriding it
|
||||
# with each way max_clients can be passed
|
||||
AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
|
||||
with closing(AsyncHTTPClient(
|
||||
self.io_loop, force_instance=True)) as client:
|
||||
self.assertEqual(client.max_clients, 12)
|
||||
with closing(AsyncHTTPClient(
|
||||
self.io_loop, max_clients=13, force_instance=True)) as client:
|
||||
self.assertEqual(client.max_clients, 13)
|
||||
with closing(AsyncHTTPClient(
|
||||
self.io_loop, max_clients=14, force_instance=True)) as client:
|
||||
self.assertEqual(client.max_clients, 14)
|
||||
|
||||
|
||||
class HTTP100ContinueTestCase(AsyncHTTPTestCase):
|
||||
def respond_100(self, request):
|
||||
self.request = request
|
||||
self.request.connection.stream.write(
|
||||
b"HTTP/1.1 100 CONTINUE\r\n\r\n",
|
||||
self.respond_200)
|
||||
|
||||
def respond_200(self):
|
||||
self.request.connection.stream.write(
|
||||
b"HTTP/1.1 200 OK\r\nContent-Length: 1\r\n\r\nA",
|
||||
self.request.connection.stream.close)
|
||||
|
||||
def get_app(self):
|
||||
# Not a full Application, but works as an HTTPServer callback
|
||||
return self.respond_100
|
||||
|
||||
def test_100_continue(self):
|
||||
res = self.fetch('/')
|
||||
self.assertEqual(res.body, b'A')
|
||||
|
||||
|
||||
class HostnameMappingTestCase(AsyncHTTPTestCase):
|
||||
def setUp(self):
|
||||
super(HostnameMappingTestCase, self).setUp()
|
||||
self.http_client = SimpleAsyncHTTPClient(
|
||||
self.io_loop,
|
||||
hostname_mapping={
|
||||
'www.example.com': '127.0.0.1',
|
||||
('foo.example.com', 8000): ('127.0.0.1', self.get_http_port()),
|
||||
})
|
||||
|
||||
def get_app(self):
|
||||
return Application([url("/hello", HelloWorldHandler), ])
|
||||
|
||||
def test_hostname_mapping(self):
|
||||
self.http_client.fetch(
|
||||
'http://www.example.com:%d/hello' % self.get_http_port(), self.stop)
|
||||
response = self.wait()
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b'Hello world!')
|
||||
|
||||
def test_port_mapping(self):
|
||||
self.http_client.fetch('http://foo.example.com:8000/hello', self.stop)
|
||||
response = self.wait()
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b'Hello world!')
|
||||
|
||||
|
||||
class ResolveTimeoutTestCase(AsyncHTTPTestCase):
|
||||
def setUp(self):
|
||||
# Dummy Resolver subclass that never invokes its callback.
|
||||
class BadResolver(Resolver):
|
||||
def resolve(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
super(ResolveTimeoutTestCase, self).setUp()
|
||||
self.http_client = SimpleAsyncHTTPClient(
|
||||
self.io_loop,
|
||||
resolver=BadResolver())
|
||||
|
||||
def get_app(self):
|
||||
return Application([url("/hello", HelloWorldHandler), ])
|
||||
|
||||
def test_resolve_timeout(self):
|
||||
response = self.fetch('/hello', connect_timeout=0.1)
|
||||
self.assertEqual(response.code, 599)
|
||||
|
||||
|
||||
class MaxHeaderSizeTest(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
class SmallHeaders(RequestHandler):
|
||||
def get(self):
|
||||
self.set_header("X-Filler", "a" * 100)
|
||||
self.write("ok")
|
||||
|
||||
class LargeHeaders(RequestHandler):
|
||||
def get(self):
|
||||
self.set_header("X-Filler", "a" * 1000)
|
||||
self.write("ok")
|
||||
|
||||
return Application([('/small', SmallHeaders),
|
||||
('/large', LargeHeaders)])
|
||||
|
||||
def get_http_client(self):
|
||||
return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_header_size=1024)
|
||||
|
||||
def test_small_headers(self):
|
||||
response = self.fetch('/small')
|
||||
response.rethrow()
|
||||
self.assertEqual(response.body, b'ok')
|
||||
|
||||
def test_large_headers(self):
|
||||
with ExpectLog(gen_log, "Unsatisfiable read"):
|
||||
response = self.fetch('/large')
|
||||
self.assertEqual(response.code, 599)
|
||||
|
|
@ -0,0 +1,288 @@
|
|||
#!/usr/bin/env python
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
from tornado import gen
|
||||
from tornado.log import app_log
|
||||
from tornado.stack_context import (StackContext, wrap, NullContext, StackContextInconsistentError,
|
||||
ExceptionStackContext, run_with_stack_context, _state)
|
||||
from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
|
||||
from tornado.test.util import unittest
|
||||
from tornado.web import asynchronous, Application, RequestHandler
|
||||
import contextlib
|
||||
import functools
|
||||
import logging
|
||||
|
||||
|
||||
class TestRequestHandler(RequestHandler):
|
||||
def __init__(self, app, request, io_loop):
|
||||
super(TestRequestHandler, self).__init__(app, request)
|
||||
self.io_loop = io_loop
|
||||
|
||||
@asynchronous
|
||||
def get(self):
|
||||
logging.debug('in get()')
|
||||
# call self.part2 without a self.async_callback wrapper. Its
|
||||
# exception should still get thrown
|
||||
self.io_loop.add_callback(self.part2)
|
||||
|
||||
def part2(self):
|
||||
logging.debug('in part2()')
|
||||
# Go through a third layer to make sure that contexts once restored
|
||||
# are again passed on to future callbacks
|
||||
self.io_loop.add_callback(self.part3)
|
||||
|
||||
def part3(self):
|
||||
logging.debug('in part3()')
|
||||
raise Exception('test exception')
|
||||
|
||||
def write_error(self, status_code, **kwargs):
|
||||
if 'exc_info' in kwargs and str(kwargs['exc_info'][1]) == 'test exception':
|
||||
self.write('got expected exception')
|
||||
else:
|
||||
self.write('unexpected failure')
|
||||
|
||||
|
||||
class HTTPStackContextTest(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
return Application([('/', TestRequestHandler,
|
||||
dict(io_loop=self.io_loop))])
|
||||
|
||||
def test_stack_context(self):
|
||||
with ExpectLog(app_log, "Uncaught exception GET /"):
|
||||
self.http_client.fetch(self.get_url('/'), self.handle_response)
|
||||
self.wait()
|
||||
self.assertEqual(self.response.code, 500)
|
||||
self.assertTrue(b'got expected exception' in self.response.body)
|
||||
|
||||
def handle_response(self, response):
|
||||
self.response = response
|
||||
self.stop()
|
||||
|
||||
|
||||
class StackContextTest(AsyncTestCase):
|
||||
def setUp(self):
|
||||
super(StackContextTest, self).setUp()
|
||||
self.active_contexts = []
|
||||
|
||||
@contextlib.contextmanager
|
||||
def context(self, name):
|
||||
self.active_contexts.append(name)
|
||||
yield
|
||||
self.assertEqual(self.active_contexts.pop(), name)
|
||||
|
||||
# Simulates the effect of an asynchronous library that uses its own
|
||||
# StackContext internally and then returns control to the application.
|
||||
def test_exit_library_context(self):
|
||||
def library_function(callback):
|
||||
# capture the caller's context before introducing our own
|
||||
callback = wrap(callback)
|
||||
with StackContext(functools.partial(self.context, 'library')):
|
||||
self.io_loop.add_callback(
|
||||
functools.partial(library_inner_callback, callback))
|
||||
|
||||
def library_inner_callback(callback):
|
||||
self.assertEqual(self.active_contexts[-2:],
|
||||
['application', 'library'])
|
||||
callback()
|
||||
|
||||
def final_callback():
|
||||
# implementation detail: the full context stack at this point
|
||||
# is ['application', 'library', 'application']. The 'library'
|
||||
# context was not removed, but is no longer innermost so
|
||||
# the application context takes precedence.
|
||||
self.assertEqual(self.active_contexts[-1], 'application')
|
||||
self.stop()
|
||||
with StackContext(functools.partial(self.context, 'application')):
|
||||
library_function(final_callback)
|
||||
self.wait()
|
||||
|
||||
def test_deactivate(self):
|
||||
deactivate_callbacks = []
|
||||
|
||||
def f1():
|
||||
with StackContext(functools.partial(self.context, 'c1')) as c1:
|
||||
deactivate_callbacks.append(c1)
|
||||
self.io_loop.add_callback(f2)
|
||||
|
||||
def f2():
|
||||
with StackContext(functools.partial(self.context, 'c2')) as c2:
|
||||
deactivate_callbacks.append(c2)
|
||||
self.io_loop.add_callback(f3)
|
||||
|
||||
def f3():
|
||||
with StackContext(functools.partial(self.context, 'c3')) as c3:
|
||||
deactivate_callbacks.append(c3)
|
||||
self.io_loop.add_callback(f4)
|
||||
|
||||
def f4():
|
||||
self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
|
||||
deactivate_callbacks[1]()
|
||||
# deactivating a context doesn't remove it immediately,
|
||||
# but it will be missing from the next iteration
|
||||
self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
|
||||
self.io_loop.add_callback(f5)
|
||||
|
||||
def f5():
|
||||
self.assertEqual(self.active_contexts, ['c1', 'c3'])
|
||||
self.stop()
|
||||
self.io_loop.add_callback(f1)
|
||||
self.wait()
|
||||
|
||||
def test_deactivate_order(self):
|
||||
# Stack context deactivation has separate logic for deactivation at
|
||||
# the head and tail of the stack, so make sure it works in any order.
|
||||
def check_contexts():
|
||||
# Make sure that the full-context array and the exception-context
|
||||
# linked lists are consistent with each other.
|
||||
full_contexts, chain = _state.contexts
|
||||
exception_contexts = []
|
||||
while chain is not None:
|
||||
exception_contexts.append(chain)
|
||||
chain = chain.old_contexts[1]
|
||||
self.assertEqual(list(reversed(full_contexts)), exception_contexts)
|
||||
return list(self.active_contexts)
|
||||
|
||||
def make_wrapped_function():
|
||||
"""Wraps a function in three stack contexts, and returns
|
||||
the function along with the deactivation functions.
|
||||
"""
|
||||
# Remove the test's stack context to make sure we can cover
|
||||
# the case where the last context is deactivated.
|
||||
with NullContext():
|
||||
partial = functools.partial
|
||||
with StackContext(partial(self.context, 'c0')) as c0:
|
||||
with StackContext(partial(self.context, 'c1')) as c1:
|
||||
with StackContext(partial(self.context, 'c2')) as c2:
|
||||
return (wrap(check_contexts), [c0, c1, c2])
|
||||
|
||||
# First make sure the test mechanism works without any deactivations
|
||||
func, deactivate_callbacks = make_wrapped_function()
|
||||
self.assertEqual(func(), ['c0', 'c1', 'c2'])
|
||||
|
||||
# Deactivate the tail
|
||||
func, deactivate_callbacks = make_wrapped_function()
|
||||
deactivate_callbacks[0]()
|
||||
self.assertEqual(func(), ['c1', 'c2'])
|
||||
|
||||
# Deactivate the middle
|
||||
func, deactivate_callbacks = make_wrapped_function()
|
||||
deactivate_callbacks[1]()
|
||||
self.assertEqual(func(), ['c0', 'c2'])
|
||||
|
||||
# Deactivate the head
|
||||
func, deactivate_callbacks = make_wrapped_function()
|
||||
deactivate_callbacks[2]()
|
||||
self.assertEqual(func(), ['c0', 'c1'])
|
||||
|
||||
def test_isolation_nonempty(self):
|
||||
# f2 and f3 are a chain of operations started in context c1.
|
||||
# f2 is incidentally run under context c2, but that context should
|
||||
# not be passed along to f3.
|
||||
def f1():
|
||||
with StackContext(functools.partial(self.context, 'c1')):
|
||||
wrapped = wrap(f2)
|
||||
with StackContext(functools.partial(self.context, 'c2')):
|
||||
wrapped()
|
||||
|
||||
def f2():
|
||||
self.assertIn('c1', self.active_contexts)
|
||||
self.io_loop.add_callback(f3)
|
||||
|
||||
def f3():
|
||||
self.assertIn('c1', self.active_contexts)
|
||||
self.assertNotIn('c2', self.active_contexts)
|
||||
self.stop()
|
||||
|
||||
self.io_loop.add_callback(f1)
|
||||
self.wait()
|
||||
|
||||
def test_isolation_empty(self):
|
||||
# Similar to test_isolation_nonempty, but here the f2/f3 chain
|
||||
# is started without any context. Behavior should be equivalent
|
||||
# to the nonempty case (although historically it was not)
|
||||
def f1():
|
||||
with NullContext():
|
||||
wrapped = wrap(f2)
|
||||
with StackContext(functools.partial(self.context, 'c2')):
|
||||
wrapped()
|
||||
|
||||
def f2():
|
||||
self.io_loop.add_callback(f3)
|
||||
|
||||
def f3():
|
||||
self.assertNotIn('c2', self.active_contexts)
|
||||
self.stop()
|
||||
|
||||
self.io_loop.add_callback(f1)
|
||||
self.wait()
|
||||
|
||||
def test_yield_in_with(self):
|
||||
@gen.engine
|
||||
def f():
|
||||
self.callback = yield gen.Callback('a')
|
||||
with StackContext(functools.partial(self.context, 'c1')):
|
||||
# This yield is a problem: the generator will be suspended
|
||||
# and the StackContext's __exit__ is not called yet, so
|
||||
# the context will be left on _state.contexts for anything
|
||||
# that runs before the yield resolves.
|
||||
yield gen.Wait('a')
|
||||
|
||||
with self.assertRaises(StackContextInconsistentError):
|
||||
f()
|
||||
self.wait()
|
||||
# Cleanup: to avoid GC warnings (which for some reason only seem
|
||||
# to show up on py33-asyncio), invoke the callback (which will do
|
||||
# nothing since the gen.Runner is already finished) and delete it.
|
||||
self.callback()
|
||||
del self.callback
|
||||
|
||||
@gen_test
|
||||
def test_yield_outside_with(self):
|
||||
# This pattern avoids the problem in the previous test.
|
||||
cb = yield gen.Callback('k1')
|
||||
with StackContext(functools.partial(self.context, 'c1')):
|
||||
self.io_loop.add_callback(cb)
|
||||
yield gen.Wait('k1')
|
||||
|
||||
def test_yield_in_with_exception_stack_context(self):
|
||||
# As above, but with ExceptionStackContext instead of StackContext.
|
||||
@gen.engine
|
||||
def f():
|
||||
with ExceptionStackContext(lambda t, v, tb: False):
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
|
||||
with self.assertRaises(StackContextInconsistentError):
|
||||
f()
|
||||
self.wait()
|
||||
|
||||
@gen_test
|
||||
def test_yield_outside_with_exception_stack_context(self):
|
||||
cb = yield gen.Callback('k1')
|
||||
with ExceptionStackContext(lambda t, v, tb: False):
|
||||
self.io_loop.add_callback(cb)
|
||||
yield gen.Wait('k1')
|
||||
|
||||
@gen_test
|
||||
def test_run_with_stack_context(self):
|
||||
@gen.coroutine
|
||||
def f1():
|
||||
self.assertEqual(self.active_contexts, ['c1'])
|
||||
yield run_with_stack_context(
|
||||
StackContext(functools.partial(self.context, 'c2')),
|
||||
f2)
|
||||
self.assertEqual(self.active_contexts, ['c1'])
|
||||
|
||||
@gen.coroutine
|
||||
def f2():
|
||||
self.assertEqual(self.active_contexts, ['c1', 'c2'])
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
self.assertEqual(self.active_contexts, ['c1', 'c2'])
|
||||
|
||||
self.assertEqual(self.active_contexts, [])
|
||||
yield run_with_stack_context(
|
||||
StackContext(functools.partial(self.context, 'c1')),
|
||||
f1)
|
||||
self.assertEqual(self.active_contexts, [])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
@ -0,0 +1 @@
|
|||
this is the index
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
User-agent: *
|
||||
Disallow: /
|
||||
|
|
@ -0,0 +1,278 @@
|
|||
#!/usr/bin/env python
|
||||
#
|
||||
# Copyright 2014 Facebook
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
from contextlib import closing
|
||||
import os
|
||||
import socket
|
||||
|
||||
from tornado.concurrent import Future
|
||||
from tornado.netutil import bind_sockets, Resolver
|
||||
from tornado.tcpclient import TCPClient, _Connector
|
||||
from tornado.tcpserver import TCPServer
|
||||
from tornado.testing import AsyncTestCase, bind_unused_port, gen_test
|
||||
from tornado.test.util import skipIfNoIPv6, unittest
|
||||
|
||||
# Fake address families for testing. Used in place of AF_INET
|
||||
# and AF_INET6 because some installations do not have AF_INET6.
|
||||
AF1, AF2 = 1, 2
|
||||
|
||||
|
||||
class TestTCPServer(TCPServer):
|
||||
def __init__(self, family):
|
||||
super(TestTCPServer, self).__init__()
|
||||
self.streams = []
|
||||
sockets = bind_sockets(None, 'localhost', family)
|
||||
self.add_sockets(sockets)
|
||||
self.port = sockets[0].getsockname()[1]
|
||||
|
||||
def handle_stream(self, stream, address):
|
||||
self.streams.append(stream)
|
||||
|
||||
def stop(self):
|
||||
super(TestTCPServer, self).stop()
|
||||
for stream in self.streams:
|
||||
stream.close()
|
||||
|
||||
|
||||
class TCPClientTest(AsyncTestCase):
|
||||
def setUp(self):
|
||||
super(TCPClientTest, self).setUp()
|
||||
self.server = None
|
||||
self.client = TCPClient()
|
||||
|
||||
def start_server(self, family):
|
||||
if family == socket.AF_UNSPEC and 'TRAVIS' in os.environ:
|
||||
self.skipTest("dual-stack servers often have port conflicts on travis")
|
||||
self.server = TestTCPServer(family)
|
||||
return self.server.port
|
||||
|
||||
def stop_server(self):
|
||||
if self.server is not None:
|
||||
self.server.stop()
|
||||
self.server = None
|
||||
|
||||
def tearDown(self):
|
||||
self.client.close()
|
||||
self.stop_server()
|
||||
super(TCPClientTest, self).tearDown()
|
||||
|
||||
def skipIfLocalhostV4(self):
|
||||
Resolver().resolve('localhost', 0, callback=self.stop)
|
||||
addrinfo = self.wait()
|
||||
families = set(addr[0] for addr in addrinfo)
|
||||
if socket.AF_INET6 not in families:
|
||||
self.skipTest("localhost does not resolve to ipv6")
|
||||
|
||||
@gen_test
|
||||
def do_test_connect(self, family, host):
|
||||
port = self.start_server(family)
|
||||
stream = yield self.client.connect(host, port)
|
||||
with closing(stream):
|
||||
stream.write(b"hello")
|
||||
data = yield self.server.streams[0].read_bytes(5)
|
||||
self.assertEqual(data, b"hello")
|
||||
|
||||
def test_connect_ipv4_ipv4(self):
|
||||
self.do_test_connect(socket.AF_INET, '127.0.0.1')
|
||||
|
||||
def test_connect_ipv4_dual(self):
|
||||
self.do_test_connect(socket.AF_INET, 'localhost')
|
||||
|
||||
@skipIfNoIPv6
|
||||
def test_connect_ipv6_ipv6(self):
|
||||
self.skipIfLocalhostV4()
|
||||
self.do_test_connect(socket.AF_INET6, '::1')
|
||||
|
||||
@skipIfNoIPv6
|
||||
def test_connect_ipv6_dual(self):
|
||||
self.skipIfLocalhostV4()
|
||||
if Resolver.configured_class().__name__.endswith('TwistedResolver'):
|
||||
self.skipTest('TwistedResolver does not support multiple addresses')
|
||||
self.do_test_connect(socket.AF_INET6, 'localhost')
|
||||
|
||||
def test_connect_unspec_ipv4(self):
|
||||
self.do_test_connect(socket.AF_UNSPEC, '127.0.0.1')
|
||||
|
||||
@skipIfNoIPv6
|
||||
def test_connect_unspec_ipv6(self):
|
||||
self.skipIfLocalhostV4()
|
||||
self.do_test_connect(socket.AF_UNSPEC, '::1')
|
||||
|
||||
def test_connect_unspec_dual(self):
|
||||
self.do_test_connect(socket.AF_UNSPEC, 'localhost')
|
||||
|
||||
@gen_test
|
||||
def test_refused_ipv4(self):
|
||||
sock, port = bind_unused_port()
|
||||
sock.close()
|
||||
with self.assertRaises(IOError):
|
||||
yield self.client.connect('127.0.0.1', port)
|
||||
|
||||
|
||||
class TestConnectorSplit(unittest.TestCase):
|
||||
def test_one_family(self):
|
||||
# These addresses aren't in the right format, but split doesn't care.
|
||||
primary, secondary = _Connector.split(
|
||||
[(AF1, 'a'),
|
||||
(AF1, 'b')])
|
||||
self.assertEqual(primary, [(AF1, 'a'),
|
||||
(AF1, 'b')])
|
||||
self.assertEqual(secondary, [])
|
||||
|
||||
def test_mixed(self):
|
||||
primary, secondary = _Connector.split(
|
||||
[(AF1, 'a'),
|
||||
(AF2, 'b'),
|
||||
(AF1, 'c'),
|
||||
(AF2, 'd')])
|
||||
self.assertEqual(primary, [(AF1, 'a'), (AF1, 'c')])
|
||||
self.assertEqual(secondary, [(AF2, 'b'), (AF2, 'd')])
|
||||
|
||||
|
||||
class ConnectorTest(AsyncTestCase):
|
||||
class FakeStream(object):
|
||||
def __init__(self):
|
||||
self.closed = False
|
||||
|
||||
def close(self):
|
||||
self.closed = True
|
||||
|
||||
def setUp(self):
|
||||
super(ConnectorTest, self).setUp()
|
||||
self.connect_futures = {}
|
||||
self.streams = {}
|
||||
self.addrinfo = [(AF1, 'a'), (AF1, 'b'),
|
||||
(AF2, 'c'), (AF2, 'd')]
|
||||
|
||||
def tearDown(self):
|
||||
# Unless explicitly checked (and popped) in the test, we shouldn't
|
||||
# be closing any streams
|
||||
for stream in self.streams.values():
|
||||
self.assertFalse(stream.closed)
|
||||
super(ConnectorTest, self).tearDown()
|
||||
|
||||
def create_stream(self, af, addr):
|
||||
future = Future()
|
||||
self.connect_futures[(af, addr)] = future
|
||||
return future
|
||||
|
||||
def assert_pending(self, *keys):
|
||||
self.assertEqual(sorted(self.connect_futures.keys()), sorted(keys))
|
||||
|
||||
def resolve_connect(self, af, addr, success):
|
||||
future = self.connect_futures.pop((af, addr))
|
||||
if success:
|
||||
self.streams[addr] = ConnectorTest.FakeStream()
|
||||
future.set_result(self.streams[addr])
|
||||
else:
|
||||
future.set_exception(IOError())
|
||||
|
||||
def start_connect(self, addrinfo):
|
||||
conn = _Connector(addrinfo, self.io_loop, self.create_stream)
|
||||
# Give it a huge timeout; we'll trigger timeouts manually.
|
||||
future = conn.start(3600)
|
||||
return conn, future
|
||||
|
||||
def test_immediate_success(self):
|
||||
conn, future = self.start_connect(self.addrinfo)
|
||||
self.assertEqual(list(self.connect_futures.keys()),
|
||||
[(AF1, 'a')])
|
||||
self.resolve_connect(AF1, 'a', True)
|
||||
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
|
||||
|
||||
def test_immediate_failure(self):
|
||||
# Fail with just one address.
|
||||
conn, future = self.start_connect([(AF1, 'a')])
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assertRaises(IOError, future.result)
|
||||
|
||||
def test_one_family_second_try(self):
|
||||
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending((AF1, 'b'))
|
||||
self.resolve_connect(AF1, 'b', True)
|
||||
self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
|
||||
|
||||
def test_one_family_second_try_failure(self):
|
||||
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending((AF1, 'b'))
|
||||
self.resolve_connect(AF1, 'b', False)
|
||||
self.assertRaises(IOError, future.result)
|
||||
|
||||
def test_one_family_second_try_timeout(self):
|
||||
conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
|
||||
self.assert_pending((AF1, 'a'))
|
||||
# trigger the timeout while the first lookup is pending;
|
||||
# nothing happens.
|
||||
conn.on_timeout()
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending((AF1, 'b'))
|
||||
self.resolve_connect(AF1, 'b', True)
|
||||
self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
|
||||
|
||||
def test_two_families_immediate_failure(self):
|
||||
conn, future = self.start_connect(self.addrinfo)
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending((AF1, 'b'), (AF2, 'c'))
|
||||
self.resolve_connect(AF1, 'b', False)
|
||||
self.resolve_connect(AF2, 'c', True)
|
||||
self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
|
||||
|
||||
def test_two_families_timeout(self):
|
||||
conn, future = self.start_connect(self.addrinfo)
|
||||
self.assert_pending((AF1, 'a'))
|
||||
conn.on_timeout()
|
||||
self.assert_pending((AF1, 'a'), (AF2, 'c'))
|
||||
self.resolve_connect(AF2, 'c', True)
|
||||
self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
|
||||
# resolving 'a' after the connection has completed doesn't start 'b'
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending()
|
||||
|
||||
def test_success_after_timeout(self):
|
||||
conn, future = self.start_connect(self.addrinfo)
|
||||
self.assert_pending((AF1, 'a'))
|
||||
conn.on_timeout()
|
||||
self.assert_pending((AF1, 'a'), (AF2, 'c'))
|
||||
self.resolve_connect(AF1, 'a', True)
|
||||
self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
|
||||
# resolving 'c' after completion closes the connection.
|
||||
self.resolve_connect(AF2, 'c', True)
|
||||
self.assertTrue(self.streams.pop('c').closed)
|
||||
|
||||
def test_all_fail(self):
|
||||
conn, future = self.start_connect(self.addrinfo)
|
||||
self.assert_pending((AF1, 'a'))
|
||||
conn.on_timeout()
|
||||
self.assert_pending((AF1, 'a'), (AF2, 'c'))
|
||||
self.resolve_connect(AF2, 'c', False)
|
||||
self.assert_pending((AF1, 'a'), (AF2, 'd'))
|
||||
self.resolve_connect(AF2, 'd', False)
|
||||
# one queue is now empty
|
||||
self.assert_pending((AF1, 'a'))
|
||||
self.resolve_connect(AF1, 'a', False)
|
||||
self.assert_pending((AF1, 'b'))
|
||||
self.assertFalse(future.done())
|
||||
self.resolve_connect(AF1, 'b', False)
|
||||
self.assertRaises(IOError, future.result)
|
||||
412
Shared/lib/python3.4/site-packages/tornado/test/template_test.py
Normal file
412
Shared/lib/python3.4/site-packages/tornado/test/template_test.py
Normal file
|
|
@ -0,0 +1,412 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from tornado.escape import utf8, native_str, to_unicode
|
||||
from tornado.template import Template, DictLoader, ParseError, Loader
|
||||
from tornado.test.util import unittest
|
||||
from tornado.util import u, bytes_type, ObjectDict, unicode_type
|
||||
|
||||
|
||||
class TemplateTest(unittest.TestCase):
|
||||
def test_simple(self):
|
||||
template = Template("Hello {{ name }}!")
|
||||
self.assertEqual(template.generate(name="Ben"),
|
||||
b"Hello Ben!")
|
||||
|
||||
def test_bytes(self):
|
||||
template = Template("Hello {{ name }}!")
|
||||
self.assertEqual(template.generate(name=utf8("Ben")),
|
||||
b"Hello Ben!")
|
||||
|
||||
def test_expressions(self):
|
||||
template = Template("2 + 2 = {{ 2 + 2 }}")
|
||||
self.assertEqual(template.generate(), b"2 + 2 = 4")
|
||||
|
||||
def test_comment(self):
|
||||
template = Template("Hello{# TODO i18n #} {{ name }}!")
|
||||
self.assertEqual(template.generate(name=utf8("Ben")),
|
||||
b"Hello Ben!")
|
||||
|
||||
def test_include(self):
|
||||
loader = DictLoader({
|
||||
"index.html": '{% include "header.html" %}\nbody text',
|
||||
"header.html": "header text",
|
||||
})
|
||||
self.assertEqual(loader.load("index.html").generate(),
|
||||
b"header text\nbody text")
|
||||
|
||||
def test_extends(self):
|
||||
loader = DictLoader({
|
||||
"base.html": """\
|
||||
<title>{% block title %}default title{% end %}</title>
|
||||
<body>{% block body %}default body{% end %}</body>
|
||||
""",
|
||||
"page.html": """\
|
||||
{% extends "base.html" %}
|
||||
{% block title %}page title{% end %}
|
||||
{% block body %}page body{% end %}
|
||||
""",
|
||||
})
|
||||
self.assertEqual(loader.load("page.html").generate(),
|
||||
b"<title>page title</title>\n<body>page body</body>\n")
|
||||
|
||||
def test_relative_load(self):
|
||||
loader = DictLoader({
|
||||
"a/1.html": "{% include '2.html' %}",
|
||||
"a/2.html": "{% include '../b/3.html' %}",
|
||||
"b/3.html": "ok",
|
||||
})
|
||||
self.assertEqual(loader.load("a/1.html").generate(),
|
||||
b"ok")
|
||||
|
||||
def test_escaping(self):
|
||||
self.assertRaises(ParseError, lambda: Template("{{"))
|
||||
self.assertRaises(ParseError, lambda: Template("{%"))
|
||||
self.assertEqual(Template("{{!").generate(), b"{{")
|
||||
self.assertEqual(Template("{%!").generate(), b"{%")
|
||||
self.assertEqual(Template("{{ 'expr' }} {{!jquery expr}}").generate(),
|
||||
b"expr {{jquery expr}}")
|
||||
|
||||
def test_unicode_template(self):
|
||||
template = Template(utf8(u("\u00e9")))
|
||||
self.assertEqual(template.generate(), utf8(u("\u00e9")))
|
||||
|
||||
def test_unicode_literal_expression(self):
|
||||
# Unicode literals should be usable in templates. Note that this
|
||||
# test simulates unicode characters appearing directly in the
|
||||
# template file (with utf8 encoding), i.e. \u escapes would not
|
||||
# be used in the template file itself.
|
||||
if str is unicode_type:
|
||||
# python 3 needs a different version of this test since
|
||||
# 2to3 doesn't run on template internals
|
||||
template = Template(utf8(u('{{ "\u00e9" }}')))
|
||||
else:
|
||||
template = Template(utf8(u('{{ u"\u00e9" }}')))
|
||||
self.assertEqual(template.generate(), utf8(u("\u00e9")))
|
||||
|
||||
def test_custom_namespace(self):
|
||||
loader = DictLoader({"test.html": "{{ inc(5) }}"}, namespace={"inc": lambda x: x + 1})
|
||||
self.assertEqual(loader.load("test.html").generate(), b"6")
|
||||
|
||||
def test_apply(self):
|
||||
def upper(s):
|
||||
return s.upper()
|
||||
template = Template(utf8("{% apply upper %}foo{% end %}"))
|
||||
self.assertEqual(template.generate(upper=upper), b"FOO")
|
||||
|
||||
def test_unicode_apply(self):
|
||||
def upper(s):
|
||||
return to_unicode(s).upper()
|
||||
template = Template(utf8(u("{% apply upper %}foo \u00e9{% end %}")))
|
||||
self.assertEqual(template.generate(upper=upper), utf8(u("FOO \u00c9")))
|
||||
|
||||
def test_bytes_apply(self):
|
||||
def upper(s):
|
||||
return utf8(to_unicode(s).upper())
|
||||
template = Template(utf8(u("{% apply upper %}foo \u00e9{% end %}")))
|
||||
self.assertEqual(template.generate(upper=upper), utf8(u("FOO \u00c9")))
|
||||
|
||||
def test_if(self):
|
||||
template = Template(utf8("{% if x > 4 %}yes{% else %}no{% end %}"))
|
||||
self.assertEqual(template.generate(x=5), b"yes")
|
||||
self.assertEqual(template.generate(x=3), b"no")
|
||||
|
||||
def test_if_empty_body(self):
|
||||
template = Template(utf8("{% if True %}{% else %}{% end %}"))
|
||||
self.assertEqual(template.generate(), b"")
|
||||
|
||||
def test_try(self):
|
||||
template = Template(utf8("""{% try %}
|
||||
try{% set y = 1/x %}
|
||||
{% except %}-except
|
||||
{% else %}-else
|
||||
{% finally %}-finally
|
||||
{% end %}"""))
|
||||
self.assertEqual(template.generate(x=1), b"\ntry\n-else\n-finally\n")
|
||||
self.assertEqual(template.generate(x=0), b"\ntry-except\n-finally\n")
|
||||
|
||||
def test_comment_directive(self):
|
||||
template = Template(utf8("{% comment blah blah %}foo"))
|
||||
self.assertEqual(template.generate(), b"foo")
|
||||
|
||||
def test_break_continue(self):
|
||||
template = Template(utf8("""\
|
||||
{% for i in range(10) %}
|
||||
{% if i == 2 %}
|
||||
{% continue %}
|
||||
{% end %}
|
||||
{{ i }}
|
||||
{% if i == 6 %}
|
||||
{% break %}
|
||||
{% end %}
|
||||
{% end %}"""))
|
||||
result = template.generate()
|
||||
# remove extraneous whitespace
|
||||
result = b''.join(result.split())
|
||||
self.assertEqual(result, b"013456")
|
||||
|
||||
def test_break_outside_loop(self):
|
||||
try:
|
||||
Template(utf8("{% break %}"))
|
||||
raise Exception("Did not get expected exception")
|
||||
except ParseError:
|
||||
pass
|
||||
|
||||
def test_break_in_apply(self):
|
||||
# This test verifies current behavior, although of course it would
|
||||
# be nice if apply didn't cause seemingly unrelated breakage
|
||||
try:
|
||||
Template(utf8("{% for i in [] %}{% apply foo %}{% break %}{% end %}{% end %}"))
|
||||
raise Exception("Did not get expected exception")
|
||||
except ParseError:
|
||||
pass
|
||||
|
||||
@unittest.skipIf(sys.version_info >= division.getMandatoryRelease(),
|
||||
'no testable future imports')
|
||||
def test_no_inherit_future(self):
|
||||
# This file has from __future__ import division...
|
||||
self.assertEqual(1 / 2, 0.5)
|
||||
# ...but the template doesn't
|
||||
template = Template('{{ 1 / 2 }}')
|
||||
self.assertEqual(template.generate(), '0')
|
||||
|
||||
|
||||
class StackTraceTest(unittest.TestCase):
|
||||
def test_error_line_number_expression(self):
|
||||
loader = DictLoader({"test.html": """one
|
||||
two{{1/0}}
|
||||
three
|
||||
"""})
|
||||
try:
|
||||
loader.load("test.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
self.assertTrue("# test.html:2" in traceback.format_exc())
|
||||
|
||||
def test_error_line_number_directive(self):
|
||||
loader = DictLoader({"test.html": """one
|
||||
two{%if 1/0%}
|
||||
three{%end%}
|
||||
"""})
|
||||
try:
|
||||
loader.load("test.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
self.assertTrue("# test.html:2" in traceback.format_exc())
|
||||
|
||||
def test_error_line_number_module(self):
|
||||
loader = DictLoader({
|
||||
"base.html": "{% module Template('sub.html') %}",
|
||||
"sub.html": "{{1/0}}",
|
||||
}, namespace={"_tt_modules": ObjectDict({"Template": lambda path, **kwargs: loader.load(path).generate(**kwargs)})})
|
||||
try:
|
||||
loader.load("base.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
exc_stack = traceback.format_exc()
|
||||
self.assertTrue('# base.html:1' in exc_stack)
|
||||
self.assertTrue('# sub.html:1' in exc_stack)
|
||||
|
||||
def test_error_line_number_include(self):
|
||||
loader = DictLoader({
|
||||
"base.html": "{% include 'sub.html' %}",
|
||||
"sub.html": "{{1/0}}",
|
||||
})
|
||||
try:
|
||||
loader.load("base.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
self.assertTrue("# sub.html:1 (via base.html:1)" in
|
||||
traceback.format_exc())
|
||||
|
||||
def test_error_line_number_extends_base_error(self):
|
||||
loader = DictLoader({
|
||||
"base.html": "{{1/0}}",
|
||||
"sub.html": "{% extends 'base.html' %}",
|
||||
})
|
||||
try:
|
||||
loader.load("sub.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
exc_stack = traceback.format_exc()
|
||||
self.assertTrue("# base.html:1" in exc_stack)
|
||||
|
||||
def test_error_line_number_extends_sub_error(self):
|
||||
loader = DictLoader({
|
||||
"base.html": "{% block 'block' %}{% end %}",
|
||||
"sub.html": """
|
||||
{% extends 'base.html' %}
|
||||
{% block 'block' %}
|
||||
{{1/0}}
|
||||
{% end %}
|
||||
"""})
|
||||
try:
|
||||
loader.load("sub.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
self.assertTrue("# sub.html:4 (via base.html:1)" in
|
||||
traceback.format_exc())
|
||||
|
||||
def test_multi_includes(self):
|
||||
loader = DictLoader({
|
||||
"a.html": "{% include 'b.html' %}",
|
||||
"b.html": "{% include 'c.html' %}",
|
||||
"c.html": "{{1/0}}",
|
||||
})
|
||||
try:
|
||||
loader.load("a.html").generate()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
self.assertTrue("# c.html:1 (via b.html:1, a.html:1)" in
|
||||
traceback.format_exc())
|
||||
|
||||
|
||||
class AutoEscapeTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.templates = {
|
||||
"escaped.html": "{% autoescape xhtml_escape %}{{ name }}",
|
||||
"unescaped.html": "{% autoescape None %}{{ name }}",
|
||||
"default.html": "{{ name }}",
|
||||
|
||||
"include.html": """\
|
||||
escaped: {% include 'escaped.html' %}
|
||||
unescaped: {% include 'unescaped.html' %}
|
||||
default: {% include 'default.html' %}
|
||||
""",
|
||||
|
||||
"escaped_block.html": """\
|
||||
{% autoescape xhtml_escape %}\
|
||||
{% block name %}base: {{ name }}{% end %}""",
|
||||
"unescaped_block.html": """\
|
||||
{% autoescape None %}\
|
||||
{% block name %}base: {{ name }}{% end %}""",
|
||||
|
||||
# Extend a base template with different autoescape policy,
|
||||
# with and without overriding the base's blocks
|
||||
"escaped_extends_unescaped.html": """\
|
||||
{% autoescape xhtml_escape %}\
|
||||
{% extends "unescaped_block.html" %}""",
|
||||
"escaped_overrides_unescaped.html": """\
|
||||
{% autoescape xhtml_escape %}\
|
||||
{% extends "unescaped_block.html" %}\
|
||||
{% block name %}extended: {{ name }}{% end %}""",
|
||||
"unescaped_extends_escaped.html": """\
|
||||
{% autoescape None %}\
|
||||
{% extends "escaped_block.html" %}""",
|
||||
"unescaped_overrides_escaped.html": """\
|
||||
{% autoescape None %}\
|
||||
{% extends "escaped_block.html" %}\
|
||||
{% block name %}extended: {{ name }}{% end %}""",
|
||||
|
||||
"raw_expression.html": """\
|
||||
{% autoescape xhtml_escape %}\
|
||||
expr: {{ name }}
|
||||
raw: {% raw name %}""",
|
||||
}
|
||||
|
||||
def test_default_off(self):
|
||||
loader = DictLoader(self.templates, autoescape=None)
|
||||
name = "Bobby <table>s"
|
||||
self.assertEqual(loader.load("escaped.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
self.assertEqual(loader.load("unescaped.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
self.assertEqual(loader.load("default.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
|
||||
self.assertEqual(loader.load("include.html").generate(name=name),
|
||||
b"escaped: Bobby <table>s\n"
|
||||
b"unescaped: Bobby <table>s\n"
|
||||
b"default: Bobby <table>s\n")
|
||||
|
||||
def test_default_on(self):
|
||||
loader = DictLoader(self.templates, autoescape="xhtml_escape")
|
||||
name = "Bobby <table>s"
|
||||
self.assertEqual(loader.load("escaped.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
self.assertEqual(loader.load("unescaped.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
self.assertEqual(loader.load("default.html").generate(name=name),
|
||||
b"Bobby <table>s")
|
||||
|
||||
self.assertEqual(loader.load("include.html").generate(name=name),
|
||||
b"escaped: Bobby <table>s\n"
|
||||
b"unescaped: Bobby <table>s\n"
|
||||
b"default: Bobby <table>s\n")
|
||||
|
||||
def test_unextended_block(self):
|
||||
loader = DictLoader(self.templates)
|
||||
name = "<script>"
|
||||
self.assertEqual(loader.load("escaped_block.html").generate(name=name),
|
||||
b"base: <script>")
|
||||
self.assertEqual(loader.load("unescaped_block.html").generate(name=name),
|
||||
b"base: <script>")
|
||||
|
||||
def test_extended_block(self):
|
||||
loader = DictLoader(self.templates)
|
||||
|
||||
def render(name):
|
||||
return loader.load(name).generate(name="<script>")
|
||||
self.assertEqual(render("escaped_extends_unescaped.html"),
|
||||
b"base: <script>")
|
||||
self.assertEqual(render("escaped_overrides_unescaped.html"),
|
||||
b"extended: <script>")
|
||||
|
||||
self.assertEqual(render("unescaped_extends_escaped.html"),
|
||||
b"base: <script>")
|
||||
self.assertEqual(render("unescaped_overrides_escaped.html"),
|
||||
b"extended: <script>")
|
||||
|
||||
def test_raw_expression(self):
|
||||
loader = DictLoader(self.templates)
|
||||
|
||||
def render(name):
|
||||
return loader.load(name).generate(name='<>&"')
|
||||
self.assertEqual(render("raw_expression.html"),
|
||||
b"expr: <>&"\n"
|
||||
b"raw: <>&\"")
|
||||
|
||||
def test_custom_escape(self):
|
||||
loader = DictLoader({"foo.py":
|
||||
"{% autoescape py_escape %}s = {{ name }}\n"})
|
||||
|
||||
def py_escape(s):
|
||||
self.assertEqual(type(s), bytes_type)
|
||||
return repr(native_str(s))
|
||||
|
||||
def render(template, name):
|
||||
return loader.load(template).generate(py_escape=py_escape,
|
||||
name=name)
|
||||
self.assertEqual(render("foo.py", "<html>"),
|
||||
b"s = '<html>'\n")
|
||||
self.assertEqual(render("foo.py", "';sys.exit()"),
|
||||
b"""s = "';sys.exit()"\n""")
|
||||
self.assertEqual(render("foo.py", ["not a string"]),
|
||||
b"""s = "['not a string']"\n""")
|
||||
|
||||
def test_minimize_whitespace(self):
|
||||
# Whitespace including newlines is allowed within template tags
|
||||
# and directives, and this is one way to avoid long lines while
|
||||
# keeping extra whitespace out of the rendered output.
|
||||
loader = DictLoader({'foo.txt': """\
|
||||
{% for i in items
|
||||
%}{% if i > 0 %}, {% end %}{#
|
||||
#}{{i
|
||||
}}{% end
|
||||
%}""",
|
||||
})
|
||||
self.assertEqual(loader.load("foo.txt").generate(items=range(5)),
|
||||
b"0, 1, 2, 3, 4")
|
||||
|
||||
|
||||
class TemplateLoaderTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.loader = Loader(os.path.join(os.path.dirname(__file__), "templates"))
|
||||
|
||||
def test_utf8_in_file(self):
|
||||
tmpl = self.loader.load("utf8.html")
|
||||
result = tmpl.generate()
|
||||
self.assertEqual(to_unicode(result).strip(), u("H\u00e9llo"))
|
||||
|
|
@ -0,0 +1 @@
|
|||
Héllo
|
||||
15
Shared/lib/python3.4/site-packages/tornado/test/test.crt
Normal file
15
Shared/lib/python3.4/site-packages/tornado/test/test.crt
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
-----BEGIN CERTIFICATE-----
|
||||
MIICSDCCAbGgAwIBAgIJAN1oTowzMbkzMA0GCSqGSIb3DQEBBQUAMD0xCzAJBgNV
|
||||
BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRkwFwYDVQQKDBBUb3JuYWRvIFdl
|
||||
YiBUZXN0MB4XDTEwMDgyNTE4MjQ0NFoXDTIwMDgyMjE4MjQ0NFowPTELMAkGA1UE
|
||||
BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExGTAXBgNVBAoMEFRvcm5hZG8gV2Vi
|
||||
IFRlc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALirW3mX4jbdFse2aZwW
|
||||
zszCJ1IsRDrzALpbvMYLLbIZqo+Z8v5aERKTRQpXFqGaZyY+tdwYy7X7YXcLtKqv
|
||||
jnw/MSeIaqkw5pROKz5aR0nkPLvcTmhJVLVPCLc8dFnIlu8aC9TrDhr90P+PzU39
|
||||
UG7zLweA9zXKBuW3Tjo5dMP3AgMBAAGjUDBOMB0GA1UdDgQWBBRhJjMBYrzddCFr
|
||||
/0vvPyHMeqgo0TAfBgNVHSMEGDAWgBRhJjMBYrzddCFr/0vvPyHMeqgo0TAMBgNV
|
||||
HRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAGP6GaxSfb21bikcqaK3ZKCC1sRJ
|
||||
tiCuvJZbBUFUCAzl05dYUfJZim/oWK+GqyUkUB8ciYivUNnn9OtS7DnlTgT2ws2e
|
||||
lNgn5cuFXoAGcHXzVlHG3yoywYBf3y0Dn20uzrlLXUWJAzoSLOt2LTaXvwlgm7hF
|
||||
W1q8SQ6UBshRw2X0
|
||||
-----END CERTIFICATE-----
|
||||
16
Shared/lib/python3.4/site-packages/tornado/test/test.key
Normal file
16
Shared/lib/python3.4/site-packages/tornado/test/test.key
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
-----BEGIN PRIVATE KEY-----
|
||||
MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBALirW3mX4jbdFse2
|
||||
aZwWzszCJ1IsRDrzALpbvMYLLbIZqo+Z8v5aERKTRQpXFqGaZyY+tdwYy7X7YXcL
|
||||
tKqvjnw/MSeIaqkw5pROKz5aR0nkPLvcTmhJVLVPCLc8dFnIlu8aC9TrDhr90P+P
|
||||
zU39UG7zLweA9zXKBuW3Tjo5dMP3AgMBAAECgYEAiygNaWYrf95AcUQi9w00zpUr
|
||||
nj9fNvCwxr2kVbRMvd2balS/CC4EmXPCXdVcZ3B7dBVjYzSIJV0Fh/iZLtnVysD9
|
||||
fcNMZ+Cz71b/T0ItsNYOsJk0qUVyP52uqsqkNppIPJsD19C+ZeMLZj6iEiylZyl8
|
||||
2U16c/kVIjER63mUEGkCQQDayQOTGPJrKHqPAkUqzeJkfvHH2yCf+cySU+w6ezyr
|
||||
j9yxcq8aZoLusCebDVT+kz7RqnD5JePFvB38cMuepYBLAkEA2BTFdZx30f4moPNv
|
||||
JlXlPNJMUTUzsXG7n4vNc+18O5ous0NGQII8jZWrIcTrP8wiP9fF3JwUsKrJhcBn
|
||||
xRs3hQJBAIDUgz1YIE+HW3vgi1gkOh6RPdBAsVpiXtr/fggFz3j60qrO7FswaAMj
|
||||
SX8c/6KUlBYkNjgP3qruFf4zcUNvEzcCQQCaioCPFVE9ByBpjLG6IUTKsz2R9xL5
|
||||
nfYqrbpLZ1aq6iLsYvkjugHE4X57sHLwNfdo4dHJbnf9wqhO2MVe25BhAkBdKYpY
|
||||
7OKc/2mmMbJDhVBgoixz/muN/5VjdfbvVY48naZkJF1p1tmogqPC5F1jPCS4rM+S
|
||||
FfPJIHRNEn2oktw5
|
||||
-----END PRIVATE KEY-----
|
||||
220
Shared/lib/python3.4/site-packages/tornado/test/testing_test.py
Normal file
220
Shared/lib/python3.4/site-packages/tornado/test/testing_test.py
Normal file
|
|
@ -0,0 +1,220 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
from tornado import gen, ioloop
|
||||
from tornado.testing import AsyncTestCase, gen_test
|
||||
from tornado.test.util import unittest
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import traceback
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def set_environ(name, value):
|
||||
old_value = os.environ.get(name)
|
||||
os.environ[name] = value
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if old_value is None:
|
||||
del os.environ[name]
|
||||
else:
|
||||
os.environ[name] = old_value
|
||||
|
||||
|
||||
class AsyncTestCaseTest(AsyncTestCase):
|
||||
def test_exception_in_callback(self):
|
||||
self.io_loop.add_callback(lambda: 1 / 0)
|
||||
try:
|
||||
self.wait()
|
||||
self.fail("did not get expected exception")
|
||||
except ZeroDivisionError:
|
||||
pass
|
||||
|
||||
def test_wait_timeout(self):
|
||||
time = self.io_loop.time
|
||||
|
||||
# Accept default 5-second timeout, no error
|
||||
self.io_loop.add_timeout(time() + 0.01, self.stop)
|
||||
self.wait()
|
||||
|
||||
# Timeout passed to wait()
|
||||
self.io_loop.add_timeout(time() + 1, self.stop)
|
||||
with self.assertRaises(self.failureException):
|
||||
self.wait(timeout=0.01)
|
||||
|
||||
# Timeout set with environment variable
|
||||
self.io_loop.add_timeout(time() + 1, self.stop)
|
||||
with set_environ('ASYNC_TEST_TIMEOUT', '0.01'):
|
||||
with self.assertRaises(self.failureException):
|
||||
self.wait()
|
||||
|
||||
def test_subsequent_wait_calls(self):
|
||||
"""
|
||||
This test makes sure that a second call to wait()
|
||||
clears the first timeout.
|
||||
"""
|
||||
self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
|
||||
self.wait(timeout=0.02)
|
||||
self.io_loop.add_timeout(self.io_loop.time() + 0.03, self.stop)
|
||||
self.wait(timeout=0.15)
|
||||
|
||||
|
||||
class AsyncTestCaseWrapperTest(unittest.TestCase):
|
||||
def test_undecorated_generator(self):
|
||||
class Test(AsyncTestCase):
|
||||
def test_gen(self):
|
||||
yield
|
||||
test = Test('test_gen')
|
||||
result = unittest.TestResult()
|
||||
test.run(result)
|
||||
self.assertEqual(len(result.errors), 1)
|
||||
self.assertIn("should be decorated", result.errors[0][1])
|
||||
|
||||
def test_undecorated_generator_with_skip(self):
|
||||
class Test(AsyncTestCase):
|
||||
@unittest.skip("don't run this")
|
||||
def test_gen(self):
|
||||
yield
|
||||
test = Test('test_gen')
|
||||
result = unittest.TestResult()
|
||||
test.run(result)
|
||||
self.assertEqual(len(result.errors), 0)
|
||||
self.assertEqual(len(result.skipped), 1)
|
||||
|
||||
def test_other_return(self):
|
||||
class Test(AsyncTestCase):
|
||||
def test_other_return(self):
|
||||
return 42
|
||||
test = Test('test_other_return')
|
||||
result = unittest.TestResult()
|
||||
test.run(result)
|
||||
self.assertEqual(len(result.errors), 1)
|
||||
self.assertIn("Return value from test method ignored", result.errors[0][1])
|
||||
|
||||
|
||||
class SetUpTearDownTest(unittest.TestCase):
|
||||
def test_set_up_tear_down(self):
|
||||
"""
|
||||
This test makes sure that AsyncTestCase calls super methods for
|
||||
setUp and tearDown.
|
||||
|
||||
InheritBoth is a subclass of both AsyncTestCase and
|
||||
SetUpTearDown, with the ordering so that the super of
|
||||
AsyncTestCase will be SetUpTearDown.
|
||||
"""
|
||||
events = []
|
||||
result = unittest.TestResult()
|
||||
|
||||
class SetUpTearDown(unittest.TestCase):
|
||||
def setUp(self):
|
||||
events.append('setUp')
|
||||
|
||||
def tearDown(self):
|
||||
events.append('tearDown')
|
||||
|
||||
class InheritBoth(AsyncTestCase, SetUpTearDown):
|
||||
def test(self):
|
||||
events.append('test')
|
||||
|
||||
InheritBoth('test').run(result)
|
||||
expected = ['setUp', 'test', 'tearDown']
|
||||
self.assertEqual(expected, events)
|
||||
|
||||
|
||||
class GenTest(AsyncTestCase):
|
||||
def setUp(self):
|
||||
super(GenTest, self).setUp()
|
||||
self.finished = False
|
||||
|
||||
def tearDown(self):
|
||||
self.assertTrue(self.finished)
|
||||
super(GenTest, self).tearDown()
|
||||
|
||||
@gen_test
|
||||
def test_sync(self):
|
||||
self.finished = True
|
||||
|
||||
@gen_test
|
||||
def test_async(self):
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
self.finished = True
|
||||
|
||||
def test_timeout(self):
|
||||
# Set a short timeout and exceed it.
|
||||
@gen_test(timeout=0.1)
|
||||
def test(self):
|
||||
yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)
|
||||
|
||||
# This can't use assertRaises because we need to inspect the
|
||||
# exc_info triple (and not just the exception object)
|
||||
try:
|
||||
test(self)
|
||||
self.fail("did not get expected exception")
|
||||
except ioloop.TimeoutError:
|
||||
# The stack trace should blame the add_timeout line, not just
|
||||
# unrelated IOLoop/testing internals.
|
||||
self.assertIn(
|
||||
"gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)",
|
||||
traceback.format_exc())
|
||||
|
||||
self.finished = True
|
||||
|
||||
def test_no_timeout(self):
|
||||
# A test that does not exceed its timeout should succeed.
|
||||
@gen_test(timeout=1)
|
||||
def test(self):
|
||||
time = self.io_loop.time
|
||||
yield gen.Task(self.io_loop.add_timeout, time() + 0.1)
|
||||
|
||||
test(self)
|
||||
self.finished = True
|
||||
|
||||
def test_timeout_environment_variable(self):
|
||||
@gen_test(timeout=0.5)
|
||||
def test_long_timeout(self):
|
||||
time = self.io_loop.time
|
||||
yield gen.Task(self.io_loop.add_timeout, time() + 0.25)
|
||||
|
||||
# Uses provided timeout of 0.5 seconds, doesn't time out.
|
||||
with set_environ('ASYNC_TEST_TIMEOUT', '0.1'):
|
||||
test_long_timeout(self)
|
||||
|
||||
self.finished = True
|
||||
|
||||
def test_no_timeout_environment_variable(self):
|
||||
@gen_test(timeout=0.01)
|
||||
def test_short_timeout(self):
|
||||
time = self.io_loop.time
|
||||
yield gen.Task(self.io_loop.add_timeout, time() + 1)
|
||||
|
||||
# Uses environment-variable timeout of 0.1, times out.
|
||||
with set_environ('ASYNC_TEST_TIMEOUT', '0.1'):
|
||||
with self.assertRaises(ioloop.TimeoutError):
|
||||
test_short_timeout(self)
|
||||
|
||||
self.finished = True
|
||||
|
||||
def test_with_method_args(self):
|
||||
@gen_test
|
||||
def test_with_args(self, *args):
|
||||
self.assertEqual(args, ('test',))
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
|
||||
test_with_args(self, 'test')
|
||||
self.finished = True
|
||||
|
||||
def test_with_method_kwargs(self):
|
||||
@gen_test
|
||||
def test_with_kwargs(self, **kwargs):
|
||||
self.assertDictEqual(kwargs, {'test': 'test'})
|
||||
yield gen.Task(self.io_loop.add_callback)
|
||||
|
||||
test_with_kwargs(self, test='test')
|
||||
self.finished = True
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
618
Shared/lib/python3.4/site-packages/tornado/test/twisted_test.py
Normal file
618
Shared/lib/python3.4/site-packages/tornado/test/twisted_test.py
Normal file
|
|
@ -0,0 +1,618 @@
|
|||
# Author: Ovidiu Predescu
|
||||
# Date: July 2011
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
"""
|
||||
Unittest for the twisted-style reactor.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import signal
|
||||
import tempfile
|
||||
import threading
|
||||
|
||||
try:
|
||||
import fcntl
|
||||
from twisted.internet.defer import Deferred
|
||||
from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor
|
||||
from twisted.internet.protocol import Protocol
|
||||
from twisted.python import log
|
||||
from tornado.platform.twisted import TornadoReactor, TwistedIOLoop
|
||||
from zope.interface import implementer
|
||||
have_twisted = True
|
||||
except ImportError:
|
||||
have_twisted = False
|
||||
|
||||
# The core of Twisted 12.3.0 is available on python 3, but twisted.web is not
|
||||
# so test for it separately.
|
||||
try:
|
||||
from twisted.web.client import Agent
|
||||
from twisted.web.resource import Resource
|
||||
from twisted.web.server import Site
|
||||
have_twisted_web = True
|
||||
except ImportError:
|
||||
have_twisted_web = False
|
||||
|
||||
try:
|
||||
import thread # py2
|
||||
except ImportError:
|
||||
import _thread as thread # py3
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient
|
||||
from tornado.httpserver import HTTPServer
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.platform.auto import set_close_exec
|
||||
from tornado.platform.select import SelectIOLoop
|
||||
from tornado.testing import bind_unused_port
|
||||
from tornado.test.util import unittest
|
||||
from tornado.util import import_object
|
||||
from tornado.web import RequestHandler, Application
|
||||
|
||||
skipIfNoTwisted = unittest.skipUnless(have_twisted,
|
||||
"twisted module not present")
|
||||
|
||||
|
||||
def save_signal_handlers():
|
||||
saved = {}
|
||||
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
|
||||
saved[sig] = signal.getsignal(sig)
|
||||
if "twisted" in repr(saved):
|
||||
if not issubclass(IOLoop.configured_class(), TwistedIOLoop):
|
||||
# when the global ioloop is twisted, we expect the signal
|
||||
# handlers to be installed. Otherwise, it means we're not
|
||||
# cleaning up after twisted properly.
|
||||
raise Exception("twisted signal handlers already installed")
|
||||
return saved
|
||||
|
||||
|
||||
def restore_signal_handlers(saved):
|
||||
for sig, handler in saved.items():
|
||||
signal.signal(sig, handler)
|
||||
|
||||
|
||||
class ReactorTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._saved_signals = save_signal_handlers()
|
||||
self._io_loop = IOLoop()
|
||||
self._reactor = TornadoReactor(self._io_loop)
|
||||
|
||||
def tearDown(self):
|
||||
self._io_loop.close(all_fds=True)
|
||||
restore_signal_handlers(self._saved_signals)
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorWhenRunningTest(ReactorTestCase):
|
||||
def test_whenRunning(self):
|
||||
self._whenRunningCalled = False
|
||||
self._anotherWhenRunningCalled = False
|
||||
self._reactor.callWhenRunning(self.whenRunningCallback)
|
||||
self._reactor.run()
|
||||
self.assertTrue(self._whenRunningCalled)
|
||||
self.assertTrue(self._anotherWhenRunningCalled)
|
||||
|
||||
def whenRunningCallback(self):
|
||||
self._whenRunningCalled = True
|
||||
self._reactor.callWhenRunning(self.anotherWhenRunningCallback)
|
||||
self._reactor.stop()
|
||||
|
||||
def anotherWhenRunningCallback(self):
|
||||
self._anotherWhenRunningCalled = True
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorCallLaterTest(ReactorTestCase):
|
||||
def test_callLater(self):
|
||||
self._laterCalled = False
|
||||
self._now = self._reactor.seconds()
|
||||
self._timeout = 0.001
|
||||
dc = self._reactor.callLater(self._timeout, self.callLaterCallback)
|
||||
self.assertEqual(self._reactor.getDelayedCalls(), [dc])
|
||||
self._reactor.run()
|
||||
self.assertTrue(self._laterCalled)
|
||||
self.assertTrue(self._called - self._now > self._timeout)
|
||||
self.assertEqual(self._reactor.getDelayedCalls(), [])
|
||||
|
||||
def callLaterCallback(self):
|
||||
self._laterCalled = True
|
||||
self._called = self._reactor.seconds()
|
||||
self._reactor.stop()
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorTwoCallLaterTest(ReactorTestCase):
|
||||
def test_callLater(self):
|
||||
self._later1Called = False
|
||||
self._later2Called = False
|
||||
self._now = self._reactor.seconds()
|
||||
self._timeout1 = 0.0005
|
||||
dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1)
|
||||
self._timeout2 = 0.001
|
||||
dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2)
|
||||
self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or
|
||||
self._reactor.getDelayedCalls() == [dc2, dc1])
|
||||
self._reactor.run()
|
||||
self.assertTrue(self._later1Called)
|
||||
self.assertTrue(self._later2Called)
|
||||
self.assertTrue(self._called1 - self._now > self._timeout1)
|
||||
self.assertTrue(self._called2 - self._now > self._timeout2)
|
||||
self.assertEqual(self._reactor.getDelayedCalls(), [])
|
||||
|
||||
def callLaterCallback1(self):
|
||||
self._later1Called = True
|
||||
self._called1 = self._reactor.seconds()
|
||||
|
||||
def callLaterCallback2(self):
|
||||
self._later2Called = True
|
||||
self._called2 = self._reactor.seconds()
|
||||
self._reactor.stop()
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorCallFromThreadTest(ReactorTestCase):
|
||||
def setUp(self):
|
||||
super(ReactorCallFromThreadTest, self).setUp()
|
||||
self._mainThread = thread.get_ident()
|
||||
|
||||
def tearDown(self):
|
||||
self._thread.join()
|
||||
super(ReactorCallFromThreadTest, self).tearDown()
|
||||
|
||||
def _newThreadRun(self):
|
||||
self.assertNotEqual(self._mainThread, thread.get_ident())
|
||||
if hasattr(self._thread, 'ident'): # new in python 2.6
|
||||
self.assertEqual(self._thread.ident, thread.get_ident())
|
||||
self._reactor.callFromThread(self._fnCalledFromThread)
|
||||
|
||||
def _fnCalledFromThread(self):
|
||||
self.assertEqual(self._mainThread, thread.get_ident())
|
||||
self._reactor.stop()
|
||||
|
||||
def _whenRunningCallback(self):
|
||||
self._thread = threading.Thread(target=self._newThreadRun)
|
||||
self._thread.start()
|
||||
|
||||
def testCallFromThread(self):
|
||||
self._reactor.callWhenRunning(self._whenRunningCallback)
|
||||
self._reactor.run()
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorCallInThread(ReactorTestCase):
|
||||
def setUp(self):
|
||||
super(ReactorCallInThread, self).setUp()
|
||||
self._mainThread = thread.get_ident()
|
||||
|
||||
def _fnCalledInThread(self, *args, **kwargs):
|
||||
self.assertNotEqual(thread.get_ident(), self._mainThread)
|
||||
self._reactor.callFromThread(lambda: self._reactor.stop())
|
||||
|
||||
def _whenRunningCallback(self):
|
||||
self._reactor.callInThread(self._fnCalledInThread)
|
||||
|
||||
def testCallInThread(self):
|
||||
self._reactor.callWhenRunning(self._whenRunningCallback)
|
||||
self._reactor.run()
|
||||
|
||||
|
||||
class Reader(object):
|
||||
def __init__(self, fd, callback):
|
||||
self._fd = fd
|
||||
self._callback = callback
|
||||
|
||||
def logPrefix(self):
|
||||
return "Reader"
|
||||
|
||||
def close(self):
|
||||
self._fd.close()
|
||||
|
||||
def fileno(self):
|
||||
return self._fd.fileno()
|
||||
|
||||
def readConnectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def doRead(self):
|
||||
self._callback(self._fd)
|
||||
if have_twisted:
|
||||
Reader = implementer(IReadDescriptor)(Reader)
|
||||
|
||||
|
||||
class Writer(object):
|
||||
def __init__(self, fd, callback):
|
||||
self._fd = fd
|
||||
self._callback = callback
|
||||
|
||||
def logPrefix(self):
|
||||
return "Writer"
|
||||
|
||||
def close(self):
|
||||
self._fd.close()
|
||||
|
||||
def fileno(self):
|
||||
return self._fd.fileno()
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.close()
|
||||
|
||||
def doWrite(self):
|
||||
self._callback(self._fd)
|
||||
if have_twisted:
|
||||
Writer = implementer(IWriteDescriptor)(Writer)
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
class ReactorReaderWriterTest(ReactorTestCase):
|
||||
def _set_nonblocking(self, fd):
|
||||
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
|
||||
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||
|
||||
def setUp(self):
|
||||
super(ReactorReaderWriterTest, self).setUp()
|
||||
r, w = os.pipe()
|
||||
self._set_nonblocking(r)
|
||||
self._set_nonblocking(w)
|
||||
set_close_exec(r)
|
||||
set_close_exec(w)
|
||||
self._p1 = os.fdopen(r, "rb", 0)
|
||||
self._p2 = os.fdopen(w, "wb", 0)
|
||||
|
||||
def tearDown(self):
|
||||
super(ReactorReaderWriterTest, self).tearDown()
|
||||
self._p1.close()
|
||||
self._p2.close()
|
||||
|
||||
def _testReadWrite(self):
|
||||
"""
|
||||
In this test the writer writes an 'x' to its fd. The reader
|
||||
reads it, check the value and ends the test.
|
||||
"""
|
||||
self.shouldWrite = True
|
||||
|
||||
def checkReadInput(fd):
|
||||
self.assertEquals(fd.read(1), b'x')
|
||||
self._reactor.stop()
|
||||
|
||||
def writeOnce(fd):
|
||||
if self.shouldWrite:
|
||||
self.shouldWrite = False
|
||||
fd.write(b'x')
|
||||
self._reader = Reader(self._p1, checkReadInput)
|
||||
self._writer = Writer(self._p2, writeOnce)
|
||||
|
||||
self._reactor.addWriter(self._writer)
|
||||
|
||||
# Test that adding the reader twice adds it only once to
|
||||
# IOLoop.
|
||||
self._reactor.addReader(self._reader)
|
||||
self._reactor.addReader(self._reader)
|
||||
|
||||
def testReadWrite(self):
|
||||
self._reactor.callWhenRunning(self._testReadWrite)
|
||||
self._reactor.run()
|
||||
|
||||
def _testNoWriter(self):
|
||||
"""
|
||||
In this test we have no writer. Make sure the reader doesn't
|
||||
read anything.
|
||||
"""
|
||||
def checkReadInput(fd):
|
||||
self.fail("Must not be called.")
|
||||
|
||||
def stopTest():
|
||||
# Close the writer here since the IOLoop doesn't know
|
||||
# about it.
|
||||
self._writer.close()
|
||||
self._reactor.stop()
|
||||
self._reader = Reader(self._p1, checkReadInput)
|
||||
|
||||
# We create a writer, but it should never be invoked.
|
||||
self._writer = Writer(self._p2, lambda fd: fd.write('x'))
|
||||
|
||||
# Test that adding and removing the writer leaves us with no writer.
|
||||
self._reactor.addWriter(self._writer)
|
||||
self._reactor.removeWriter(self._writer)
|
||||
|
||||
# Test that adding and removing the reader doesn't cause
|
||||
# unintended effects.
|
||||
self._reactor.addReader(self._reader)
|
||||
|
||||
# Wake up after a moment and stop the test
|
||||
self._reactor.callLater(0.001, stopTest)
|
||||
|
||||
def testNoWriter(self):
|
||||
self._reactor.callWhenRunning(self._testNoWriter)
|
||||
self._reactor.run()
|
||||
|
||||
# Test various combinations of twisted and tornado http servers,
|
||||
# http clients, and event loop interfaces.
|
||||
|
||||
|
||||
@skipIfNoTwisted
|
||||
@unittest.skipIf(not have_twisted_web, 'twisted web not present')
|
||||
class CompatibilityTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.saved_signals = save_signal_handlers()
|
||||
self.io_loop = IOLoop()
|
||||
self.io_loop.make_current()
|
||||
self.reactor = TornadoReactor(self.io_loop)
|
||||
|
||||
def tearDown(self):
|
||||
self.reactor.disconnectAll()
|
||||
self.io_loop.clear_current()
|
||||
self.io_loop.close(all_fds=True)
|
||||
restore_signal_handlers(self.saved_signals)
|
||||
|
||||
def start_twisted_server(self):
|
||||
class HelloResource(Resource):
|
||||
isLeaf = True
|
||||
|
||||
def render_GET(self, request):
|
||||
return "Hello from twisted!"
|
||||
site = Site(HelloResource())
|
||||
port = self.reactor.listenTCP(0, site, interface='127.0.0.1')
|
||||
self.twisted_port = port.getHost().port
|
||||
|
||||
def start_tornado_server(self):
|
||||
class HelloHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write("Hello from tornado!")
|
||||
app = Application([('/', HelloHandler)],
|
||||
log_function=lambda x: None)
|
||||
server = HTTPServer(app, io_loop=self.io_loop)
|
||||
sock, self.tornado_port = bind_unused_port()
|
||||
server.add_sockets([sock])
|
||||
|
||||
def run_ioloop(self):
|
||||
self.stop_loop = self.io_loop.stop
|
||||
self.io_loop.start()
|
||||
self.reactor.fireSystemEvent('shutdown')
|
||||
|
||||
def run_reactor(self):
|
||||
self.stop_loop = self.reactor.stop
|
||||
self.stop = self.reactor.stop
|
||||
self.reactor.run()
|
||||
|
||||
def tornado_fetch(self, url, runner):
|
||||
responses = []
|
||||
client = AsyncHTTPClient(self.io_loop)
|
||||
|
||||
def callback(response):
|
||||
responses.append(response)
|
||||
self.stop_loop()
|
||||
client.fetch(url, callback=callback)
|
||||
runner()
|
||||
self.assertEqual(len(responses), 1)
|
||||
responses[0].rethrow()
|
||||
return responses[0]
|
||||
|
||||
def twisted_fetch(self, url, runner):
|
||||
# http://twistedmatrix.com/documents/current/web/howto/client.html
|
||||
chunks = []
|
||||
client = Agent(self.reactor)
|
||||
d = client.request('GET', url)
|
||||
|
||||
class Accumulator(Protocol):
|
||||
def __init__(self, finished):
|
||||
self.finished = finished
|
||||
|
||||
def dataReceived(self, data):
|
||||
chunks.append(data)
|
||||
|
||||
def connectionLost(self, reason):
|
||||
self.finished.callback(None)
|
||||
|
||||
def callback(response):
|
||||
finished = Deferred()
|
||||
response.deliverBody(Accumulator(finished))
|
||||
return finished
|
||||
d.addCallback(callback)
|
||||
|
||||
def shutdown(ignored):
|
||||
self.stop_loop()
|
||||
d.addBoth(shutdown)
|
||||
runner()
|
||||
self.assertTrue(chunks)
|
||||
return ''.join(chunks)
|
||||
|
||||
def testTwistedServerTornadoClientIOLoop(self):
|
||||
self.start_twisted_server()
|
||||
response = self.tornado_fetch(
|
||||
'http://localhost:%d' % self.twisted_port, self.run_ioloop)
|
||||
self.assertEqual(response.body, 'Hello from twisted!')
|
||||
|
||||
def testTwistedServerTornadoClientReactor(self):
|
||||
self.start_twisted_server()
|
||||
response = self.tornado_fetch(
|
||||
'http://localhost:%d' % self.twisted_port, self.run_reactor)
|
||||
self.assertEqual(response.body, 'Hello from twisted!')
|
||||
|
||||
def testTornadoServerTwistedClientIOLoop(self):
|
||||
self.start_tornado_server()
|
||||
response = self.twisted_fetch(
|
||||
'http://localhost:%d' % self.tornado_port, self.run_ioloop)
|
||||
self.assertEqual(response, 'Hello from tornado!')
|
||||
|
||||
def testTornadoServerTwistedClientReactor(self):
|
||||
self.start_tornado_server()
|
||||
response = self.twisted_fetch(
|
||||
'http://localhost:%d' % self.tornado_port, self.run_reactor)
|
||||
self.assertEqual(response, 'Hello from tornado!')
|
||||
|
||||
|
||||
if have_twisted:
|
||||
# Import and run as much of twisted's test suite as possible.
|
||||
# This is unfortunately rather dependent on implementation details,
|
||||
# but there doesn't appear to be a clean all-in-one conformance test
|
||||
# suite for reactors.
|
||||
#
|
||||
# This is a list of all test suites using the ReactorBuilder
|
||||
# available in Twisted 11.0.0 and 11.1.0 (and a blacklist of
|
||||
# specific test methods to be disabled).
|
||||
twisted_tests = {
|
||||
'twisted.internet.test.test_core.ObjectModelIntegrationTest': [],
|
||||
'twisted.internet.test.test_core.SystemEventTestsBuilder': [
|
||||
'test_iterate', # deliberately not supported
|
||||
# Fails on TwistedIOLoop and AsyncIOLoop.
|
||||
'test_runAfterCrash',
|
||||
],
|
||||
'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [
|
||||
"test_lostFileDescriptor", # incompatible with epoll and kqueue
|
||||
],
|
||||
'twisted.internet.test.test_process.ProcessTestsBuilder': [
|
||||
# Only work as root. Twisted's "skip" functionality works
|
||||
# with py27+, but not unittest2 on py26.
|
||||
'test_changeGID',
|
||||
'test_changeUID',
|
||||
],
|
||||
# Process tests appear to work on OSX 10.7, but not 10.6
|
||||
#'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
|
||||
# 'test_systemCallUninterruptedByChildExit',
|
||||
# ],
|
||||
'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
|
||||
'test_badContext', # ssl-related; see also SSLClientTestsMixin
|
||||
],
|
||||
'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
|
||||
# These use link-local addresses and cause firewall prompts on mac
|
||||
'test_buildProtocolIPv6AddressScopeID',
|
||||
'test_portGetHostOnIPv6ScopeID',
|
||||
'test_serverGetHostOnIPv6ScopeID',
|
||||
'test_serverGetPeerOnIPv6ScopeID',
|
||||
],
|
||||
'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
|
||||
'twisted.internet.test.test_tcp.WriteSequenceTests': [],
|
||||
'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
|
||||
'twisted.internet.test.test_threads.ThreadTestsBuilder': [],
|
||||
'twisted.internet.test.test_time.TimeTestsBuilder': [],
|
||||
# Extra third-party dependencies (pyOpenSSL)
|
||||
#'twisted.internet.test.test_tls.SSLClientTestsMixin': [],
|
||||
'twisted.internet.test.test_udp.UDPServerTestsBuilder': [],
|
||||
'twisted.internet.test.test_unix.UNIXTestsBuilder': [
|
||||
# Platform-specific. These tests would be skipped automatically
|
||||
# if we were running twisted's own test runner.
|
||||
'test_connectToLinuxAbstractNamespace',
|
||||
'test_listenOnLinuxAbstractNamespace',
|
||||
# These tests use twisted's sendmsg.c extension and sometimes
|
||||
# fail with what looks like uninitialized memory errors
|
||||
# (more common on pypy than cpython, but I've seen it on both)
|
||||
'test_sendFileDescriptor',
|
||||
'test_sendFileDescriptorTriggersPauseProducing',
|
||||
'test_descriptorDeliveredBeforeBytes',
|
||||
'test_avoidLeakingFileDescriptors',
|
||||
],
|
||||
'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [
|
||||
'test_listenOnLinuxAbstractNamespace',
|
||||
],
|
||||
'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [],
|
||||
}
|
||||
for test_name, blacklist in twisted_tests.items():
|
||||
try:
|
||||
test_class = import_object(test_name)
|
||||
except (ImportError, AttributeError):
|
||||
continue
|
||||
for test_func in blacklist:
|
||||
if hasattr(test_class, test_func):
|
||||
# The test_func may be defined in a mixin, so clobber
|
||||
# it instead of delattr()
|
||||
setattr(test_class, test_func, lambda self: None)
|
||||
|
||||
def make_test_subclass(test_class):
|
||||
class TornadoTest(test_class):
|
||||
_reactors = ["tornado.platform.twisted._TestReactor"]
|
||||
|
||||
def setUp(self):
|
||||
# Twisted's tests expect to be run from a temporary
|
||||
# directory; they create files in their working directory
|
||||
# and don't always clean up after themselves.
|
||||
self.__curdir = os.getcwd()
|
||||
self.__tempdir = tempfile.mkdtemp()
|
||||
os.chdir(self.__tempdir)
|
||||
super(TornadoTest, self).setUp()
|
||||
|
||||
def tearDown(self):
|
||||
super(TornadoTest, self).tearDown()
|
||||
os.chdir(self.__curdir)
|
||||
shutil.rmtree(self.__tempdir)
|
||||
|
||||
def buildReactor(self):
|
||||
self.__saved_signals = save_signal_handlers()
|
||||
return test_class.buildReactor(self)
|
||||
|
||||
def unbuildReactor(self, reactor):
|
||||
test_class.unbuildReactor(self, reactor)
|
||||
# Clean up file descriptors (especially epoll/kqueue
|
||||
# objects) eagerly instead of leaving them for the
|
||||
# GC. Unfortunately we can't do this in reactor.stop
|
||||
# since twisted expects to be able to unregister
|
||||
# connections in a post-shutdown hook.
|
||||
reactor._io_loop.close(all_fds=True)
|
||||
restore_signal_handlers(self.__saved_signals)
|
||||
|
||||
TornadoTest.__name__ = test_class.__name__
|
||||
return TornadoTest
|
||||
test_subclass = make_test_subclass(test_class)
|
||||
globals().update(test_subclass.makeTestCaseClasses())
|
||||
|
||||
# Since we're not using twisted's test runner, it's tricky to get
|
||||
# logging set up well. Most of the time it's easiest to just
|
||||
# leave it turned off, but while working on these tests you may want
|
||||
# to uncomment one of the other lines instead.
|
||||
log.defaultObserver.stop()
|
||||
# import sys; log.startLogging(sys.stderr, setStdout=0)
|
||||
# log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
|
||||
# import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
|
||||
|
||||
if have_twisted:
|
||||
class LayeredTwistedIOLoop(TwistedIOLoop):
|
||||
"""Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
|
||||
|
||||
This is of course silly, but is useful for testing purposes to make
|
||||
sure we're implementing both sides of the various interfaces
|
||||
correctly. In some tests another TornadoReactor is layered on top
|
||||
of the whole stack.
|
||||
"""
|
||||
def initialize(self):
|
||||
# When configured to use LayeredTwistedIOLoop we can't easily
|
||||
# get the next-best IOLoop implementation, so use the lowest common
|
||||
# denominator.
|
||||
self.real_io_loop = SelectIOLoop()
|
||||
reactor = TornadoReactor(io_loop=self.real_io_loop)
|
||||
super(LayeredTwistedIOLoop, self).initialize(reactor=reactor)
|
||||
self.add_callback(self.make_current)
|
||||
|
||||
def close(self, all_fds=False):
|
||||
super(LayeredTwistedIOLoop, self).close(all_fds=all_fds)
|
||||
# HACK: This is the same thing that test_class.unbuildReactor does.
|
||||
for reader in self.reactor._internalReaders:
|
||||
self.reactor.removeReader(reader)
|
||||
reader.connectionLost(None)
|
||||
self.real_io_loop.close(all_fds=all_fds)
|
||||
|
||||
def stop(self):
|
||||
# One of twisted's tests fails if I don't delay crash()
|
||||
# until the reactor has started, but if I move this to
|
||||
# TwistedIOLoop then the tests fail when I'm *not* running
|
||||
# tornado-on-twisted-on-tornado. I'm clearly missing something
|
||||
# about the startup/crash semantics, but since stop and crash
|
||||
# are really only used in tests it doesn't really matter.
|
||||
self.reactor.callWhenRunning(self.reactor.crash)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
30
Shared/lib/python3.4/site-packages/tornado/test/util.py
Normal file
30
Shared/lib/python3.4/site-packages/tornado/test/util.py
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import os
|
||||
import socket
|
||||
import sys
|
||||
|
||||
# Encapsulate the choice of unittest or unittest2 here.
|
||||
# To be used as 'from tornado.test.util import unittest'.
|
||||
if sys.version_info < (2, 7):
|
||||
# In py26, we must always use unittest2.
|
||||
import unittest2 as unittest
|
||||
else:
|
||||
# Otherwise, use whichever version of unittest was imported in
|
||||
# tornado.testing.
|
||||
from tornado.testing import unittest
|
||||
|
||||
skipIfNonUnix = unittest.skipIf(os.name != 'posix' or sys.platform == 'cygwin',
|
||||
"non-unix platform")
|
||||
|
||||
# travis-ci.org runs our tests in an overworked virtual machine, which makes
|
||||
# timing-related tests unreliable.
|
||||
skipOnTravis = unittest.skipIf('TRAVIS' in os.environ,
|
||||
'timing tests unreliable on travis')
|
||||
|
||||
# Set the environment variable NO_NETWORK=1 to disable any tests that
|
||||
# depend on an external network.
|
||||
skipIfNoNetwork = unittest.skipIf('NO_NETWORK' in os.environ,
|
||||
'network access disabled')
|
||||
|
||||
skipIfNoIPv6 = unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present')
|
||||
172
Shared/lib/python3.4/site-packages/tornado/test/util_test.py
Normal file
172
Shared/lib/python3.4/site-packages/tornado/test/util_test.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
# coding: utf-8
|
||||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
import sys
|
||||
|
||||
from tornado.escape import utf8
|
||||
from tornado.util import raise_exc_info, Configurable, u, exec_in, ArgReplacer
|
||||
from tornado.test.util import unittest
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO # py2
|
||||
except ImportError:
|
||||
from io import StringIO # py3
|
||||
|
||||
|
||||
class RaiseExcInfoTest(unittest.TestCase):
|
||||
def test_two_arg_exception(self):
|
||||
# This test would fail on python 3 if raise_exc_info were simply
|
||||
# a three-argument raise statement, because TwoArgException
|
||||
# doesn't have a "copy constructor"
|
||||
class TwoArgException(Exception):
|
||||
def __init__(self, a, b):
|
||||
super(TwoArgException, self).__init__()
|
||||
self.a, self.b = a, b
|
||||
|
||||
try:
|
||||
raise TwoArgException(1, 2)
|
||||
except TwoArgException:
|
||||
exc_info = sys.exc_info()
|
||||
try:
|
||||
raise_exc_info(exc_info)
|
||||
self.fail("didn't get expected exception")
|
||||
except TwoArgException as e:
|
||||
self.assertIs(e, exc_info[1])
|
||||
|
||||
|
||||
class TestConfigurable(Configurable):
|
||||
@classmethod
|
||||
def configurable_base(cls):
|
||||
return TestConfigurable
|
||||
|
||||
@classmethod
|
||||
def configurable_default(cls):
|
||||
return TestConfig1
|
||||
|
||||
|
||||
class TestConfig1(TestConfigurable):
|
||||
def initialize(self, a=None):
|
||||
self.a = a
|
||||
|
||||
|
||||
class TestConfig2(TestConfigurable):
|
||||
def initialize(self, b=None):
|
||||
self.b = b
|
||||
|
||||
|
||||
class ConfigurableTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.saved = TestConfigurable._save_configuration()
|
||||
|
||||
def tearDown(self):
|
||||
TestConfigurable._restore_configuration(self.saved)
|
||||
|
||||
def checkSubclasses(self):
|
||||
# no matter how the class is configured, it should always be
|
||||
# possible to instantiate the subclasses directly
|
||||
self.assertIsInstance(TestConfig1(), TestConfig1)
|
||||
self.assertIsInstance(TestConfig2(), TestConfig2)
|
||||
|
||||
obj = TestConfig1(a=1)
|
||||
self.assertEqual(obj.a, 1)
|
||||
obj = TestConfig2(b=2)
|
||||
self.assertEqual(obj.b, 2)
|
||||
|
||||
def test_default(self):
|
||||
obj = TestConfigurable()
|
||||
self.assertIsInstance(obj, TestConfig1)
|
||||
self.assertIs(obj.a, None)
|
||||
|
||||
obj = TestConfigurable(a=1)
|
||||
self.assertIsInstance(obj, TestConfig1)
|
||||
self.assertEqual(obj.a, 1)
|
||||
|
||||
self.checkSubclasses()
|
||||
|
||||
def test_config_class(self):
|
||||
TestConfigurable.configure(TestConfig2)
|
||||
obj = TestConfigurable()
|
||||
self.assertIsInstance(obj, TestConfig2)
|
||||
self.assertIs(obj.b, None)
|
||||
|
||||
obj = TestConfigurable(b=2)
|
||||
self.assertIsInstance(obj, TestConfig2)
|
||||
self.assertEqual(obj.b, 2)
|
||||
|
||||
self.checkSubclasses()
|
||||
|
||||
def test_config_args(self):
|
||||
TestConfigurable.configure(None, a=3)
|
||||
obj = TestConfigurable()
|
||||
self.assertIsInstance(obj, TestConfig1)
|
||||
self.assertEqual(obj.a, 3)
|
||||
|
||||
obj = TestConfigurable(a=4)
|
||||
self.assertIsInstance(obj, TestConfig1)
|
||||
self.assertEqual(obj.a, 4)
|
||||
|
||||
self.checkSubclasses()
|
||||
# args bound in configure don't apply when using the subclass directly
|
||||
obj = TestConfig1()
|
||||
self.assertIs(obj.a, None)
|
||||
|
||||
def test_config_class_args(self):
|
||||
TestConfigurable.configure(TestConfig2, b=5)
|
||||
obj = TestConfigurable()
|
||||
self.assertIsInstance(obj, TestConfig2)
|
||||
self.assertEqual(obj.b, 5)
|
||||
|
||||
obj = TestConfigurable(b=6)
|
||||
self.assertIsInstance(obj, TestConfig2)
|
||||
self.assertEqual(obj.b, 6)
|
||||
|
||||
self.checkSubclasses()
|
||||
# args bound in configure don't apply when using the subclass directly
|
||||
obj = TestConfig2()
|
||||
self.assertIs(obj.b, None)
|
||||
|
||||
|
||||
class UnicodeLiteralTest(unittest.TestCase):
|
||||
def test_unicode_escapes(self):
|
||||
self.assertEqual(utf8(u('\u00e9')), b'\xc3\xa9')
|
||||
|
||||
|
||||
class ExecInTest(unittest.TestCase):
|
||||
# This test is python 2 only because there are no new future imports
|
||||
# defined in python 3 yet.
|
||||
@unittest.skipIf(sys.version_info >= print_function.getMandatoryRelease(),
|
||||
'no testable future imports')
|
||||
def test_no_inherit_future(self):
|
||||
# This file has from __future__ import print_function...
|
||||
f = StringIO()
|
||||
print('hello', file=f)
|
||||
# ...but the template doesn't
|
||||
exec_in('print >> f, "world"', dict(f=f))
|
||||
self.assertEqual(f.getvalue(), 'hello\nworld\n')
|
||||
|
||||
|
||||
class ArgReplacerTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
def function(x, y, callback=None, z=None):
|
||||
pass
|
||||
self.replacer = ArgReplacer(function, 'callback')
|
||||
|
||||
def test_omitted(self):
|
||||
args = (1, 2)
|
||||
kwargs = dict()
|
||||
self.assertIs(self.replacer.get_old_value(args, kwargs), None)
|
||||
self.assertEqual(self.replacer.replace('new', args, kwargs),
|
||||
(None, (1, 2), dict(callback='new')))
|
||||
|
||||
def test_position(self):
|
||||
args = (1, 2, 'old', 3)
|
||||
kwargs = dict()
|
||||
self.assertEqual(self.replacer.get_old_value(args, kwargs), 'old')
|
||||
self.assertEqual(self.replacer.replace('new', args, kwargs),
|
||||
('old', [1, 2, 'new', 3], dict()))
|
||||
|
||||
def test_keyword(self):
|
||||
args = (1,)
|
||||
kwargs = dict(y=2, callback='old', z=3)
|
||||
self.assertEqual(self.replacer.get_old_value(args, kwargs), 'old')
|
||||
self.assertEqual(self.replacer.replace('new', args, kwargs),
|
||||
('old', (1,), dict(y=2, callback='new', z=3)))
|
||||
2315
Shared/lib/python3.4/site-packages/tornado/test/web_test.py
Normal file
2315
Shared/lib/python3.4/site-packages/tornado/test/web_test.py
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,313 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
|
||||
import traceback
|
||||
|
||||
from tornado.concurrent import Future
|
||||
from tornado.httpclient import HTTPError, HTTPRequest
|
||||
from tornado.log import gen_log, app_log
|
||||
from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
|
||||
from tornado.test.util import unittest
|
||||
from tornado.web import Application, RequestHandler
|
||||
from tornado.util import u
|
||||
|
||||
try:
|
||||
import tornado.websocket
|
||||
from tornado.util import _websocket_mask_python
|
||||
except ImportError:
|
||||
# The unittest module presents misleading errors on ImportError
|
||||
# (it acts as if websocket_test could not be found, hiding the underlying
|
||||
# error). If we get an ImportError here (which could happen due to
|
||||
# TORNADO_EXTENSION=1), print some extra information before failing.
|
||||
traceback.print_exc()
|
||||
raise
|
||||
|
||||
from tornado.websocket import WebSocketHandler, websocket_connect, WebSocketError
|
||||
|
||||
try:
|
||||
from tornado import speedups
|
||||
except ImportError:
|
||||
speedups = None
|
||||
|
||||
|
||||
class TestWebSocketHandler(WebSocketHandler):
|
||||
"""Base class for testing handlers that exposes the on_close event.
|
||||
|
||||
This allows for deterministic cleanup of the associated socket.
|
||||
"""
|
||||
def initialize(self, close_future):
|
||||
self.close_future = close_future
|
||||
|
||||
def on_close(self):
|
||||
self.close_future.set_result((self.close_code, self.close_reason))
|
||||
|
||||
|
||||
class EchoHandler(TestWebSocketHandler):
|
||||
def on_message(self, message):
|
||||
self.write_message(message, isinstance(message, bytes))
|
||||
|
||||
|
||||
class ErrorInOnMessageHandler(TestWebSocketHandler):
|
||||
def on_message(self, message):
|
||||
1/0
|
||||
|
||||
|
||||
class HeaderHandler(TestWebSocketHandler):
|
||||
def open(self):
|
||||
try:
|
||||
# In a websocket context, many RequestHandler methods
|
||||
# raise RuntimeErrors.
|
||||
self.set_status(503)
|
||||
raise Exception("did not get expected exception")
|
||||
except RuntimeError:
|
||||
pass
|
||||
self.write_message(self.request.headers.get('X-Test', ''))
|
||||
|
||||
|
||||
class NonWebSocketHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write('ok')
|
||||
|
||||
|
||||
class CloseReasonHandler(TestWebSocketHandler):
|
||||
def open(self):
|
||||
self.close(1001, "goodbye")
|
||||
|
||||
|
||||
class WebSocketTest(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
self.close_future = Future()
|
||||
return Application([
|
||||
('/echo', EchoHandler, dict(close_future=self.close_future)),
|
||||
('/non_ws', NonWebSocketHandler),
|
||||
('/header', HeaderHandler, dict(close_future=self.close_future)),
|
||||
('/close_reason', CloseReasonHandler,
|
||||
dict(close_future=self.close_future)),
|
||||
('/error_in_on_message', ErrorInOnMessageHandler,
|
||||
dict(close_future=self.close_future)),
|
||||
])
|
||||
|
||||
def test_http_request(self):
|
||||
# WS server, HTTP client.
|
||||
response = self.fetch('/echo')
|
||||
self.assertEqual(response.code, 400)
|
||||
|
||||
@gen_test
|
||||
def test_websocket_gen(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port(),
|
||||
io_loop=self.io_loop)
|
||||
ws.write_message('hello')
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, 'hello')
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
def test_websocket_callbacks(self):
|
||||
websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port(),
|
||||
io_loop=self.io_loop, callback=self.stop)
|
||||
ws = self.wait().result()
|
||||
ws.write_message('hello')
|
||||
ws.read_message(self.stop)
|
||||
response = self.wait().result()
|
||||
self.assertEqual(response, 'hello')
|
||||
self.close_future.add_done_callback(lambda f: self.stop())
|
||||
ws.close()
|
||||
self.wait()
|
||||
|
||||
@gen_test
|
||||
def test_binary_message(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port())
|
||||
ws.write_message(b'hello \xe9', binary=True)
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, b'hello \xe9')
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_unicode_message(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port())
|
||||
ws.write_message(u('hello \u00e9'))
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, u('hello \u00e9'))
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_error_in_on_message(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/error_in_on_message' % self.get_http_port())
|
||||
ws.write_message('hello')
|
||||
with ExpectLog(app_log, "Uncaught exception"):
|
||||
response = yield ws.read_message()
|
||||
self.assertIs(response, None)
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_websocket_http_fail(self):
|
||||
with self.assertRaises(HTTPError) as cm:
|
||||
yield websocket_connect(
|
||||
'ws://localhost:%d/notfound' % self.get_http_port(),
|
||||
io_loop=self.io_loop)
|
||||
self.assertEqual(cm.exception.code, 404)
|
||||
|
||||
@gen_test
|
||||
def test_websocket_http_success(self):
|
||||
with self.assertRaises(WebSocketError):
|
||||
yield websocket_connect(
|
||||
'ws://localhost:%d/non_ws' % self.get_http_port(),
|
||||
io_loop=self.io_loop)
|
||||
|
||||
@gen_test
|
||||
def test_websocket_network_fail(self):
|
||||
sock, port = bind_unused_port()
|
||||
sock.close()
|
||||
with self.assertRaises(IOError):
|
||||
with ExpectLog(gen_log, ".*"):
|
||||
yield websocket_connect(
|
||||
'ws://localhost:%d/' % port,
|
||||
io_loop=self.io_loop,
|
||||
connect_timeout=3600)
|
||||
|
||||
@gen_test
|
||||
def test_websocket_close_buffered_data(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port())
|
||||
ws.write_message('hello')
|
||||
ws.write_message('world')
|
||||
ws.stream.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_websocket_headers(self):
|
||||
# Ensure that arbitrary headers can be passed through websocket_connect.
|
||||
ws = yield websocket_connect(
|
||||
HTTPRequest('ws://localhost:%d/header' % self.get_http_port(),
|
||||
headers={'X-Test': 'hello'}))
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, 'hello')
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_server_close_reason(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/close_reason' % self.get_http_port())
|
||||
msg = yield ws.read_message()
|
||||
# A message of None means the other side closed the connection.
|
||||
self.assertIs(msg, None)
|
||||
self.assertEqual(ws.close_code, 1001)
|
||||
self.assertEqual(ws.close_reason, "goodbye")
|
||||
|
||||
@gen_test
|
||||
def test_client_close_reason(self):
|
||||
ws = yield websocket_connect(
|
||||
'ws://localhost:%d/echo' % self.get_http_port())
|
||||
ws.close(1001, 'goodbye')
|
||||
code, reason = yield self.close_future
|
||||
self.assertEqual(code, 1001)
|
||||
self.assertEqual(reason, 'goodbye')
|
||||
|
||||
@gen_test
|
||||
def test_check_origin_valid_no_path(self):
|
||||
port = self.get_http_port()
|
||||
|
||||
url = 'ws://localhost:%d/echo' % port
|
||||
headers = {'Origin': 'http://localhost:%d' % port}
|
||||
|
||||
ws = yield websocket_connect(HTTPRequest(url, headers=headers),
|
||||
io_loop=self.io_loop)
|
||||
ws.write_message('hello')
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, 'hello')
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_check_origin_valid_with_path(self):
|
||||
port = self.get_http_port()
|
||||
|
||||
url = 'ws://localhost:%d/echo' % port
|
||||
headers = {'Origin': 'http://localhost:%d/something' % port}
|
||||
|
||||
ws = yield websocket_connect(HTTPRequest(url, headers=headers),
|
||||
io_loop=self.io_loop)
|
||||
ws.write_message('hello')
|
||||
response = yield ws.read_message()
|
||||
self.assertEqual(response, 'hello')
|
||||
ws.close()
|
||||
yield self.close_future
|
||||
|
||||
@gen_test
|
||||
def test_check_origin_invalid_partial_url(self):
|
||||
port = self.get_http_port()
|
||||
|
||||
url = 'ws://localhost:%d/echo' % port
|
||||
headers = {'Origin': 'localhost:%d' % port}
|
||||
|
||||
with self.assertRaises(HTTPError) as cm:
|
||||
yield websocket_connect(HTTPRequest(url, headers=headers),
|
||||
io_loop=self.io_loop)
|
||||
self.assertEqual(cm.exception.code, 403)
|
||||
|
||||
@gen_test
|
||||
def test_check_origin_invalid(self):
|
||||
port = self.get_http_port()
|
||||
|
||||
url = 'ws://localhost:%d/echo' % port
|
||||
# Host is localhost, which should not be accessible from some other
|
||||
# domain
|
||||
headers = {'Origin': 'http://somewhereelse.com'}
|
||||
|
||||
with self.assertRaises(HTTPError) as cm:
|
||||
yield websocket_connect(HTTPRequest(url, headers=headers),
|
||||
io_loop=self.io_loop)
|
||||
|
||||
self.assertEqual(cm.exception.code, 403)
|
||||
|
||||
@gen_test
|
||||
def test_check_origin_invalid_subdomains(self):
|
||||
port = self.get_http_port()
|
||||
|
||||
url = 'ws://localhost:%d/echo' % port
|
||||
# Subdomains should be disallowed by default. If we could pass a
|
||||
# resolver to websocket_connect we could test sibling domains as well.
|
||||
headers = {'Origin': 'http://subtenant.localhost'}
|
||||
|
||||
with self.assertRaises(HTTPError) as cm:
|
||||
yield websocket_connect(HTTPRequest(url, headers=headers),
|
||||
io_loop=self.io_loop)
|
||||
|
||||
self.assertEqual(cm.exception.code, 403)
|
||||
|
||||
|
||||
class MaskFunctionMixin(object):
|
||||
# Subclasses should define self.mask(mask, data)
|
||||
def test_mask(self):
|
||||
self.assertEqual(self.mask(b'abcd', b''), b'')
|
||||
self.assertEqual(self.mask(b'abcd', b'b'), b'\x03')
|
||||
self.assertEqual(self.mask(b'abcd', b'54321'), b'TVPVP')
|
||||
self.assertEqual(self.mask(b'ZXCV', b'98765432'), b'c`t`olpd')
|
||||
# Include test cases with \x00 bytes (to ensure that the C
|
||||
# extension isn't depending on null-terminated strings) and
|
||||
# bytes with the high bit set (to smoke out signedness issues).
|
||||
self.assertEqual(self.mask(b'\x00\x01\x02\x03',
|
||||
b'\xff\xfb\xfd\xfc\xfe\xfa'),
|
||||
b'\xff\xfa\xff\xff\xfe\xfb')
|
||||
self.assertEqual(self.mask(b'\xff\xfb\xfd\xfc',
|
||||
b'\x00\x01\x02\x03\x04\x05'),
|
||||
b'\xff\xfa\xff\xff\xfb\xfe')
|
||||
|
||||
|
||||
class PythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
|
||||
def mask(self, mask, data):
|
||||
return _websocket_mask_python(mask, data)
|
||||
|
||||
|
||||
@unittest.skipIf(speedups is None, "tornado.speedups module not present")
|
||||
class CythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
|
||||
def mask(self, mask, data):
|
||||
return speedups.websocket_mask(mask, data)
|
||||
100
Shared/lib/python3.4/site-packages/tornado/test/wsgi_test.py
Normal file
100
Shared/lib/python3.4/site-packages/tornado/test/wsgi_test.py
Normal file
|
|
@ -0,0 +1,100 @@
|
|||
from __future__ import absolute_import, division, print_function, with_statement
|
||||
from wsgiref.validate import validator
|
||||
|
||||
from tornado.escape import json_decode
|
||||
from tornado.test.httpserver_test import TypeCheckHandler
|
||||
from tornado.testing import AsyncHTTPTestCase
|
||||
from tornado.util import u
|
||||
from tornado.web import RequestHandler, Application
|
||||
from tornado.wsgi import WSGIApplication, WSGIContainer, WSGIAdapter
|
||||
|
||||
|
||||
class WSGIContainerTest(AsyncHTTPTestCase):
|
||||
def wsgi_app(self, environ, start_response):
|
||||
status = "200 OK"
|
||||
response_headers = [("Content-Type", "text/plain")]
|
||||
start_response(status, response_headers)
|
||||
return [b"Hello world!"]
|
||||
|
||||
def get_app(self):
|
||||
return WSGIContainer(validator(self.wsgi_app))
|
||||
|
||||
def test_simple(self):
|
||||
response = self.fetch("/")
|
||||
self.assertEqual(response.body, b"Hello world!")
|
||||
|
||||
|
||||
class WSGIApplicationTest(AsyncHTTPTestCase):
|
||||
def get_app(self):
|
||||
class HelloHandler(RequestHandler):
|
||||
def get(self):
|
||||
self.write("Hello world!")
|
||||
|
||||
class PathQuotingHandler(RequestHandler):
|
||||
def get(self, path):
|
||||
self.write(path)
|
||||
|
||||
# It would be better to run the wsgiref server implementation in
|
||||
# another thread instead of using our own WSGIContainer, but this
|
||||
# fits better in our async testing framework and the wsgiref
|
||||
# validator should keep us honest
|
||||
return WSGIContainer(validator(WSGIApplication([
|
||||
("/", HelloHandler),
|
||||
("/path/(.*)", PathQuotingHandler),
|
||||
("/typecheck", TypeCheckHandler),
|
||||
])))
|
||||
|
||||
def test_simple(self):
|
||||
response = self.fetch("/")
|
||||
self.assertEqual(response.body, b"Hello world!")
|
||||
|
||||
def test_path_quoting(self):
|
||||
response = self.fetch("/path/foo%20bar%C3%A9")
|
||||
self.assertEqual(response.body, u("foo bar\u00e9").encode("utf-8"))
|
||||
|
||||
def test_types(self):
|
||||
headers = {"Cookie": "foo=bar"}
|
||||
response = self.fetch("/typecheck?foo=bar", headers=headers)
|
||||
data = json_decode(response.body)
|
||||
self.assertEqual(data, {})
|
||||
|
||||
response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
|
||||
data = json_decode(response.body)
|
||||
self.assertEqual(data, {})
|
||||
|
||||
# This is kind of hacky, but run some of the HTTPServer tests through
|
||||
# WSGIContainer and WSGIApplication to make sure everything survives
|
||||
# repeated disassembly and reassembly.
|
||||
from tornado.test import httpserver_test
|
||||
from tornado.test import web_test
|
||||
|
||||
|
||||
class WSGIConnectionTest(httpserver_test.HTTPConnectionTest):
|
||||
def get_app(self):
|
||||
return WSGIContainer(validator(WSGIApplication(self.get_handlers())))
|
||||
|
||||
|
||||
def wrap_web_tests_application():
|
||||
result = {}
|
||||
for cls in web_test.wsgi_safe_tests:
|
||||
class WSGIApplicationWrappedTest(cls):
|
||||
def get_app(self):
|
||||
self.app = WSGIApplication(self.get_handlers(),
|
||||
**self.get_app_kwargs())
|
||||
return WSGIContainer(validator(self.app))
|
||||
result["WSGIApplication_" + cls.__name__] = WSGIApplicationWrappedTest
|
||||
return result
|
||||
globals().update(wrap_web_tests_application())
|
||||
|
||||
|
||||
def wrap_web_tests_adapter():
|
||||
result = {}
|
||||
for cls in web_test.wsgi_safe_tests:
|
||||
class WSGIAdapterWrappedTest(cls):
|
||||
def get_app(self):
|
||||
self.app = Application(self.get_handlers(),
|
||||
**self.get_app_kwargs())
|
||||
return WSGIContainer(validator(WSGIAdapter(self.app)))
|
||||
result["WSGIAdapter_" + cls.__name__] = WSGIAdapterWrappedTest
|
||||
return result
|
||||
globals().update(wrap_web_tests_adapter())
|
||||
Loading…
Add table
Add a link
Reference in a new issue