- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then (cd ../docs && mkdir sphinx-out && sphinx-build -E -n -W -b html . sphinx-out); fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then (cd ../docs && mkdir sphinx-doctest-out && sphinx-build -E -n -b doctest . sphinx-out); fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then (cd .. && flake8); fi
- - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then (cd .. && black --check --diff tornado); fi
+ - if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then (cd .. && black --check --diff tornado demos); fi
- if [[ $TRAVIS_PYTHON_VERSION == '3.6' ]]; then (cd .. && mypy tornado); fi
after_success:
await cur.execute("SELECT COUNT(*) FROM entries LIMIT 1")
await cur.fetchone()
except psycopg2.ProgrammingError:
- with open('schema.sql') as f:
+ with open("schema.sql") as f:
schema = f.read()
with (await db.cursor()) as cur:
await cur.execute(schema)
"""
with (await self.application.db.cursor()) as cur:
await cur.execute(stmt, args)
- return [self.row_to_obj(row, cur)
- for row in await cur.fetchall()]
+ return [self.row_to_obj(row, cur) for row in await cur.fetchall()]
async def queryone(self, stmt, *args):
"""Query for exactly one result.
# self.current_user in prepare instead.
user_id = self.get_secure_cookie("blogdemo_user")
if user_id:
- self.current_user = await self.queryone("SELECT * FROM authors WHERE id = %s",
- int(user_id))
+ self.current_user = await self.queryone(
+ "SELECT * FROM authors WHERE id = %s", int(user_id)
+ )
async def any_author_exists(self):
return bool(await self.query("SELECT * FROM authors LIMIT 1"))
class HomeHandler(BaseHandler):
async def get(self):
- entries = await self.query("SELECT * FROM entries ORDER BY published DESC LIMIT 5")
+ entries = await self.query(
+ "SELECT * FROM entries ORDER BY published DESC LIMIT 5"
+ )
if not entries:
self.redirect("/compose")
return
class FeedHandler(BaseHandler):
async def get(self):
- entries = await self.query("SELECT * FROM entries ORDER BY published DESC LIMIT 10")
+ entries = await self.query(
+ "SELECT * FROM entries ORDER BY published DESC LIMIT 10"
+ )
self.set_header("Content-Type", "application/atom+xml")
self.render("feed.xml", entries=entries)
html = markdown.markdown(text)
if id:
try:
- entry = await self.queryone("SELECT * FROM entries WHERE id = %s", int(id))
+ entry = await self.queryone(
+ "SELECT * FROM entries WHERE id = %s", int(id)
+ )
except NoResultError:
raise tornado.web.HTTPError(404)
slug = entry.slug
await self.execute(
"UPDATE entries SET title = %s, markdown = %s, html = %s "
- "WHERE id = %s", title, text, html, int(id))
+ "WHERE id = %s",
+ title,
+ text,
+ html,
+ int(id),
+ )
else:
slug = unicodedata.normalize("NFKD", title)
slug = re.sub(r"[^\w]+", " ", slug)
await self.execute(
"INSERT INTO entries (author_id,title,slug,markdown,html,published,updated)"
"VALUES (%s,%s,%s,%s,%s,CURRENT_TIMESTAMP,CURRENT_TIMESTAMP)",
- self.current_user.id, title, slug, text, html)
+ self.current_user.id,
+ title,
+ slug,
+ text,
+ html,
+ )
self.redirect("/entry/" + slug)
if await self.any_author_exists():
raise tornado.web.HTTPError(400, "author already created")
hashed_password = await tornado.ioloop.IOLoop.current().run_in_executor(
- None, bcrypt.hashpw, tornado.escape.utf8(self.get_argument("password")),
- bcrypt.gensalt())
+ None,
+ bcrypt.hashpw,
+ tornado.escape.utf8(self.get_argument("password")),
+ bcrypt.gensalt(),
+ )
author = await self.queryone(
"INSERT INTO authors (email, name, hashed_password) "
"VALUES (%s, %s, %s) RETURNING id",
- self.get_argument("email"), self.get_argument("name"),
- tornado.escape.to_unicode(hashed_password))
+ self.get_argument("email"),
+ self.get_argument("name"),
+ tornado.escape.to_unicode(hashed_password),
+ )
self.set_secure_cookie("blogdemo_user", str(author.id))
self.redirect(self.get_argument("next", "/"))
async def post(self):
try:
- author = await self.queryone("SELECT * FROM authors WHERE email = %s",
- self.get_argument("email"))
+ author = await self.queryone(
+ "SELECT * FROM authors WHERE email = %s", self.get_argument("email")
+ )
except NoResultError:
self.render("login.html", error="email not found")
return
hashed_password = await tornado.ioloop.IOLoop.current().run_in_executor(
- None, bcrypt.hashpw, tornado.escape.utf8(self.get_argument("password")),
- tornado.escape.utf8(author.hashed_password))
+ None,
+ bcrypt.hashpw,
+ tornado.escape.utf8(self.get_argument("password")),
+ tornado.escape.utf8(author.hashed_password),
+ )
hashed_password = tornado.escape.to_unicode(hashed_password)
if hashed_password == author.hashed_password:
self.set_secure_cookie("blogdemo_user", str(author.id))
# Create the global connection pool.
async with aiopg.create_pool(
- host=options.db_host,
- port=options.db_port,
- user=options.db_user,
- password=options.db_password,
- dbname=options.db_database) as db:
+ host=options.db_host,
+ port=options.db_port,
+ user=options.db_user,
+ password=options.db_password,
+ dbname=options.db_database,
+ ) as db:
await maybe_create_tables(db)
app = Application(db)
app.listen(options.port)
def add_message(self, message):
self.cache.append(message)
if len(self.cache) > self.cache_size:
- self.cache = self.cache[-self.cache_size:]
+ self.cache = self.cache[-self.cache_size :]
self.cond.notify_all()
class MessageNewHandler(tornado.web.RequestHandler):
"""Post a new message to the chat room."""
+
def post(self):
- message = {
- "id": str(uuid.uuid4()),
- "body": self.get_argument("body"),
- }
+ message = {"id": str(uuid.uuid4()), "body": self.get_argument("body")}
# render_string() returns a byte string, which is not supported
# in json, so we must convert it to a character string.
message["html"] = tornado.escape.to_unicode(
- self.render_string("message.html", message=message))
+ self.render_string("message.html", message=message)
+ )
if self.get_argument("next", None):
self.redirect(self.get_argument("next"))
else:
Waits until new messages are available before returning anything.
"""
+
async def post(self):
cursor = self.get_argument("cursor", None)
messages = global_message_buffer.get_messages_since(cursor)
class MainHandler(BaseHandler, tornado.auth.FacebookGraphMixin):
@tornado.web.authenticated
async def get(self):
- stream = await self.facebook_request("/me/home", self._on_stream,
- access_token=self.current_user["access_token"])
+ stream = await self.facebook_request(
+ "/me/home", self._on_stream, access_token=self.current_user["access_token"]
+ )
if stream is None:
# Session may have expired
self.redirect("/auth/login")
class AuthLoginHandler(BaseHandler, tornado.auth.FacebookGraphMixin):
async def get(self):
- my_url = (self.request.protocol + "://" + self.request.host +
- "/auth/login?next=" +
- tornado.escape.url_escape(self.get_argument("next", "/")))
+ my_url = (
+ self.request.protocol
+ + "://"
+ + self.request.host
+ + "/auth/login?next="
+ + tornado.escape.url_escape(self.get_argument("next", "/"))
+ )
if self.get_argument("code", False):
user = await self.get_authenticated_user(
redirect_uri=my_url,
client_id=self.settings["facebook_api_key"],
client_secret=self.settings["facebook_secret"],
- code=self.get_argument("code"))
+ code=self.get_argument("code"),
+ )
self.set_secure_cookie("fbdemo_user", tornado.escape.json_encode(user))
self.redirect(self.get_argument("next", "/"))
return
- self.authorize_redirect(redirect_uri=my_url,
- client_id=self.settings["facebook_api_key"],
- extra_params={"scope": "user_posts"})
+ self.authorize_redirect(
+ redirect_uri=my_url,
+ client_id=self.settings["facebook_api_key"],
+ extra_params={"scope": "user_posts"},
+ )
class AuthLogoutHandler(BaseHandler, tornado.auth.FacebookGraphMixin):
def post(self):
for field_name, files in self.request.files.items():
for info in files:
- filename, content_type = info['filename'], info['content_type']
- body = info['body']
- logging.info('POST "%s" "%s" %d bytes',
- filename, content_type, len(body))
+ filename, content_type = info["filename"], info["content_type"]
+ body = info["body"]
+ logging.info(
+ 'POST "%s" "%s" %d bytes', filename, content_type, len(body)
+ )
- self.write('OK')
+ self.write("OK")
@tornado.web.stream_request_body
def put(self, filename):
filename = unquote(filename)
- mtype = self.request.headers.get('Content-Type')
+ mtype = self.request.headers.get("Content-Type")
logging.info('PUT "%s" "%s" %d bytes', filename, mtype, self.bytes_read)
- self.write('OK')
+ self.write("OK")
def make_app():
- return tornado.web.Application([
- (r"/post", POSTHandler),
- (r"/(.*)", PUTHandler),
- ])
+ return tornado.web.Application([(r"/post", POSTHandler), (r"/(.*)", PUTHandler)])
if __name__ == "__main__":
for filename in filenames:
filename_bytes = filename.encode()
- mtype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+ mtype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
buf = (
- (b'--%s\r\n' % boundary_bytes) +
- (b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n' %
- (filename_bytes, filename_bytes)) +
- (b'Content-Type: %s\r\n' % mtype.encode()) +
- b'\r\n'
+ (b"--%s\r\n" % boundary_bytes)
+ + (
+ b'Content-Disposition: form-data; name="%s"; filename="%s"\r\n'
+ % (filename_bytes, filename_bytes)
+ )
+ + (b"Content-Type: %s\r\n" % mtype.encode())
+ + b"\r\n"
)
yield write(buf)
- with open(filename, 'rb') as f:
+ with open(filename, "rb") as f:
while True:
# 16k at a time.
chunk = f.read(16 * 1024)
break
yield write(chunk)
- yield write(b'\r\n')
+ yield write(b"\r\n")
- yield write(b'--%s--\r\n' % (boundary_bytes,))
+ yield write(b"--%s--\r\n" % (boundary_bytes,))
# Using HTTP PUT, upload one raw file. This is preferred for large files since
def post(filenames):
client = httpclient.AsyncHTTPClient()
boundary = uuid4().hex
- headers = {'Content-Type': 'multipart/form-data; boundary=%s' % boundary}
+ headers = {"Content-Type": "multipart/form-data; boundary=%s" % boundary}
producer = partial(multipart_producer, boundary, filenames)
- response = yield client.fetch('http://localhost:8888/post',
- method='POST',
- headers=headers,
- body_producer=producer)
+ response = yield client.fetch(
+ "http://localhost:8888/post",
+ method="POST",
+ headers=headers,
+ body_producer=producer,
+ )
print(response)
@gen.coroutine
def raw_producer(filename, write):
- with open(filename, 'rb') as f:
+ with open(filename, "rb") as f:
while True:
# 16K at a time.
chunk = f.read(16 * 1024)
def put(filenames):
client = httpclient.AsyncHTTPClient()
for filename in filenames:
- mtype = mimetypes.guess_type(filename)[0] or 'application/octet-stream'
- headers = {'Content-Type': mtype}
+ mtype = mimetypes.guess_type(filename)[0] or "application/octet-stream"
+ headers = {"Content-Type": mtype}
producer = partial(raw_producer, filename)
url_path = quote(os.path.basename(filename))
- response = yield client.fetch('http://localhost:8888/%s' % url_path,
- method='PUT',
- headers=headers,
- body_producer=producer)
+ response = yield client.fetch(
+ "http://localhost:8888/%s" % url_path,
+ method="PUT",
+ headers=headers,
+ body_producer=producer,
+ )
print(response)
def main():
tornado.options.parse_command_line()
- application = tornado.web.Application([
- (r"/", MainHandler),
- ])
+ application = tornado.web.Application([(r"/", MainHandler)])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(options.port)
tornado.ioloop.IOLoop.current().start()
to prevent hitting file system limits for number of files in each
directories. 1 means one level of directories, 2 means 2, etc.
"""
+
def __init__(self, root_directory, bucket_depth=0):
- web.Application.__init__(self, [
- (r"/", RootHandler),
- (r"/([^/]+)/(.+)", ObjectHandler),
- (r"/([^/]+)/", BucketHandler),
- ])
+ web.Application.__init__(
+ self,
+ [
+ (r"/", RootHandler),
+ (r"/([^/]+)/(.+)", ObjectHandler),
+ (r"/([^/]+)/", BucketHandler),
+ ],
+ )
self.directory = os.path.abspath(root_directory)
if not os.path.exists(self.directory):
os.makedirs(self.directory)
self.set_header("Content-Type", "application/xml; charset=UTF-8")
name = list(value.keys())[0]
parts = []
- parts.append('<' + name +
- ' xmlns="http://doc.s3.amazonaws.com/2006-03-01">')
+ parts.append("<" + name + ' xmlns="http://doc.s3.amazonaws.com/2006-03-01">')
self._render_parts(value[name], parts)
- parts.append('</' + name + '>')
- self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' +
- ''.join(parts))
+ parts.append("</" + name + ">")
+ self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' + "".join(parts))
def _render_parts(self, value, parts=[]):
if isinstance(value, (unicode_type, bytes)):
if not isinstance(subvalue, list):
subvalue = [subvalue]
for subsubvalue in subvalue:
- parts.append('<' + name + '>')
+ parts.append("<" + name + ">")
self._render_parts(subsubvalue, parts)
- parts.append('</' + name + '>')
+ parts.append("</" + name + ">")
else:
raise Exception("Unknown S3 value type %r", value)
def _object_path(self, bucket, object_name):
if self.application.bucket_depth < 1:
- return os.path.abspath(os.path.join(
- self.application.directory, bucket, object_name))
+ return os.path.abspath(
+ os.path.join(self.application.directory, bucket, object_name)
+ )
hash = hashlib.md5(object_name).hexdigest()
- path = os.path.abspath(os.path.join(
- self.application.directory, bucket))
+ path = os.path.abspath(os.path.join(self.application.directory, bucket))
for i in range(self.application.bucket_depth):
- path = os.path.join(path, hash[:2 * (i + 1)])
+ path = os.path.join(path, hash[: 2 * (i + 1)])
return os.path.join(path, object_name)
for name in names:
path = os.path.join(self.application.directory, name)
info = os.stat(path)
- buckets.append({
- "Name": name,
- "CreationDate": datetime.datetime.utcfromtimestamp(
- info.st_ctime),
- })
- self.render_xml({"ListAllMyBucketsResult": {
- "Buckets": {"Bucket": buckets},
- }})
+ buckets.append(
+ {
+ "Name": name,
+ "CreationDate": datetime.datetime.utcfromtimestamp(info.st_ctime),
+ }
+ )
+ self.render_xml({"ListAllMyBucketsResult": {"Buckets": {"Bucket": buckets}}})
class BucketHandler(BaseRequestHandler):
prefix = self.get_argument("prefix", u"")
marker = self.get_argument("marker", u"")
max_keys = int(self.get_argument("max-keys", 50000))
- path = os.path.abspath(os.path.join(self.application.directory,
- bucket_name))
+ path = os.path.abspath(os.path.join(self.application.directory, bucket_name))
terse = int(self.get_argument("terse", 0))
- if not path.startswith(self.application.directory) or \
- not os.path.isdir(path):
+ if not path.startswith(self.application.directory) or not os.path.isdir(path):
raise web.HTTPError(404)
object_names = []
for root, dirs, files in os.walk(path):
c = {"Key": object_name}
if not terse:
info = os.stat(object_path)
- c.update({
- "LastModified": datetime.datetime.utcfromtimestamp(
- info.st_mtime),
- "Size": info.st_size,
- })
+ c.update(
+ {
+ "LastModified": datetime.datetime.utcfromtimestamp(
+ info.st_mtime
+ ),
+ "Size": info.st_size,
+ }
+ )
contents.append(c)
marker = object_name
- self.render_xml({"ListBucketResult": {
- "Name": bucket_name,
- "Prefix": prefix,
- "Marker": marker,
- "MaxKeys": max_keys,
- "IsTruncated": truncated,
- "Contents": contents,
- }})
+ self.render_xml(
+ {
+ "ListBucketResult": {
+ "Name": bucket_name,
+ "Prefix": prefix,
+ "Marker": marker,
+ "MaxKeys": max_keys,
+ "IsTruncated": truncated,
+ "Contents": contents,
+ }
+ }
+ )
def put(self, bucket_name):
- path = os.path.abspath(os.path.join(
- self.application.directory, bucket_name))
- if not path.startswith(self.application.directory) or \
- os.path.exists(path):
+ path = os.path.abspath(os.path.join(self.application.directory, bucket_name))
+ if not path.startswith(self.application.directory) or os.path.exists(path):
raise web.HTTPError(403)
os.makedirs(path)
self.finish()
def delete(self, bucket_name):
- path = os.path.abspath(os.path.join(
- self.application.directory, bucket_name))
- if not path.startswith(self.application.directory) or \
- not os.path.isdir(path):
+ path = os.path.abspath(os.path.join(self.application.directory, bucket_name))
+ if not path.startswith(self.application.directory) or not os.path.isdir(path):
raise web.HTTPError(404)
if len(os.listdir(path)) > 0:
raise web.HTTPError(403)
def get(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
- if not path.startswith(self.application.directory) or \
- not os.path.isfile(path):
+ if not path.startswith(self.application.directory) or not os.path.isfile(path):
raise web.HTTPError(404)
info = os.stat(path)
self.set_header("Content-Type", "application/unknown")
- self.set_header("Last-Modified", datetime.datetime.utcfromtimestamp(
- info.st_mtime))
+ self.set_header(
+ "Last-Modified", datetime.datetime.utcfromtimestamp(info.st_mtime)
+ )
object_file = open(path, "rb")
try:
self.finish(object_file.read())
def put(self, bucket, object_name):
object_name = urllib.unquote(object_name)
- bucket_dir = os.path.abspath(os.path.join(
- self.application.directory, bucket))
- if not bucket_dir.startswith(self.application.directory) or \
- not os.path.isdir(bucket_dir):
+ bucket_dir = os.path.abspath(os.path.join(self.application.directory, bucket))
+ if not bucket_dir.startswith(self.application.directory) or not os.path.isdir(
+ bucket_dir
+ ):
raise web.HTTPError(404)
path = self._object_path(bucket, object_name)
if not path.startswith(bucket_dir) or os.path.isdir(path):
def delete(self, bucket, object_name):
object_name = urllib.unquote(object_name)
path = self._object_path(bucket, object_name)
- if not path.startswith(self.application.directory) or \
- not os.path.isfile(path):
+ if not path.startswith(self.application.directory) or not os.path.isfile(path):
raise web.HTTPError(404)
os.unlink(path)
self.set_status(204)
from tornado.options import define, options, parse_command_line, parse_config_file
from tornado.web import Application, RequestHandler, authenticated
-define('port', default=8888, help="port to listen on")
-define('config_file', default='secrets.cfg',
- help='filename for additional configuration')
-
-define('debug', default=False, group='application',
- help="run in debug mode (with automatic reloading)")
+define("port", default=8888, help="port to listen on")
+define(
+ "config_file", default="secrets.cfg", help="filename for additional configuration"
+)
+
+define(
+ "debug",
+ default=False,
+ group="application",
+ help="run in debug mode (with automatic reloading)",
+)
# The following settings should probably be defined in secrets.cfg
-define('twitter_consumer_key', type=str, group='application')
-define('twitter_consumer_secret', type=str, group='application')
-define('cookie_secret', type=str, group='application',
- default='__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE__',
- help="signing key for secure cookies")
+define("twitter_consumer_key", type=str, group="application")
+define("twitter_consumer_secret", type=str, group="application")
+define(
+ "cookie_secret",
+ type=str,
+ group="application",
+ default="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE__",
+ help="signing key for secure cookies",
+)
class BaseHandler(RequestHandler):
- COOKIE_NAME = 'twitterdemo_user'
+ COOKIE_NAME = "twitterdemo_user"
def get_current_user(self):
user_json = self.get_secure_cookie(self.COOKIE_NAME)
@gen.coroutine
def get(self):
timeline = yield self.twitter_request(
- '/statuses/home_timeline',
- access_token=self.current_user['access_token'])
- self.render('home.html', timeline=timeline)
+ "/statuses/home_timeline", access_token=self.current_user["access_token"]
+ )
+ self.render("home.html", timeline=timeline)
class LoginHandler(BaseHandler, TwitterMixin):
@gen.coroutine
def get(self):
- if self.get_argument('oauth_token', None):
+ if self.get_argument("oauth_token", None):
user = yield self.get_authenticated_user()
del user["description"]
self.set_secure_cookie(self.COOKIE_NAME, json_encode(user))
- self.redirect(self.get_argument('next', '/'))
+ self.redirect(self.get_argument("next", "/"))
else:
yield self.authorize_redirect(callback_uri=self.request.full_url())
parse_config_file(options.config_file)
app = Application(
- [
- ('/', MainHandler),
- ('/login', LoginHandler),
- ('/logout', LogoutHandler),
- ],
- login_url='/login',
- **options.group_dict('application'))
+ [("/", MainHandler), ("/login", LoginHandler), ("/logout", LogoutHandler)],
+ login_url="/login",
+ **options.group_dict("application")
+ )
app.listen(options.port)
- logging.info('Listening on http://localhost:%d' % options.port)
+ logging.info("Listening on http://localhost:%d" % options.port)
IOLoop.current().start()
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
class Application(tornado.web.Application):
def __init__(self):
- handlers = [
- (r"/", MainHandler),
- (r"/chatsocket", ChatSocketHandler),
- ]
+ handlers = [(r"/", MainHandler), (r"/chatsocket", ChatSocketHandler)]
settings = dict(
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
def update_cache(cls, chat):
cls.cache.append(chat)
if len(cls.cache) > cls.cache_size:
- cls.cache = cls.cache[-cls.cache_size:]
+ cls.cache = cls.cache[-cls.cache_size :]
@classmethod
def send_updates(cls, chat):
def on_message(self, message):
logging.info("got message %r", message)
parsed = tornado.escape.json_decode(message)
- chat = {
- "id": str(uuid.uuid4()),
- "body": parsed["body"],
- }
+ chat = {"id": str(uuid.uuid4()), "body": parsed["body"]}
chat["html"] = tornado.escape.to_basestring(
- self.render_string("message.html", message=chat))
+ self.render_string("message.html", message=chat)
+ )
ChatSocketHandler.update_cache(chat)
ChatSocketHandler.send_updates(chat)
from tornado import gen, httpclient, ioloop, queues
-base_url = 'http://www.tornadoweb.org/en/stable/'
+base_url = "http://www.tornadoweb.org/en/stable/"
concurrency = 10
'http://www.tornadoweb.org/en/stable/gen.html'.
"""
response = await httpclient.AsyncHTTPClient().fetch(url)
- print('fetched %s' % url)
+ print("fetched %s" % url)
- html = response.body.decode(errors='ignore')
- return [urljoin(url, remove_fragment(new_url))
- for new_url in get_links(html)]
+ html = response.body.decode(errors="ignore")
+ return [urljoin(url, remove_fragment(new_url)) for new_url in get_links(html)]
def remove_fragment(url):
self.urls = []
def handle_starttag(self, tag, attrs):
- href = dict(attrs).get('href')
- if href and tag == 'a':
+ href = dict(attrs).get("href")
+ if href and tag == "a":
self.urls.append(href)
url_seeker = URLSeeker()
if current_url in fetching:
return
- print('fetching %s' % current_url)
+ print("fetching %s" % current_url)
fetching.add(current_url)
urls = await get_links_from_url(current_url)
fetched.add(current_url)
try:
await fetch_url(url)
except Exception as e:
- print('Exception: %s %s' % (e, url))
+ print("Exception: %s %s" % (e, url))
finally:
q.task_done()
workers = gen.multi([worker() for _ in range(concurrency)])
await q.join(timeout=timedelta(seconds=300))
assert fetching == fetched
- print('Done in %d seconds, fetched %s URLs.' % (
- time.time() - start, len(fetched)))
+ print("Done in %d seconds, fetched %s URLs." % (time.time() - start, len(fetched)))
# Signal all the workers to exit.
for _ in range(concurrency):
await workers
-if __name__ == '__main__':
+if __name__ == "__main__":
io_loop = ioloop.IOLoop.current()
io_loop.run_sync(main)
[testenv:py3-lint]
commands =
flake8 {posargs:}
- black --check --diff {posargs:tornado}
+ black --check --diff {posargs:tornado demos}
changedir = {toxinidir}
[testenv:py3-mypy]