[Date Prev][Date Next] [Thread Prev][Thread Next] [Date Index] [Thread Index]

Bug#991233: unblock: apertium-apy/0.11.7-2



On Sun, Jul 18, 2021 at 2:35 PM Graham Inggs <ginggs@debian.org> wrote:
> It looks like you've attached a binary debdiff.   We are looking for a
> source debdiff between the version in testing and unstable.
> i.e.
>
> debdiff apertium-apy_0.11.6-1.dsc apertium-apy_0.11.7-2.dsc

Sorry! Attached source diff now.

-- 
Kartik Mistry | કાર્તિક મિસ્ત્રી
kartikm.wordpress.com
diff -Nru apertium-apy-0.11.6/apertium_apy/apy.py apertium-apy-0.11.7/apertium_apy/apy.py
--- apertium-apy-0.11.6/apertium_apy/apy.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/apy.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,13 +1,13 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 # coding=utf-8
 # -*- indent-tabs-mode: nil -*-
 
 __author__ = 'Kevin Brubeck Unhammer, Sushain K. Cherivirala'
-__copyright__ = 'Copyright 2013--2018, Kevin Brubeck Unhammer, Sushain K. Cherivirala'
+__copyright__ = 'Copyright 2013--2020, Kevin Brubeck Unhammer, Sushain K. Cherivirala'
 __credits__ = ['Kevin Brubeck Unhammer', 'Sushain K. Cherivirala', 'Jonathan North Washington', 'Xavi Ivars', 'Shardul Chiplunkar']
 __license__ = 'GPLv3'
 __status__ = 'Beta'
-__version__ = '0.11.6'
+__version__ = '0.11.7'
 
 import argparse
 import configparser
@@ -29,6 +29,8 @@
 from tornado.locks import Semaphore
 from tornado.log import enable_pretty_logging
 
+from typing import Sequence, Iterable, Type, List, Tuple, Any  # noqa: F401
+
 from apertium_apy import BYPASS_TOKEN, missing_freqs_db  # noqa: F401
 from apertium_apy import missingdb
 from apertium_apy import systemd
@@ -86,7 +88,7 @@
 
 
 def setup_handler(
-    port, pairs_path, nonpairs_path, lang_names, missing_freqs_path, timeout,
+    pairs_path, nonpairs_path, lang_names, missing_freqs_path, timeout,
     max_pipes_per_pair, min_pipes_per_pair, max_users_per_pipe, max_idle_secs,
     restart_pipe_after, max_doc_pipes, verbosity=0, scale_mt_logs=False,
     memory=1000, apy_keys=None,
@@ -249,9 +251,9 @@
 
 def setup_application(args):
     if args.stat_period_max_age:
-        BaseHandler.STAT_PERIOD_MAX_AGE = timedelta(0, args.stat_period_max_age, 0)
+        BaseHandler.stat_period_max_age = timedelta(0, args.stat_period_max_age, 0)
 
-    setup_handler(args.port, args.pairs_path, args.nonpairs_path, args.lang_names, args.missing_freqs, args.timeout,
+    setup_handler(args.pairs_path, args.nonpairs_path, args.lang_names, args.missing_freqs, args.timeout,
                   args.max_pipes_per_pair, args.min_pipes_per_pair, args.max_users_per_pipe, args.max_idle_secs,
                   args.restart_pipe_after, args.max_doc_pipes, args.verbosity, args.scalemt_logs,
                   args.unknown_memory_limit, args.api_keys)
@@ -274,7 +276,7 @@
         (r'/identifyLang', IdentifyLangHandler),
         (r'/getLocale', GetLocaleHandler),
         (r'/pipedebug', PipeDebugHandler),
-    ]
+    ]  # type: List[Tuple[str, Type[tornado.web.RequestHandler]]]
 
     if importlib_util.find_spec('streamparser'):
         handlers.append((r'/speller', SpellerHandler))
@@ -295,7 +297,8 @@
 
         handlers.append((r'/suggest', SuggestionHandler))
 
-    return tornado.web.Application(handlers)
+    # TODO: fix mypy. Application expects List but List is invariant and we use subclasses
+    return tornado.web.Application(handlers)  # type:ignore
 
 
 def setup_logging(args):
@@ -350,10 +353,10 @@
             'certfile': args.ssl_cert,
             'keyfile': args.ssl_key,
         })
-        logging.info('Serving at https://localhost:%s', args.port)
+        logging.info('Serving on all interfaces/families, e.g. https://localhost:%s', args.port)
     else:
         http_server = tornado.httpserver.HTTPServer(application)
-        logging.info('Serving at http://localhost:%s', args.port)
+        logging.info('Serving on all interfaces/families, e.g. http://localhost:%s', args.port)
 
     signal.signal(signal.SIGTERM, sig_handler)
     signal.signal(signal.SIGINT, sig_handler)
diff -Nru apertium-apy-0.11.6/apertium_apy/gateway.py apertium-apy-0.11.7/apertium_apy/gateway.py
--- apertium-apy-0.11.6/apertium_apy/gateway.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/gateway.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,7 +1,6 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 import argparse
-import functools
 import itertools
 import json
 import logging
@@ -43,8 +42,7 @@
     def initialize(self, balancer):
         self.balancer = balancer
 
-    @tornado.gen.coroutine
-    def get(self):
+    async def get(self):
         path = self.request.path
         mode, lang_pair, per_word_modes = [None] * 3
         path_to_mode = {
@@ -86,11 +84,12 @@
         logging.info('Redirecting %s?%s to %s%s?%s', path, query, server_port, path, query)
 
         http = tornado.httpclient.AsyncHTTPClient()
-        http.fetch(
+        response = await http.fetch(
             server_port + path + '?' + query,
-            functools.partial(self._on_download, (server, port), lang_pair),
+            raise_error=False,
             validate_cert=verify_ssl_cert, headers=headers)
         self.balancer.inform('start', (server, port), url=path)
+        self._on_download((server, port), lang_pair, response)
 
     def _on_download(self, server, lang_pair, response):
         response_body = response.body
@@ -127,9 +126,8 @@
         if self.request.path != '/listPairs' and self.request.path != '/list':
             self.send_error(400)
         else:
-            query = self.get_arguments('q')
-            if query:
-                query = query[0]
+            q = self.get_arguments('q')
+            query = q[0] if q else None
             if self.request.path == '/listPairs' or query == 'pairs':
                 logging.info('Responding to request for pairs')
                 response_data = []
@@ -363,9 +361,9 @@
                 if '/list' not in self.serverlist[server][1]:
                     self.serverlist[server][1]['list'] = 0
                 self.serverlist[server][1]['list'] += test_sum / (self.num_responses * len(results.items()))
-        self.calcAggregateScores()
+        # self.calcAggregateScores()  TODO: Does not exist
         self.serverlist = OrderedDict(filter(lambda x: x[1][0] != float('inf'), self.serverlist.items()))
-        self.sortServerList()
+        # self.sortServerList()  TODO: Does not exist
 
 
 def test_server_pool(server_list):
@@ -546,5 +544,5 @@
     http_server.start(args.num_processes)
     main_loop = tornado.ioloop.IOLoop.instance()
     if isinstance(balancer, Fastest):
-        tornado.ioloop.PeriodicCallback(lambda: balancer.init_server_list(), args.test_interval, io_loop=main_loop).start()
+        tornado.ioloop.PeriodicCallback(lambda: balancer.init_server_list(), args.test_interval).start()
     main_loop.start()
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/base.py apertium-apy-0.11.7/apertium_apy/handlers/base.py
--- apertium-apy-0.11.6/apertium_apy/handlers/base.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/base.py	2021-04-01 15:20:33.000000000 +0530
@@ -2,7 +2,7 @@
 import logging
 import os
 import sys
-from datetime import datetime
+from datetime import datetime, timedelta
 
 import tornado
 import tornado.gen
@@ -10,8 +10,8 @@
 from tornado.escape import utf8
 from tornado.locks import Semaphore
 
-if False:
-    from typing import Dict, Optional, List, Tuple  # noqa: F401
+from typing import Union, Dict, Optional, List, Any, Tuple  # noqa: F401
+from apertium_apy.utils.translation import FlushingPipeline, SimplePipeline
 
 
 def dump_json(data):
@@ -20,6 +20,13 @@
     return json.dumps(data, ensure_ascii=False).replace('</', '<\\/')
 
 
+class Stats:
+    startdate = datetime.now()
+    usecount = {}               # type: Dict[Tuple[str, str], int]
+    vmsize = 0
+    timing = []                 # type: List[Tuple[datetime, datetime, int]]
+
+
 class BaseHandler(tornado.web.RequestHandler):
     pairs = {}  # type: Dict[str, str]
     analyzers = {}  # type: Dict[str, Tuple[str, str]]
@@ -27,13 +34,15 @@
     taggers = {}  # type: Dict[str, Tuple[str, str]]
     spellers = {}  # type: Dict[str, Tuple[str, str]]
     # (l1, l2): [translation.Pipeline], only contains flushing pairs!
-    pipelines = {}  # type: Dict[str, List]
+    pipelines = {}  # type: Dict[Tuple[str, str], List[Union[FlushingPipeline, SimplePipeline]]]
     pipelines_holding = []  # type: List
     callback = None
-    timeout = None
+    timeout = 10
+    lang_names = None           # type: Optional[str]
     scale_mt_logs = False
     verbosity = 0
     api_keys_conf = None
+    stat_period_max_age = timedelta.max
 
     # dict representing a graph of translation pairs; keys are source languages
     # e.g. pairs_graph['eng'] = ['fra', 'spa']
@@ -43,12 +52,7 @@
     # e.g. paths['eng']['fra'] = ['eng', 'spa', 'fra']
     paths = {}  # type: Dict[str, Dict[str, List[str]]]
 
-    stats = {
-        'startdate': datetime.now(),
-        'useCount': {},
-        'vmsize': 0,
-        'timing': [],
-    }
+    stats = Stats()
 
     # (l1, l2): translation.ParsedModes
     pipeline_cmds = {}  # type: Dict
@@ -60,7 +64,7 @@
     doc_pipe_sem = Semaphore(3)
     # Empty the url_cache[pair] when it's this full:
     max_inmemory_url_cache = 1000  # type: int
-    url_cache = {}  # type: Dict[Tuple[str, str], Dict[str, str]]
+    url_cache = {}       # type: Dict[Tuple[str, str], Dict[str, Tuple[str, str]]]
     url_cache_path = None  # type: Optional[str]
     # Keep half a gig free when storing url_cache to disk:
     min_free_space_disk_url_cache = 512 * 1024 * 1024  # type: int
@@ -99,7 +103,7 @@
 
         cls.paths[start] = {}
         for u in prevs:
-            prev = prevs[u]
+            prev = prevs[u]     # type: Optional[str]
             path = [u]
             while prev:
                 path.append(prev)
@@ -122,9 +126,9 @@
                     _, num, unit = line.split()
                     break
             vmsize = int(num) * scale[unit]
-            if vmsize > self.stats['vmsize']:
-                logging.warning('VmSize of %s from %d to %d', os.getpid(), self.stats['vmsize'], vmsize)
-                self.stats['vmsize'] = vmsize
+            if vmsize > self.stats.vmsize:
+                logging.warning('VmSize of %s from %d to %d', os.getpid(), self.stats.vmsize, vmsize)
+                self.stats.vmsize = vmsize
         except Exception as e:
             # Don't fail just because we couldn't log:
             logging.info('Exception in log_vmsize: %s', e)
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/list_modes.py apertium-apy-0.11.7/apertium_apy/handlers/list_modes.py
--- apertium-apy-0.11.6/apertium_apy/handlers/list_modes.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/list_modes.py	2021-04-01 15:20:33.000000000 +0530
@@ -13,17 +13,10 @@
             src = self.get_argument('src', default=None)
             response_data = []
             if src:
-                pairs_list = self.paths[src]
-
-                def langs(foo):
-                    return (src, foo)
+                pairs = [(src, trg) for trg in self.paths[src]]
             else:
-                pairs_list = self.pairs
-
-                def langs(foo):
-                    return (foo.split('-')[0], foo.split('-')[1])
-            for pair in pairs_list:
-                l1, l2 = langs(pair)
+                pairs = [(p[0], p[1]) for par in self.pairs for p in [par.split('-')]]
+            for (l1, l2) in pairs:
                 response_data.append({'sourceLanguage': l1, 'targetLanguage': l2})
                 if self.get_arguments('include_deprecated_codes'):
                     response_data.append({'sourceLanguage': to_alpha2_code(l1), 'targetLanguage': to_alpha2_code(l2)})
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/per_word.py apertium-apy-0.11.7/apertium_apy/handlers/per_word.py
--- apertium-apy-0.11.6/apertium_apy/handlers/per_word.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/per_word.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,19 +1,14 @@
 import re
-from multiprocessing import Pool
-from subprocess import Popen, PIPE
-
-from tornado import gen
+from asyncio.subprocess import create_subprocess_exec, PIPE
 
 from apertium_apy.handlers.base import BaseHandler
-from apertium_apy.utils import apertium, run_async_thread, remove_dot_from_deformat, to_alpha3_code
+from apertium_apy.utils import apertium, remove_dot_from_deformat, to_alpha3_code
 
 
-def bilingual_translate(to_translate, mode_dir, mode):
-    p1 = Popen(['echo', to_translate], stdout=PIPE)
-    p2 = Popen(['lt-proc', '-b', mode], stdin=p1.stdout, stdout=PIPE, cwd=mode_dir)
-    p1.stdout.close()
-    output = p2.communicate()[0].decode('utf-8')
-    return output
+async def bilingual_translate(to_translate, mode_dir, mode):
+    proc = await create_subprocess_exec('lt-proc', '-b', mode, stdin=PIPE, stdout=PIPE, stderr=PIPE, cwd=mode_dir)
+    (output, _stderr) = await proc.communicate(input=to_translate.encode('utf-8'))
+    return output.decode('utf-8')
 
 
 def strip_tags(analysis):
@@ -23,7 +18,7 @@
         return analysis
 
 
-def process_per_word(analyzers, taggers, lang, modes, query):
+async def process_per_word(analyzers, taggers, lang, modes, query):
     outputs = {}
     morph_lexical_units = None
     tagger_lexical_units = None
@@ -32,7 +27,7 @@
     if 'morph' in modes or 'biltrans' in modes:
         if lang in analyzers:
             mode_info = analyzers[lang]
-            analysis = apertium(query, mode_info[0], mode_info[1])
+            analysis = await apertium(query, mode_info[0], mode_info[1])
             morph_lexical_units = remove_dot_from_deformat(query, re.findall(lexical_unit_re, analysis))
             outputs['morph'] = [lu.split('/')[1:] for lu in morph_lexical_units]
             outputs['morph_inputs'] = [strip_tags(lu.split('/')[0]) for lu in morph_lexical_units]
@@ -42,7 +37,7 @@
     if 'tagger' in modes or 'disambig' in modes or 'translate' in modes:
         if lang in taggers:
             mode_info = taggers[lang]
-            analysis = apertium(query, mode_info[0], mode_info[1])
+            analysis = await apertium(query, mode_info[0], mode_info[1])
             tagger_lexical_units = remove_dot_from_deformat(query, re.findall(lexical_unit_re, analysis))
             outputs['tagger'] = [lu.split('/')[1:] if '/' in lu else lu for lu in tagger_lexical_units]
             outputs['tagger_inputs'] = [strip_tags(lu.split('/')[0]) for lu in tagger_lexical_units]
@@ -55,7 +50,9 @@
             for lu in morph_lexical_units:
                 split_unit = lu.split('/')
                 forms = split_unit[1:] if len(split_unit) > 1 else split_unit
-                raw_translations = bilingual_translate(''.join(['^%s$' % form for form in forms]), mode_info[0], lang + '.autobil.bin')
+                raw_translations = await bilingual_translate(''.join(['^%s$' % form for form in forms]),
+                                                             mode_info[0],
+                                                             lang + '.autobil.bin')
                 translations = re.findall(lexical_unit_re, raw_translations)
                 outputs['biltrans'].append(list(map(lambda x: '/'.join(x.split('/')[1:]), translations)))
                 outputs['translate_inputs'] = outputs['morph_inputs']
@@ -68,7 +65,9 @@
             for lu in tagger_lexical_units:
                 split_unit = lu.split('/')
                 forms = split_unit[1:] if len(split_unit) > 1 else split_unit
-                raw_translations = bilingual_translate(''.join(['^%s$' % form for form in forms]), mode_info[0], lang + '.autobil.bin')
+                raw_translations = await bilingual_translate(''.join(['^%s$' % form for form in forms]),
+                                                             mode_info[0],
+                                                             lang + '.autobil.bin')
                 translations = re.findall(lexical_unit_re, raw_translations)
                 outputs['translate'].append(list(map(lambda x: '/'.join(x.split('/')[1:]), translations)))
                 outputs['translate_inputs'] = outputs['tagger_inputs']
@@ -79,8 +78,8 @@
 
 
 class PerWordHandler(BaseHandler):
-    @gen.coroutine
-    def get(self):
+
+    async def get(self):
         lang = to_alpha3_code(self.get_argument('lang'))
         modes = set(self.get_argument('modes').split(' '))
         query = self.get_argument('q')
@@ -131,17 +130,5 @@
             else:
                 self.send_response(to_return)
 
-        pool = Pool(processes=1)
-        result = pool.apply_async(process_per_word, (self.analyzers, self.taggers, lang, modes, query))
-        pool.close()
-
-        @run_async_thread
-        def worker(callback):
-            try:
-                callback(result.get(timeout=self.timeout))
-            except TimeoutError:
-                pool.terminate()
-                callback(None)
-
-        output = yield gen.Task(worker)
+        output = await process_per_word(self.analyzers, self.taggers, lang, modes, query)
         handle_output(output)
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/stats.py apertium-apy-0.11.7/apertium_apy/handlers/stats.py
--- apertium-apy-0.11.6/apertium_apy/handlers/stats.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/stats.py	2021-04-01 15:20:33.000000000 +0530
@@ -8,13 +8,13 @@
 class StatsHandler(BaseHandler):
     @tornado.gen.coroutine
     def get(self):
-        num_requests = self.get_argument('requests', 1000)
+        num_requests_arg = self.get_argument('requests', default='1000')
         try:
-            num_requests = int(num_requests)
+            num_requests = int(num_requests_arg)
         except ValueError:
             num_requests = 1000
 
-        period_stats = self.stats['timing'][-num_requests:]
+        period_stats = self.stats.timing[-num_requests:]
         times = sum([x[1] - x[0] for x in period_stats],
                     timedelta())
         chars = sum(x[2] for x in period_stats)
@@ -25,11 +25,11 @@
         nrequests = len(period_stats)
         max_age = (datetime.now() - period_stats[0][0]).total_seconds() if period_stats else 0
 
-        uptime = int((datetime.now() - self.stats['startdate']).total_seconds())
+        uptime = int((datetime.now() - self.stats.startdate).total_seconds())
         use_count = {'%s-%s' % pair: use_count
-                     for pair, use_count in self.stats['useCount'].items()}
-        running_pipes = {'%s-%s' % pair: len(pipes)
-                         for pair, pipes in self.pipelines.items()
+                     for pair, use_count in self.stats.usecount.items()}
+        running_pipes = {'%s-%s' % (l1, l2): len(pipes)
+                         for (l1, l2), pipes in self.pipelines.items()
                          if pipes != []}
         holding_pipes = len(self.pipelines_holding)
 
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/suggestion.py apertium-apy-0.11.7/apertium_apy/handlers/suggestion.py
--- apertium-apy-0.11.6/apertium_apy/handlers/suggestion.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/suggestion.py	2021-04-01 15:20:33.000000000 +0530
@@ -36,6 +36,7 @@
     wiki_edit_token = None
     SUGGEST_URL = None
     recaptcha_secret = None
+    auth_token = None
 
     @gen.coroutine
     def get(self):
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/translate_doc.py apertium-apy-0.11.7/apertium_apy/handlers/translate_doc.py
--- apertium-apy-0.11.6/apertium_apy/handlers/translate_doc.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/translate_doc.py	2021-04-01 15:20:33.000000000 +0530
@@ -38,8 +38,7 @@
 }
 
 
-@gen.coroutine
-def translate_doc(file_to_translate, fmt, mode_file, unknown_marks=False):
+async def translate_doc(file_to_translate, fmt, mode_file, unknown_marks=False):
     modes_dir = os.path.dirname(os.path.dirname(mode_file))
     mode = os.path.splitext(os.path.basename(mode_file))[0]
     if unknown_marks:
@@ -49,7 +48,7 @@
     proc = tornado.process.Subprocess(cmd,
                                       stdin=file_to_translate,
                                       stdout=tornado.process.Subprocess.STREAM)
-    translated = yield gen.Task(proc.stdout.read_until_close)
+    translated = await proc.stdout.read_until_close()
     proc.stdout.close()
     # TODO: raises but not caught:
     # check_ret_code(' '.join(cmd), proc)
@@ -67,7 +66,10 @@
                     cls.mime_type_command = command
                     break
 
-        mime_type = MIMETYPE_COMMANDS[cls.mime_type_command](f).split(';')[0]
+        if not cls.mime_type_command:
+            return None
+        mime_command = MIMETYPE_COMMANDS[cls.mime_type_command]
+        mime_type = mime_command(f).split(';')[0]
         if mime_type == 'application/zip':
             with zipfile.ZipFile(f) as zf:
                 file_names = zf.namelist()
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/translate.py apertium-apy-0.11.7/apertium_apy/handlers/translate.py
--- apertium-apy-0.11.6/apertium_apy/handlers/translate.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/translate.py	2021-04-01 15:20:33.000000000 +0530
@@ -6,12 +6,14 @@
 
 from tornado import gen
 import tornado.iostream
+import asyncio
 
 from apertium_apy import missing_freqs_db  # noqa: F401
 from apertium_apy.handlers.base import BaseHandler
 from apertium_apy.keys import ApiKeys
 from apertium_apy.utils import to_alpha3_code, scale_mt_log
-from apertium_apy.utils.translation import parse_mode_file, make_pipeline
+from apertium_apy.utils.translation import parse_mode_file, make_pipeline, FlushingPipeline, SimplePipeline
+from typing import Union
 
 
 class TranslationInfo:
@@ -34,7 +36,7 @@
         return self.get_argument('markUnknown', default='yes').lower() in ['yes', 'true', '1']
 
     def note_pair_usage(self, pair):
-        self.stats['useCount'][pair] = 1 + self.stats['useCount'].get(pair, 0)
+        self.stats.usecount[pair] = 1 + self.stats.usecount.get(pair, 0)
 
     def maybe_strip_marks(self, mark_unknown, pair, translated):
         self.note_unknown_tokens('%s-%s' % pair, translated)
@@ -111,7 +113,7 @@
             logging.info('Starting up a new pipeline for %s-%s …', l1, l2)
             if pair not in self.pipelines:
                 self.pipelines[pair] = []
-            p = make_pipeline(self.get_pipe_cmds(l1, l2))
+            p = make_pipeline(self.get_pipe_cmds(l1, l2), self.timeout)
             heapq.heappush(self.pipelines[pair], p)
         return self.pipelines[pair][0]
 
@@ -126,10 +128,11 @@
             scale_mt_log(self.get_status(), after - before, t_info, key, length)
 
         if self.get_status() == 200:
-            oldest = self.stats['timing'][0][0] if self.stats['timing'] else datetime.now()
-            if datetime.now() - oldest > self.STAT_PERIOD_MAX_AGE:
-                self.stats['timing'].pop(0)
-            self.stats['timing'].append(
+            timings = self.stats.timing
+            oldest = timings[0][0] if timings else datetime.now()
+            if datetime.now() - oldest > self.stat_period_max_age:
+                self.stats.timing.pop(0)
+            self.stats.timing.append(
                 (before, after, length))
 
     def get_pair_or_error(self, langpair, text_length):
@@ -178,6 +181,10 @@
                 'responseDetails': None,
                 'responseStatus': 200,
             })
+        except asyncio.TimeoutError as e:
+            logging.warning('Translation error in pair %s-%s: %s', pair[0], pair[1], e)
+            pipeline.stuck = True
+            self.send_error(503, explanation='internal error')
         except tornado.iostream.StreamClosedError as e:
             logging.warning('Translation error in pair %s-%s: %s', pair[0], pair[1], e)
             pipeline.stuck = True
@@ -189,7 +196,7 @@
         pair = self.get_pair_or_error(self.get_argument('langpair'),
                                       len(self.get_argument('q')))
         if pair is not None:
-            pipeline = self.get_pipeline(pair)
+            pipeline = self.get_pipeline(pair)  # type: Union[FlushingPipeline, SimplePipeline]
             deformat, reformat = self.get_format()
             yield self.translate_and_respond(pair,
                                              pipeline,
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/translate_raw.py apertium-apy-0.11.7/apertium_apy/handlers/translate_raw.py
--- apertium-apy-0.11.6/apertium_apy/handlers/translate_raw.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/translate_raw.py	2021-04-01 15:20:33.000000000 +0530
@@ -23,10 +23,11 @@
                                       len(self.get_argument('q', strip=False)))
         if pair is not None:
             pipeline = self.get_pipeline(pair)
+            deformat = self.get_argument('deformat', default='True') != 'False'
             yield self.translate_and_respond(pair,
                                              pipeline,
                                              self.get_argument('q', strip=False),
                                              self.get_argument('markUnknown', default='yes'),
                                              nosplit=False,
-                                             deformat=self.get_argument('deformat', default=True),
+                                             deformat=deformat,
                                              reformat=False)
diff -Nru apertium-apy-0.11.6/apertium_apy/handlers/translate_webpage.py apertium-apy-0.11.7/apertium_apy/handlers/translate_webpage.py
--- apertium-apy-0.11.6/apertium_apy/handlers/translate_webpage.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/handlers/translate_webpage.py	2021-04-01 15:20:33.000000000 +0530
@@ -8,11 +8,14 @@
 
 from tornado import gen
 from tornado import httpclient
+from typing import Optional  # noqa: F401
+
 
 try:
     import chardet
-except ImportError:
-    chardet = None
+except ImportError:             # type should be Optional[Module] but there's no module in mypy? TODO
+    chardet = None              # type: ignore
+
 
 from apertium_apy.utils import translation
 from apertium_apy.handlers.translate import TranslateHandler
@@ -70,7 +73,7 @@
         if self.url_cache_path is None:
             logging.info('No --url-cache-path, not storing cached url to disk')
             return
-        dirname, basename = self.cache_path(pair, url)
+        dirname, basename = self.cache_path(self.url_cache_path, pair, url)
         os.makedirs(dirname, exist_ok=True)
         statvfs = os.statvfs(dirname)
         if (statvfs.f_frsize * statvfs.f_bavail) < self.min_free_space_disk_url_cache:
@@ -88,11 +91,11 @@
         with open(origpath, 'w') as f:
             f.write(origtext)
 
-    def cache_path(self, pair, url):
+    def cache_path(self, url_cache_path, pair, url):
         """Give the directory for where to cache the translation of this url,
         and the file name to use for this pair."""
         hsh = sha1(url.encode('utf-8')).hexdigest()
-        dirname = os.path.join(self.url_cache_path,
+        dirname = os.path.join(url_cache_path,
                                # split it to avoid too many files in one dir:
                                hsh[:1], hsh[1:2], hsh[2:])
         return (dirname, '{}-{}'.format(*pair))
@@ -105,7 +108,7 @@
         if url in self.url_cache[pair]:
             logging.info('Got cache from memory')
             return self.url_cache[pair][url]
-        dirname, basename = self.cache_path(pair, url)
+        dirname, basename = self.cache_path(self.url_cache_path, pair, url)
         path = os.path.join(dirname, basename)
         if os.path.exists(path):
             logging.info('Got cache on disk, we want to retranslate in background …')
@@ -119,20 +122,12 @@
         pair.
         """
         mem_cached = self.url_cache.get(pair, {}).get(url)
-        if mem_cached is None and cached is not None:
-            dirname, _ = self.cache_path(pair, url)
+        if mem_cached is None and cached is not None and self.url_cache_path is not None:
+            dirname, _ = self.cache_path(self.url_cache_path, pair, url)
             origpath = os.path.join(dirname, pair[0])
             if os.path.exists(origpath):
                 return open(origpath, 'r').read()
 
-    def handle_fetch(self, response):
-        if response.error is None:
-            return
-        elif response.code == 304:  # means we can use cache, so don't fail on this
-            return
-        else:
-            self.send_error(503, explanation='{} on fetching url: {}'.format(response.code, response.error))
-
     @gen.coroutine
     def get(self):
         pair = self.get_pair_or_error(self.get_argument('langpair'),
@@ -159,13 +154,14 @@
             self.send_error(404, explanation='{} on fetching url: {}'.format('Error 404', e))
             return
         try:
-            response = yield httpclient.AsyncHTTPClient().fetch(request, self.handle_fetch)
+            response = yield httpclient.AsyncHTTPClient().fetch(request, raise_error=True)
         except httpclient.HTTPError as e:
             if e.code == 304:
                 got304 = True
                 logging.info('304, can use cache')
             else:
                 logging.error(e)
+                self.send_error(503, explanation='{} on fetching url: {}'.format(response.code, response.error))
                 return
         if got304 and cached is not None:
             translation_catpipeline = translation.CatPipeline  # type: ignore
diff -Nru apertium-apy-0.11.6/apertium_apy/mode_search.py apertium-apy-0.11.7/apertium_apy/mode_search.py
--- apertium-apy-0.11.6/apertium_apy/mode_search.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/mode_search.py	2021-04-01 15:20:33.000000000 +0530
@@ -59,7 +59,7 @@
                 if m:
                     if mtype != 'pair':
                         modename = m.group(1)  # e.g. en-es-anmorph
-                        langlist = [to_alpha3_code(l) for l in m.group(2).split('-')]
+                        langlist = [to_alpha3_code(x) for x in m.group(2).split('-')]
                         lang_pair = '-'.join(langlist)  # e.g. en-es
                         dir_of_modes = os.path.dirname(dirpath)
                         mode = (dir_of_modes,
diff -Nru apertium-apy-0.11.6/apertium_apy/systemd.py apertium-apy-0.11.7/apertium_apy/systemd.py
--- apertium-apy-0.11.6/apertium_apy/systemd.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/systemd.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 # coding=utf-8
 # -*- indent-tabs-mode: nil -*-
 # -*- encoding: utf-8 -*-
diff -Nru apertium-apy-0.11.6/apertium_apy/utils/__init__.py apertium-apy-0.11.7/apertium_apy/utils/__init__.py
--- apertium-apy-0.11.6/apertium_apy/utils/__init__.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/utils/__init__.py	2021-04-01 15:20:33.000000000 +0530
@@ -55,13 +55,12 @@
         return analyses
 
 
-@gen.coroutine
-def apertium(apertium_input, mode_dir, mode, formatting='txt'):
+async def apertium(apertium_input, mode_dir, mode, formatting='txt'):
     logging.debug('util.apertium({!r}, {!r}, {!r}, {!r})'.format(apertium_input, mode_dir, mode, formatting))
     proc = Subprocess(['apertium', '-d', mode_dir, '-f', formatting, mode], stdin=Subprocess.STREAM, stdout=Subprocess.STREAM)
-    yield gen.Task(proc.stdin.write, apertium_input.encode('utf-8'))
+    await proc.stdin.write(apertium_input.encode('utf-8'))
     proc.stdin.close()
-    output = yield gen.Task(proc.stdout.read_until_close)
+    output = await proc.stdout.read_until_close()
     proc.stdout.close()
     return output.decode('utf-8')
 
diff -Nru apertium-apy-0.11.6/apertium_apy/utils/translation.py apertium-apy-0.11.7/apertium_apy/utils/translation.py
--- apertium-apy-0.11.6/apertium_apy/utils/translation.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/apertium_apy/utils/translation.py	2021-04-01 15:20:33.000000000 +0530
@@ -6,6 +6,8 @@
 from select import PIPE_BUF
 from subprocess import Popen, PIPE
 from time import time
+import asyncio
+from secrets import token_urlsafe
 
 import tornado.iostream
 import tornado.locks as locks
@@ -26,7 +28,7 @@
         # pipeline for translation. If this is 0, we can safely shut
         # down the pipeline.
         self.users = 0
-        self.last_usage = 0
+        self.last_usage = 0.0
         self.use_count = 0
         self.stuck = False
 
@@ -50,7 +52,8 @@
 
 
 class FlushingPipeline(Pipeline):
-    def __init__(self, commands, *args, **kwargs):
+    def __init__(self, timeout, commands, *args, **kwargs):
+        self.timeout = timeout
         self.inpipe, self.outpipe = start_pipeline(commands)
         super().__init__(*args, **kwargs)
 
@@ -66,11 +69,11 @@
     def translate(self, to_translate, nosplit=False, deformat=True, reformat=True):
         with self.use():
             if nosplit:
-                res = yield translate_nul_flush(to_translate, self, deformat, reformat)
+                res = yield translate_nul_flush(to_translate, self, deformat, reformat, self.timeout)
                 return res
             else:
                 all_split = split_for_translation(to_translate, n_users=self.users)
-                parts = yield [translate_nul_flush(part, self, deformat, reformat)
+                parts = yield [translate_nul_flush(part, self, deformat, reformat, self.timeout)
                                for part in all_split]
                 return ''.join(parts)
 
@@ -91,9 +94,9 @@
 ParsedModes = namedtuple('ParsedModes', 'do_flush commands')
 
 
-def make_pipeline(modes_parsed):
+def make_pipeline(modes_parsed, timeout):
     if modes_parsed.do_flush:
-        return FlushingPipeline(modes_parsed.commands)
+        return FlushingPipeline(timeout, modes_parsed.commands)
     else:
         return SimplePipeline(modes_parsed.commands)
 
@@ -257,44 +260,46 @@
     return result
 
 
-@gen.coroutine
-def translate_nul_flush(to_translate, pipeline, unsafe_deformat, unsafe_reformat):
-    with (yield pipeline.lock.acquire()):
+async def translate_nul_flush(to_translate, pipeline, unsafe_deformat, unsafe_reformat, timeout):
+    with (await pipeline.lock.acquire()):
         proc_in, proc_out = pipeline.inpipe, pipeline.outpipe
         deformat, reformat = validate_formatters(unsafe_deformat, unsafe_reformat)
 
         if deformat:
             proc_deformat = Popen(deformat, stdin=PIPE, stdout=PIPE)
+            assert proc_deformat.stdin is not None  # stupid mypy
             proc_deformat.stdin.write(bytes(to_translate, 'utf-8'))
             deformatted = proc_deformat.communicate()[0]
             check_ret_code('Deformatter', proc_deformat)
         else:
             deformatted = bytes(to_translate, 'utf-8')
 
+        nonce = '[/NONCE:' + token_urlsafe(8) + ']'
         proc_in.stdin.write(deformatted)
-        proc_in.stdin.write(bytes('\0', 'utf-8'))
+        proc_in.stdin.write(bytes('\0' + nonce + '\0', 'utf-8'))
         # TODO: PipeIOStream has no flush, but seems to work anyway?
         # proc_in.stdin.flush()
 
-        # TODO: If the output has no \0, this hangs, locking the
-        # pipeline. If there's no way to put a timeout right here, we
-        # might need a timeout using Pipeline.use(), like servlet.py's
-        # cleanable but called *before* trying to translate anew
-        output = yield gen.Task(proc_out.stdout.read_until, bytes('\0', 'utf-8'))
+        # If the output has no \0, this hangs, locking the pipeline, so we use a timeout
+        noncereader = proc_out.stdout.read_until(bytes(nonce + '\0', 'utf-8'))
+        output = await asyncio.wait_for(noncereader, timeout=timeout)
+        output = output.replace(bytes(nonce, 'utf-8'), b'')
 
         if reformat:
             proc_reformat = Popen(reformat, stdin=PIPE, stdout=PIPE)
+            assert proc_reformat.stdin is not None  # stupid mypy
             proc_reformat.stdin.write(output)
             result = proc_reformat.communicate()[0]
             check_ret_code('Reformatter', proc_reformat)
         else:
-            result = re.sub(rb'\0$', b'', output)
+            result = output.replace(b'\0', b'')
         return result.decode('utf-8')
 
 
 @gen.coroutine
 def translate_pipeline(to_translate, commands):
     proc_deformat = Popen('apertium-deshtml', stdin=PIPE, stdout=PIPE)
+    assert proc_deformat.stdin is not None  # stupid mypy
     proc_deformat.stdin.write(bytes(to_translate, 'utf-8'))
     deformatted = proc_deformat.communicate()[0]
     check_ret_code('Deformatter', proc_deformat)
@@ -310,6 +315,7 @@
 
     for cmd in commands:
         proc = Popen(cmd, stdin=PIPE, stdout=PIPE)
+        assert proc.stdin is not None  # stupid mypy
         proc.stdin.write(towrite)
         towrite = proc.communicate()[0]
         check_ret_code(' '.join(cmd), proc)
@@ -318,8 +324,9 @@
         all_cmds.append(cmd)
 
     proc_reformat = Popen('apertium-rehtml-noent', stdin=PIPE, stdout=PIPE)
+    assert proc_reformat.stdin is not None  # stupid mypy
     proc_reformat.stdin.write(towrite)
-    towrite = proc_reformat.communicate()[0].decode('utf-8')
+    towrite = proc_reformat.communicate()[0]
     check_ret_code('Reformatter', proc_reformat)
 
     output.append(towrite)
@@ -328,13 +335,12 @@
     return output, all_cmds
 
 
-@gen.coroutine
-def translate_simple(to_translate, commands):
+async def translate_simple(to_translate, commands):
     proc_in, proc_out = start_pipeline(commands)
     assert proc_in == proc_out
-    yield gen.Task(proc_in.stdin.write, bytes(to_translate, 'utf-8'))
+    await proc_in.stdin.write(bytes(to_translate, 'utf-8'))
     proc_in.stdin.close()
-    translated = yield gen.Task(proc_out.stdout.read_until_close)
+    translated = await proc_out.stdout.read_until_close()
     proc_in.stdout.close()
     return translated.decode('utf-8')
 
@@ -349,26 +355,17 @@
     return start_pipeline([cmd])
 
 
-@gen.coroutine
-def translate_modefile_bytes(to_translate_bytes, fmt, mode_file, unknown_marks=False):
+async def translate_modefile_bytes(to_translate_bytes, fmt, mode_file, unknown_marks=False):
     proc_in, proc_out = start_pipeline_from_modefile(mode_file, fmt, unknown_marks)
     assert proc_in == proc_out
-    yield gen.Task(proc_in.stdin.write, to_translate_bytes)
+    await proc_in.stdin.write(to_translate_bytes)
     proc_in.stdin.close()
-    translated_bytes = yield gen.Task(proc_out.stdout.read_until_close)
+    translated_bytes = await proc_out.stdout.read_until_close()
     proc_in.stdout.close()
     return translated_bytes
 
 
 @gen.coroutine
 def translate_html_mark_headings(to_translate, mode_file, unknown_marks=False):
-    proc_deformat = Popen(['apertium-deshtml', '-o'], stdin=PIPE, stdout=PIPE)
-    deformatted = proc_deformat.communicate(bytes(to_translate, 'utf-8'))[0]
-    check_ret_code('Deformatter', proc_deformat)
-
-    translated = yield translate_modefile_bytes(deformatted, 'none', mode_file, unknown_marks)
-
-    proc_reformat = Popen(['apertium-rehtml-noent'], stdin=PIPE, stdout=PIPE)
-    reformatted = proc_reformat.communicate(translated)[0]
-    check_ret_code('Reformatter', proc_reformat)
-    return reformatted.decode('utf-8')
+    translated = yield translate_modefile_bytes(bytes(to_translate, 'utf-8'), 'html', mode_file, unknown_marks)
+    return translated.decode('utf-8')
diff -Nru apertium-apy-0.11.6/debian/apertium-apy.service apertium-apy-0.11.7/debian/apertium-apy.service
--- apertium-apy-0.11.6/debian/apertium-apy.service	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/apertium-apy.service	2021-07-01 13:24:15.000000000 +0530
@@ -1,6 +1,6 @@
 [Unit]
 Description=Translation server and API for Apertium
-Documentation=http://wiki.apertium.org/wiki/Apertium-apy
+Documentation=https://wiki.apertium.org/wiki/Apertium-apy
 After=network.target
 
 [Service]
diff -Nru apertium-apy-0.11.6/debian/changelog apertium-apy-0.11.7/debian/changelog
--- apertium-apy-0.11.6/debian/changelog	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/changelog	2021-07-01 13:24:15.000000000 +0530
@@ -1,3 +1,24 @@
+apertium-apy (0.11.7-2) unstable; urgency=high
+
+  * Upload to unstable. 0.11.6 is broken due to dependency on python3-tornado.
+
+ -- Kartik Mistry <kartik@debian.org>  Thu, 01 Jul 2021 13:24:15 +0530
+
+apertium-apy (0.11.7-1) experimental; urgency=low
+
+  [ Tino Didriksen ]
+  * Update to latest upstream release:
+    + Fixes the processing pipes yielding out-of-order results.
+  * debian/control:
+    + Updated Standards-Version to 4.5.1
+    + Bumped Build-Depends and Depends for Python3 and Python3-tornado
+    + Updated debian/upstream/metadata.
+
+  [ Kartik Mistry ]
+  * Updated debian/watch.
+
+ -- Kartik Mistry <kartik@debian.org>  Fri, 09 Apr 2021 20:16:31 +0530
+
 apertium-apy (0.11.6-1) unstable; urgency=medium
 
   [ Debian Janitor ]
diff -Nru apertium-apy-0.11.6/debian/control apertium-apy-0.11.7/debian/control
--- apertium-apy-0.11.6/debian/control	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/control	2021-07-01 13:24:15.000000000 +0530
@@ -6,7 +6,7 @@
            Tino Didriksen <tino@didriksen.cc>
 Build-Depends: debhelper-compat (= 13),
                dh-python,
-               python3 (>= 3.4),
+               python3 (>= 3.5),
                python3-chardet,
                python3-coverage,
                python3-pkg-resources,
@@ -15,7 +15,7 @@
                python3-streamparser,
                python3-tornado,
                sqlite3
-Standards-Version: 4.5.0
+Standards-Version: 4.5.1
 Homepage: https://apertium.org/
 Vcs-Git: https://salsa.debian.org/science-team/apertium-apy.git
 Vcs-Browser: https://salsa.debian.org/science-team/apertium-apy
@@ -26,9 +26,9 @@
 Pre-Depends: ${misc:Pre-Depends}
 Depends: adduser,
          logrotate,
-         python3 (>= 3.4),
+         python3 (>= 3.5),
          python3-pkg-resources,
-         python3-tornado (>= 4.2),
+         python3-tornado,
          ${misc:Depends},
          ${python3:Depends}
 Recommends: python3-chardet, python3-requests, python3-streamparser
diff -Nru apertium-apy-0.11.6/debian/copyright apertium-apy-0.11.7/debian/copyright
--- apertium-apy-0.11.6/debian/copyright	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/copyright	2021-07-01 13:24:15.000000000 +0530
@@ -19,7 +19,7 @@
  GNU General Public License for more details.
  .
  You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
+ along with this program. If not, see <https://www.gnu.org/licenses/>.
  .
  On Debian systems, the complete text of the GNU General
  Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
diff -Nru apertium-apy-0.11.6/debian/rules apertium-apy-0.11.7/debian/rules
--- apertium-apy-0.11.6/debian/rules	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/rules	2021-07-01 13:24:15.000000000 +0530
@@ -30,3 +30,6 @@
 
 override_dh_installman:
 	dh_installman apertium-apy.1
+
+override_dh_missing:
+	dh_missing --fail-missing
diff -Nru apertium-apy-0.11.6/debian/upstream/metadata apertium-apy-0.11.7/debian/upstream/metadata
--- apertium-apy-0.11.6/debian/upstream/metadata	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/upstream/metadata	2021-07-01 13:24:15.000000000 +0530
@@ -1,4 +1,8 @@
+---
+Archive: GitHub
 Bug-Database: https://github.com/apertium/apertium-apy/issues
 Bug-Submit: https://github.com/apertium/apertium-apy/issues/new
+Changelog: https://github.com/apertium/apertium-apy/tags
+Documentation: https://wiki.apertium.org/
 Repository: https://github.com/apertium/apertium-apy.git
 Repository-Browse: https://github.com/apertium/apertium-apy
diff -Nru apertium-apy-0.11.6/debian/watch apertium-apy-0.11.7/debian/watch
--- apertium-apy-0.11.6/debian/watch	2020-07-01 13:45:06.000000000 +0530
+++ apertium-apy-0.11.7/debian/watch	2021-07-01 13:24:15.000000000 +0530
@@ -1,3 +1,4 @@
-version=3
-opts=filenamemangle=s/.+\/v?(\d\S+)\.tar\.gz/apertium-apy-$1\.tar\.gz/ \
-  https://github.com/apertium/apertium-apy/tags .*/v?(\d\S+)\.tar\.gz
+version=4
+opts="filenamemangle=s%(?:.*?)?v?(\d[\d.]*)\.tar\.gz%apertium-apy-$1.tar.bz2%" \
+    https://github.com/apertium/apertium-apy/tags \
+    (?:.*?/)?v?(\d[\d.]*)\.tar\.gz debian uupdate
diff -Nru apertium-apy-0.11.6/.dir-locals.el apertium-apy-0.11.7/.dir-locals.el
--- apertium-apy-0.11.6/.dir-locals.el	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/.dir-locals.el	2021-04-01 15:20:33.000000000 +0530
@@ -1 +1,6 @@
-((nil . ((flycheck-flake8-maximum-line-length . 180))))
+;;; Directory Local Variables
+;;; For more information see (info "(emacs) Directory Variables")
+
+((nil . ((flycheck-flake8-maximum-line-length . 180)))
+ (python-mode . ((flycheck-python-mypy-args . ("run" "mypy" "--check-untyped-defs" "--python-version" "3.8"))
+                 (flycheck-python-mypy-executable . "pipenv"))))
diff -Nru apertium-apy-0.11.6/.gitattributes apertium-apy-0.11.7/.gitattributes
--- apertium-apy-0.11.6/.gitattributes	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/.gitattributes	1970-01-01 05:30:00.000000000 +0530
@@ -1 +0,0 @@
-language_names/*.sql linguist-detectable=false
diff -Nru apertium-apy-0.11.6/.gitignore apertium-apy-0.11.7/.gitignore
--- apertium-apy-0.11.6/.gitignore	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/.gitignore	1970-01-01 05:30:00.000000000 +0530
@@ -1,112 +0,0 @@
-/tests/apertium-apy.err
-/tests/apertium-apy.log
-/langNames.db
-/cache/
-
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-
-# PyInstaller
-#  Usually these files are written by a python script from a template
-#  before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-.hypothesis/
-.pytest_cache/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-db.sqlite3
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# pyenv
-.python-version
-
-# celery beat schedule file
-celerybeat-schedule
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
-.env
-.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-
-# pycharm conf files
-.idea/
diff -Nru apertium-apy-0.11.6/language_names/build_db.py apertium-apy-0.11.7/language_names/build_db.py
--- apertium-apy-0.11.6/language_names/build_db.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/language_names/build_db.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 import argparse
 import csv
diff -Nru apertium-apy-0.11.6/language_names/manual-additions.tsv apertium-apy-0.11.7/language_names/manual-additions.tsv
--- apertium-apy-0.11.6/language_names/manual-additions.tsv	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/language_names/manual-additions.tsv	2021-04-01 15:20:33.000000000 +0530
@@ -1,12 +1,140 @@
 lg	inLg	name
+an	frp	arpitán
 byv	byv	Mə̀dʉ̂mbɑ̀
+es	frp	francoprovenzal
 es	szl	silesiano
 eu	szl	silesiera
+fr	frp	francoprovençal
+frp	af	afrikâns
+frp	an	aragonês
+frp	ar	arabo
+frp	arp	arpitan
+frp	ast	asturien
+frp	av	avaro
+frp	aym	aymara
+frp	az	azèri
+frp	ba	bachkir
+frp	be	bièlorusso
+frp	bg	bulgaro
+frp	bn	bengali
+frp	br	breton
+frp	bs	bosnien
+frp	bua	bouriato
+frp	ca	catalan
+frp	ckb	kurdo sorani
+frp	co	corso
+frp	cs	tchèco
+frp	csb	cachoubo
+frp	cv	tchouvache
+frp	cy	galouès
+frp	da	danouès
+frp	de	alemand
+frp	dsb	bâs sorabo
+frp	el	grèco
+frp	en	angllês
+frp	eo	èsperanto
+frp	es	èspagnol
+frp	et	èstonien
+frp	eu	bâsco
+frp	fa	pèrso
+frp	fi	finouès
+frp	fo	fèroyen
+frp	fr	francês
+frp	frp	arpitan
+frp	fy	frison orientâl
+frp	ga	irlandês
+frp	gd	gaèlico ècossês
+frp	gl	galicien
+frp	gn	goarani
+frp	gv	manouès
+frp	he	hèbrô
+frp	hi	hindi
+frp	hr	croato
+frp	hsb	hôt sorabo
+frp	ht	hayicien
+frp	hu	hongrouès
+frp	hy	armènien
+frp	ia	interlingua
+frp	id	indonèsien
+frp	is	islandês
+frp	it	italien
+frp	kaa	karakalpak
+frp	kk	kazakh
+frp	kmr	kurdo de la bise
+frp	ko	corèen
+frp	ku	kurdo
+frp	kum	koumyk
+frp	kv	komi
+frp	ky	kirghize
+frp	la	latin
+frp	lb	luxemborjouès
+frp	lo	laocien
+frp	lt	lituanien
+frp	lv	lèton
+frp	mk	macèdonien
+frp	ml	malayalam
+frp	mr	marati
+frp	ms	malês
+frp	mt	maltês
+frp	myv	mordvo erziâ
+frp	nb	norvègien bokmål
+frp	ne	nèpalês
+frp	nl	nêrlandês
+frp	nn	norvègien nynorsk
+frp	no	norvègien
+frp	oc	occitan
+frp	os	ossèto
+frp	pl	polonês
+frp	pt	português
+frp	rm	roumancho
+frp	ro	roumen
+frp	ru	russo
+frp	rup	aroumen
+frp	qve	quèchoua
+frp	sa	sanscrit
+frp	sah	yakouto
+frp	sc	sârdo
+frp	sco	ècossês
+frp	se	samo de la bise
+frp	sh	sèrbocroato
+frp	sk	slovaco
+frp	sl	slovèno
+frp	sma	samo miéjornâl
+frp	smj	samo de Lule
+frp	sq	albanês
+frp	sr	sèrbo
+frp	sv	suèdouès
+frp	sw	souahili
+frp	ta	tamoul
+frp	te	tèlougou
+frp	tet	tètoum
+frp	tg	tadjik
+frp	th	taylandês
+frp	tk	turkmèno
+frp	tl	tagalog
+frp	tr	turco
+frp	tt	tataro
+frp	tyv	touven
+frp	udm	oudmourto
+frp	uk	ukrênien
+frp	ur	ourdou
+frp	uz	ouzbèk
+frp	vi	viètnamien
+frp	xh	xhosa
+frp	zh	chinouès
+frp	zu	zoulou
+frp	szl	silèsien
+gl	frp	francoprovenzal
+it	frp	francoprovenzale
 kmr	kmr	Kurmancî
 lvs	lvs	latviešu valoda
 nn	szl	schlesisk
 oc	szl	silesian
+oc	frp	arpitan
+pt	frp	francoprovençal
 ro	szl	sileziană
+ru	frp	франкопровансальский
+srd	frp	francuproventzale
 szl	af	afrikaans
 szl	an	aragōńskŏ
 szl	ar	arabskŏ
@@ -155,3 +283,4 @@
 vro	vro	võro
 liv	liv	livõ
 bua	bua	буряад
+srd	srd	sardu
diff -Nru apertium-apy-0.11.6/language_names/scraper-cldr.py apertium-apy-0.11.7/language_names/scraper-cldr.py
--- apertium-apy-0.11.6/language_names/scraper-cldr.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/language_names/scraper-cldr.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 import argparse
 import csv
diff -Nru apertium-apy-0.11.6/language_names/scraper-sil.py apertium-apy-0.11.7/language_names/scraper-sil.py
--- apertium-apy-0.11.6/language_names/scraper-sil.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/language_names/scraper-sil.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 import argparse
 import csv
diff -Nru apertium-apy-0.11.6/language_names/variants.tsv apertium-apy-0.11.7/language_names/variants.tsv
--- apertium-apy-0.11.6/language_names/variants.tsv	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/language_names/variants.tsv	2021-04-01 15:20:33.000000000 +0530
@@ -2,9 +2,24 @@
 an	cat_valencia	valenciana
 an	oci_aran	aranés
 an	por_BR	portugués brasileño
+an	por_PTpre1990	port. europeu (ort. trad.)
 ca	cat_valencia	valencià
+ca	eng_GB	anglès britànic
+ca	eng_US	anglès dels EEUU
+ca	fra_eco	quebequès
+ca	hbs_BS	bosnià
+ca	hbs_HR	croat
+ca	hbs_SR	serbi
+ca	hbs_SRcyr	serbi (ciríl·lic)
+ca	mlt_translit	maltès transliterat
+ca	nno_e	norueg oriental
 ca	oci_aran	aranès
+ca	oci_gascon	gascó
+ca	pan_Arab	panjabi (alifat)
+ca	pan_Guru	panjabi (gurmukhi)
 ca	por_BR	portuguès brasiler
+ca	por_PTpre1990	port. europeu (ort. trad.)
+ca	tgk_translit	tadjik transliterat
 de	cat_valencia	valencianische
 de	oci_aran	aranesische
 de	por_BR	brasilianisches portugiesisch
@@ -17,44 +32,94 @@
 en	hbs_SR	Serbian
 en	hbs_SRcyr	Serbian (Cyrillic)
 en	mlt_translit	Maltese Transliterated
-en	nno_e	East Norwegian
+en	nno_e	East Norwegian, vi→vi
+en	nno_e_me	East Norwegian, vi→me
+en	nno_a_vi	Norwegian Nynorsk, vi→vi
 en	oci_aran	Aranese
 en	oci_gascon	Gascon
 en	pan_Arab	Punjabi (Arabic)
 en	pan_Guru	Punjabi (Gurmukhi)
 en	por_BR	Brazilian Portuguese
+en	por_PTpre1990	European Port. (trad. spelling)
 en	tgk_translit	Tajik Transliterated
 es	cat_valencia	valenciano
 es	oci_aran	aranés
 es	por_BR	portugués brasileño
+es	por_PTpre1990	port. europeo (ort. trad.)
 eu	cat_valencia	valentziera
 eu	oci_aran	aranera
 eu	por_BR	brasilgo portugesa
+ita	cat_valencia	valenzano
+ita	oci_aran	aranese
+ita	por_BR	portoghese brasiliano
+ita	por_PTpre1990	port. europeo (ort. trad.)
 fi	cat_valencia	valencian
 fi	oci_aran	araneesi
 fi	por_BR	brasilianportugali
 fr	cat_valencia	valencien
+fr	eng_GB	anglais britannique
+fr	eng_US	anglais américain
+fr	fra_eco	français québécois
+fr	hbs_BS	bosnien
+fr	hbs_HR	croate
+fr	hbs_SR	serbe
+fr	hbs_SRcyr	serbe (cyrillique)
+fr	nno_e	norvégien oriental
 fr	oci_aran	aranais
+fr	oci_gascon	gascon
 fr	por_BR	portugais brésilien
+fr	por_PTpre1990	port. européen (orth. trad.)
+frp	cat_valencia	valencien
+frp	eng_GB	angllês britanico
+frp	eng_US	angllês amèriquen
+frp	fra_eco	francês quèbècouès
+frp	hbs_BS	bosnien
+frp	hbs_HR	croato
+frp	hbs_SR	sèrbo
+frp	hbs_SRcyr	sèrbo (cirilico)
+frp	mlt_translit	maltês translitèrâ
+frp	nno_e	norvègien orientâl
+frp	oci_aran	aranês
+frp	oci_gascon	gascon
+frp	pan_Arab	pendjabi (arabo)
+frp	pan_Guru	pendjabi (gurmukhi)
+frp	por_BR	português brèsilien
+frp	por_PTpre1990	port. européen (ort. trad.)
+frp	tgk_translit	tadjik translitèrâ
 nb	cat_valencia	valensiansk
-nb	nno_e	nynorsk e-infinitiv
+nb	nno_e	nynorsk e-infinitiv, vi→vi
+nb	nno_e_me	nynorsk e-infinitiv, vi→me
+nb	nno_a_vi	nynorsk e-infinitiv, vi→vi
 nb	oci_aran	araneisk
 nb	por_BR	brasiliansk portugisisk
 nn	cat_valencia	valensiansk
-nn	nno_e	nynorsk e-infinitiv
+nn	nno_e	nynorsk e-infinitiv, vi→vi
+nn	nno_e_me	nynorsk e-infinitiv, vi→me
+nn	nno_a_vi	nynorsk e-infinitiv, vi→vi
 nn	oci_aran	araneisk
 nn	por_BR	brasiliansk portugisisk
 no	cat_valencia	valensiansk
-no	nno_e	nynorsk e-infinitiv
+no	nno_e	nynorsk e-infinitiv, vi→vi
+no	nno_e_me	nynorsk e-infinitiv, vi→me
+no	nno_a_vi	nynorsk e-infinitiv, vi→vi
 no	oci_aran	araneisk
 no	por_BR	brasiliansk portugisisk
-oci_aran	oci_aran	Aranés
-pt	cat_valencia	valenciana
+oci	cat_valencia	Valencian
+oci	oci_aran	Aranés
+oci	oci_gascon	Gascon
+oci	por_BR	Portugués brasilian
+oci	por_PTpre1990	Port. europèu (ort. trad.)
+pt	cat_valencia	valenciano
 pt	oci_aran	aranês
 pt	por_BR	português brasileiro
+pt	por_PTpre1990	port. europeu (ort. trad.)
 ru	cat_valencia	валенсийский
 ru	oci_aran	aра́нский
 ru	por_BR	брази́льский португа́льский
+srd	cat_valencia	valentzianu
+srd	oci_aran	aranesu
+srd	por_BR	portughesu brasilianu
+srd	por_PTpre1990	port. europeu (ort. trad.)
 sv	cat_valencia	valencianska
 sv	oci_aran	araneiska
 sv	por_BR	brasiliansk portugisiska
@@ -74,3 +139,5 @@
 kaz_Cyrl	kaz_Cyrl	қазақша (кирил жазуымен)
 eng_US	eng_US	English (US)
 wad-ipa	wad-ipa	Wamesa (IPA)
+cat_valencia	cat_valencia	valencià
+cat_iec2017	cat_iec2017	català (IEC 2017)
diff -Nru apertium-apy-0.11.6/Pipfile apertium-apy-0.11.7/Pipfile
--- apertium-apy-0.11.6/Pipfile	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/Pipfile	2021-04-01 15:20:33.000000000 +0530
@@ -7,7 +7,7 @@
 apertium-streamparser = "*"
 chardet = "*"
 requests = "*"
-tornado = "==4.2.1"
+tornado = "==6.0.3"
 commentjson = "*"
 
 [dev-packages]
diff -Nru apertium-apy-0.11.6/Pipfile.lock apertium-apy-0.11.7/Pipfile.lock
--- apertium-apy-0.11.6/Pipfile.lock	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/Pipfile.lock	2021-04-01 15:20:33.000000000 +0530
@@ -1,7 +1,7 @@
 {
     "_meta": {
         "hash": {
-            "sha256": "1c2c9ef4041302ae8860255d96a20a3f9f5094e31ef063598ee9b846d634a019"
+            "sha256": "c4a3cda3a36b2ff449891558273a1ce2fd884725ab23c84a14300202d7e3f956"
         },
         "pipfile-spec": 6,
         "requires": {},
@@ -24,10 +24,10 @@
         },
         "certifi": {
             "hashes": [
-                "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
-                "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
+                "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
+                "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
             ],
-            "version": "==2019.11.28"
+            "version": "==2020.12.5"
         },
         "chardet": {
             "hashes": [
@@ -39,56 +39,109 @@
         },
         "commentjson": {
             "hashes": [
-                "sha256:2916e54695241bb9e4342203c3f643efaff6d53807d72102161dbd5449f54dbd"
+                "sha256:42f9f231d97d93aff3286a4dc0de39bfd91ae823d1d9eba9fa901fe0c7113dd4"
             ],
             "index": "pypi",
-            "version": "==0.7.2"
+            "version": "==0.9.0"
         },
         "idna": {
             "hashes": [
-                "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
-                "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+                "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
+                "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
             ],
-            "version": "==2.8"
+            "version": "==2.10"
+        },
+        "lark-parser": {
+            "hashes": [
+                "sha256:26215ebb157e6fb2ee74319aa4445b9f3b7e456e26be215ce19fdaaa901c20a4"
+            ],
+            "version": "==0.7.8"
         },
         "requests": {
             "hashes": [
-                "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
-                "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+                "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b",
+                "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"
             ],
             "index": "pypi",
-            "version": "==2.22.0"
+            "version": "==2.24.0"
         },
         "tornado": {
             "hashes": [
-                "sha256:a16fcdc4f76b184cb82f4f9eaeeacef6113b524b26a2cb331222e4a7fa6f2969"
+                "sha256:349884248c36801afa19e342a77cc4458caca694b0eda633f5878e458a44cb2c",
+                "sha256:398e0d35e086ba38a0427c3b37f4337327231942e731edaa6e9fd1865bbd6f60",
+                "sha256:4e73ef678b1a859f0cb29e1d895526a20ea64b5ffd510a2307b5998c7df24281",
+                "sha256:559bce3d31484b665259f50cd94c5c28b961b09315ccd838f284687245f416e5",
+                "sha256:abbe53a39734ef4aba061fca54e30c6b4639d3e1f59653f0da37a0003de148c7",
+                "sha256:c845db36ba616912074c5b1ee897f8e0124df269468f25e4fe21fe72f6edd7a9",
+                "sha256:c9399267c926a4e7c418baa5cbe91c7d1cf362d505a1ef898fde44a07c9dd8a5"
             ],
             "index": "pypi",
-            "version": "==4.2.1"
+            "version": "==6.0.3"
         },
         "urllib3": {
             "hashes": [
-                "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
-                "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
+                "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
+                "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
             ],
-            "version": "==1.25.8"
+            "version": "==1.25.11"
         }
     },
     "develop": {
         "bleach": {
             "hashes": [
-                "sha256:cc8da25076a1fe56c3ac63671e2194458e0c4d9c7becfd52ca251650d517903c",
-                "sha256:e78e426105ac07026ba098f04de8abe9b6e3e98b5befbf89b51a5ef0a4292b03"
+                "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125",
+                "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"
             ],
-            "index": "pypi",
-            "version": "==3.1.4"
+            "version": "==3.3.0"
         },
         "certifi": {
             "hashes": [
-                "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
-                "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
+                "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c",
+                "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"
+            ],
+            "version": "==2020.12.5"
+        },
+        "cffi": {
+            "hashes": [
+                "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813",
+                "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06",
+                "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea",
+                "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee",
+                "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396",
+                "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73",
+                "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315",
+                "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1",
+                "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49",
+                "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892",
+                "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482",
+                "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058",
+                "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5",
+                "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53",
+                "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045",
+                "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3",
+                "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5",
+                "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e",
+                "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c",
+                "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369",
+                "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827",
+                "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053",
+                "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa",
+                "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4",
+                "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322",
+                "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132",
+                "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62",
+                "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa",
+                "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0",
+                "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396",
+                "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e",
+                "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991",
+                "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6",
+                "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1",
+                "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406",
+                "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d",
+                "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"
             ],
-            "version": "==2019.11.28"
+            "version": "==1.14.5"
         },
         "chardet": {
             "hashes": [
@@ -98,50 +151,80 @@
             "index": "pypi",
             "version": "==3.0.4"
         },
+        "colorama": {
+            "hashes": [
+                "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b",
+                "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"
+            ],
+            "version": "==0.4.4"
+        },
         "coverage": {
             "hashes": [
-                "sha256:3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9",
-                "sha256:39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74",
-                "sha256:3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390",
-                "sha256:465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8",
-                "sha256:48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe",
-                "sha256:5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf",
-                "sha256:5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e",
-                "sha256:68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741",
-                "sha256:6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09",
-                "sha256:7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd",
-                "sha256:7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034",
-                "sha256:839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420",
-                "sha256:8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c",
-                "sha256:932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab",
-                "sha256:988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba",
-                "sha256:998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e",
-                "sha256:9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609",
-                "sha256:9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2",
-                "sha256:a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49",
-                "sha256:a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b",
-                "sha256:aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d",
-                "sha256:bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce",
-                "sha256:bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9",
-                "sha256:c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4",
-                "sha256:c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773",
-                "sha256:c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723",
-                "sha256:df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c",
-                "sha256:f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f",
-                "sha256:f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1",
-                "sha256:f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260",
-                "sha256:fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a"
+                "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516",
+                "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259",
+                "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9",
+                "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097",
+                "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0",
+                "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f",
+                "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7",
+                "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c",
+                "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5",
+                "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7",
+                "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729",
+                "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978",
+                "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9",
+                "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f",
+                "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9",
+                "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822",
+                "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418",
+                "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82",
+                "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f",
+                "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d",
+                "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221",
+                "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4",
+                "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21",
+                "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709",
+                "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54",
+                "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d",
+                "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270",
+                "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24",
+                "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751",
+                "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a",
+                "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237",
+                "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7",
+                "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636",
+                "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"
             ],
             "index": "pypi",
-            "version": "==4.5.3"
+            "version": "==5.3"
         },
         "coveralls": {
             "hashes": [
-                "sha256:baa26648430d5c2225ab12d7e2067f75597a4b967034bba7e3d5ab7501d207a1",
-                "sha256:ff9b7823b15070f26f654837bb02a201d006baaf2083e0514ffd3b34a3ffed81"
+                "sha256:4430b862baabb3cf090d36d84d331966615e4288d8a8c5957e0fd456d0dd8bd6",
+                "sha256:b3b60c17b03a0dee61952a91aed6f131e0b2ac8bd5da909389c53137811409e1"
             ],
             "index": "pypi",
-            "version": "==1.7.0"
+            "version": "==2.1.2"
+        },
+        "cryptography": {
+            "hashes": [
+                "sha256:0d7b69674b738068fa6ffade5c962ecd14969690585aaca0a1b1fc9058938a72",
+                "sha256:1bd0ccb0a1ed775cd7e2144fe46df9dc03eefd722bbcf587b3e0616ea4a81eff",
+                "sha256:3c284fc1e504e88e51c428db9c9274f2da9f73fdf5d7e13a36b8ecb039af6e6c",
+                "sha256:49570438e60f19243e7e0d504527dd5fe9b4b967b5a1ff21cc12b57602dd85d3",
+                "sha256:541dd758ad49b45920dda3b5b48c968f8b2533d8981bcdb43002798d8f7a89ed",
+                "sha256:5a60d3780149e13b7a6ff7ad6526b38846354d11a15e21068e57073e29e19bed",
+                "sha256:7951a966613c4211b6612b0352f5bf29989955ee592c4a885d8c7d0f830d0433",
+                "sha256:922f9602d67c15ade470c11d616f2b2364950602e370c76f0c94c94ae672742e",
+                "sha256:a0f0b96c572fc9f25c3f4ddbf4688b9b38c69836713fb255f4a2715d93cbaf44",
+                "sha256:a777c096a49d80f9d2979695b835b0f9c9edab73b59e4ceb51f19724dda887ed",
+                "sha256:a9a4ac9648d39ce71c2f63fe7dc6db144b9fa567ddfc48b9fde1b54483d26042",
+                "sha256:aa4969f24d536ae2268c902b2c3d62ab464b5a66bcb247630d208a79a8098e9b",
+                "sha256:c7390f9b2119b2b43160abb34f63277a638504ef8df99f11cb52c1fda66a2e6f",
+                "sha256:e18e6ab84dfb0ab997faf8cca25a86ff15dfea4027b986322026cc99e0a892da"
+            ],
+            "index": "pypi",
+            "version": "==3.3.2"
         },
         "docopt": {
             "hashes": [
@@ -156,20 +239,13 @@
             ],
             "version": "==0.16"
         },
-        "entrypoints": {
-            "hashes": [
-                "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19",
-                "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"
-            ],
-            "version": "==0.3"
-        },
         "flake8": {
             "hashes": [
-                "sha256:859996073f341f2670741b51ec1e67a01da142831aa1fdc6242dbf88dffbe661",
-                "sha256:a796a115208f5c03b18f332f7c11729812c8c3ded6c46319c59b53efd3819da8"
+                "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839",
+                "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"
             ],
             "index": "pypi",
-            "version": "==3.7.7"
+            "version": "==3.8.4"
         },
         "flake8-commas": {
             "hashes": [
@@ -188,17 +264,32 @@
         },
         "flake8-quotes": {
             "hashes": [
-                "sha256:10c9af6b472d4302a8e721c5260856c3f985c5c082b04841aefd2f808ac02038"
+                "sha256:3f1116e985ef437c130431ac92f9b3155f8f652fda7405ac22ffdfd7a9d1055e"
             ],
             "index": "pypi",
-            "version": "==2.0.1"
+            "version": "==3.2.0"
         },
         "idna": {
             "hashes": [
-                "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
-                "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
+                "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
+                "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
+            ],
+            "version": "==2.10"
+        },
+        "jeepney": {
+            "hashes": [
+                "sha256:7d59b6622675ca9e993a6bd38de845051d315f8b0c72cca3aef733a20b648657",
+                "sha256:aec56c0eb1691a841795111e184e13cad504f7703b9a64f63020816afa79a8ae"
             ],
-            "version": "==2.8"
+            "markers": "sys_platform == 'linux'",
+            "version": "==0.6.0"
+        },
+        "keyring": {
+            "hashes": [
+                "sha256:9acb3e1452edbb7544822b12fd25459078769e560fa51f418b6d00afaa6178df",
+                "sha256:9f44660a5d4931bdc14c08a1d01ef30b18a7a8147380710d8c9f9531e1f6c3c0"
+            ],
+            "version": "==22.0.1"
         },
         "mccabe": {
             "hashes": [
@@ -209,20 +300,23 @@
         },
         "mypy": {
             "hashes": [
-                "sha256:2afe51527b1f6cdc4a5f34fc90473109b22bf7f21086ba3e9451857cf11489e6",
-                "sha256:56a16df3e0abb145d8accd5dbb70eba6c4bd26e2f89042b491faa78c9635d1e2",
-                "sha256:5764f10d27b2e93c84f70af5778941b8f4aa1379b2430f85c827e0f5464e8714",
-                "sha256:5bbc86374f04a3aa817622f98e40375ccb28c4836f36b66706cf3c6ccce86eda",
-                "sha256:6a9343089f6377e71e20ca734cd8e7ac25d36478a9df580efabfe9059819bf82",
-                "sha256:6c9851bc4a23dc1d854d3f5dfd5f20a016f8da86bcdbb42687879bb5f86434b0",
-                "sha256:b8e85956af3fcf043d6f87c91cbe8705073fc67029ba6e22d3468bfee42c4823",
-                "sha256:b9a0af8fae490306bc112229000aa0c2ccc837b49d29a5c42e088c132a2334dd",
-                "sha256:bbf643528e2a55df2c1587008d6e3bda5c0445f1240dfa85129af22ae16d7a9a",
-                "sha256:c46ab3438bd21511db0f2c612d89d8344154c0c9494afc7fbc932de514cf8d15",
-                "sha256:f7a83d6bd805855ef83ec605eb01ab4fa42bcef254b13631e451cbb44914a9b0"
+                "sha256:0a0d102247c16ce93c97066443d11e2d36e6cc2a32d8ccc1f705268970479324",
+                "sha256:0d34d6b122597d48a36d6c59e35341f410d4abfa771d96d04ae2c468dd201abc",
+                "sha256:2170492030f6faa537647d29945786d297e4862765f0b4ac5930ff62e300d802",
+                "sha256:2842d4fbd1b12ab422346376aad03ff5d0805b706102e475e962370f874a5122",
+                "sha256:2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975",
+                "sha256:72060bf64f290fb629bd4a67c707a66fd88ca26e413a91384b18db3876e57ed7",
+                "sha256:af4e9ff1834e565f1baa74ccf7ae2564ae38c8df2a85b057af1dbbc958eb6666",
+                "sha256:bd03b3cf666bff8d710d633d1c56ab7facbdc204d567715cb3b9f85c6e94f669",
+                "sha256:c614194e01c85bb2e551c421397e49afb2872c88b5830e3554f0519f9fb1c178",
+                "sha256:cf4e7bf7f1214826cf7333627cb2547c0db7e3078723227820d0a2490f117a01",
+                "sha256:da56dedcd7cd502ccd3c5dddc656cb36113dd793ad466e894574125945653cea",
+                "sha256:e86bdace26c5fe9cf8cb735e7cedfe7850ad92b327ac5d797c656717d2ca66de",
+                "sha256:e97e9c13d67fbe524be17e4d8025d51a7dca38f90de2e462243ab8ed8a9178d1",
+                "sha256:eea260feb1830a627fb526d22fbb426b750d9f5a47b624e8d5e7e004359b219c"
             ],
             "index": "pypi",
-            "version": "==0.701"
+            "version": "==0.790"
         },
         "mypy-extensions": {
             "hashes": [
@@ -231,56 +325,77 @@
             ],
             "version": "==0.4.3"
         },
+        "packaging": {
+            "hashes": [
+                "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5",
+                "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"
+            ],
+            "version": "==20.9"
+        },
         "pep8-naming": {
             "hashes": [
-                "sha256:01cb1dab2f3ce9045133d08449f1b6b93531dceacb9ef04f67087c11c723cea9",
-                "sha256:0ec891e59eea766efd3059c3d81f1da304d858220678bdc351aab73c533f2fbb"
+                "sha256:a1dd47dd243adfe8a83616e27cf03164960b507530f155db94e10b36a6cd6724",
+                "sha256:f43bfe3eea7e0d73e8b5d07d6407ab47f2476ccaeff6937c84275cd30b016738"
             ],
             "index": "pypi",
-            "version": "==0.8.2"
+            "version": "==0.11.1"
         },
         "pkginfo": {
             "hashes": [
-                "sha256:7424f2c8511c186cd5424bbf31045b77435b37a8d604990b79d4e70d741148bb",
-                "sha256:a6d9e40ca61ad3ebd0b72fbadd4fba16e4c0e4df0428c041e01e06eb6ee71f32"
+                "sha256:029a70cb45c6171c329dfc890cde0879f8c52d6f3922794796e06f577bb03db4",
+                "sha256:9fdbea6495622e022cc72c2e5e1b735218e4ffb2a2a69cde2694a6c1f16afb75"
             ],
-            "version": "==1.5.0.1"
+            "version": "==1.7.0"
         },
         "pycodestyle": {
             "hashes": [
-                "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56",
-                "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c"
+                "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367",
+                "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e"
+            ],
+            "version": "==2.6.0"
+        },
+        "pycparser": {
+            "hashes": [
+                "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0",
+                "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"
             ],
-            "version": "==2.5.0"
+            "version": "==2.20"
         },
         "pyflakes": {
             "hashes": [
-                "sha256:17dbeb2e3f4d772725c777fabc446d5634d1038f234e77343108ce445ea69ce0",
-                "sha256:d976835886f8c5b31d47970ed689944a0262b5f3afa00a5a7b4dc81e5449f8a2"
+                "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92",
+                "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"
             ],
-            "version": "==2.1.1"
+            "version": "==2.2.0"
         },
         "pygments": {
             "hashes": [
-                "sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
-                "sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
+                "sha256:37a13ba168a02ac54cc5891a42b1caec333e59b66addb7fa633ea8a6d73445c0",
+                "sha256:b21b072d0ccdf29297a82a2363359d99623597b8a265b8081760e4d0f7153c88"
             ],
-            "version": "==2.6.1"
+            "version": "==2.8.0"
+        },
+        "pyparsing": {
+            "hashes": [
+                "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
+                "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
+            ],
+            "version": "==2.4.7"
         },
         "readme-renderer": {
             "hashes": [
-                "sha256:1b6d8dd1673a0b293766b4106af766b6eff3654605f9c4f239e65de6076bc222",
-                "sha256:e67d64242f0174a63c3b727801a2fff4c1f38ebe5d71d95ff7ece081945a6cd4"
+                "sha256:63b4075c6698fcfa78e584930f07f39e05d46f3ec97f65006e430b595ca6348c",
+                "sha256:92fd5ac2bf8677f310f3303aa4bce5b9d5f9f2094ab98c29f13791d7b805a3db"
             ],
-            "version": "==25.0"
+            "version": "==29.0"
         },
         "requests": {
             "hashes": [
-                "sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
-                "sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
+                "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b",
+                "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"
             ],
             "index": "pypi",
-            "version": "==2.22.0"
+            "version": "==2.24.0"
         },
         "requests-toolbelt": {
             "hashes": [
@@ -289,58 +404,92 @@
             ],
             "version": "==0.9.1"
         },
+        "rfc3986": {
+            "hashes": [
+                "sha256:112398da31a3344dc25dbf477d8df6cb34f9278a94fee2625d89e4514be8bb9d",
+                "sha256:af9147e9aceda37c91a05f4deb128d4b4b49d6b199775fd2d2927768abdc8f50"
+            ],
+            "version": "==1.4.0"
+        },
+        "secretstorage": {
+            "hashes": [
+                "sha256:422d82c36172d88d6a0ed5afdec956514b189ddbfb72fefab0c8a1cee4eaf71f",
+                "sha256:fd666c51a6bf200643495a04abb261f83229dcb6fd8472ec393df7ffc8b6f195"
+            ],
+            "markers": "sys_platform == 'linux'",
+            "version": "==3.3.1"
+        },
         "six": {
             "hashes": [
-                "sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
-                "sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
+                "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
+                "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
             ],
-            "version": "==1.14.0"
+            "version": "==1.15.0"
         },
         "tqdm": {
             "hashes": [
-                "sha256:03d2366c64d44c7f61e74c700d9b202d57e9efe355ea5c28814c52bfe7a50b8c",
-                "sha256:be5ddeec77d78ba781ea41eacb2358a77f74cc2407f54b82222d7ee7dc8c8ccf"
+                "sha256:2c44efa73b8914dba7807aefd09653ac63c22b5b4ea34f7a80973f418f1a3089",
+                "sha256:c23ac707e8e8aabb825e4d91f8e17247f9cc14b0d64dd9e97be0781e9e525bba"
             ],
-            "version": "==4.44.1"
+            "version": "==4.58.0"
         },
         "twine": {
             "hashes": [
-                "sha256:0fb0bfa3df4f62076cab5def36b1a71a2e4acb4d1fa5c97475b048117b1a6446",
-                "sha256:d6c29c933ecfc74e9b1d9fa13aa1f87c5d5770e119f5a4ce032092f0ff5b14dc"
+                "sha256:34352fd52ec3b9d29837e6072d5a2a7c6fe4290e97bba46bb8d478b5c598f7ab",
+                "sha256:ba9ff477b8d6de0c89dd450e70b2185da190514e91c42cc62f96850025c10472"
             ],
             "index": "pypi",
-            "version": "==1.13.0"
+            "version": "==3.2.0"
         },
         "typed-ast": {
             "hashes": [
-                "sha256:132eae51d6ef3ff4a8c47c393a4ef5ebf0d1aecc96880eb5d6c8ceab7017cc9b",
-                "sha256:18141c1484ab8784006c839be8b985cfc82a2e9725837b0ecfa0203f71c4e39d",
-                "sha256:2baf617f5bbbfe73fd8846463f5aeafc912b5ee247f410700245d68525ec584a",
-                "sha256:3d90063f2cbbe39177e9b4d888e45777012652d6110156845b828908c51ae462",
-                "sha256:4304b2218b842d610aa1a1d87e1dc9559597969acc62ce717ee4dfeaa44d7eee",
-                "sha256:4983ede548ffc3541bae49a82675996497348e55bafd1554dc4e4a5d6eda541a",
-                "sha256:5315f4509c1476718a4825f45a203b82d7fdf2a6f5f0c8f166435975b1c9f7d4",
-                "sha256:6cdfb1b49d5345f7c2b90d638822d16ba62dc82f7616e9b4caa10b72f3f16649",
-                "sha256:7b325f12635598c604690efd7a0197d0b94b7d7778498e76e0710cd582fd1c7a",
-                "sha256:8d3b0e3b8626615826f9a626548057c5275a9733512b137984a68ba1598d3d2f",
-                "sha256:8f8631160c79f53081bd23446525db0bc4c5616f78d04021e6e434b286493fd7",
-                "sha256:912de10965f3dc89da23936f1cc4ed60764f712e5fa603a09dd904f88c996760",
-                "sha256:b010c07b975fe853c65d7bbe9d4ac62f1c69086750a574f6292597763781ba18",
-                "sha256:c908c10505904c48081a5415a1e295d8403e353e0c14c42b6d67f8f97fae6616",
-                "sha256:c94dd3807c0c0610f7c76f078119f4ea48235a953512752b9175f9f98f5ae2bd",
-                "sha256:ce65dee7594a84c466e79d7fb7d3303e7295d16a83c22c7c4037071b059e2c21",
-                "sha256:eaa9cfcb221a8a4c2889be6f93da141ac777eb8819f077e1d09fb12d00a09a93",
-                "sha256:f3376bc31bad66d46d44b4e6522c5c21976bf9bca4ef5987bb2bf727f4506cbb",
-                "sha256:f9202fa138544e13a4ec1a6792c35834250a85958fde1251b6a22e07d1260ae7"
+                "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1",
+                "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d",
+                "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6",
+                "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd",
+                "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37",
+                "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151",
+                "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07",
+                "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440",
+                "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70",
+                "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496",
+                "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea",
+                "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400",
+                "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc",
+                "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606",
+                "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc",
+                "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581",
+                "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412",
+                "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a",
+                "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2",
+                "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787",
+                "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f",
+                "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937",
+                "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64",
+                "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487",
+                "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b",
+                "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41",
+                "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a",
+                "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3",
+                "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166",
+                "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"
+            ],
+            "version": "==1.4.2"
+        },
+        "typing-extensions": {
+            "hashes": [
+                "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918",
+                "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c",
+                "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"
             ],
-            "version": "==1.3.5"
+            "version": "==3.7.4.3"
         },
         "urllib3": {
             "hashes": [
-                "sha256:2f3db8b19923a873b3e5256dc9c2dedfa883e33d87c690d9c7913e1f40673cdc",
-                "sha256:87716c2d2a7121198ebcb7ce7cccf6ce5e9ba539041cfbaeecfb641dc0bf6acc"
+                "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2",
+                "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"
             ],
-            "version": "==1.25.8"
+            "version": "==1.25.11"
         },
         "webencodings": {
             "hashes": [
diff -Nru apertium-apy-0.11.6/README.md apertium-apy-0.11.7/README.md
--- apertium-apy-0.11.6/README.md	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/README.md	2021-04-01 15:20:33.000000000 +0530
@@ -14,8 +14,8 @@
 Requirements
 ------------
 
-- Python 3.5+
-- Tornado 4.3-6.0.4 (`python3-tornado` on Debian)
+- Python 3.6+
+- Tornado 4.5.3 - 6.0.4 (`python3-tornado` on Debian/Ubuntu)
 
 Additional functionality is provided by installation
 of the following packages:
diff -Nru apertium-apy-0.11.6/servlet.py apertium-apy-0.11.7/servlet.py
--- apertium-apy-0.11.6/servlet.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/servlet.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 from apertium_apy.apy import main
 
diff -Nru apertium-apy-0.11.6/tests/sanity-test-apy.py apertium-apy-0.11.7/tests/sanity-test-apy.py
--- apertium-apy-0.11.6/tests/sanity-test-apy.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/tests/sanity-test-apy.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 
 import html.parser
 import json
diff -Nru apertium-apy-0.11.6/tests/test.py apertium-apy-0.11.7/tests/test.py
--- apertium-apy-0.11.6/tests/test.py	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/tests/test.py	2021-04-01 15:20:33.000000000 +0530
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/python3
 # coding=utf-8
 
 import io
@@ -7,6 +7,7 @@
 import mimetypes
 import os
 import shlex
+import shutil
 import subprocess
 import sys
 import time
@@ -41,7 +42,14 @@
 
 def setUpModule():  # noqa: N802
     global server_handle
-    coverage_cli_args = shlex.split('coverage run --rcfile {}'.format(os.path.join(base_path, '.coveragerc')))
+    coverage_cli_args = []
+    if shutil.which('coverage'):
+        coverage_cli_args = shlex.split('coverage run --rcfile {}'.format(os.path.join(base_path, '.coveragerc')))
+    else:
+        logging.warning("Couldn't find `coverage` executable, not running coverage tests!")
+        for _ in range(3):
+            time.sleep(1)
+            print('.')
     server_handle = subprocess.Popen(coverage_cli_args + [os.path.join(base_path, 'servlet.py')] + cli_args)  # TODO: print only on error?
 
     started = False
@@ -164,6 +172,36 @@
         self.assertTrue(response.items() >= expect.items(), '{} is missing {}'.format(response, expect))
 
 
+class TestPerWordHandler(BaseTestCase):
+    def test_per_word_nno(self):
+        response = self.fetch_json('/perWord', {'lang': 'nno', 'modes': 'morph tagger', 'q': 'og ikkje'})
+        expected = [
+            {
+                'input': 'og',
+                'tagger': 'og<cnjcoo><clb>',
+                'morph': [
+                    'og<cnjcoo>',
+                    'og<cnjcoo><clb>',
+                ],
+            },
+            {
+                'input': 'ikkje',
+                'tagger': 'ikkje<adv>',
+                'morph': [
+                    'ikkje<adv>',
+                ],
+            },
+            {
+                'input': '.',
+                'tagger': '.<sent><clb>',
+                'morph': [
+                    '.<sent><clb>',
+                ],
+            },
+        ]
+        self.assertEqual(response, expected)
+
+
 class TestTranslateHandler(BaseTestCase):
     def fetch_translation(self, query, pair, **kwargs):
         params = kwargs.get('params', {})
@@ -231,7 +269,7 @@
             'url': 'http://example.org/',
             'markUnknown': 'no',
         })
-        self.assertIn('Ámbito', response['responseData']['translatedText'])
+        self.assertIn('literatura', response['responseData']['translatedText'])
 
     def test_translate_invalid_webpage(self):
         response = self.fetch_json('/translatePage', params={
diff -Nru apertium-apy-0.11.6/.travis.yml apertium-apy-0.11.7/.travis.yml
--- apertium-apy-0.11.6/.travis.yml	2020-07-01 13:06:52.000000000 +0530
+++ apertium-apy-0.11.7/.travis.yml	1970-01-01 05:30:00.000000000 +0530
@@ -1,40 +0,0 @@
-language: python
-sudo: required
-dist: xenial
-cache:
-    pip: true
-    directories:
-        - /tmp/languages
-python:
-    - '3.5'
-    - '3.6'
-    - '3.7'
-    - 'nightly'
-install:
-    - pip install pipenv
-    - travis_retry pipenv install --dev --system
-    - make
-before_script:
-    - wget http://apertium.projectjj.com/apt/install-nightly.sh -O - | sudo bash
-    - sudo apt-get -f --allow-unauthenticated install apertium-all-dev
-    - sudo apt-get -f --allow-unauthenticated install apertium-sme-nob apertium-es-en
-    - git clone --depth 1 https://github.com/apertium/apertium-nno /tmp/languages/apertium-nno || echo 'Using cached apertium-nno'
-    - ( cd /tmp/languages/apertium-nno && ./autogen.sh && make -j2 )
-script:
-    - NONPAIRS=/tmp/languages make test
-after_success:
-    - coveralls
-notifications:
-    irc:
-        channels:
-            - secure: 'Ijxnr2SbP1iqWO1puvWTQfyzW2yav/OCSScy6F20MOtBLozYEeHn6ruzLVPa7r7/FibCBkS6tCYQx9DJeJwJaXAn81VEY1Fk9B5SeiMpm5VWjN83YIeJzpEkiIT1YfEU5nfgYyOyqzaVJbE6Ssae/SBsfkYrYqd8T1YajoWO/qNX/0VfLBtfHSR2ysx/IbXM3McMnqBRS+WGHBijSOIewp7SCnAJ+J7SYLLkwFIdE1eFELiDBLv0ciiPacShGy4A3pNZyJDeI/CFgs+gNEZEwiG4gjuW1sdZ1NfvktCTmpzH185Co1YaGVpzCDSlp+IuKyKGjbQE+n2q66V1fixVq9rL15MibzZU/846YLPLYkHOM8ZTMXB4VLtyz2/JgAe0M78QM+1nKD3QqN2tPIHCTohvwrw7RGVNZHh4LMkbSLfeuyXBM/znlWmBCS14bFvNK+jrZdVm1m3ltIrMZpC+E90EQ/KAoSONeF8l/sW+Q92lI/2jGVOuQmSJrlNUIjKb3cCGZU03AVveNa7Y5bP/LfP64xBfApPgmdl7LVLmFJbNP+N+0M13bTUHOuxqV0OLxYM7M9VUPW4PwGCRc+OvgrHa7hHzSPKYZFZ9Yvt8KB2PTZojxrYn+2179mVj78NkuOEs980MpsbKRwpSLYhCL7tDHclyb6vQAJTOFagkXO0='
-    on_failure: change
-    on_success: change
-matrix:
-    allow_failures:
-        - python: nightly
-
-    # The irc channel is encrypted for apertium/apertium-apy, so build notifications from forks won't show up on the IRC channel
-    # Encrypt with:
-    # $ gem install --user-install travis
-    # $ ~/.gem/ruby/*/bin/travis encrypt -r apertium/apertium-apy 'chat.freenode.net#apertium'

Reply to: