Updated dependencies.

Also changed the base Docker image from Centos to Rocky Linux.
master
Pacman Ghost 2 years ago
parent 9121a89c13
commit bb4e036649
  1. 3
      .pylintrc
  2. 2
      Dockerfile
  3. 6
      asl_rulebook2/bin/prepare_pdf.py
  4. 2
      asl_rulebook2/extract/content.py
  5. 30
      asl_rulebook2/tests/test_extract.py
  6. 2
      asl_rulebook2/utils.py
  7. 2
      asl_rulebook2/webapp/asop.py
  8. 2
      asl_rulebook2/webapp/main.py
  9. 4
      asl_rulebook2/webapp/prepare.py
  10. 4
      asl_rulebook2/webapp/rule_info.py
  11. 3
      asl_rulebook2/webapp/run_server.py
  12. 4
      asl_rulebook2/webapp/startup.py
  13. 8
      asl_rulebook2/webapp/tests/test_asop.py
  14. 4
      asl_rulebook2/webapp/tests/test_content_sets.py
  15. 4
      asl_rulebook2/webapp/tests/test_doc.py
  16. 3
      asl_rulebook2/webapp/tests/test_prepare.py
  17. 11
      asl_rulebook2/webapp/tests/utils.py
  18. 11
      conftest.py
  19. 10
      requirements-dev.txt
  20. 12
      requirements.txt
  21. 11
      setup.py

@ -148,7 +148,8 @@ disable=print-statement,
duplicate-code, duplicate-code,
no-else-return, no-else-return,
consider-using-enumerate, consider-using-enumerate,
too-many-lines too-many-lines,
consider-using-f-string
# Enable the message, report, category or checker with the given id(s). You can # Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option # either give multiple identifier separated by comma (,) or put this option

@ -1,7 +1,7 @@
# NOTE: Use the run-container.sh script to build and launch this container. # NOTE: Use the run-container.sh script to build and launch this container.
# NOTE: Multi-stage builds require Docker >= 17.05. # NOTE: Multi-stage builds require Docker >= 17.05.
FROM centos:8 AS base FROM rockylinux:8 AS base
# update packages and install requirements # update packages and install requirements
RUN dnf -y upgrade-minimal && \ RUN dnf -y upgrade-minimal && \

@ -32,10 +32,10 @@ def prepare_pdf( pdf_file,
"""Prepare the MMP eASLRB PDF.""" """Prepare the MMP eASLRB PDF."""
# load the targets # load the targets
with open( targets_fname, "r" ) as fp: with open( targets_fname, "r", encoding="utf-8" ) as fp:
targets = json.load( fp ) targets = json.load( fp )
if vo_notes_fname: if vo_notes_fname:
with open( vo_notes_fname, "r" ) as fp: with open( vo_notes_fname, "r", encoding="utf-8" ) as fp:
vo_notes_targets = json.load( fp ) vo_notes_targets = json.load( fp )
else: else:
vo_notes_targets = None vo_notes_targets = None
@ -116,7 +116,7 @@ def prepare_pdf( pdf_file,
def _run_subprocess( args, caption, relinq ): def _run_subprocess( args, caption, relinq ):
"""Run an external process.""" """Run an external process."""
proc = subprocess.Popen( args ) proc = subprocess.Popen( args ) #pylint: disable=consider-using-with
try: try:
pass_no = 0 pass_no = 0
while True: while True:

@ -783,7 +783,7 @@ class ExtractContent( ExtractBase ):
jsonval(caption[1]), jsonval(caption[0]) jsonval(caption[1]), jsonval(caption[0])
) )
chapters = [] chapters = []
for chapter in self._footnotes: for chapter in self._footnotes: #pylint: disable=consider-using-dict-items
footnotes = [] footnotes = []
for footnote in self._footnotes[chapter]: for footnote in self._footnotes[chapter]:
footnotes.append( "{}: {{\n \"captions\": {},\n \"content\": {}\n}}".format( footnotes.append( "{}: {{\n \"captions\": {},\n \"content\": {}\n}}".format(

@ -32,7 +32,8 @@ def test_extract_index():
# check the results # check the results
fname = os.path.join( dname, "index.txt" ) fname = os.path.join( dname, "index.txt" )
assert open( fname, "r", encoding="utf-8" ).read() == buf with open( fname, "r", encoding="utf-8" ) as fp:
assert fp.read() == buf
# run the test # run the test
for_each_easlrb_version( do_test ) for_each_easlrb_version( do_test )
@ -61,13 +62,17 @@ def test_extract_content():
# check the results # check the results
fname2 = os.path.join( dname, "targets.txt" ) fname2 = os.path.join( dname, "targets.txt" )
assert open( fname2, "r", encoding="utf-8" ).read() == targets_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == targets_buf
fname2 = os.path.join( dname, "chapters.txt" ) fname2 = os.path.join( dname, "chapters.txt" )
assert open( fname2, "r", encoding="utf-8" ).read() == chapters_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == chapters_buf
fname2 = os.path.join( dname, "footnotes.txt" ) fname2 = os.path.join( dname, "footnotes.txt" )
assert open( fname2, "r", encoding="utf-8" ).read() == footnotes_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == footnotes_buf
fname2 = os.path.join( dname, "vo-notes.txt" ) fname2 = os.path.join( dname, "vo-notes.txt" )
assert open( fname2, "r", encoding="utf-8" ).read() == vo_notes_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == vo_notes_buf
# run the test # run the test
for_each_easlrb_version( do_test ) for_each_easlrb_version( do_test )
@ -99,15 +104,20 @@ def test_extract_all():
# check the results # check the results
fname2 = os.path.join( dname, "index.json" ) fname2 = os.path.join( dname, "index.json" )
assert open( fname2, "r", encoding="utf-8" ).read() == index_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == index_buf
fname2 = os.path.join( dname, "targets.json" ) fname2 = os.path.join( dname, "targets.json" )
assert open( fname2, "r", encoding="utf-8" ).read() == targets_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == targets_buf
fname2 = os.path.join( dname, "chapters.json" ) fname2 = os.path.join( dname, "chapters.json" )
assert open( fname2, "r", encoding="utf-8" ).read() == chapters_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == chapters_buf
fname2 = os.path.join( dname, "footnotes.json" ) fname2 = os.path.join( dname, "footnotes.json" )
assert open( fname2, "r", encoding="utf-8" ).read() == footnotes_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == footnotes_buf
fname2 = os.path.join( dname, "vo-notes.json" ) fname2 = os.path.join( dname, "vo-notes.json" )
assert open( fname2, "r", encoding="utf-8" ).read() == vo_notes_buf with open( fname2, "r", encoding="utf-8" ) as fp:
assert fp.read() == vo_notes_buf
# run the test # run the test
for_each_easlrb_version( do_test ) for_each_easlrb_version( do_test )

@ -30,7 +30,7 @@ class TempFile:
else: else:
encoding = "utf-8" if "b" not in self.mode else None encoding = "utf-8" if "b" not in self.mode else None
assert self.temp_file is None assert self.temp_file is None
self.temp_file = tempfile.NamedTemporaryFile( self.temp_file = tempfile.NamedTemporaryFile( #pylint: disable=consider-using-with
mode = self.mode, mode = self.mode,
encoding = encoding, encoding = encoding,
suffix = self.extn, suffix = self.extn,

@ -129,5 +129,5 @@ def _render_template( fname ):
"ASOP_BASE_URL": url_for( "get_asop_file", path="" ), "ASOP_BASE_URL": url_for( "get_asop_file", path="" ),
} }
args.update( _asop.get( "template_args", {} ) ) args.update( _asop.get( "template_args", {} ) )
with open( fname, "r" ) as fp: with open( fname, "r", encoding="utf-8" ) as fp:
return fname, render_template_string( fp.read(), **args ) return fname, render_template_string( fp.read(), **args )

@ -69,7 +69,7 @@ def get_control_tests():
servicer = ControlTestsServicer( app ) servicer = ControlTestsServicer( app )
add_ControlTestsServicer_to_server( servicer, server ) add_ControlTestsServicer_to_server( servicer, server )
port_no = parse_int( get_port(), -1 ) # nb: have to get this again?! port_no = parse_int( get_port(), -1 ) # nb: have to get this again?!
if port_no <= 0: if port_no <= 0: #pylint: disable=consider-using-max-builtin
# NOTE: Requesting port 0 tells grpc to use any free port, which is usually OK, unless # NOTE: Requesting port 0 tells grpc to use any free port, which is usually OK, unless
# we're running inside a Docker container, in which case it needs to be pre-defined, # we're running inside a Docker container, in which case it needs to be pre-defined,
# so that the port can be mapped to an external port when the container is started. # so that the port can be mapped to an external port when the container is started.

@ -169,9 +169,9 @@ def _do_prepare_data_files( args, download_url ):
with zipfile.ZipFile( zip_data, "w", zipfile.ZIP_DEFLATED ) as zip_file: with zipfile.ZipFile( zip_data, "w", zipfile.ZIP_DEFLATED ) as zip_file:
fname_stem = "ASL Rulebook" fname_stem = "ASL Rulebook"
zip_file.writestr( fname_stem+".pdf", pdf_data ) zip_file.writestr( fname_stem+".pdf", pdf_data )
for key in file_data: for key, fdata in file_data.items():
fname = "{}.{}".format( fname_stem, key ) fname = "{}.{}".format( fname_stem, key )
zip_file.writestr( fname, file_data[key] ) zip_file.writestr( fname, fdata )
zip_data = zip_data.getvalue() zip_data = zip_data.getvalue()
# notify the front-end that we're done # notify the front-end that we're done

@ -184,8 +184,8 @@ def init_errata( startup_msgs, logger ):
logger.info( "- Loaded %s.", plural(len(sources),"source","sources") ) logger.info( "- Loaded %s.", plural(len(sources),"source","sources") )
# fixup all the errata entries with their real source # fixup all the errata entries with their real source
for ruleid in _errata: for ruleid, annos in _errata.items():
for anno in _errata[ruleid]: for anno in annos:
if "source" in anno: if "source" in anno:
anno["source"] = sources.get( anno["source"], anno["source"] ) anno["source"] = sources.get( anno["source"], anno["source"] )

@ -77,7 +77,8 @@ def main( bind_addr, data_dir, force_init_delay, flask_debug ):
def _start_server(): def _start_server():
time.sleep( force_init_delay ) time.sleep( force_init_delay )
url = "http://{}:{}".format( flask_host, flask_port ) url = "http://{}:{}".format( flask_host, flask_port )
_ = urllib.request.urlopen( url ) with urllib.request.urlopen( url ) as fp:
_ = fp.read()
threading.Thread( target=_start_server, daemon=True ).start() threading.Thread( target=_start_server, daemon=True ).start()
# run the server # run the server

@ -80,9 +80,7 @@ def init_webapp():
# NOTE: This is quite a slow process (~1 minute for a full data load), which is why we don't do it inline, # NOTE: This is quite a slow process (~1 minute for a full data load), which is why we don't do it inline,
# during the normal startup process. So, we start up using the original content, and if the user does # during the normal startup process. So, we start up using the original content, and if the user does
# a search, that's what they will see, but we fix it up in the background, and the new content will # a search, that's what they will see, but we fix it up in the background, and the new content will
# eventually start to be returned as search results. We could do this process once, and save the results # eventually start to be returned as search results.
# in a file, then reload everything at startup, which will obviously be much faster, but we then have to
# figure out when that file needs to be rebuolt :-/
if app.config.get( "BLOCKING_STARTUP_TASKS" ): if app.config.get( "BLOCKING_STARTUP_TASKS" ):
# NOTE: It's useful to do this synchronously when running the test suite, since if the tests # NOTE: It's useful to do this synchronously when running the test suite, since if the tests
# need the linkified ruleid's, they can't start until the fixup has finished (and if they don't # need the linkified ruleid's, they can't start until the fixup has finished (and if they don't

@ -20,7 +20,7 @@ def test_asop_nav( webdriver, webapp ):
# load the ASOP # load the ASOP
fname = os.path.join( os.path.dirname(__file__), "fixtures/asop/asop/index.json" ) fname = os.path.join( os.path.dirname(__file__), "fixtures/asop/asop/index.json" )
with open( fname, "r" ) as fp: with open( fname, "r", encoding="utf-8" ) as fp:
asop_index = json.load( fp ) asop_index = json.load( fp )
# check the nav # check the nav
@ -64,7 +64,7 @@ def test_asop_content( webdriver, webapp ):
fname = os.path.join( base_dir, fname ) fname = os.path.join( base_dir, fname )
if not os.path.isfile( fname ): if not os.path.isfile( fname ):
return None return None
with open( fname, "r" ) as fp: with open( fname, "r", encoding="utf-8" ) as fp:
return json.load( fp ) if as_json else fp.read() return json.load( fp ) if as_json else fp.read()
# load the ASOP index # load the ASOP index
@ -121,6 +121,8 @@ def test_asop_content( webdriver, webapp ):
assert content[0:20] in expected_content assert content[0:20] in expected_content
# check each individual section # check each individual section
def check_title():
return find_child( "#asop .title" ).text == expected
for section_no, nav_section in enumerate( nav[chapter_no]["sections"] ): for section_no, nav_section in enumerate( nav[chapter_no]["sections"] ):
# click on the section in the nav pane # click on the section in the nav pane
@ -130,7 +132,7 @@ def test_asop_content( webdriver, webapp ):
expected = expected_sections[ section_no ][ "caption" ] expected = expected_sections[ section_no ][ "caption" ]
if expected_chapter.get( "sniper_phase" ): if expected_chapter.get( "sniper_phase" ):
expected += "\u2020" expected += "\u2020"
wait_for( 2, lambda: find_child("#asop .title").text == expected ) wait_for( 2, check_title )
# check the preamble # check the preamble
# NOTE: The preamble is part of the parent chapter, and so should remain unchanged. # NOTE: The preamble is part of the parent chapter, and so should remain unchanged.

@ -1,5 +1,7 @@
""" Test how content sets are handled. """ """ Test how content sets are handled. """
from selenium.webdriver.common.by import By
from asl_rulebook2.webapp.tests.utils import init_webapp, select_tabbed_page, get_curr_target, \ from asl_rulebook2.webapp.tests.utils import init_webapp, select_tabbed_page, get_curr_target, \
set_stored_msg_marker, get_last_error_msg, find_child, find_children, wait_for, has_class set_stored_msg_marker, get_last_error_msg, find_child, find_children, wait_for, has_class
from asl_rulebook2.webapp.tests.test_search import do_search from asl_rulebook2.webapp.tests.test_search import do_search
@ -144,7 +146,7 @@ def _select_chapter( chapter_elem ):
find_child( ".title", chapter_elem ).click() find_child( ".title", chapter_elem ).click()
wait_for( 2, lambda: find_child( ".entries", chapter_elem ).is_displayed() ) wait_for( 2, lambda: find_child( ".entries", chapter_elem ).is_displayed() )
# make sure all other chapters are collapsed # make sure all other chapters are collapsed
parent = chapter_elem.find_element_by_xpath( ".." ) parent = chapter_elem.find_element( By.XPATH, ".." )
assert has_class( parent, "accordian" ) assert has_class( parent, "accordian" )
for elem in find_children( ".accordian-pane", parent ): for elem in find_children( ".accordian-pane", parent ):
is_expanded = find_child( ".entries", elem ).is_displayed() is_expanded = find_child( ".entries", elem ).is_displayed()

@ -19,8 +19,8 @@ def test_doc( webapp, webdriver ):
def get_doc( path ): def get_doc( path ):
# get the specified documentation file # get the specified documentation file
url = "{}/{}".format( webapp.base_url, path ) url = "{}/{}".format( webapp.base_url, path )
resp = urllib.request.urlopen( url ).read() with urllib.request.urlopen( url ) as resp:
return resp.decode( "utf-8" ) return resp.read().decode( "utf-8" )
# test a valid documentation file # test a valid documentation file
resp = get_doc( "/doc/prepare.md" ) resp = get_doc( "/doc/prepare.md" )

@ -88,7 +88,8 @@ def test_full_prepare( webapp, webdriver ):
assert zip_file.getinfo( "ASL Rulebook.pdf" ).file_size > 40*1000 assert zip_file.getinfo( "ASL Rulebook.pdf" ).file_size > 40*1000
for ftype in [ "index", "targets", "chapters", "footnotes" ]: for ftype in [ "index", "targets", "chapters", "footnotes" ]:
fname = os.path.join( dname, ftype+".json" ) fname = os.path.join( dname, ftype+".json" )
expected = json.load( open( fname, "r" ) ) with open( fname, "r", encoding="utf-8" ) as fp:
expected = json.load( fp )
fdata = zip_file.read( "ASL Rulebook.{}".format( ftype ) ) fdata = zip_file.read( "ASL Rulebook.{}".format( ftype ) )
assert json.loads( fdata ) == expected assert json.loads( fdata ) == expected

@ -8,6 +8,7 @@ import re
import uuid import uuid
from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, TimeoutException from selenium.common.exceptions import NoSuchElementException, TimeoutException
from asl_rulebook2.utils import strip_html from asl_rulebook2.utils import strip_html
@ -40,9 +41,9 @@ def init_webapp( webapp, webdriver, **options ):
_wait_for_webapp() _wait_for_webapp()
# make sure there were no errors or warnings # make sure there were no errors or warnings
startup_msgs = json.load( url = webapp.url_for( "get_startup_msgs" )
urllib.request.urlopen( webapp.url_for( "get_startup_msgs" ) ) with urllib.request.urlopen( url ) as resp:
) startup_msgs = json.load( resp )
errors = startup_msgs.pop( "error", [] ) errors = startup_msgs.pop( "error", [] )
errors = [ e[0] for e in errors ] errors = [ e[0] for e in errors ]
assert set( errors ) == set( expected_errors ) assert set( errors ) == set( expected_errors )
@ -171,7 +172,7 @@ def find_child( sel, parent=None ):
try: try:
if parent is None: if parent is None:
parent = _webdriver parent = _webdriver
return parent.find_element_by_css_selector( sel ) return parent.find_element( By.CSS_SELECTOR, sel )
except NoSuchElementException: except NoSuchElementException:
return None return None
@ -180,7 +181,7 @@ def find_children( sel, parent=None ):
try: try:
if parent is None: if parent is None:
parent = _webdriver parent = _webdriver
return parent.find_elements_by_css_selector( sel ) return parent.find_elements( By.CSS_SELECTOR, sel )
except NoSuchElementException: except NoSuchElementException:
return None return None

@ -156,8 +156,9 @@ def _make_webapp():
def is_ready(): def is_ready():
"""Try to connect to the webapp server.""" """Try to connect to the webapp server."""
try: try:
resp = urllib.request.urlopen( app.url_for( "ping" ) ).read() url = app.url_for( "ping" )
assert resp == b"pong" with urllib.request.urlopen( url ) as resp:
assert resp.read() == b"pong"
return True return True
except URLError: except URLError:
return False return False
@ -167,9 +168,9 @@ def _make_webapp():
# set up control of the remote webapp server # set up control of the remote webapp server
try: try:
resp = json.load( url = app.url_for( "get_control_tests" )
urllib.request.urlopen( app.url_for( "get_control_tests" ) ) with urllib.request.urlopen( url ) as resp:
) resp = json.load( resp )
except urllib.error.HTTPError as ex: except urllib.error.HTTPError as ex:
if ex.code == 404: if ex.code == 404:
raise RuntimeError( "Can't get the test control port - has remote test control been enabled?" ) from ex raise RuntimeError( "Can't get the test control port - has remote test control been enabled?" ) from ex

@ -1,6 +1,6 @@
pytest==6.2.2 pytest==7.0.1
selenium==3.141.0 selenium==4.1.0
grpcio==1.36.1 grpcio==1.44.0
protobuf==3.15.6 protobuf==3.19.4
pylint==2.6.2 pylint==2.12.2
pytest-pylint==0.18.0 pytest-pylint==0.18.0

@ -1,12 +1,12 @@
# python 3.8.7 # python 3.8.7
flask==1.1.2 flask==2.0.3
flask-socketio==5.1.1 flask-socketio==5.1.1
eventlet==0.33.0 eventlet==0.33.0
pyyaml==5.4.1 pyyaml==6.0
lxml==4.6.2 lxml==4.8.0
markdown==3.3.6 markdown==3.3.6
click==7.1.2 click==8.0.4
pdfminer.six==20201018 pdfminer.six==20211012
pikepdf==2.5.2 pikepdf==5.0.1

@ -16,11 +16,12 @@ def parse_requirements( fname ):
"""Parse a requirements file.""" """Parse a requirements file."""
lines = [] lines = []
fname = os.path.join( os.path.dirname(__file__), fname ) fname = os.path.join( os.path.dirname(__file__), fname )
for line in open(fname,"r"): with open( fname, "r", encoding="utf-8" ) as fp:
line = line.strip() for line in fp:
if line == "" or line.startswith("#"): line = line.strip()
continue if line == "" or line.startswith("#"):
lines.append( line ) continue
lines.append( line )
return lines return lines
# --------------------------------------------------------------------- # ---------------------------------------------------------------------

Loading…
Cancel
Save