Linkify free-form ruleid's in content.

master
Pacman Ghost 3 years ago
parent 8f8d03d695
commit fe45cb6170
  1. 17
      asl_rulebook2/webapp/asop.py
  2. 97
      asl_rulebook2/webapp/content.py
  3. 6
      asl_rulebook2/webapp/rule_info.py
  4. 215
      asl_rulebook2/webapp/search.py
  5. 55
      asl_rulebook2/webapp/startup.py
  6. 10
      asl_rulebook2/webapp/static/ASOP.js
  7. 4
      asl_rulebook2/webapp/static/ContentPane.js
  8. 7
      asl_rulebook2/webapp/static/NavPane.js
  9. 37
      asl_rulebook2/webapp/static/RuleInfo.js
  10. 7
      asl_rulebook2/webapp/static/SearchPane.js
  11. 1
      asl_rulebook2/webapp/static/css/ASOP.css
  12. 1
      asl_rulebook2/webapp/static/css/RuleInfo.css
  13. 1
      asl_rulebook2/webapp/static/css/SearchResult.css
  14. 5
      asl_rulebook2/webapp/static/css/global.css
  15. 33
      asl_rulebook2/webapp/static/utils.js
  16. 2
      asl_rulebook2/webapp/tests/control_tests_servicer.py
  17. 6
      asl_rulebook2/webapp/tests/fixtures/asop/asop/close-combat-2.html
  18. 1
      asl_rulebook2/webapp/tests/fixtures/full/ASL Rulebook (Red Barricades).targets
  19. 4
      asl_rulebook2/webapp/tests/fixtures/full/ASL Rulebook.index
  20. 45
      asl_rulebook2/webapp/tests/fixtures/full/ASL Rulebook.targets
  21. 2
      asl_rulebook2/webapp/tests/fixtures/full/annotations.json
  22. 2
      asl_rulebook2/webapp/tests/fixtures/full/errata/demo.json
  23. 2
      asl_rulebook2/webapp/tests/fixtures/full/q+a/demo.json
  24. 17
      asl_rulebook2/webapp/tests/test_asop.py
  25. 1
      asl_rulebook2/webapp/tests/test_content_sets.py
  26. 193
      asl_rulebook2/webapp/tests/test_linkify_ruleids.py
  27. 2
      asl_rulebook2/webapp/tests/test_qa.py
  28. 4
      asl_rulebook2/webapp/tests/test_sr_filters.py
  29. 6
      asl_rulebook2/webapp/tests/utils.py
  30. 6
      conftest.py

@ -5,11 +5,13 @@ import os
from flask import jsonify, render_template_string, send_from_directory, safe_join, url_for, abort
from asl_rulebook2.webapp import app
from asl_rulebook2.webapp.content import tag_ruleids
from asl_rulebook2.webapp.utils import load_data_file
_asop = None
_asop_dir = None
_asop_section_content = None
_footer = None
user_css_url = None
# ---------------------------------------------------------------------
@ -18,8 +20,8 @@ def init_asop( startup_msgs, logger ):
"""Initialize the ASOP."""
# initiailize
global _asop, _asop_dir, _asop_section_content, user_css_url
_asop, _asop_section_content = {}, {}
global _asop, _asop_dir, _asop_section_content, _footer, user_css_url
_asop, _asop_section_content, _footer = {}, {}, ""
# get the data directory
data_dir = app.config.get( "DATA_DIR" )
@ -35,7 +37,7 @@ def init_asop( startup_msgs, logger ):
# load the ASOP index
fname = os.path.join( base_dir, "index.json" )
_asop = load_data_file( fname, "ASCOP index", False, logger, startup_msgs.error )
_asop = load_data_file( fname, "ASOP index", False, logger, startup_msgs.error )
if not _asop:
return None, None
@ -52,6 +54,10 @@ def init_asop( startup_msgs, logger ):
content = _render_template( section_id + ".html" )
_asop_section_content[ section_id ] = content
# load the ASOP footer
footer = _render_template( "footer.html" )
_footer = tag_ruleids( footer, None )
return _asop, _asop_section_content
# ---------------------------------------------------------------------
@ -72,10 +78,9 @@ def get_asop_intro():
@app.route( "/asop/footer" )
def get_asop_footer():
"""Return the ASOP footer."""
resp = _render_template( "footer.html" )
if not resp:
if not _footer:
abort( 404 )
return resp
return _footer
@app.route( "/asop/section/<section_id>" )
def get_asop_section( section_id ):

@ -1,6 +1,7 @@
""" Manage the content documents. """
import os
import re
import io
import glob
@ -10,9 +11,12 @@ from asl_rulebook2.webapp import app
from asl_rulebook2.webapp.utils import load_data_file, slugify
_content_sets = None
_target_index = None
_footnote_index = None
_chapter_resources = None
_tag_ruleid_regexes = None
# ---------------------------------------------------------------------
def load_content_sets( startup_msgs, logger ):
@ -28,8 +32,8 @@ def load_content_sets( startup_msgs, logger ):
# in the MMP eASLRB index, and have their own index.
# initialize
global _content_sets, _footnote_index, _chapter_resources
_content_sets, _footnote_index = {}, {}
global _content_sets, _target_index, _footnote_index, _chapter_resources
_content_sets, _target_index, _footnote_index = {}, {}, {}
_chapter_resources = { "background": {}, "icon": {} }
# get the data directory
@ -51,7 +55,11 @@ def load_content_sets( startup_msgs, logger ):
def load_content_doc( fname_stem, title, cdoc_id ):
# load the content doc files
content_doc = { "cdoc_id": cdoc_id, "title": title }
load_file( fname_stem+".targets", content_doc, "targets", startup_msgs.warning )
if load_file( fname_stem+".targets", content_doc, "targets", startup_msgs.warning ):
# update the target index
_target_index[ cdoc_id ] = {}
for ruleid, target in content_doc.get( "targets", {} ).items():
_target_index[ cdoc_id ][ ruleid ] = target
load_file( fname_stem+".chapters", content_doc, "chapters", startup_msgs.warning )
if load_file( fname_stem+".footnotes", content_doc, "footnotes", startup_msgs.warning ):
# update the footnote index
@ -142,6 +150,17 @@ def load_content_sets( startup_msgs, logger ):
# save the new content set
_content_sets[ content_set["cset_id"] ] = content_set
# generate a list of regex's that identify each ruleid
global _tag_ruleid_regexes
_tag_ruleid_regexes = {}
for cset_id, cset in _content_sets.items():
for cdoc_id, cdoc in cset["content_docs"].items():
for ruleid in cdoc.get( "targets", {} ):
# nb: we also want to detect things like A1.23-.45
_tag_ruleid_regexes[ ruleid ] = re.compile(
r"\b{}(-\.\d+)?\b".format( ruleid.replace( ".", "\\." ) )
)
return _content_sets
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -158,6 +177,78 @@ def _dump_content_sets():
# ---------------------------------------------------------------------
def tag_ruleids( content, cset_id ):
"""Identify ruleid's in a piece of content and tag them.
There are a lot of free-form ruleid's in the content (e.g. Q+A or ASOP,) which we would
like to make clickable. We could do it in the front-end using regex's, but it gets
quite tricky to do this reliably (e.g. "AbtF SSR CG.1a"), so we do things a different way.
We already have a list of known ruleid's (i.e. the content set targets), so we look
specifically for those in the content, and mark them with a special <span>, which the front-end
can look for and convert into clickable links. It would be nice to detect ruleid's that
we don't know about, and mark them accordingly in the UI, but then we're back in regex hell,
so we can live without it.
"""
# NOTE: This function is quite expensive, so it's worth doing a quick check to see if there's
# any point looping through all the regex's e.g. it's pointless doing this for all those
# numerous Q+A answers that just say "Yes." or "No." :-/
if not content:
return content
if all( not c.isdigit() for c in content ):
return content
# NOTE: To avoid excessive string operations, we identify all ruleid matches first,
# then fixup the string content in one pass.
# look for ruleid matches in the content
matches = []
for ruleid, regex in _tag_ruleid_regexes.items():
matches.extend(
( mo, ruleid )
for mo in regex.finditer( content )
)
# sort the matches by start position, longer matches first
matches.sort( key = lambda m: (
m[0].start(), -len( m[0].group() )
) )
# remove "duplicate" matches (e.g "A1.2" when we've already matched "A1.23")
prev_match = [] # nb: we use [] instead of None to stop unsubscriptable-object warnings :-/
for match_no, match in enumerate( matches ):
if prev_match:
if match[0].start() == prev_match[0].start():
if match[0].group() == prev_match[0].group()[ : len(match[0].group()) ]:
# this is a "duplicate" match - delete it
matches[ match_no ] = None
continue
assert match[0].start() > prev_match[0].end()
prev_match = match
matches = [ m for m in matches if m ]
# tag the matches
for match in reversed( matches ):
mo = match[0]
buf = [
content[ : mo.start() ],
"<span data-ruleid='{}' class='auto-ruleid'".format( match[1] )
]
if cset_id:
buf.append( " data-csetid='{}'".format( cset_id ) )
buf.append( ">" )
buf.extend( [
mo.group(),
"</span>",
content[ mo.end() : ]
] )
content = "".join( buf )
return content
# ---------------------------------------------------------------------
@app.route( "/content-docs" )
def get_content_docs():
"""Return the available content docs."""

@ -41,11 +41,11 @@ def init_qa( startup_msgs, logger ):
qa_entries = load_data_file( fname, "Q+A", False, logger, startup_msgs.warning )
if qa_entries is None:
return
for key, vals in qa_entries.items():
for key, entries in qa_entries.items():
if key in qa:
qa[ key ].extend( vals )
qa[ key ].extend( entries )
else:
qa[ key ] = vals
qa[ key ] = entries
n = sum( len(v) for v in qa_entries.values() )
logger.info( "- Loaded %s.", plural(n,"entry","entries") )

@ -7,6 +7,7 @@ import re
import itertools
import string
import copy
import time
import tempfile
import logging
import traceback
@ -16,10 +17,12 @@ import lxml.html
from asl_rulebook2.utils import plural
from asl_rulebook2.webapp import app
import asl_rulebook2.webapp.startup as webapp_startup
from asl_rulebook2.webapp.content import tag_ruleids
from asl_rulebook2.webapp.utils import make_config_path, make_data_path, split_strip
_sqlite_path = None
_fts_index= None
_fts_index = None
_logger = logging.getLogger( "search" )
@ -72,14 +75,19 @@ def search() :
_logger.info( "- %s: %s", key, val )
# run the search
try:
return _do_search( args )
except Exception as exc: #pylint: disable=broad-except
msg = str( exc )
if msg.startswith( "fts5: " ):
msg = msg[5:] # nb: this is a sqlite3.OperationalError
_logger.warning( "SEARCH ERROR: %s\n%s", args, traceback.format_exc() )
return jsonify( { "error": msg } )
# NOTE: We can't use the search index nor in-memory data structures if the "fix content" thread
# is still running (and possible updating them). However, the tasks running in that thread
# relinquish the lock regularly, to give the user a chance to jump in and grab it here, if they
# want to do a search while that thread is still running.
with webapp_startup.fixup_content_lock:
try:
return _do_search( args )
except Exception as exc: #pylint: disable=broad-except
msg = str( exc )
if msg.startswith( "fts5: " ):
msg = msg[5:] # nb: this is a sqlite3.OperationalError
_logger.warning( "SEARCH ERROR: %s\n%s", args, traceback.format_exc() )
return jsonify( { "error": msg } )
def _do_search( args ):
@ -160,14 +168,11 @@ def _do_search( args ):
def _unload_index_sr( row ):
"""Unload an index search result from the database."""
index_entry = _fts_index["index"][ row[0] ] # nb: our copy of the index entry (must remain unchanged)
result = { "cset_id": row[2] } # nb: the index entry we will return to the caller
result = copy.deepcopy( index_entry ) # nb: the index entry we will return to the caller
result[ "cset_id" ] = row[2]
_get_result_col( result, "title", row[4] )
_get_result_col( result, "subtitle", row[5] )
_get_result_col( result, "content", row[6] )
if index_entry.get( "ruleids" ):
result["ruleids"] = index_entry["ruleids"]
if index_entry.get( "see_also" ):
result["see_also"] = index_entry["see_also"]
rulerefs = split_strip( row[7], _RULEREF_SEPARATOR ) if row[7] else []
assert len(rulerefs) == len(index_entry.get("rulerefs",[]))
if rulerefs:
@ -190,6 +195,8 @@ def _unload_qa_sr( row ):
result = copy.deepcopy( qa_entry ) # nb: the Q+A entry we will return to the caller (will be changed)
# replace the content in the Q+A entry we will return to the caller with the values
# from the search index (which will have search term highlighting)
if row[4]:
result["caption"] = row[4]
sr_content = split_strip( row[6], _QA_CONTENT_SEPERATOR ) if row[6] else []
qa_entry_content = qa_entry.get( "content", [] )
if len(sr_content) != len(qa_entry_content):
@ -216,7 +223,7 @@ def _unload_anno_sr( row, atype ):
def _unload_asop_entry_sr( row ):
"""Unload an ASOP entry search result from the database."""
section = _fts_index["asop-entry"][ row[0] ] # nb: our copy of the ASOP section (must remain unchanged)
section = _fts_index["asop-entry"][ row[0] ][0] # nb: our copy of the ASOP section (must remain unchanged)
result = copy.deepcopy( section ) # nb: the ASOP section we will return to the caller (will be changed)
_get_result_col( result, "content", row[6] )
return result
@ -458,6 +465,14 @@ def init_search( content_sets, qa, errata, user_anno, asop, asop_content, startu
def _init_content_sets( conn, curs, content_sets, logger ):
"""Add the content sets to the search index."""
def make_fields( index_entry ):
return {
"subtitle": index_entry.get( "subtitle" ),
"content": index_entry.get( "content" ),
}
# add the index entries to the search index
sr_type = "index"
for cset in content_sets.values():
logger.info( "- Adding index file: %s", cset["index_fname"] )
@ -469,12 +484,13 @@ def _init_content_sets( conn, curs, content_sets, logger ):
# will be this stripped content. We could go back to the original data to get the original HTML content,
# but that means we would lose the highlighting of search terms that SQLite gives us. We opt to insert
# the original content, since none of it should contain HTML, anyway.
fields = make_fields( index_entry )
curs.execute(
"INSERT INTO searchable"
" ( sr_type, cset_id, title, subtitle, content, rulerefs )"
" VALUES ( ?, ?, ?, ?, ?, ? )", (
sr_type, cset["cset_id"],
index_entry.get("title"), index_entry.get("subtitle"), index_entry.get("content"), rulerefs
index_entry.get("title"), fields["subtitle"], fields["content"], rulerefs
) )
_fts_index[sr_type][ curs.lastrowid ] = index_entry
index_entry["_fts_rowid"] = curs.lastrowid
@ -482,36 +498,66 @@ def _init_content_sets( conn, curs, content_sets, logger ):
logger.info( " - Added %s.", plural(nrows,"index entry","index entries"), )
assert len(_fts_index[sr_type]) == _get_row_count( conn, "searchable" )
# register a task to fixup the content
def fixup_index_entry( rowid, cset_id ):
index_entry = _fts_index[ sr_type ][ rowid ]
_tag_ruleids_in_field( index_entry, "subtitle", cset_id )
_tag_ruleids_in_field( index_entry, "content", cset_id )
return index_entry
from asl_rulebook2.webapp.startup import add_fixup_content_task
add_fixup_content_task( "index searchable content",
lambda: _fixup_searchable_content( sr_type, fixup_index_entry, make_fields )
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _init_qa( curs, qa, logger ):
"""Add the Q+A to the search index."""
def make_fields( qa_entry ):
buf = []
for content in qa_entry.get( "content", [] ):
buf2 = []
buf2.append( content.get( "question", _NO_QA_QUESTION ) )
# NOTE: We don't really want to index answers, since they are mostly not very useful (e.g. "Yes."),
# but we do so in order to get highlighting for those cases where they contain a search term.
for answer in content.get( "answers", [] ):
buf2.append( answer[0] )
buf.append( _QA_FIELD_SEPARATOR.join( buf2 ) )
return {
"title": qa_entry.get( "caption" ),
"content":_QA_CONTENT_SEPERATOR.join( buf ),
}
logger.info( "- Adding the Q+A." )
nrows = 0
sr_type = "qa"
for qa_entries in qa.values():
for qa_entry in qa_entries:
buf = []
for content in qa_entry.get( "content", [] ):
buf2 = []
buf2.append( content.get( "question", _NO_QA_QUESTION ) )
# NOTE: We don't really want to index answers, since they are mostly not very useful (e.g. "Yes."),
# but we do so in order to get highlighting for those cases where they contain a search term.
for answer in content.get( "answers", [] ):
buf2.append( answer[0] )
buf.append( _QA_FIELD_SEPARATOR.join( buf2 ) )
# NOTE: We munge all the questions and answers into one big searchable string, but we need to
# be able to separate that string back out into its component parts, so that we can return
# the Q+A entry to the front-end as a search result, but with highlighted search terms.
fields = make_fields( qa_entry )
curs.execute(
"INSERT INTO searchable ( sr_type, title, content ) VALUES ( ?, ?, ? )", (
sr_type, qa_entry.get("caption"), _QA_CONTENT_SEPERATOR.join(buf)
sr_type, fields["title"], fields["content"]
) )
_fts_index[sr_type][ curs.lastrowid ] = qa_entry
qa_entry["_fts_rowid"] = curs.lastrowid
nrows += 1
logger.info( " - Added %s.", plural(nrows,"Q+A entry","Q+A entries"), )
# register a task to fixup the content
def fixup_qa( rowid, cset_id ):
qa_entry = _fts_index[ sr_type ][ rowid ]
_tag_ruleids_in_field( qa_entry, "caption", cset_id )
for content in qa_entry.get( "content", [] ):
_tag_ruleids_in_field( content, "question", cset_id )
for answer in content.get( "answers", [] ):
_tag_ruleids_in_field( answer, 0, cset_id )
return qa_entry
from asl_rulebook2.webapp.startup import add_fixup_content_task
add_fixup_content_task( "Q+A searchable content",
lambda: _fixup_searchable_content( sr_type, fixup_qa, make_fields )
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _init_errata( curs, errata, logger ):
@ -527,32 +573,55 @@ def _init_user_anno( curs, user_anno, logger ):
logger.info( " - Added %s.", plural(nrows,"annotation","annotations"), )
def _do_init_anno( curs, anno, atype ):
"""Add the annotations to the search index."""
"""Add annotations to the search index."""
def make_fields( anno ):
return {
"content": anno.get( "content" ),
}
# add the annotations to the search index
nrows = 0
sr_type = atype
for ruleid in anno:
for a in anno[ruleid]:
fields = make_fields( a )
curs.execute(
"INSERT INTO searchable ( sr_type, content ) VALUES ( ?, ? )", (
sr_type, a.get("content")
sr_type, fields["content"]
) )
_fts_index[sr_type][ curs.lastrowid ] = a
a["_fts_rowid"] = curs.lastrowid
nrows += 1
# register a task to fixup the content
def fixup_anno( rowid, cset_id ):
anno = _fts_index[ sr_type ][ rowid ]
_tag_ruleids_in_field( anno, "content", cset_id )
return anno
from asl_rulebook2.webapp.startup import add_fixup_content_task
add_fixup_content_task( atype+" searchable content",
lambda: _fixup_searchable_content( sr_type, fixup_anno, make_fields )
)
return nrows
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def _init_asop( curs, asop, asop_content, logger ):
"""Add the ASOP to the search index."""
logger.info( "- Adding the ASOP." )
sr_type = "asop-entry"
fixup_chapters, fixup_sections = [], []
nentries = 0
for chapter in asop.get( "chapters", [] ):
fixup_chapters.append( chapter )
for section in chapter.get( "sections", [] ):
content = asop_content.get( section["section_id"] )
content = asop_content.get( section["section_id"] )
if not content:
continue
fixup_sections.append( section )
entries = _extract_section_entries( content )
# NOTE: The way we manage the FTS index for ASOP entries is a little different to normal,
# since they don't exist as individual entities (this is the only place where they do,
@ -564,11 +633,29 @@ def _init_asop( curs, asop, asop_content, logger ):
"INSERT INTO searchable ( sr_type, content ) VALUES ( ?, ? )", (
sr_type, entry
) )
_fts_index[sr_type][ curs.lastrowid ] = section
_fts_index[sr_type][ curs.lastrowid ] = [ section, entry ]
section[ "_fts_rowids" ].append( curs.lastrowid )
nentries += 1
logger.info( " - Added %s.", plural(nentries,"entry","entries") )
# register a task to fixup the content
def fixup_content():
_fixup_searchable_content( sr_type, fixup_entry, make_fields )
# we also need to fixup the in-memory data structures
cset_id = None
for chapter in fixup_chapters:
_tag_ruleids_in_field( chapter, "preamble", cset_id )
for section in fixup_sections:
_tag_ruleids_in_field( asop_content, section["section_id"], cset_id )
def fixup_entry( rowid, cset_id ):
entry = _fts_index[ sr_type ][ rowid ].pop()
entry = tag_ruleids( entry, cset_id )
return entry
def make_fields( entry ):
return { "content": entry }
from asl_rulebook2.webapp.startup import add_fixup_content_task
add_fixup_content_task( "ASOP searchable content", fixup_content )
def _extract_section_entries( content ):
"""Separate out each entry from the section's content."""
entries = []
@ -578,7 +665,8 @@ def _extract_section_entries( content ):
for elem in fragment.xpath( ".//div[contains(@class,'entry')]" ):
if "entry" not in elem.attrib["class"].split():
continue
entries.append( lxml.html.tostring( elem ) )
entry = lxml.html.tostring( elem )
entries.append( entry.decode( "utf-8" ) )
return entries
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -684,6 +772,65 @@ def load_search_config( startup_msgs, logger ):
# ---------------------------------------------------------------------
def _fixup_searchable_content( sr_type, fixup_row, make_fields ):
"""Fixup the searchable content for the specified search result type."""
# locate the rows we're going to fixup
# NOTE: Then searchable table never changes after it has been built, so we don't need the lock.
conn = sqlite3.connect( _sqlite_path )
curs = conn.cursor()
query = curs.execute( "SELECT rowid, cset_id, title, subtitle, content FROM searchable WHERE sr_type=?",
( sr_type, )
)
content_rows = list( query.fetchall() )
# update the searchable content in each row
nrows = 0
last_commit_time = time.time()
for row in content_rows:
# NOTE: The fixup_row() callback will usually be using _tag_ruleids_in_field(), which manages
# the lock; otherwise the callback needs to do it itself. We don't want to invoke this callback
# inside the lock since it can be quite slow; _tag_ruleids_in_field() holds the lock for the
# minimum amount of time.
new_row = fixup_row( row[0], row[1] )
with webapp_startup.fixup_content_lock:
# NOTE: The make_fields() callback will usually be accessing the fields we want to fixup,
# so we need to protect them with the lock.
fields = make_fields( new_row )
# NOTE: We update the row inside the lock to prevent "database is locked" errors, if the user
# tries to do a search while this is happening.
query = "UPDATE searchable SET {} WHERE rowid={}".format(
", ".join( "{}=?".format( f ) for f in fields ),
row[0]
)
curs.execute( query, tuple(fields.values()) )
nrows += 1
# commit the changes regularly (so that they are available to the front-end)
if time.time() - last_commit_time >= 1:
conn.commit()
last_commit_time = time.time()
# commit the last block of updates
conn.commit()
return plural( nrows, "row", "rows" )
def _tag_ruleids_in_field( obj, key, cset_id ):
"""Tag ruleid's in an optional field."""
if isinstance( key, int ) or key in obj:
# NOTE: The data structures we use to manage all the in-memory objects never change after
# they have been loaded, so the only thread-safety we need to worry about is when we read
# the original value from an object, and when we update it with a new value. The actual process
# of tagging ruleid's in a piece of content is done outside the lock, since it's quite slow.
with webapp_startup.fixup_content_lock:
val = obj[key]
new_val = tag_ruleids( val, cset_id )
with webapp_startup.fixup_content_lock:
obj[key] = new_val
def _get_row_count( conn, table_name ):
"""Get the number of rows in a table."""
cur = conn.execute( "SELECT count(*) FROM {}".format( table_name ) )

@ -1,6 +1,10 @@
""" Manage the startup process. """
import time
import datetime
import threading
import logging
import traceback
from collections import defaultdict
from flask import jsonify
@ -12,11 +16,14 @@ from asl_rulebook2.webapp.rule_info import init_qa, init_errata, init_annotation
from asl_rulebook2.webapp.asop import init_asop
from asl_rulebook2.webapp.utils import parse_int
_capabilities = None
fixup_content_lock = threading.Lock()
_fixup_content_tasks = None
_logger = logging.getLogger( "startup" )
_startup_msgs = None
_capabilities = None
# ---------------------------------------------------------------------
def init_webapp():
@ -27,9 +34,10 @@ def init_webapp():
"""
# initialize
global _startup_msgs, _capabilities
global _startup_msgs, _capabilities, _fixup_content_tasks
_startup_msgs = StartupMsgs()
_capabilities = {}
_fixup_content_tasks = []
# initialize the webapp
content_sets = load_content_sets( _startup_msgs, _logger )
@ -52,6 +60,47 @@ def init_webapp():
_startup_msgs, _logger
)
# everything has been initialized - now we can go back and fixup content
# NOTE: This is quite a slow process (~1 minute for a full data load), which is why we don't do it inline,
# during the normal startup process. So, we start up using the original content, and if the user does
# a search, that's what they will see, but we fix it up in the background, and the new content will
# eventually start to be returned as search results. We could do this process once, and save the results
# in a file, then reload everything at startup, which will obviously be much faster, but we then have to
# figure out when that file needs to be rebuolt :-/
if app.config.get( "BLOCKING_FIXUP_CONTENT" ):
# NOTE: It's useful to do this synchronously when running the test suite, since if the tests
# need the linkified ruleid's, they can't start until the fixup has finished (and if they don't
# it won't really matter, since there will be so little data, this process will be fast).
_do_fixup_content()
else:
threading.Thread( target = _do_fixup_content ).start()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def add_fixup_content_task( ctype, func ):
"""Register a function to fixup content after startup has finished."""
if app.config.get( "DISABLE_FIXUP_CONTENT" ):
return
_fixup_content_tasks.append( ( ctype, func ) )
def _do_fixup_content():
"""Run each task to fixup content."""
if not _fixup_content_tasks:
return
start_time = time.time()
for task_no, (ctype, func) in enumerate( _fixup_content_tasks ):
_logger.debug( "Fixing up %s (%d/%d)...", ctype, 1+task_no, len(_fixup_content_tasks) )
start_time2 = time.time()
try:
msg = func()
except Exception as ex: #pylint: disable=broad-except
_logger.error( "Couldn't fixup %s: %s\n%s", ctype, ex, traceback.format_exc() )
continue
elapsed_time = datetime.timedelta( seconds = int( time.time() - start_time2 ) )
_logger.debug( "- Finished fixing up %s (%s): %s", ctype, elapsed_time, msg )
elapsed_time = datetime.timedelta( seconds = int( time.time() - start_time ) )
_logger.info( "All fixup tasks completed (%s).", elapsed_time )
# ---------------------------------------------------------------------
@app.route( "/app-config" )

@ -1,5 +1,5 @@
import { gMainApp, gASOPChapterIndex, gASOPSectionIndex, gEventBus } from "./MainApp.js" ;
import { getURL, getASOPChapterIdFromSectionId, wrapMatches, isChildOf } from "./utils.js" ;
import { getURL, getASOPChapterIdFromSectionId, linkifyAutoRuleids, wrapMatches, isChildOf } from "./utils.js" ;
let gSectionContentOverrides = {} ;
@ -33,6 +33,9 @@ gMainApp.component( "asop", {
return ;
this.isActive = (tabId == "asop") ;
} ) ;
gEventBus.on( "show-target", (cdocId, ruleid) => { //eslint-disable-line no-unused-vars
this.isActive = false ;
} ) ;
// handle events in the nav pane
gEventBus.on( "asop-chapter-expanded", this.showASOPChapter ) ;
@ -43,6 +46,7 @@ gMainApp.component( "asop", {
gEventBus.on( "search", () => {
gSectionContentOverrides = {} ;
} ) ;
},
mounted() {
@ -51,6 +55,8 @@ gMainApp.component( "asop", {
},
updated() {
// make the ruleid's clickable
linkifyAutoRuleids( $( this.$el ) ) ;
// scroll to the top of the sections each time
if ( this.$refs.sections )
this.$refs.sections.scrollTop = 0 ;
@ -77,6 +83,7 @@ gMainApp.component( "asop", {
if ( ! isClick )
return ;
// prepare to show the ASOP chapter (with all sections combined)
this.isActive = true ;
this.title = this.makeTitle( chapter, chapter.caption ) ;
this.preamble = this.fixupContent( chapter.preamble ) ;
this.sections = chapter.sections ? Array( chapter.sections.length ) : [] ;
@ -146,6 +153,7 @@ gMainApp.component( "asop", {
doShowASOPSection( chapter, section, content ) {
// show the specified ASOP section
this.isActive = true ;
this.title = this.makeTitle( chapter, section.caption ) ;
this.preamble = this.fixupContent( chapter.preamble ) ;
let contentOverride = gSectionContentOverrides[ section.section_id ] ;

@ -51,6 +51,10 @@ gMainApp.component( "content-pane", {
// show the footnote in a notification balloon
let msg = this.makeFootnoteContent( footnotes ) ;
let $growl = showNotificationMsg( "footnote", msg ) ;
if ( ! $growl ) {
// NOTE: We get here when running the test suite (notifications are stored in a message buffer).
return ;
}
// adjust the width of the balloon (based on the available width)
// NOTE: The longest footnote is ~7K (A25.8), so we try to hit the max width at ~3K.

@ -1,5 +1,5 @@
import { gMainApp, gAppConfig, gContentDocs, gEventBus } from "./MainApp.js" ;
import { getJSON, getURL, getASOPChapterIdFromSectionId, showWarningMsg } from "./utils.js" ;
import { getJSON, getURL, linkifyAutoRuleids, getASOPChapterIdFromSectionId, showWarningMsg } from "./utils.js" ;
// --------------------------------------------------------------------
@ -172,7 +172,7 @@ gMainApp.component( "nav-pane-asop", {
@pane-expanded=onPaneExpanded @entry-clicked=onEntryClicked
/>
</accordian>
<div v-show=footer v-html=footer id="asop-footer" />
<div v-show=footer v-html=footer id="asop-footer" ref="footer" />
`,
created() {
@ -180,6 +180,9 @@ gMainApp.component( "nav-pane-asop", {
// get the ASOP footer
getURL( gGetASOPFooterUrl ).then( (resp) => { //eslint-disable-line no-undef
this.footer = resp ;
this.$nextTick( () => {
linkifyAutoRuleids( $( this.$refs.footer ) ) ;
} ) ;
} ).catch( (errorMsg) => {
console.log( "Couldn't get the ASOP footer: " + errorMsg ) ;
} ) ;

@ -1,5 +1,5 @@
import { gMainApp, gUrlParams } from "./MainApp.js" ;
import { makeImagesZoomable } from "./utils.js" ;
import { linkifyAutoRuleids, fixupSearchHilites, makeImagesZoomable } from "./utils.js" ;
// --------------------------------------------------------------------
@ -37,7 +37,26 @@ gMainApp.component( "rule-info", {
beforeUpdate() {
// hide the close button until the "enter" transition has completed
$( this.$refs.closeRuleInfoButton ).hide() ;
let $closeButton = $( this.$refs.closeRuleInfoButton ) ;
if ( this.ruleInfo.length == 0 )
$closeButton.hide() ;
else {
if ( $closeButton.css( "display" ) == "none" )
$closeButton.hide() ;
else {
// NOTE: If we're already visible, we don't get the transition, so we force
// post-transition processing manually.
this.$nextTick( () => {
this.onAfterEnterRuleInfoTransition() ;
this.$refs.ruleInfo.scrollTop = 0 ;
} ) ;
}
}
},
updated() {
// make the ruleid's clickable
linkifyAutoRuleids( $( this.$el ) ) ;
},
methods: {
@ -76,7 +95,7 @@ gMainApp.component( "qa-entry", {
template: `
<div class="qa rule-info">
<div class="caption"> {{qaEntry.caption}} </div>
<div class="caption" v-html=fixupHilites(qaEntry.caption) />
<div v-for="content in qaEntry.content" :key=content class="content">
<div v-if="content.question">
<!-- this is a normal question + one or more answers -->
@ -109,10 +128,17 @@ gMainApp.component( "qa-entry", {
},
methods: {
makeQAImageUrl( fname ) {
// return the URL to an image associated with a Q+A entry
return gGetQAImageUrl.replace( "FNAME", fname ) ; //eslint-disable-line no-undef
},
fixupHilites( val ) {
// convert search term highlights returned to us by the search engine to HTML
return fixupSearchHilites( val ) ;
},
},
} ) ;
@ -127,7 +153,10 @@ gMainApp.component( "annotation", {
template: `
<div class="anno rule-info">
<div :class=annoType class="caption" > {{anno.ruleid || '(no rule ID)'}} </div>
<div :class=annoType class="caption" >
<span v-if=anno.ruleid :data-ruleid=anno.ruleid class="auto-ruleid"> {{anno.ruleid}} </span>
<span v-else> (no rule ID) </span>
</div>
<div class="content">
<img :src=makeIconImageUrl() :title=anno.source class="icon" />
<div v-html=anno.content />

@ -1,6 +1,6 @@
import { gMainApp, gAppConfig, gEventBus } from "./MainApp.js" ;
import { postURL, findTargets, getPrimaryTarget, fixupSearchHilites, hideFootnotes } from "./utils.js" ;
import { gUserSettings, saveUserSettings } from "./UserSettings.js" ;
import { postURL, findTargets, getPrimaryTarget, linkifyAutoRuleids, fixupSearchHilites, hideFootnotes } from "./utils.js" ;
// --------------------------------------------------------------------
@ -197,6 +197,11 @@ gMainApp.component( "search-results", {
},
updated() {
// make the ruleid's clickable
linkifyAutoRuleids( $( this.$el ) ) ;
},
methods: {
onSearch( queryString ) {

@ -24,7 +24,6 @@
#asop[data-chapterid="pre-game"]>.title { background: #fff6e2 ; }
.asop .pre { margin-left: 30px ; font-family: monospace ; }
.asop .exc { font-style: italic ; color: #666 ; }
.asop .hilite { padding: 0 2px ; background: #ffa ; }
.asop .title { border: 1px solid #666 ; border-radius: 2px ; background: #eee ; font-size: 125% ; font-weight: bold ; padding: 2px 5px ; }

@ -23,7 +23,6 @@
.rule-info p { margin-top: 5px ; }
.rule-info br { margin-top: 3px ; }
.rule-info .quote { font-style: italic ; color: #406040 ; }
.rule-info .exc { font-style: italic ; color: #666 ; }
.qa { margin: 2px ; background: white ; padding: 5px ; }
.qa img.icon { float: left ; margin: 0 6px 0 0 ; height: 18px ; }

@ -1,5 +1,4 @@
#search-results .sr { margin-bottom: 8px ; padding-right: 5px; }
#search-results .sr .hilite { padding: 0 2px ; background: #ffa ; }
#search-results .index-sr .title { padding: 3px 6px ; font-weight: bold ; border-radius: 3px ; }
#search-results .index-sr .title img.icon { height: 15px ; float: left ; margin-top: 2px ; }

@ -9,6 +9,11 @@ input[type="text"] { height: 22px ; padding: 0 5px ; }
button { height: 24px ; padding: 0 5px !important ; }
ul, ol { margin-left: 15px ; }
/* content */
.hilite { padding: 0 2px ; background: #ffa ; }
.exc { font-style: italic ; color: #666 ; }
.auto-ruleid { color:red; border-bottom:1px dotted red; cursor: pointer ; }
/* notification balloons */
.growl { cursor: pointer ; }
.growl .growl-close { position: absolute ; top: 0 ; right: 6px ; }

@ -1,4 +1,4 @@
import { gTargetIndex, gChapterResources, gUrlParams } from "./MainApp.js" ;
import { gContentDocs, gTargetIndex, gChapterResources, gEventBus, gUrlParams } from "./MainApp.js" ;
// --------------------------------------------------------------------
@ -80,6 +80,37 @@ export function fixupSearchHilites( val )
.replace( _HILITE_REGEXES[1], "</span>" ) ;
}
export function linkifyAutoRuleids( $root )
{
if ( ! gTargetIndex )
return ; // nb: don't bother during this during startup
// process each auto-detected ruleid
$root.find( "span.auto-ruleid" ).each( function() {
let ruleid = $(this).attr( "data-ruleid" ) ;
let csetId = $(this).attr( "data-csetid" ) ;
let targets = findTargets( ruleid, csetId ) ;
if ( ! targets || targets.length == 0 ) {
// nb: this would normally suggest an error, but there are things like e.g "Chapter B Terrain Chart" :-/
return ;
} else if ( targets.length != 1 )
console.log( "WARNING: Found multiple targets for auto-ruleid: " + csetId + "/" + ruleid ) ;
let target = targets[0] ;
// add a label
// NOTE: We don't add labels to ruleref's in index search results because their destination
// is more-or-less indicated by the ruleref caption.
let caption = gContentDocs[ target.cdoc_id ].targets[ target.ruleid ].caption ;
$(this).attr( "title", caption ) ;
// make the ruleid clickable
$(this).on( "click", function() {
gEventBus.emit( "show-target", target.cdoc_id, target.ruleid ) ;
} ) ;
} ) ;
}
// --------------------------------------------------------------------
export function getChapterResource( rtype, chapterId )

@ -39,7 +39,7 @@ class ControlTestsServicer( BaseControlTestsServicer ):
# reset the webapp
ctx = None
self.setDataDir( SetDataDirRequest( fixturesDirName=None ), ctx )
# NOTE: The webapp has now been reset, but the client must reloaed the home page
# NOTE: The webapp has now been reset, but the client must reload the home page
# with "?reload=1", to force it to reload with the new settings.
return Empty()

@ -1,9 +1,5 @@
<div class="entry"> <div class=B> 8.21B </div> <ul>
<li> Declare first/next sequential CC attack (A11.3-.34) or, ATTACKER first (A11.12; G13.495), all simultaneous CC attacks, if no sequential CC exists.
<li> Declare first/next sequential CC attack (A11.3-.34) or, ...
</ul> </div>
{{CONTENT_REMOVED|safe}}
<div class="entry"> <div class=B> 8.25B </div> <ul>
<li> May Interrogate new Prisoners (E2.1; G1.621; G18.71).
</ul> </div>

@ -2,5 +2,4 @@
"O6.7": { "caption": "ENCIRCLEMENT", "page_no": 5 }
}

@ -5,7 +5,7 @@
},
{ "title": "CCPh",
"subtitle": "Close Combat Phase",
"subtitle": "Close Combat Phase (also check out A11)",
"content": "This rule has had an errata attached to it. Click through to see it.",
"ruleids": [ "A3.8" ],
"rulerefs": [
@ -16,6 +16,7 @@
{ "title": "CC",
"subtitle": "Close Combat",
"content": "You might also want to look at A3.8.",
"ruleids": [ "A11" ],
"rulerefs": [
{ "caption": "Armor Leader", "ruleids": [ "D3.44" ] },
@ -47,6 +48,7 @@
},
{ "title": "Encirclement",
"content": "Doesn't happen in RB Cellars (O6.7).",
"ruleids": [ "A7.7" ],
"rulerefs": [
{ "caption": "Cellars NA", "ruleids": [ "O6.7", "R4.7" ] },

@ -1,33 +1,68 @@
{
"A.2": { "caption": "ERRORS" },
"A1": { "caption": "PERSONNEL COUNTERS" },
"A1.31": { "caption": "DEPLOYMENT" },
"A2": { "caption": "THE MAPBOARD" },
"A2.51": { "caption": "OFFBOARD SETUP" },
"A3": { "caption": "BASIC SEQUENCE OF PLAY" },
"A3.4": { "caption": "DEFENSIVE FIRE PHASE (DFPh)" },
"A3.6": { "caption": "ROUT PHASE (RtPh)" },
"A3.8": { "caption": "CLOSE COMBAT PHASE (CCPh)" },
"A7.7": { "caption": "ENCIRCLEMENT" },
"A11": { "caption": "CLOSE COMBAT (CC)" },
"A11.12": { "caption": "MECHANICS" },
"A11.14": { "caption": "SMC" },
"A11.19": { "caption": "CONCEALMENT" },
"A11.3": { "caption": "SEQUENTIAL CC" },
"A11.6": { "caption": "CC vs AN AFV" },
"A24.3": { "caption": "WHITE PHOSPHORUS (WP)" },
"D5.2": { "caption": "BUTTONED UP (BU)" },
"A1": { "caption": "PERSONNEL COUNTERS" },
"A2": { "caption": "THE MAPBOARD" },
"A3": { "caption": "BASIC SEQUENCE OF PLAY" },
"A24.31": { "caption": "CASUALTIES" },
"B1": { "caption": "OPEN GROUND" },
"B2": { "caption": "SHELLHOLES" },
"B3": { "caption": "ROADS" },
"C1": { "caption": "OFFBOARD ARTILLERY (OBA)" },
"C2": { "caption": "GUN CLASSIFICATIONS" },
"C3": { "caption": "THE TO HIT PROCESS" },
"C3.22": { "caption": "CHANGING CA WITHOUT FIRE" },
"D1": { "caption": "VEHICLE COUNTERS" },
"D2": { "caption": "VEHICULAR MOVEMENT" },
"D3": { "caption": "AFV COMBAT" },
"D5.2": { "caption": "BUTTONED UP (BU)" },
"E1": { "caption": "NIGHT" },
"E1.23": { "caption": "RECON" },
"E1.41": { "caption": "CONTENTS" },
"E1.54": { "caption": "ROUTING" },
"E1.72": { "caption": "SNIPERS" },
"E1.74": { "caption": "TARGET ACQUISITION" },
"E1.76": { "caption": "MISTAKEN FIRE" },
"E1.8": { "caption": "GUNFLASHES" },
"E1.91": { "caption": "INITIAL USE" },
"E1.923": { "caption": "EFFECTS & DURATION" },
"E1.93": { "caption": "ILLUMINATING ROUNDS (IR)" },
"E1.933": { "caption": "EFFECTS & DURATION" },
"E1.95": { "caption": "TRIP FLARES" },
"E1.953": { "caption": "SEARCH & RECON" },
"E2": { "caption": "INTERROGATION" },
"E2.1": { "caption": "INCIDENCE" },
"E3": { "caption": "WEATHER" },
"E3.3": { "caption": "FOG/MIST" },
"E3.4": { "caption": "GUSTY" },
"E3.6": { "caption": "MUD" },
"E3.713": { "caption": "EC" },
"E3.72": { "caption": "GROUND SNOW" },
"E3.73": { "caption": "DEEP SNOW" },
"E3.74": { "caption": "EXTREME WINTER" },
"F1": { "caption": "OPEN GROUND" },
"F2": { "caption": "SCRUB" },
"F3": { "caption": "HAMMADA" },
"G1": { "caption": "THE JAPANESE" },
"G1.621": { "caption": "NO-QUARTER/PRISONERS" },
"G1.63": { "caption": "CONCEALMENT" },
"G1.664": { "caption": "PARATROOPERS" },
"G2": { "caption": "JUNGLE" },
"G3": { "caption": "BAMBOO" },
"G3.5": { "caption": "FORTIFICATIONS" },
"J1": { "caption": "MINIATURES" },
"J2": { "caption": "CONVERTING ASL RULES TO DELUXE ASL" },
"W1": { "caption": "KW TERRAIN" },

@ -1,7 +1,7 @@
[
{ "ruleid": "A24.3",
"content": "Mmmm, White Phosphorous. Is there anything it can't do...?"
"content": "Mmmm, White Phosphorus (A24.31). Is there anything it can't do...?"
}
]

@ -1,7 +1,7 @@
[
{ "ruleid": "A3.8",
"content": "The new eASLRB will, of course, be kept up-to-date, and there will be no need for errata :-), but other rulebooks might, so this is an example of how you would set them up."
"content": "The new eASLRB will, of course, be kept up-to-date, and there will be no need for errata (A.2) :-), but other rulebooks might, so this is an example of how you would set them up."
}
]

@ -40,7 +40,7 @@
{ "caption": "A24.31",
"ruleids": [ "A24.31" ],
"content": [
{ "question": "May a MMC attempt to throw WP grenades into its own location? <p> Into an adjacent location solely occupied by friendly units?",
{ "question": "May a MMC attempt to throw WP grenades (A24.3) into its own location? <p> Into an adjacent location solely occupied by friendly units?",
"answers": [
[ "Yes to both; A24.31", "ps" ]
]

@ -261,6 +261,23 @@ def test_asop_entries( webdriver, webapp ):
# ---------------------------------------------------------------------
def open_asop_chapter( chapter_id ):
"""Open the specified ASOP chapter."""
nav = _unload_nav( True )
for chapter in nav:
if chapter["chapter_id"] == chapter_id:
chapter["elem"].click()
wait_for( 2, lambda: find_child("#asop").get_attribute("data-chapterid") == chapter_id )
return chapter
assert False, "Can't find ASOP chapter: "+chapter_id
return None # nb: for pylint :-/
def open_asop_section( chapter_id, section_no ):
"""Open the specified ASOP section."""
chapter = open_asop_chapter( chapter_id )
chapter["sections"][ section_no ]["elem"].click()
wait_for( 2, lambda: find_child( "#asop .sections.single" ) )
def _unload_nav( include_elems ):
"""Unload the ASOP nav."""

@ -101,6 +101,7 @@ def test_chapters( webapp, webdriver ):
def do_test( chapter_no, entry_no, expected ):
"""Click on a chapter entry."""
select_tabbed_page( "nav", "chapters" )
elems = find_children( "#accordian-chapters .accordian-pane" )
chapter_elem = elems[ chapter_no ]
_select_chapter( chapter_elem )

@ -0,0 +1,193 @@
""" Testing converting auto-deteected ruleid's into links. """
from asl_rulebook2.webapp.tests.utils import init_webapp, refresh_webapp, select_tabbed_page, \
find_child, find_children, wait_for, wait_for_elem, get_curr_target
from asl_rulebook2.webapp.tests.test_search import do_search
from asl_rulebook2.webapp.tests.test_asop import open_asop_chapter, open_asop_section
# ---------------------------------------------------------------------
def test_index_entry( webdriver, webapp ):
"""Test ruleid's in index entries."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test ruleid's in an index entry's search result
results = _do_search( "CCPh", True )
assert len(results) == 1
sr_elem = find_child( "#search-results .sr" )
_check_ruleid( find_child(".subtitle",sr_elem), ("asl-rulebook","A11") )
# test ruleid's in an index entry's content
results = _do_search( "also want to", False )
assert len(results) == 1
sr_elem = find_child( "#search-results .sr" )
_check_ruleid( find_child(".content",sr_elem), ("asl-rulebook","A3.8") )
_dismiss_rule_info_popup()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def test_alternate_content_set( webdriver, webapp ):
"""Test ruleid's that reference another document."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test a ruleid that references the Red Barricades document
results = _do_search( "cellar", True )
assert len(results) == 1
sr_elem = find_child( "#search-results .sr" )
_check_ruleid( find_child(".content",sr_elem), ("asl-rulebook!red-barricades","O6.7") )
# ---------------------------------------------------------------------
def test_qa( webdriver, webapp ):
"""Testing ruleid's in Q+A entries."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test ruleid's in a Q+A entry's search result
results = _do_search( "wp", True )
assert len(results) == 3
sr_elem = find_children( "#search-results .sr" )[ 1 ]
_check_ruleid( find_child(".caption",sr_elem), ("asl-rulebook","A24.31") )
_dismiss_rule_info_popup()
_check_ruleid( find_child(".question",sr_elem), ("asl-rulebook","A24.3") )
_dismiss_rule_info_popup()
_check_ruleid( find_child(".answer",sr_elem), ("asl-rulebook","A24.31") )
_dismiss_rule_info_popup()
# test ruleid's in a Q+A entry in the rule info popup
expected = [
( ".caption", ("asl-rulebook","A24.31") ),
( ".question", ("asl-rulebook","A24.3") ),
( ".answer", ("asl-rulebook","A24.31") )
]
for sel, target in expected:
_do_search( "A24.31", False )
elems = find_children( "#rule-info .rule-info" )
assert len(elems) == 1
_check_ruleid( find_child(sel,elems[0]), target )
_dismiss_rule_info_popup()
# ---------------------------------------------------------------------
def test_errata( webdriver, webapp ):
"""Test ruleid's in errata."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test ruleid's in an errata's search result
results = _do_search( "errata", True )
assert len(results) == 2
sr_elem = find_children( "#search-results .sr" )[ 1 ]
_check_ruleid( find_child(".caption",sr_elem), ("asl-rulebook","A3.8") )
_dismiss_rule_info_popup()
_check_ruleid( find_child(".content",sr_elem), ("asl-rulebook","A.2") )
# test ruleid's in an errata in the rule info popup
expected = [
( ".caption", ("asl-rulebook","A3.8") ),
( ".content", ("asl-rulebook","A.2") )
]
for sel, target in expected:
_do_search( "errata", False )
sr_elem = find_child( "#rule-info .rule-info" )
_check_ruleid( find_child(sel,sr_elem), target )
_dismiss_rule_info_popup()
# ---------------------------------------------------------------------
def test_user_annotations( webdriver, webapp ):
"""Test ruleid's in user annotations."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test ruleid's in a user annotation's search result
results = _do_search( "is there anything", False )
assert len(results) == 1
sr_elem = find_child( "#search-results .sr" )
_check_ruleid( find_child(".caption",sr_elem), ("asl-rulebook","A24.3") )
_dismiss_rule_info_popup()
_check_ruleid( find_child(".content",sr_elem), ("asl-rulebook","A24.31") )
_dismiss_rule_info_popup()
# ---------------------------------------------------------------------
def test_asop( webdriver, webapp ):
"""Test ruleid's in ASOP entries."""
# initialize
webapp.control_tests.set_data_dir( "full" )
init_webapp( webapp, webdriver )
# test ruleid's in an ASOP entry's search result
results = _do_search( "first/next", False )
assert len(results) == 1
sr_elem = find_child( "#search-results .sr" )
_check_ruleid( find_child(".content",sr_elem), ("asl-rulebook","A11.3-.34") )
# click through to the ASOP section and check the ruleid there
find_child( ".caption", sr_elem ).click()
_check_ruleid( find_child("#asop .section"), ("asl-rulebook","A11.3-.34") )
# check the ruleid in the ASOP chapter
refresh_webapp( webdriver ) # nb: clear the ASOP overrides
select_tabbed_page( "nav", "asop" )
open_asop_chapter( "close-combat" )
sections = find_children( "#asop .section" )
assert len(sections) == 4
_check_ruleid( sections[1], ("asl-rulebook","A11.3-.34") )
# check the ruleid in the ASOP section
refresh_webapp( webdriver )
select_tabbed_page( "nav", "asop" )
open_asop_section( "close-combat", 1 )
section = find_child( "#asop .section" )
_check_ruleid( section, ("asl-rulebook","A11.3-.34") )
# ---------------------------------------------------------------------
def _do_search( query_string, dismiss_rule_info ):
"""Do a search."""
results = do_search( query_string )
if dismiss_rule_info:
_dismiss_rule_info_popup()
return results
def _dismiss_rule_info_popup():
"""Dismiss the rule info popup."""
elem = wait_for_elem( 2, "#rule-info" )
find_child( ".close-rule-info" ).click()
wait_for( 2, lambda: not elem.is_displayed() )
def _check_ruleid( elem, expected ):
"""Check the ruleid in the specified element."""
# check the ruleid
elems = find_children( "span.auto-ruleid", elem )
assert len(elems) == 1
elem = elems[0]
cset_id = elem.get_attribute( "data-csetid" )
if cset_id:
pos = expected[0].find( "!" )
assert cset_id == expected[0] if pos < 0 else expected[0][:pos]
ruleid = expected[1]
pos = ruleid.find( "-" )
if pos >= 0:
ruleid = ruleid[:pos]
assert elem.get_attribute( "data-ruleid" ) == ruleid
assert elem.text == expected[1]
# click on the ruleid and make sure we go to the right place
elem.click()
wait_for( 2, lambda: get_curr_target() == ( expected[0], ruleid ) )

@ -95,7 +95,7 @@ def test_missing_content( webapp, webdriver ):
# NOTE: Q+A captions are ignored by the search engine (since they usually just contain just ruleid's),
# so search terms are *not* highlighted.
expected = {
"caption": "Missing content",
"caption": "((Missing)) content",
}
assert len(results) == 1
result = results[0]

@ -1,7 +1,7 @@
""" Test search result filtering. """
from asl_rulebook2.webapp.tests.test_search import do_search, unload_search_results
from asl_rulebook2.webapp.tests.utils import init_webapp, \
from asl_rulebook2.webapp.tests.utils import init_webapp, refresh_webapp, \
check_sr_filters, find_child
# ---------------------------------------------------------------------
@ -43,7 +43,7 @@ def test_sr_filtering( webdriver, webapp ):
check_sr_count( len(expected2) if isinstance(expected2,list) else 0, len(expected) )
# refresh the page
webdriver.refresh()
refresh_webapp( webdriver )
elem = find_child( sel )
assert not elem.is_selected()

@ -35,6 +35,7 @@ def init_webapp( webapp, webdriver, **options ):
}
# load the webapp
webapp.control_tests.set_app_config_val( "BLOCKING_FIXUP_CONTENT", True )
if get_pytest_option("webdriver") == "chrome" and get_pytest_option("headless"):
# FUDGE! Headless Chrome doesn't want to show the PDF in the browser,
# it downloads the file and saves it in the current directory :wtf:
@ -60,7 +61,10 @@ def init_webapp( webapp, webdriver, **options ):
# reset the user settings
webdriver.delete_all_cookies()
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def refresh_webapp( webdriver ):
"""Refresh the webapp."""
webdriver.refresh()
_wait_for_webapp()
def _wait_for_webapp():
"""Wait for the webapp to finish initialization."""

@ -114,6 +114,12 @@ def _make_webapp():
# check if we need to start a local webapp server
if not webapp_url:
# yup - make it so
# FUDGE! If the dev environment is configured to fixup content in a background thread, that thread will start
# when we start the webapp server. When we configure things before running a test, and reload the webapp,
# that thread will still be running, loading the old data into the search index and in-memory structures :-/
# We work-around this by forcing an empty environment before starting the webapp server.
app.config.pop( "DATA_DIR", None )
app.config.pop( "INITIAL_QUERY_STRING", None ) # nb: this can also cause problems
# NOTE: We run the server thread as a daemon so that it won't prevent the tests from finishing
# when they're done. However, this makes it difficult to know when to shut the server down,
# and, in particular, clean up the gRPC service. We send an EndTests message at the end of each test,

Loading…
Cancel
Save