Compare commits

...

28 Commits
v0.2 ... master

Author SHA1 Message Date
Pacman Ghost 75d6678e18 Updated the pylint config. 2 years ago
Pacman Ghost a03f917f05 Updated links to point to pacman-ghost.com. 2 years ago
Pacman Ghost 04d52c85bd Don't warn about missing page numbers for publisher articles. 2 years ago
Pacman Ghost 757e9797dc Updated the version strings. 2 years ago
Pacman Ghost 838d3d1c1e Updated dependencies. 2 years ago
Pacman Ghost 1945c8d4e7 Updated dependencies. 2 years ago
Pacman Ghost a5f931ce51 Updated the READ-ME to point to the pre-loaded database. 2 years ago
Pacman Ghost 32b3ebdf5e Strip HTML when setting the browser titlebar. 2 years ago
Pacman Ghost 51ff9e960b Handle quoted words in author names. 2 years ago
Pacman Ghost 11c8f0dced Changed how we scroll to articles already on-screen. 2 years ago
Pacman Ghost 1446d97ac3 Allow the image zoom Javascript to be cached. 2 years ago
Pacman Ghost f080805e77 Fixed some issues when running the test suite against a remote backend server. 2 years ago
Pacman Ghost 49618b9d9c Minor documentation changes. 2 years ago
Pacman Ghost 20f03c2dc1 Added a watermark. 2 years ago
Pacman Ghost 01be3e9880 Minor UI changes. 2 years ago
Pacman Ghost c59e189998 Updated the version strings. 2 years ago
Pacman Ghost 7575d2c217 Use waitress to serve the webapp. 2 years ago
Pacman Ghost 81445487f5 Fixed a problem updating the UI after deleting something. 2 years ago
Pacman Ghost 197a665b10 Made the database reporting tools available in the webapp UI. 2 years ago
Pacman Ghost 189d72725c Update the browser URL after selecting menu items. 3 years ago
Pacman Ghost d81a02317f Got filtering working for standard Select droplists. 3 years ago
Pacman Ghost a0410f5960 Fixed an issue parsing quoted search phrases that contain special characters. 3 years ago
Pacman Ghost 49c608186c Allow publisher articles to have a publication date. 3 years ago
Pacman Ghost 95e662c9f6 Changed how data is transfered between the front- and back-end. 3 years ago
Pacman Ghost fdc287bb61 Allow articles to be associated with a publisher. 3 years ago
Pacman Ghost 41c5d261af Run the Docker container using the caller's UID/GID. 3 years ago
Pacman Ghost db1469023b Updated to a newer version of Flask-SQLAlchemy. 3 years ago
Pacman Ghost 425fdb00e2 Updated to Python 3.8.7. 3 years ago
  1. 92
      .pylintrc
  2. 11
      Dockerfile
  3. 64
      README.md
  4. 28
      alembic/versions/702eeb219037_allow_articles_to_have_a_publication_.py
  5. 40
      alembic/versions/a33edb7272a2_allow_articles_to_be_associated_with_a_.py
  6. 7
      asl_articles/__init__.py
  7. 120
      asl_articles/articles.py
  8. 31
      asl_articles/authors.py
  9. 2
      asl_articles/config/constants.py
  10. 2
      asl_articles/config/site.cfg.example
  11. 149
      asl_articles/db_report.py
  12. 2
      asl_articles/images.py
  13. 10
      asl_articles/main.py
  14. 5
      asl_articles/models.py
  15. 66
      asl_articles/publications.py
  16. 63
      asl_articles/publishers.py
  17. 14
      asl_articles/scenarios.py
  18. 84
      asl_articles/search.py
  19. 7
      asl_articles/tags.py
  20. 3
      asl_articles/tests/__init__.py
  21. 42
      asl_articles/tests/fixtures/db-report.json
  22. 1
      asl_articles/tests/fixtures/docs/aslj-1.html
  23. 1
      asl_articles/tests/fixtures/docs/aslj-2.html
  24. 1
      asl_articles/tests/fixtures/docs/mmp.html
  25. 11
      asl_articles/tests/fixtures/publisher-article-dates.json
  26. 17
      asl_articles/tests/fixtures/publisher-articles.json
  27. 190
      asl_articles/tests/test_articles.py
  28. 3
      asl_articles/tests/test_authors.py
  29. 235
      asl_articles/tests/test_db_report.py
  30. 5
      asl_articles/tests/test_import_roar_scenarios.py
  31. 20
      asl_articles/tests/test_publications.py
  32. 10
      asl_articles/tests/test_publishers.py
  33. 3
      asl_articles/tests/test_scenarios.py
  34. 16
      asl_articles/tests/test_search.py
  35. 4
      asl_articles/tests/test_startup.py
  36. 6
      asl_articles/tests/test_tags.py
  37. 52
      asl_articles/tests/utils.py
  38. 28
      asl_articles/utils.py
  39. 39
      conftest.py
  40. BIN
      doc/publication.png
  41. BIN
      doc/publishers.png
  42. BIN
      doc/search.png
  43. BIN
      doc/tag.png
  44. 10
      requirements-dev.txt
  45. 15
      requirements.txt
  46. 2
      run-containers.sh
  47. 35
      run_server.py
  48. 14
      setup.py
  49. 78
      tools/find_broken_external_document_links.py
  50. 92
      tools/images_report.py
  51. 3
      tools/import_roar_scenarios.py
  52. 6
      web/Dockerfile
  53. 37267
      web/package-lock.json
  54. 5
      web/package.json
  55. BIN
      web/public/favicon.ico
  56. BIN
      web/public/images/check-db-links.png
  57. BIN
      web/public/images/edit.png
  58. BIN
      web/public/images/icons/article-grey.png
  59. BIN
      web/public/images/icons/article.png
  60. BIN
      web/public/images/icons/delete.png
  61. BIN
      web/public/images/icons/edit.png
  62. BIN
      web/public/images/icons/publication.png
  63. BIN
      web/public/images/icons/publisher.png
  64. BIN
      web/public/images/icons/technique.png
  65. BIN
      web/public/images/icons/tips.png
  66. BIN
      web/public/images/info.png
  67. BIN
      web/public/images/link-error-bullet.png
  68. BIN
      web/public/images/menu/article.png
  69. BIN
      web/public/images/menu/db-report.png
  70. BIN
      web/public/images/menu/publication.png
  71. BIN
      web/public/images/menu/publisher.png
  72. BIN
      web/public/images/menu/publishers.png
  73. BIN
      web/public/images/menu/technique.png
  74. BIN
      web/public/images/menu/tips.png
  75. BIN
      web/public/images/open-link.png
  76. BIN
      web/public/images/watermark.png
  77. 20
      web/src/App.css
  78. 217
      web/src/App.js
  79. 1
      web/src/ArticleSearchResult.css
  80. 221
      web/src/ArticleSearchResult.js
  81. 117
      web/src/ArticleSearchResult2.js
  82. 59
      web/src/DataCache.js
  83. 24
      web/src/DbReport.css
  84. 387
      web/src/DbReport.js
  85. 22
      web/src/PreviewableImage.js
  86. 189
      web/src/PublicationSearchResult.js
  87. 19
      web/src/PublicationSearchResult2.js
  88. 165
      web/src/PublisherSearchResult.js
  89. 4
      web/src/PublisherSearchResult2.js
  90. 2
      web/src/SearchForm.css
  91. 14
      web/src/SearchResults.css
  92. 42
      web/src/SearchResults.js
  93. 1
      web/src/constants.js
  94. 8
      web/src/index.css
  95. 17
      web/src/index.js
  96. 53
      web/src/utils.js

@ -60,17 +60,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
raw-checker-failed,
disable=raw-checker-failed,
bad-inline-option,
locally-disabled,
file-ignored,
@ -78,74 +68,15 @@ disable=print-statement,
useless-suppression,
deprecated-pragma,
use-symbolic-message-instead,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
deprecated-operator-function,
deprecated-urllib-function,
xreadlines-attribute,
deprecated-sys-function,
exception-escape,
comprehension-escape,
bad-whitespace,
invalid-name,
wrong-import-position,
global-statement,
bad-continuation,
too-few-public-methods,
no-else-return
no-else-return,
consider-using-f-string,
use-implicit-booleaness-not-comparison,
duplicate-code,
unnecessary-lambda-assignment,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
@ -240,7 +171,7 @@ contextmanager-decorators=contextlib.contextmanager
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
# NOTE: We disable warnings for SQLAlchemy's query.filter/filter_by/join() methods.
generated-members=filter,join
generated-members=filter,join,session.query,session.add,session.commit
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
@ -262,7 +193,7 @@ ignore-on-opaque-inference=yes
# for classes with dynamically set attributes). This supports the use of
# qualified names.
# NOTE: We disable warnings for SQLAlchemy's Column class members e.g. ilike(), asc()
ignored-classes=optparse.Values,thread._local,_thread._local,Column
ignored-classes=optparse.Values,thread._local,_thread._local,scoped_session,Column
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
@ -307,13 +238,6 @@ max-line-length=120
# Maximum number of lines in a module.
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,
dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no

@ -1,11 +1,11 @@
# We do a multi-stage build (requires Docker >= 17.05) to install everything, then copy it all
# to the final target image.
FROM centos:8 AS base
FROM rockylinux:8.5 AS base
# update packages and install Python
RUN dnf -y upgrade-minimal && \
dnf install -y python36 && \
dnf install -y python38 && \
dnf clean all
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -13,13 +13,12 @@ RUN dnf -y upgrade-minimal && \
FROM base AS build
# set up a virtualenv
RUN python3.6 -m venv /opt/venv
RUN python3 -m venv /opt/venv
ENV PATH="/opt/venv/bin:$PATH"
RUN pip install --upgrade pip
# install the application requirements
COPY requirements.txt /tmp/
RUN pip install --upgrade pip
RUN pip install -r /tmp/requirements.txt
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -45,6 +44,10 @@ RUN rm -f asl_articles/config/debug.cfg
# copy the alembic files (so that users can upgrade their database)
COPY alembic alembic
# NOTE: We set these so that we can update the database outside the container.
ENV UID=$DOCKER_UID
ENV GID=$DOCKER_GID
# launch the web server
EXPOSE 5000
ENV DBCONN undefined

@ -1,28 +1,30 @@
This program provides a searchable interface to your ASL magazines and their articles.
It is written using React (Javascript) for the front-end, and a Flask (Python) back-end. For ease of use, it can be run using Docker containers.
### To create a new database
*NOTE: This requires the Python environment to have been set up (see the developer notes below).*
Go to the *alembic/* directory and change the database connection string in *alembic.ini* e.g.
```sqlalchemy.url = sqlite:////home/pacman-ghost/asl-articles.db```
Note that there are 3 forward slashes for the protocol, the 4th one is the start of the path to the database.
Run the following command to create the database (you must be in the *alembic/* directory):
[<img src="doc/publishers.png" height="150">](doc/publishers.png)
&nbsp;
[<img src="doc/publication.png" height="150">](doc/publication.png)
&nbsp;
[<img src="doc/search.png" height="150">](doc/search.png)
&nbsp;
[<img src="doc/tag.png" height="150">](doc/tag.png)
*NOTE: This project integrates with my other [asl-rulebook2](https://code.pacman-ghost.com/public/asl-rulebook2) project. Add a setting to your `site.cfg` e.g.*
```
ASLRB_BASE_URL = http://localhost:5020
```
```alembic upgrade head```
*and references to rules will be converted to clickable links that will open the ASLRB at that rule.*
### To run the application
Go to the project root directory and run the following command:
Get a copy of the pre-loaded database from the release page.
```./run-containers.sh /home/pacman-ghost/asl-articles.db```
Then go to the project root directory and run the following command:
```
./run-containers.sh -d /home/pacman-ghost/asl-articles.db
```
*NOTE: You will need Docker >= 17.05 (for multi-stage builds)*
*NOTE: You will need Docker >= 17.05 (for multi-stage builds)*, and `docker-compose`.
Then open a browser and go to http://localhost:3002
@ -35,13 +37,15 @@ It is possible to configure publications and their articles so that clicking the
For security reasons, browsers don't allow *file://* links to PDF's, they must be served by a web server. This program supports this, but some things need to be set up first.
When you run the application, specify the top-level directory that contains your PDF's in the command line e.g.
```./run-containers.sh /home/pacman-ghost/asl-articles.db /home/pacman-ghost/asl-articles-docs/```
```
./run-containers.sh \
-d /home/pacman-ghost/asl-articles.db \
-e /home/pacman-ghost/asl-articles-docs/
```
Then, configure your document paths *relative to that directory*.
For example, say I have my files organized like this:
```
* /home/pacman-ghost/
+-- asl-articles.db
@ -63,14 +67,16 @@ The application is split over 2 Docker containers, one running a React front-end
##### Setting up the Flask (Python) back-end
Create a *virtualenv*, then go to the *asl_articles/* directory and install the requirements:
```pip install -e .[dev]```
```
pip install -e .[dev]
```
Copy *config/site.cfg.example* to *config/site.cfg*, and update it to point to your database.
Then run the server:
```./run-server.py```
```
./run-server.py
```
You can test if things are working by opening a browser and going to http://localhost:5000/ping
@ -79,9 +85,11 @@ You can test if things are working by opening a browser and going to http://loca
##### Setting up the React front-end
Go to the *web/* directory and install the requirements:
```npm install```
```
npm install
```
Then run the server:
```npm start```
```
npm start
```

@ -0,0 +1,28 @@
"""Allow articles to have a publication date.
Revision ID: 702eeb219037
Revises: a33edb7272a2
Create Date: 2021-11-16 20:41:37.454305
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '702eeb219037'
down_revision = 'a33edb7272a2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('article', sa.Column('article_date', sa.String(length=100), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('article', 'article_date')
# ### end Alembic commands ###

@ -0,0 +1,40 @@
"""Allow articles to be associated with a publisher.
Revision ID: a33edb7272a2
Revises: 21ec84874208
Create Date: 2021-10-22 20:10:50.440849
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a33edb7272a2'
down_revision = '21ec84874208'
branch_labels = None
depends_on = None
from alembic import context
is_sqlite = context.config.get_main_option( "sqlalchemy.url" ).startswith( "sqlite://" )
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('article', sa.Column('publ_id', sa.Integer(), nullable=True))
if is_sqlite:
op.execute( "PRAGMA foreign_keys = off" ) # nb: stop cascading deletes
with op.batch_alter_table('article') as batch_op:
batch_op.create_foreign_key('fk_article_publisher', 'publisher', ['publ_id'], ['publ_id'], ondelete='CASCADE')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
if is_sqlite:
op.execute( "PRAGMA foreign_keys = off" ) # nb: stop cascading deletes
with op.batch_alter_table('article') as batch_op:
batch_op.drop_constraint('fk_article_publisher', type_='foreignkey')
op.drop_column('article', 'publ_id')
# ### end Alembic commands ###

@ -40,8 +40,8 @@ def _on_startup():
return
# initialize the search index
_logger = logging.getLogger( "startup" )
asl_articles.search.init_search( db.session, _logger )
logger = logging.getLogger( "startup" )
asl_articles.search.init_search( db.session, logger )
# ---------------------------------------------------------------------
@ -75,7 +75,7 @@ _load_config( _cfg, _fname, "Debug" )
# initialize logging
_fname = os.path.join( config_dir, "logging.yaml" )
if os.path.isfile( _fname ):
with open( _fname, "r" ) as fp:
with open( _fname, "r", encoding="utf-8" ) as fp:
logging.config.dictConfig( yaml.safe_load( fp ) )
else:
# stop Flask from logging every request :-/
@ -112,6 +112,7 @@ import asl_articles.scenarios #pylint: disable=cyclic-import
import asl_articles.images #pylint: disable=cyclic-import
import asl_articles.tags #pylint: disable=cyclic-import
import asl_articles.docs #pylint: disable=cyclic-import
import asl_articles.db_report #pylint: disable=cyclic-import
import asl_articles.utils #pylint: disable=cyclic-import
# initialize

@ -9,19 +9,18 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db
from asl_articles.models import Article, Author, ArticleAuthor, Scenario, ArticleScenario, ArticleImage
from asl_articles.models import Publication
from asl_articles.authors import do_get_authors
from asl_articles.scenarios import do_get_scenarios
from asl_articles.tags import do_get_tags
from asl_articles.authors import get_author_vals
from asl_articles.scenarios import get_scenario_vals
import asl_articles.publications
import asl_articles.publishers
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response
_logger = logging.getLogger( "db" )
_FIELD_NAMES = [ "*article_title", "article_subtitle", "article_snippet", "article_pageno",
"article_url", "article_tags", "pub_id"
_FIELD_NAMES = [ "*article_title", "article_subtitle", "article_date", "article_snippet", "article_pageno",
"article_url", "article_tags", "pub_id", "publ_id"
]
# ---------------------------------------------------------------------
@ -34,9 +33,10 @@ def get_article( article_id ):
if not article:
abort( 404 )
_logger.debug( "- %s", article )
return jsonify( get_article_vals( article ) )
deep = request.args.get( "deep" )
return jsonify( get_article_vals( article, deep ) )
def get_article_vals( article, add_type=False ):
def get_article_vals( article, deep ):
"""Extract public fields from an Article record."""
authors = sorted( article.article_authors,
key = lambda a: a.seq_no
@ -45,21 +45,29 @@ def get_article_vals( article, add_type=False ):
key = lambda a: a.seq_no
)
vals = {
"_type": "article",
"article_id": article.article_id,
"article_title": article.article_title,
"article_subtitle": article.article_subtitle,
"article_image_id": article.article_id if article.article_image else None,
"article_authors": [ a.author_id for a in authors ],
"article_authors": [ get_author_vals( a.parent_author ) for a in authors ],
"article_date": article.article_date,
"article_snippet": article.article_snippet,
"article_pageno": article.article_pageno,
"article_url": article.article_url,
"article_scenarios": [ s.scenario_id for s in scenarios ],
"article_scenarios": [ get_scenario_vals( s.parent_scenario ) for s in scenarios ],
"article_tags": decode_tags( article.article_tags ),
"article_rating": article.article_rating,
"pub_id": article.pub_id,
"publ_id": article.publ_id,
}
if add_type:
vals[ "type" ] = "article"
if deep:
vals["_parent_pub"] = asl_articles.publications.get_publication_vals(
article.parent_pub, False, False
) if article.parent_pub else None
vals["_parent_publ"] = asl_articles.publishers.get_publisher_vals(
article.parent_publ, False, False
) if article.parent_publ else None
return vals
def get_article_sort_key( article ):
@ -79,38 +87,31 @@ def create_article():
log = ( _logger, "Create article:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# create the new article
vals[ "time_created" ] = datetime.datetime.now()
if not vals.get( "publ_id" ):
vals.pop( "article_date", None )
article = Article( **vals )
db.session.add( article )
db.session.flush()
new_article_id = article.article_id
_set_seqno( article, article.pub_id )
_save_authors( article, updated )
_save_scenarios( article, updated )
_save_image( article, updated )
_save_authors( article )
_save_scenarios( article )
_save_image( article )
db.session.commit()
_logger.debug( "- New ID: %d", new_article_id )
search.add_or_update_article( None, article, None )
# generate the response
extras = { "article_id": new_article_id }
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_article_vals( article, True )
return make_ok_response( record=vals, warnings=warnings )
def _set_seqno( article, pub_id ):
"""Set an article's seq#."""
@ -122,7 +123,7 @@ def _set_seqno( article, pub_id ):
else:
article.article_seqno = None
def _save_authors( article, updated_fields ):
def _save_authors( article ):
"""Save the article's authors."""
# delete the existing article-author rows
@ -132,8 +133,6 @@ def _save_authors( article, updated_fields ):
# add the article-author rows
authors = request.json.get( "article_authors", [] )
author_ids = []
new_authors = False
for seq_no,author in enumerate( authors ):
if isinstance( author, int ):
# this is an existing author
@ -146,19 +145,12 @@ def _save_authors( article, updated_fields ):
db.session.add( author )
db.session.flush()
author_id = author.author_id
new_authors = True
_logger.debug( "Created new author \"%s\": id=%d", author, author_id )
db.session.add(
ArticleAuthor( seq_no=seq_no, article_id=article.article_id, author_id=author_id )
)
author_ids.append( author_id )
# check if we created any new authors
if new_authors:
# yup - let the caller know about them
updated_fields[ "article_authors"] = author_ids
def _save_scenarios( article, updated_fields ):
def _save_scenarios( article ):
"""Save the article's scenarios."""
# delete the existing article-scenario rows
@ -168,8 +160,6 @@ def _save_scenarios( article, updated_fields ):
# add the article-scenario rows
scenarios = request.json.get( "article_scenarios", [] )
scenario_ids = []
new_scenarios = False
for seq_no,scenario in enumerate( scenarios ):
if isinstance( scenario, int ):
# this is an existing scenario
@ -182,19 +172,12 @@ def _save_scenarios( article, updated_fields ):
db.session.add( new_scenario )
db.session.flush()
scenario_id = new_scenario.scenario_id
new_scenarios = True
_logger.debug( "Created new scenario \"%s [%s]\": id=%d", scenario[1], scenario[0], scenario_id )
db.session.add(
ArticleScenario( seq_no=seq_no, article_id=article.article_id, scenario_id=scenario_id )
)
scenario_ids.append( scenario_id )
# check if we created any new scenarios
if new_scenarios:
# yup - let the caller know about them
updated_fields[ "article_scenarios"] = scenario_ids
def _save_image( article, updated ):
def _save_image( article ):
"""Save the article's image."""
# check if a new image was provided
@ -206,7 +189,7 @@ def _save_image( article, updated ):
ArticleImage.query.filter( ArticleImage.article_id == article.article_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the article to have no image.
updated[ "article_image_id" ] = None
article.article_image_id = None
return
# add the new image to the database
@ -216,7 +199,6 @@ def _save_image( article, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "article_image_id" ] = article.article_id
# ---------------------------------------------------------------------
@ -230,44 +212,31 @@ def update_article():
log = ( _logger, "Update article: id={}".format( article_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# update the article
article = Article.query.get( article_id )
if not article:
abort( 404 )
orig_pub = Publication.query.get( article.pub_id ) if article.pub_id else None
if vals["pub_id"] != article.pub_id:
_set_seqno( article, vals["pub_id"] )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( article, vals )
_save_authors( article, updated )
_save_scenarios( article, updated )
_save_image( article, updated )
if not vals.get( "publ_id" ):
article.article_date = None
_save_authors( article )
_save_scenarios( article )
_save_image( article )
db.session.commit()
search.add_or_update_article( None, article, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
pubs = []
if orig_pub and orig_pub.pub_id != article.pub_id:
pubs.append( asl_articles.publications.get_publication_vals( orig_pub, True ) )
if article.pub_id:
pub = Publication.query.get( article.pub_id )
pubs.append( asl_articles.publications.get_publication_vals( pub, True ) )
if pubs:
extras[ "_publications" ] = pubs
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_article_vals( article, True )
return make_ok_response( record=vals, warnings=warnings )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -310,11 +279,4 @@ def delete_article( article_id ):
search.delete_articles( [ article ] )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( extras=extras )
return make_ok_response()

@ -1,27 +1,38 @@
""" Handle author requests. """
from flask import jsonify
import logging
from flask import jsonify, abort
from asl_articles import app
from asl_articles.models import Author
_logger = logging.getLogger( "db" )
# ---------------------------------------------------------------------
@app.route( "/authors" )
def get_authors():
"""Get all authors."""
return jsonify( do_get_authors() )
return jsonify( {
author.author_id: get_author_vals( author )
for author in Author.query.all()
} )
def do_get_authors():
"""Get all authors."""
# ---------------------------------------------------------------------
# get all the authors
return {
r.author_id: _get_author_vals(r)
for r in Author.query #pylint: disable=not-an-iterable
}
@app.route( "/author/<author_id>" )
def get_author( author_id ):
"""Get an author."""
_logger.debug( "Get author: id=%s", author_id )
author = Author.query.get( author_id )
if not author:
abort( 404 )
vals = get_author_vals( author )
_logger.debug( "- %s", author )
return jsonify( vals )
def _get_author_vals( author ):
def get_author_vals( author ):
"""Extract public fields from an Author record."""
return {
"author_id": author.author_id,

@ -3,7 +3,7 @@
import os
APP_NAME = "ASL Articles"
APP_VERSION = "v0.1" # nb: also update setup.py
APP_VERSION = "v1.1" # nb: also update setup.py
APP_DESCRIPTION = "Searchable index of ASL articles."
BASE_DIR = os.path.abspath( os.path.join( os.path.split(__file__)[0], ".." ) )

@ -11,5 +11,5 @@ EXTERNAL_DOCS_BASEDIR = ...
; Base directory for user files.
USER_FILES_BASEDIR = ...
; Base URL for the eASLRB.
; Base URL for the eASLRB (e.g. http://localhost:5020).
ASLRB_BASE_URL = ...

@ -0,0 +1,149 @@
""" Generate the database report. """
import urllib.request
import urllib.error
import hashlib
from collections import defaultdict
from flask import request, jsonify, abort
from asl_articles import app, db
# ---------------------------------------------------------------------
@app.route( "/db-report/row-counts" )
def get_db_row_counts():
"""Get the database row counts."""
results = {}
for table_name in [
"publisher", "publication", "article", "author",
"publisher_image", "publication_image", "article_image",
"scenario"
]:
query = db.engine.execute( "SELECT count(*) FROM {}".format( table_name ) )
results[ table_name ] = query.scalar()
return jsonify( results )
# ---------------------------------------------------------------------
@app.route( "/db-report/links" )
def get_db_links():
"""Get all links in the database."""
# initialize
results = {}
def find_db_links( table_name, col_names ):
links = []
query = db.engine.execute( "SELECT * FROM {}".format( table_name ) )
for row in query:
url = row[ col_names[1] ]
if not url:
continue
obj_id = row[ col_names[0] ]
name = col_names[2]( row ) if callable( col_names[2] ) else row[ col_names[2] ]
links.append( [ obj_id, name, url ] )
results[ table_name ] = links
# find all links
find_db_links( "publisher", [
"publ_id", "publ_url", "publ_name"
] )
find_db_links( "publication", [
"pub_id", "pub_url", _get_pub_name
] )
find_db_links( "article", [
"article_id", "article_url", "article_title"
] )
return jsonify( results )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@app.route( "/db-report/check-link", methods=["POST"] )
def check_db_link():
"""Check if a link appears to be working."""
url = request.args.get( "url" )
try:
req = urllib.request.Request( url, method="HEAD" )
with urllib.request.urlopen( req ) as resp:
resp_code = resp.code
except urllib.error.URLError as ex:
resp_code = getattr( ex, "code", None )
if not resp_code:
resp_code = 400
if resp_code != 200:
abort( resp_code )
return "ok"
# ---------------------------------------------------------------------
@app.route( "/db-report/images" )
def get_db_images():
"""Analyze the images stored in the database."""
# initialize
results = {}
image_hashes = defaultdict( list )
def find_images( table_name, col_names, get_name ):
# find rows in the specified table that have images
sql = "SELECT {cols}, image_data" \
" FROM {table}_image LEFT JOIN {table}" \
" ON {table}_image.{id_col} = {table}.{id_col}".format(
cols = ",".join( "{}.{}".format( table_name, c ) for c in col_names ),
table = table_name,
id_col = col_names[0]
)
rows = [
dict( row )
for row in db.engine.execute( sql )
]
# save the image hashes
for row in rows:
image_hash = hashlib.md5( row["image_data"] ).hexdigest()
image_hashes[ image_hash ].append( [
table_name, row[col_names[0]], get_name(row)
] )
# save the image sizes
image_sizes = [
[ len(row["image_data"]), row[col_names[0]], get_name(row) ]
for row in rows
]
image_sizes.sort( key = lambda r: r[0], reverse=True )
results[ table_name ] = image_sizes
# look for images in each table
find_images( "publisher",
[ "publ_id", "publ_name" ],
lambda row: row["publ_name"]
)
find_images( "publication",
[ "pub_id", "pub_name", "pub_edition" ],
_get_pub_name
)
find_images( "article",
[ "article_id", "article_title" ],
lambda row: row["article_title"]
)
# look for duplicate images
results["duplicates"] = {}
for image_hash, images in image_hashes.items():
if len(images) == 1:
continue
results["duplicates"][ image_hash ] = images
return results
# ---------------------------------------------------------------------
def _get_pub_name( row ):
"""Get a publication's display name."""
name = row["pub_name"]
if row["pub_edition"]:
name += " ({})".format( row["pub_edition"] )
return name

@ -21,5 +21,5 @@ def get_image( image_type, image_id ):
abort( 404 )
return send_file(
io.BytesIO( img.image_data ),
attachment_filename = img.image_filename # nb: so that Flask can set the MIME type
download_name = img.image_filename # nb: so that Flask can set the MIME type
)

@ -1,7 +1,5 @@
""" Main handlers. """
from flask import request
from asl_articles import app
# ---------------------------------------------------------------------
@ -10,11 +8,3 @@ from asl_articles import app
def ping():
"""Let the caller know we're alive (for testing porpoises)."""
return "pong"
# ---------------------------------------------------------------------
@app.route( "/shutdown" )
def shutdown():
"""Shutdown the server (for testing porpoises)."""
request.environ.get( "werkzeug.server.shutdown" )()
return ""

@ -23,6 +23,7 @@ class Publisher( db.Model ):
#
publ_image = db.relationship( "PublisherImage", backref="parent_publ", passive_deletes=True )
publications = db.relationship( "Publication", backref="parent_publ", passive_deletes=True )
articles = db.relationship( "Article", backref="parent_publ", passive_deletes=True )
def __repr__( self ):
return "<Publisher:{}|{}>".format( self.publ_id, self.publ_name )
@ -62,6 +63,7 @@ class Article( db.Model ):
article_id = db.Column( db.Integer, primary_key=True )
article_title = db.Column( db.String(200), nullable=False )
article_subtitle = db.Column( db.String(200) )
article_date = db.Column( db.String(100) ) # nb: this is just a display string
article_snippet = db.Column( db.String(5000) )
article_seqno = db.Column( db.Integer )
article_pageno = db.Column( db.String(20) )
@ -71,6 +73,9 @@ class Article( db.Model ):
pub_id = db.Column( db.Integer,
db.ForeignKey( Publication.__table__.c.pub_id, ondelete="CASCADE" )
)
publ_id = db.Column( db.Integer,
db.ForeignKey( Publisher.__table__.c.publ_id, ondelete="CASCADE" )
)
# NOTE: time_created should be non-nullable, but getting this to work on both SQLite and Postgres
# is more trouble than it's worth :-/
time_created = db.Column( db.TIMESTAMP(timezone=True) )

@ -10,7 +10,7 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db
from asl_articles.models import Publication, PublicationImage, Article
from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles.tags import do_get_tags
import asl_articles.publishers
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response
@ -24,14 +24,10 @@ _FIELD_NAMES = [ "*pub_name", "pub_edition", "pub_description", "pub_date", "pub
@app.route( "/publications" )
def get_publications():
"""Get all publications."""
return jsonify( do_get_publications() )
def do_get_publications():
"""Get all publications."""
# NOTE: The front-end maintains a cache of the publications, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publication.query.all()
return { r.pub_id: get_publication_vals(r,False) for r in results }
return jsonify( {
pub.pub_id: get_publication_vals( pub, False, False )
for pub in Publication.query.all()
} )
# ---------------------------------------------------------------------
@ -42,16 +38,20 @@ def get_publication( pub_id ):
pub = Publication.query.get( pub_id )
if not pub:
abort( 404 )
vals = get_publication_vals( pub, False )
vals = get_publication_vals( pub,
request.args.get( "include_articles" ),
request.args.get( "deep" )
)
# include the number of associated articles
query = Article.query.filter_by( pub_id = pub_id )
vals[ "nArticles" ] = query.count()
_logger.debug( "- %s ; #articles=%d", pub, vals["nArticles"] )
return jsonify( vals )
def get_publication_vals( pub, include_articles, add_type=False ):
def get_publication_vals( pub, include_articles, deep ):
"""Extract public fields from a Publication record."""
vals = {
"_type": "publication",
"pub_id": pub.pub_id,
"pub_name": pub.pub_name,
"pub_edition": pub.pub_edition,
@ -66,9 +66,11 @@ def get_publication_vals( pub, include_articles, add_type=False ):
}
if include_articles:
articles = sorted( pub.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a ) for a in articles ]
if add_type:
vals[ "type" ] = "publication"
vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
if deep:
vals[ "_parent_publ" ] = asl_articles.publishers.get_publisher_vals(
pub.parent_publ, False, False
) if pub.parent_publ else None
return vals
def get_publication_sort_key( pub ):
@ -96,30 +98,25 @@ def create_publication():
log = ( _logger, "Create publication:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# create the new publication
vals[ "time_created" ] = datetime.datetime.now()
pub = Publication( **vals )
db.session.add( pub )
_set_seqno( pub, pub.publ_id )
_save_image( pub, updated )
_save_image( pub )
db.session.commit()
_logger.debug( "- New ID: %d", pub.pub_id )
search.add_or_update_publication( None, pub, None )
# generate the response
extras = { "pub_id": pub.pub_id }
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publication_vals( pub, False, True )
return make_ok_response( record=vals, warnings=warnings )
def _set_seqno( pub, publ_id ):
"""Set a publication's seq#."""
@ -139,7 +136,7 @@ def _set_seqno( pub, publ_id ):
else:
pub.pub_seqno = None
def _save_image( pub, updated ):
def _save_image( pub ):
"""Save the publication's image."""
# check if a new image was provided
@ -151,7 +148,7 @@ def _save_image( pub, updated ):
PublicationImage.query.filter( PublicationImage.pub_id == pub.pub_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publication to have no image.
updated[ "pub_image_id" ] = None
pub.pub_image_id = None
return
# add the new image to the database
@ -161,7 +158,6 @@ def _save_image( pub, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "pub_image_id" ] = pub.pub_id
# ---------------------------------------------------------------------
@ -175,14 +171,12 @@ def update_publication():
log = ( _logger, "Update publication: id={}".format( pub_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
article_order = request.json.get( "article_order" )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# update the publication
pub = Publication.query.get( pub_id )
@ -192,7 +186,7 @@ def update_publication():
_set_seqno( pub, vals["publ_id"] )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( pub, vals )
_save_image( pub, updated )
_save_image( pub )
if article_order:
query = Article.query.filter( Article.pub_id == pub_id )
articles = { int(a.article_id): a for a in query }
@ -212,11 +206,8 @@ def update_publication():
search.add_or_update_publication( None, pub, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publication_vals( pub, False, True )
return make_ok_response( record=vals, warnings=warnings )
# ---------------------------------------------------------------------
@ -243,8 +234,5 @@ def delete_publication( pub_id ):
search.delete_articles( deleted_articles )
# generate the response
extras = { "deleteArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
extras = { "deletedArticles": deleted_articles }
return make_ok_response( extras=extras )

@ -8,7 +8,8 @@ from flask import request, jsonify, abort
from asl_articles import app, db
from asl_articles.models import Publisher, PublisherImage, Publication, Article
from asl_articles.publications import do_get_publications
from asl_articles.publications import get_publication_vals, get_publication_sort_key
from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, make_ok_response, apply_attrs
@ -21,14 +22,10 @@ _FIELD_NAMES = [ "*publ_name", "publ_description", "publ_url" ]
@app.route( "/publishers" )
def get_publishers():
"""Get all publishers."""
return jsonify( _do_get_publishers() )
def _do_get_publishers():
"""Get all publishers."""
# NOTE: The front-end maintains a cache of the publishers, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publisher.query.all()
return { r.publ_id: get_publisher_vals(r) for r in results }
return jsonify( {
publ.publ_id: get_publisher_vals( publ, False, False )
for publ in Publisher.query.all()
} )
# ---------------------------------------------------------------------
@ -40,7 +37,10 @@ def get_publisher( publ_id ):
publ = Publisher.query.get( publ_id )
if not publ:
abort( 404 )
vals = get_publisher_vals( publ )
vals = get_publisher_vals( publ,
request.args.get( "include_pubs" ),
request.args.get( "include_articles" )
)
# include the number of associated publications
query = Publication.query.filter_by( publ_id = publ_id )
vals[ "nPublications" ] = query.count()
@ -48,21 +48,28 @@ def get_publisher( publ_id ):
query = db.session.query( Article, Publication ) \
.filter( Publication.publ_id == publ_id ) \
.filter( Article.pub_id == Publication.pub_id )
vals[ "nArticles" ] = query.count()
nArticles = query.count()
nArticles2 = Article.query.filter_by( publ_id = publ_id ).count()
vals[ "nArticles" ] = nArticles + nArticles2
_logger.debug( "- %s ; #publications=%d ; #articles=%d", publ, vals["nPublications"], vals["nArticles"] )
return jsonify( vals )
def get_publisher_vals( publ, add_type=False ):
def get_publisher_vals( publ, include_pubs, include_articles ):
"""Extract public fields from a Publisher record."""
vals = {
"_type": "publisher",
"publ_id": publ.publ_id,
"publ_name": publ.publ_name,
"publ_description": publ.publ_description,
"publ_url": publ.publ_url,
"publ_image_id": publ.publ_id if publ.publ_image else None,
}
if add_type:
vals[ "type" ] = "publisher"
if include_pubs:
pubs = sorted( publ.publications, key=get_publication_sort_key )
vals[ "publications" ] = [ get_publication_vals( p, False, False ) for p in pubs ]
if include_articles:
articles = sorted( publ.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
return vals
# ---------------------------------------------------------------------
@ -76,24 +83,22 @@ def create_publisher():
log = ( _logger, "Create publisher:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# create the new publisher
vals[ "time_created" ] = datetime.datetime.now()
publ = Publisher( **vals )
db.session.add( publ )
_save_image( publ, updated )
_save_image( publ )
db.session.commit()
_logger.debug( "- New ID: %d", publ.publ_id )
search.add_or_update_publisher( None, publ, None )
# generate the response
extras = { "publ_id": publ.publ_id }
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publisher_vals( publ, True, True )
return make_ok_response( record=vals, warnings=warnings )
def _save_image( publ, updated ):
def _save_image( publ ):
"""Save the publisher's image."""
# check if a new image was provided
@ -105,7 +110,7 @@ def _save_image( publ, updated ):
PublisherImage.query.filter( PublisherImage.publ_id == publ.publ_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publisher to have no image.
updated[ "publ_image_id" ] = None
publ.publ_image_id = None
return
# add the new image to the database
@ -115,7 +120,6 @@ def _save_image( publ, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "publ_image_id" ] = publ.publ_id
# ---------------------------------------------------------------------
@ -129,23 +133,21 @@ def update_publisher():
log = ( _logger, "Update publisher: id={}".format( publ_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# update the publisher
publ = Publisher.query.get( publ_id )
if not publ:
abort( 404 )
_save_image( publ, updated )
_save_image( publ )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( publ, vals )
db.session.commit()
search.add_or_update_publisher( None, publ, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publisher_vals( publ, True, True )
return make_ok_response( record=vals, warnings=warnings )
# ---------------------------------------------------------------------
@ -179,7 +181,4 @@ def delete_publisher( publ_id ):
search.delete_articles( deleted_articles )
extras = { "deletedPublications": deleted_pubs, "deletedArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
extras[ "publications" ] = do_get_publications()
return make_ok_response( extras=extras )

@ -10,16 +10,12 @@ from asl_articles.models import Scenario
@app.route( "/scenarios" )
def get_scenarios():
"""Get all scenarios."""
return jsonify( do_get_scenarios() )
return jsonify( {
scenario.scenario_id: get_scenario_vals( scenario )
for scenario in Scenario.query.all()
} )
def do_get_scenarios():
"""Get all scenarios."""
return {
s.scenario_id: _get_scenario_vals( s )
for s in Scenario.query #pylint: disable=not-an-iterable
}
def _get_scenario_vals( scenario ):
def get_scenario_vals( scenario ):
"""Extract public fields from a scenario record."""
return {
"scenario_id": scenario.scenario_id,

@ -30,7 +30,7 @@ _SQLITE_FTS_SPECIAL_CHARS = "+-#':/.@$"
# NOTE: The column order defined here is important, since we have to access row results by column index.
_SEARCHABLE_COL_NAMES = [ "name", "name2", "description", "authors", "scenarios", "tags" ]
_get_publisher_vals = lambda p: get_publisher_vals( p, True )
_get_publisher_vals = lambda p: get_publisher_vals( p, True, True )
_get_publication_vals = lambda p: get_publication_vals( p, True, True )
_get_article_vals = lambda a: get_article_vals( a, True )
@ -120,7 +120,7 @@ def search():
def search_publishers():
"""Return all publishers."""
publs = sorted( Publisher.query.all(), key=lambda p: p.publ_name.lower() )
results = [ get_publisher_vals( p, True ) for p in publs ]
results = [ get_publisher_vals( p, True, True ) for p in publs ]
return jsonify( results )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -131,7 +131,7 @@ def search_publisher( publ_id ):
publ = Publisher.query.get( publ_id )
if not publ:
return jsonify( [] )
results = [ get_publisher_vals( publ, True ) ]
results = [ get_publisher_vals( publ, True, True ) ]
pubs = sorted( publ.publications, key=get_publication_sort_key, reverse=True )
for pub in pubs:
results.append( get_publication_vals( pub, True, True ) )
@ -161,13 +161,13 @@ def search_article( article_id ):
article = Article.query.get( article_id )
if not article:
return jsonify( [] )
article = get_article_vals( article, True )
_create_aslrb_links( article )
results = [ article ]
if article["pub_id"]:
pub = Publication.query.get( article["pub_id"] )
if pub:
results.append( get_publication_vals( pub, True, True ) )
vals = get_article_vals( article, True )
_create_aslrb_links( vals )
results = [ vals ]
if article.parent_pub:
results.append( get_publication_vals( article.parent_pub, True, True ) )
if article.parent_publ:
results.append( get_publisher_vals( article.parent_publ, True, True ) )
return jsonify( results )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -301,7 +301,7 @@ def _do_fts_search( fts_query_string, col_names, results=None ): #pylint: disabl
# prepare the result for the front-end
result = globals()[ "_get_{}_vals".format( owner_type ) ]( obj )
result[ "type" ] = owner_type
result[ "_type" ] = owner_type
result[ "rank" ] = row[1]
# return highlighted versions of the content to the caller
@ -393,7 +393,15 @@ def _make_fts_query_string( query_string, search_aliases ): #pylint: disable=too
return "({})".format( " OR ".join( quote(v) for v in val ) )
def quote( val ):
"""Quote a string, if necessary."""
if not val.startswith( '"' ) or not val.endswith( '"' ):
# NOTE: We used to check for fully-quoted values i.e.
# not ( startswith " and endswith " )
# which becomes:
# not startswith " or not endswith "
# but this doesn't work with quoted multi-word phrases that contain special characters
# e.g. "J. R. Tracy", since we see that the first phrase ("J.) is not fully-quoted,
# and so we wrap it in quotes :-/ Instead, if we see a quote at either end of the word,
# we treat it as part of a quoted phrase (either single- or multi-word), and use it verbatim.
if not val.startswith( '"' ) and not val.endswith( '"' ):
if any( ch in val for ch in _SQLITE_FTS_SPECIAL_CHARS+" " ):
val = '"{}"'.format( val )
return val.replace( "'", "''" )
@ -402,20 +410,24 @@ def _make_fts_query_string( query_string, search_aliases ): #pylint: disable=too
if is_raw_query:
return [ val.strip() ]
tokens = []
DQUOTE_MARKER = "<!~!>"
for word in val.split():
# FUDGE! It's difficult to figure out if we have a multi-word quoted phrase when the query string
# contains nested quotes, so we hack around this by temporarily removing the inner quotes.
word = word.replace( '""', DQUOTE_MARKER )
if len(tokens) > 0:
if tokens[-1].startswith( '"' ) and not tokens[-1].endswith( '"' ):
# the previous token is a quoted phrase, continue it
# the previous token is a the start of a quoted phrase - continue it
tokens[-1] += " " + word
continue
if not tokens[-1].startswith( '"' ) and word.endswith( '"' ):
tokens.append( quote( word[:-1] ) )
continue
tokens.append( quote( word ) )
if len(tokens) > 0 and tokens[-1].startswith( '"' ) and not tokens[-1].endswith( '"' ):
# we have an unterminated quoted phrase, terminate it
tokens[-1] += '"'
return [ t for t in tokens if t ]
return [
t.replace( DQUOTE_MARKER, '""' )
for t in tokens if t
]
# split the query string into parts (alias replacement texts, and everything else)
parts, pos = [], 0
@ -560,7 +572,7 @@ def _find_aslrb_ruleids( val ): #pylint: disable=too-many-branches
# ---------------------------------------------------------------------
def init_search( session, logger ):
def init_search( session, logger, test_mode=False ):
"""Initialize the search engine."""
# initialize the database
@ -604,20 +616,22 @@ def init_search( session, logger ):
for article in session.query( Article ).order_by( Article.time_created.desc() ):
add_or_update_article( dbconn, article, session )
# configure the searcg engine
# configure the search engine
global _search_aliases
_search_aliases = {}
global _search_weights
_search_weights = {}
fname = os.path.join( asl_articles.config_dir, "search.cfg" )
if os.path.isfile( fname ):
# load the search aliases
_logger.debug( "Loading search aliases: %s", fname )
cfg = AppConfigParser( fname )
global _search_aliases
_search_aliases = _load_search_aliases(
cfg.get_section( "Search aliases" ),
cfg.get_section( "Search aliases 2" )
)
# load the search weights
_logger.debug( "Loading search weights:" )
global _search_weights
for row in cfg.get_section( "Search weights" ):
if row[0] not in _SEARCHABLE_COL_NAMES:
asl_articles.startup.log_startup_msg( "warning",
@ -638,20 +652,22 @@ def init_search( session, logger ):
# NOTE: These should really be stored in the database, but the UI would be so insanely hairy,
# we just keep them in a text file and let the user manage them manually :-/
global _author_aliases
_author_aliases = {}
fname = os.path.join( asl_articles.config_dir, "author-aliases.cfg" )
if os.path.isfile( fname ):
_logger.debug( "Loading author aliases: %s", fname )
cfg = AppConfigParser( fname )
_author_aliases = _load_author_aliases( cfg.get_section("Author aliases"), session, False )
# NOTE: We load the test aliases here as well (the test suite can't mock them because
# they might be running in a different process).
fname = os.path.join( os.path.split(__file__)[0], "tests/fixtures/author-aliases.cfg" )
if os.path.isfile( fname ):
_logger.debug( "Loading test author aliases: %s", fname )
cfg = AppConfigParser( fname )
_author_aliases.update(
_load_author_aliases( cfg.get_section("Author aliases"), session, True )
)
if test_mode:
# NOTE: We load the test aliases here as well (since the test suite can't mock them,
# because we might be running in a different process).
fname = os.path.join( os.path.split(__file__)[0], "tests/fixtures/author-aliases.cfg" )
if os.path.isfile( fname ):
_logger.debug( "Loading test author aliases: %s", fname )
cfg = AppConfigParser( fname )
_author_aliases.update(
_load_author_aliases( cfg.get_section("Author aliases"), session, True )
)
def _load_search_aliases( aliases, aliases2 ):
"""Load the search aliases."""
@ -819,3 +835,11 @@ def _make_publication_key( pub ):
def _make_article_key( article ):
"""Generate the owner key for an Article."""
return "article:{}".format( article.article_id if isinstance(article,Article) else article )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@app.route( "/init-search-for-test" )
def init_search_for_test():
"""Re-initialize the search engine (for testing porpoises)."""
init_search( db.session, logging.getLogger("search"), test_mode=True )
return "ok"

@ -13,12 +13,7 @@ from asl_articles.utils import decode_tags
@app.route( "/tags" )
def get_tags():
"""Get all tags."""
return jsonify( do_get_tags() )
def do_get_tags():
"""Get all tags."""
# get all the tags
# NOTE: This is pretty inefficient, since an article/publication's tags are munged into one big string
# and stored in a single column, so we need to manually unpack everything, but we'll see how it goes...
tags = defaultdict( int )
@ -36,4 +31,4 @@ def do_get_tags():
key = lambda v: ( -v[1], v[0] ) # sort by # instances, then name
)
return tags
return jsonify( tags )

@ -0,0 +1,3 @@
""" Module definitions. """
pytest_options = None

@ -0,0 +1,42 @@
{
"publisher": [
{ "publ_id": 1, "publ_name": "Avalon Hill", "publ_url": "http://{FLASK}/ping" },
{ "publ_id": 2, "publ_name": "Multiman Publishing", "publ_url": "http://{FLASK}/unknown" }
],
"publication": [
{ "pub_id": 10, "pub_name": "ASL Journal", "pub_edition": "1", "publ_id": 1, "pub_url": "/aslj-1.html" },
{ "pub_id": 11, "pub_name": "ASL Journal", "pub_edition": "2", "publ_id": 1, "pub_url": "/aslj-2.html" },
{ "pub_id": 20, "pub_name": "MMP News", "publ_id": 2 }
],
"article": [
{ "article_id": 100, "article_title": "ASLJ article 1", "pub_id": 10 },
{ "article_id": 101, "article_title": "ASLJ article 2", "pub_id": 10 },
{ "article_id": 110, "article_title": "ASLJ article 3", "pub_id": 11 },
{ "article_id": 200, "article_title": "MMP article", "pub_id": 20, "article_url": "/mmp.html" },
{ "article_id": 299, "article_title": "MMP publisher article", "publ_id": 2, "article_url": "/unknown" }
],
"article_author": [
{ "seq_no": 1, "article_id": 100, "author_id": 1000 },
{ "seq_no": 2, "article_id": 100, "author_id": 1001 },
{ "seq_no": 1, "article_id": 299, "author_id": 1000 }
],
"author": [
{ "author_id": 1000, "author_name": "Joe Blow" },
{ "author_id": 1001, "author_name": "Fred Nerk" },
{ "author_id": 1999, "author_name": "Alan Smithee" }
],
"article_scenario": [
{ "seq_no": 1, "article_id": 100, "scenario_id": 2000 },
{ "seq_no": 1, "article_id": 299, "scenario_id": 2001 }
],
"scenario": [
{ "scenario_id": 2000, "scenario_display_id": "ASL 1", "scenario_name": "The Guards Counterattack" },
{ "scenario_id": 2001, "scenario_display_id": "ASL 5", "scenario_name": "Hill 621" }
]
}

@ -0,0 +1 @@
Multiman Publishing.

@ -0,0 +1,11 @@
{
"publisher": [
{ "publ_id": 1, "publ_name": "Avalon Hill" }
],
"publication": [
{ "pub_id": 20, "pub_name": "ASL Journal", "publ_id": 1 }
]
}

@ -0,0 +1,17 @@
{
"publisher": [
{ "publ_id": 1, "publ_name": "Avalon Hill" },
{ "publ_id": 2, "publ_name": "Multiman Publishing" },
{ "publ_id": 3, "publ_name": "Le Franc Tireur" }
],
"publication": [
{ "pub_id": 20, "pub_name": "MMP News", "publ_id": 2 }
],
"article": [
{ "article_id": 200, "article_title": "MMP Today", "pub_id": 20 }
]
}

@ -5,6 +5,7 @@ import urllib.request
import urllib.error
import json
import base64
import re
from asl_articles.search import SEARCH_ALL_ARTICLES
from asl_articles.tests.utils import init_tests, select_main_menu_option, select_sr_menu_option, \
@ -277,8 +278,9 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
btn = find_child( ".row.image .remove-image", dlg )
assert btn.is_displayed()
# make sure the article's image is correct
resp = urllib.request.urlopen( image_url ).read()
assert resp == open( expected, "rb" ).read()
with urllib.request.urlopen( image_url ) as resp:
with open( expected, "rb" ) as fp:
assert resp.read() == fp.read()
else:
# make sure there is no image
img = find_child( ".row.image img.image", dlg )
@ -289,7 +291,8 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
# make sure the article's image is not available
url = flask_app.url_for( "get_image", image_type="article", image_id=article_id )
try:
resp = urllib.request.urlopen( url )
with urllib.request.urlopen( url ):
pass
assert False, "Should never get here!"
except urllib.error.HTTPError as ex:
assert ex.code == 404
@ -349,7 +352,8 @@ def test_parent_publisher( webdriver, flask_app, dbconn ):
# check that the parent publication was updated in the database
article_id = sr.get_attribute( "testing--article_id" )
url = flask_app.url_for( "get_article", article_id=article_id )
article = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
article = json.load( resp )
if expected_parent:
if article["pub_id"] != expected_parent[0]:
return None
@ -387,6 +391,157 @@ def test_parent_publisher( webdriver, flask_app, dbconn ):
# ---------------------------------------------------------------------
def test_publisher_articles( webdriver, flask_app, dbconn ): #pylint: disable=too-many-statements
"""Test articles that are associated with a publisher (not publication)."""
# initialize
init_tests( webdriver, flask_app, dbconn, fixtures="publisher-articles.json" )
def check_parent_in_sr( sr, pub, publ ):
"""Check the article's parent publication/publisher in a search result."""
if pub:
elem = wait_for( 2, lambda: find_child( ".header a.publication", sr ) )
assert elem.is_displayed()
assert elem.text == pub
assert re.search( r"^http://.+?/publication/\d+", elem.get_attribute( "href" ) )
elif publ:
elem = wait_for( 2, lambda: find_child( ".header a.publisher", sr ) )
assert elem.is_displayed()
assert elem.text == publ
assert re.search( r"^http://.+?/publisher/\d+", elem.get_attribute( "href" ) )
else:
assert False, "At least one publication/publisher must be specified."
def check_parent_in_dlg( dlg, pub, publ ):
"""Check the article's parent publication/publication in the edit dialog."""
if pub:
select = find_child( ".row.publication .react-select", dlg )
assert select.is_displayed()
assert select.text == pub
elif publ:
select = find_child( ".row.publisher .react-select", dlg )
assert select.is_displayed()
assert select.text == publ
else:
assert False, "At least one publication/publisher must be specified."
# create an article associated with LFT
create_article( {
"title": "test article",
"publisher": "Le Franc Tireur"
} )
results = wait_for( 2, get_search_results )
assert len(results) == 1
sr = results[0]
check_parent_in_sr( sr, None, "Le Franc Tireur" )
# open the article's dialog
select_sr_menu_option( sr, "edit" )
dlg = wait_for_elem( 2, "#article-form" )
check_parent_in_dlg( dlg, None, "Le Franc Tireur" )
# change the article to be associated with an MMP publication
find_child( ".row.publisher label.parent-mode" ).click()
select = wait_for_elem( 2, ".row.publication .react-select" )
ReactSelect( select ).select_by_name( "MMP News" )
find_child( "button.ok", dlg ).click()
results = wait_for( 2, get_search_results )
assert len(results) == 1
sr = results[0]
check_parent_in_sr( sr, "MMP News", None )
# open the article's dialog
select_sr_menu_option( sr, "edit" )
dlg = wait_for_elem( 2, "#article-form" )
check_parent_in_dlg( dlg, "MMP News", None )
# change the article to be associated with MMP (publisher)
find_child( ".row.publication label.parent-mode" ).click()
select = wait_for_elem( 2, ".row.publisher .react-select" )
ReactSelect( select ).select_by_name( "Multiman Publishing" )
find_child( "button.ok", dlg ).click()
results = wait_for( 2, get_search_results )
assert len(results) == 1
sr = results[0]
check_parent_in_sr( sr, None, "Multiman Publishing" )
# show the MMP publisher
results = do_search( "multiman" )
assert len(results) == 1
sr = results[0]
collapsibles = find_children( ".collapsible", sr )
assert len(collapsibles) == 2
items = find_children( "li a", collapsibles[1] )
assert len(items) == 1
item = items[0]
assert item.text == "test article"
assert re.search( r"^http://.+?/article/\d+", item.get_attribute( "href" ) )
# delete the MMP publisher
# NOTE: There are 2 MMP articles, the one that is in the "MMP News" publication,
# and the test article we created above that is associated with the publisher.
select_sr_menu_option( sr, "delete" )
check_ask_dialog( ( "Delete this publisher?", "2 articles will also be deleted" ), "ok" )
query = dbconn.execute( "SELECT count(*) FROM article" )
assert query.scalar() == 0
# ---------------------------------------------------------------------
def test_publisher_article_dates( webdriver, flask_app, dbconn ):
"""Test "published" dates for publisher articles."""
# initialize
init_tests( webdriver, flask_app, dbconn, disable_constraints=False, fixtures="publisher-article-dates.json" )
# initialize
article_title, article_date = "test article", "1st January, 2000"
article_sr = None
def check_article_date( has_date ):
# check the article's publication date
def do_check():
elem = find_child( ".article_date", article_sr )
article_id = article_sr.get_attribute( "testing--article_id" )
row = get_article_row( dbconn, article_id, ["article_date"] )
if has_date:
return elem.text == article_date and row[0] == article_date
else:
return not elem and not row[0]
wait_for( 2, do_check )
# create an article associated with a publication
create_article( {
"title": article_title,
"publication": "ASL Journal",
"snippet": "This is a test article.",
"pageno": 42,
"authors": [ "+Joe Blow" ]
} )
article_sr = wait_for( 2, lambda: find_search_result( article_title ) )
check_article_date( False )
# change the article to be associated with a publisher
edit_article( article_sr, {
"publisher": "Avalon Hill"
}, expected_constraints = [
"The article date was not specified."
], accept_constraints=True )
check_article_date( False )
# give the article a published date
edit_article( article_sr, {
"article_date": article_date
} )
check_article_date( True )
# change the article back to the publication
edit_article( article_sr, {
"publication": "ASL Journal"
} )
check_article_date( False )
# ---------------------------------------------------------------------
def test_unicode( webdriver, flask_app, dbconn ):
"""Test Unicode content."""
@ -539,7 +694,10 @@ def test_article_ratings( webdriver, flask_app, dbconn ):
# ---------------------------------------------------------------------
def create_article( vals, toast_type="info", expected_error=None, expected_constraints=None, dlg=None ):
def create_article( vals, toast_type="info",
expected_error=None, expected_constraints=None, accept_constraints=False,
dlg=None
):
"""Create a new article."""
# initialize
@ -559,7 +717,9 @@ def create_article( vals, toast_type="info", expected_error=None, expected_const
return dlg # nb: the dialog is left on-screen
elif expected_constraints:
# we were expecting constraint warnings, confirm them
check_constraint_warnings( "Do you want to create this article?", expected_constraints, "cancel" )
check_constraint_warnings( "Do you want to create this article?",
expected_constraints, "ok" if accept_constraints else "cancel"
)
return dlg # nb: the dialog is left on-screen
else:
# we were expecting the create to work, confirm this
@ -571,7 +731,9 @@ def create_article( vals, toast_type="info", expected_error=None, expected_const
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def edit_article( sr, vals, toast_type="info", expected_error=None, expected_constraints=None ): #pylint: disable=too-many-branches
def edit_article( sr, vals, toast_type="info",
expected_error=None, expected_constraints=None, accept_constraints=False
): #pylint: disable=too-many-branches
"""Edit a article's details."""
# initialize
@ -593,7 +755,9 @@ def edit_article( sr, vals, toast_type="info", expected_error=None, expected_con
return dlg # nb: the dialog is left on-screen
elif expected_constraints:
# we were expecting constraint warnings, confirm them
check_constraint_warnings( "Do you want to update this article?", expected_constraints, "cancel" )
check_constraint_warnings( "Do you want to update this article?",
expected_constraints, "ok" if accept_constraints else "cancel"
)
return dlg # nb: the dialog is left on-screen
else:
# we were expecting the update to work, confirm this
@ -612,8 +776,14 @@ def _update_values( dlg, vals ):
change_image( dlg, val )
else:
remove_image( dlg )
elif key == "publication":
select = ReactSelect( find_child( ".row.publication .react-select", dlg ) )
elif key in ("publication", "publisher"):
row = find_child( ".row.{}".format( key ), dlg )
select = ReactSelect( find_child( ".react-select", row ) )
if not select.select.is_displayed():
key2 = "publisher" if key == "publication" else "publication"
row2 = find_child( ".row.{}".format( key2 ), dlg )
find_child( "label.parent-mode", row2 ).click()
wait_for( 2, select.select.is_displayed )
select.select_by_name( val )
elif key in ["authors","scenarios","tags"]:
select = ReactSelect( find_child( ".row.{} .react-select".format(key), dlg ) )

@ -92,5 +92,6 @@ def _check_authors( flask_app, all_authors, expected ):
# check the authors in the database
url = flask_app.url_for( "get_authors" )
authors = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
authors = json.load( resp )
assert set( a["author_name"] for a in authors.values() ) == all_authors

@ -0,0 +1,235 @@
""" Test the database reports. """
import os
import itertools
import re
import pytest
from asl_articles.search import SEARCH_ALL
from asl_articles.tests.test_publishers import edit_publisher
from asl_articles.tests.test_publications import edit_publication
from asl_articles.tests.test_articles import edit_article
from asl_articles.tests.utils import init_tests, \
select_main_menu_option, select_sr_menu_option, check_ask_dialog, \
do_search, find_search_result, get_search_results, \
wait_for, wait_for_elem, find_child, find_children
from asl_articles.tests import pytest_options
# ---------------------------------------------------------------------
def test_db_report( webdriver, flask_app, dbconn ):
"""Test the database report."""
# initialize
init_tests( webdriver, flask_app, dbconn, fixtures="db-report.json" )
# check the initial report
row_counts, links, dupe_images, image_sizes = _get_db_report()
assert row_counts == {
"publishers": 2, "publications": 3, "articles": 5,
"authors": 3, "scenarios": 2
}
assert links == {
"publishers": [ 2, [] ],
"publications": [ 2, [] ],
"articles": [ 2, [] ],
}
assert dupe_images == []
assert image_sizes == {}
# add some images
do_search( SEARCH_ALL )
publ_sr = find_search_result( "Avalon Hill", wait=2 )
fname = os.path.join( os.path.split(__file__)[0], "fixtures/images/1.gif" )
edit_publisher( publ_sr, { "image": fname } )
results = get_search_results()
pub_sr = find_search_result( "ASL Journal (1)", results )
fname = os.path.join( os.path.split(__file__)[0], "fixtures/images/2.gif" )
edit_publication( pub_sr, { "image": fname } )
article_sr = find_search_result( "ASLJ article 1", results )
fname = os.path.join( os.path.split(__file__)[0], "fixtures/images/3.gif" )
edit_article( article_sr, { "image": fname } )
article_sr = find_search_result( "ASLJ article 2", results )
fname = os.path.join( os.path.split(__file__)[0], "fixtures/images/3.gif" )
edit_article( article_sr, { "image": fname } )
# check the updated report
row_counts, _, dupe_images, image_sizes = _get_db_report()
assert row_counts == {
"publishers": 2, "publisher_images": 1,
"publications": 3, "publication_images": 1,
"articles": 5, "article_images": 2,
"authors": 3, "scenarios": 2
}
assert dupe_images == [
[ "f0457ea742376e76ff276ce62c7a8540", "/images/article/100",
( "ASLJ article 1", "/article/100" ),
( "ASLJ article 2", "/article/101" ),
]
]
assert image_sizes == {
"publishers": [
( "Avalon Hill", "/publisher/1", "/images/publisher/1" ),
],
"publications": [
( "ASL Journal (1)", "/publication/10", "/images/publication/10" ),
],
"articles": [
( "ASLJ article 1", "/article/100", "/images/article/100" ),
( "ASLJ article 2", "/article/101", "/images/article/101" ),
]
}
# delete all the publishers (and associated objects), then check the updated report
do_search( SEARCH_ALL )
publ_sr = find_search_result( "Avalon Hill", wait=2 )
select_sr_menu_option( publ_sr, "delete" )
check_ask_dialog( "Delete this publisher?", "ok" )
results = get_search_results()
publ_sr = find_search_result( "Multiman Publishing", results )
select_sr_menu_option( publ_sr, "delete" )
check_ask_dialog( "Delete this publisher?", "ok" )
row_counts, links, dupe_images, image_sizes = _get_db_report()
assert row_counts == {
"publishers": 0, "publications": 0, "articles": 0,
"authors": 3, "scenarios": 2
}
assert links == {
"publishers": [ 0, [] ],
"publications": [ 0, [] ],
"articles": [ 0, [] ],
}
assert dupe_images == []
assert image_sizes == {}
# ---------------------------------------------------------------------
# NOTE: This test may not work if we are running against Docker containers, because:
# - external URL's are created that point to the back-end's $/ping endpoint.
# - the front-end container realizes that these URL's need to be checked by the backend,
# so it sends them to the $/db-report/check-link endpoint.
# - these URL's may not resolve because they were generated using gAppRef.makeFlaskUrl(),
# which will work if the front-end container is sending a request to the back-end
# container, but may not work from inside the back-end container, because the port number
# being used by Flask *inside* the container may not be the same as *outside* the container.
# The problem is generating a URL that can be used as an external URL that will work everywhere.
# We could specify it as a parameter, but that's more trouble than it's worth.
@pytest.mark.skipif( pytest_options.flask_url is not None, reason="Testing against a remote Flask server." )
def test_check_db_links( webdriver, flask_app, dbconn ):
"""Test checking links in the database."""
# initialize
init_tests( webdriver, flask_app, dbconn, docs="docs/", fixtures="db-report.json" )
# check the initial report
_, links, _, _ = _get_db_report()
assert links == {
"publishers": [ 2, [] ],
"publications": [ 2, [] ],
"articles": [ 2, [] ],
}
# check the links
btn = find_child( "#db-report button.check-links" )
btn.click()
status = find_child( "#db-report .db-links .status-msg" )
wait_for( 10, lambda: status.text == "Checked 6 links." )
# check the updated report
_, links, _, _ = _get_db_report()
assert links == {
"publishers": [ 2, [
( "Multiman Publishing", "/publisher/2", "HTTP 404: http://{FLASK}/unknown" )
] ],
"publications": [ 2, [] ],
"articles": [ 2, [
( "MMP publisher article", "/article/299", "HTTP 404: /unknown" )
] ],
}
# ---------------------------------------------------------------------
def _get_db_report(): #pylint: disable=too-many-locals
"""Generate the database report."""
# generate the report
select_main_menu_option( "db-report" )
wait_for_elem( 2, "#db-report .db-images" )
# unload the row counts
row_counts = {}
table = find_child( "#db-report .db-row-counts" )
for row in find_children( "tr", table ):
cells = find_children( "td", row )
mo = re.search( r"^(\d+)( \((\d+) images?\))?$", cells[1].text )
key = cells[0].text.lower()[:-1]
row_counts[ key ] = int( mo.group(1) )
if mo.group( 3 ):
row_counts[ key[:-1] + "_images" ] = int( mo.group(3) )
# unload the links
links = {}
table = find_child( "#db-report .db-links" )
last_key = None
for row in find_children( "tr", table ):
cells = find_children( "td", row )
if len(cells) == 2:
last_key = cells[0].text.lower()[:-1]
links[ last_key ] = [ int( cells[1].text ) , [] ]
else:
mo = re.search( r"^(.+) \((.+)\)$", cells[0].text )
tags = find_children( "a", cells[0] )
url = _fixup_url( tags[0].get_attribute( "href" ) )
links[ last_key ][1].append( ( mo.group(1), url, mo.group(2) ) )
# unload duplicate images
dupe_images = []
for row in find_children( "#db-report .dupe-analysis .dupe-image" ):
elem = find_child( ".caption .hash", row )
mo = re.search( r"^\(md5:(.+)\)$", elem.text )
image_hash = mo.group(1)
image_url = _fixup_url( find_child( "img", row ).get_attribute( "src" ) )
parents = []
for entry in find_children( ".collapsible li", row ):
url = _fixup_url( find_child( "a", entry ).get_attribute( "href" ) )
parents.append( ( entry.text, url ) )
dupe_images.append( list( itertools.chain(
[ image_hash, image_url ], parents
) ) )
# unload the image sizes
tab_ctrl = find_child( "#db-report .db-images .react-tabs" )
image_sizes = {}
for tab in find_children( ".react-tabs__tab", tab_ctrl ):
key = tab.text.lower()
tab_id = tab.get_attribute( "id" )
tab.click()
sel = ".react-tabs__tab-panel[aria-labelledby='{}'].react-tabs__tab-panel--selected".format( tab_id )
tab_page = wait_for( 2,
lambda: find_child( sel, tab_ctrl ) #pylint: disable=cell-var-from-loop
)
parents = []
for row_no, row in enumerate( find_children( "table.image-sizes tr", tab_page ) ):
if row_no == 0:
continue
cells = find_children( "td", row )
image_url = _fixup_url( find_child( "img", cells[0] ).get_attribute( "src" ) )
url = _fixup_url( find_child( "a", cells[2] ).get_attribute( "href" ) )
parents.append( ( cells[2].text, url, image_url ) )
if parents:
image_sizes[ key ] = parents
else:
assert tab_page.text == "No images found."
return row_counts, links, dupe_images, image_sizes
# ---------------------------------------------------------------------
def _fixup_url( url ):
"""Fixup a URL to make it independent of its server."""
url = re.sub( r"^http://[^/]+", "", url )
pos = url.find( "?" )
if pos >= 0:
url = url[:pos]
return url

@ -8,7 +8,7 @@ from asl_articles.models import Scenario
from asl_articles.tests.utils import init_tests
sys.path.append( os.path.join( os.path.split(__file__)[0], "../../tools/" ) )
from import_roar_scenarios import import_roar_scenarios
from import_roar_scenarios import import_roar_scenarios #pylint: disable=import-error,wrong-import-order
# ---------------------------------------------------------------------
@ -18,7 +18,8 @@ def test_import_roar_scenarios( dbconn ):
# initialize
session = init_tests( None, None, dbconn )
roar_fname = os.path.join( os.path.split(__file__)[0], "fixtures/roar-scenarios.json" )
roar_data = json.load( open( roar_fname, "r" ) )
with open( roar_fname, "r", encoding="utf-8" ) as fp:
roar_data = json.load( fp )
# do the first import
_do_import( dbconn, session, roar_fname,

@ -246,8 +246,9 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
btn = find_child( ".row.image .remove-image", dlg )
assert btn.is_displayed()
# make sure the publication's image is correct
resp = urllib.request.urlopen( image_url ).read()
assert resp == open( expected, "rb" ).read()
with urllib.request.urlopen( image_url ) as resp:
with open( expected, "rb" ) as fp:
assert resp.read() == fp.read()
else:
# make sure there is no image
img = find_child( ".row.image img.image", dlg )
@ -258,7 +259,8 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
# make sure the publication's image is not available
url = flask_app.url_for( "get_image", image_type="publication", image_id=pub_id )
try:
resp = urllib.request.urlopen( url )
with urllib.request.urlopen( url ):
pass
assert False, "Should never get here!"
except urllib.error.HTTPError as ex:
assert ex.code == 404
@ -318,7 +320,8 @@ def test_parent_publisher( webdriver, flask_app, dbconn ):
# check that the parent publisher was updated in the database
pub_id = sr.get_attribute( "testing--pub_id" )
url = flask_app.url_for( "get_publication", pub_id=pub_id )
pub = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
pub = json.load( resp )
if expected_parent:
if pub["publ_id"] != expected_parent[0]:
return None
@ -672,8 +675,11 @@ def test_default_image( webdriver, flask_app, dbconn ):
f: os.path.join( os.path.split(__file__)[0], "fixtures/images/"+f )
for f in images
}
def read_image_data( fname ):
with open( fname, "rb" ) as fp:
return fp.read()
image_data = {
f: open( image_fnames[f], "rb" ).read()
f: read_image_data( image_fnames[f] )
for f in images
}
@ -690,8 +696,8 @@ def test_default_image( webdriver, flask_app, dbconn ):
if img:
assert expected
image_url = img.get_attribute( "src" )
resp = urllib.request.urlopen( image_url ).read()
assert resp == image_data[ expected ]
with urllib.request.urlopen( image_url ) as resp:
assert resp.read() == image_data[ expected ]
else:
assert not expected

@ -176,8 +176,9 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
btn = find_child( ".row.image .remove-image", dlg )
assert btn.is_displayed()
# make sure the publisher's image is correct
resp = urllib.request.urlopen( image_url ).read()
assert resp == open(expected,"rb").read()
with urllib.request.urlopen( image_url ) as resp:
with open( expected, "rb" ) as fp:
assert resp.read() == fp.read()
else:
# make sure there is no image
img = find_child( ".row.image img.image", dlg )
@ -188,7 +189,8 @@ def test_images( webdriver, flask_app, dbconn ): #pylint: disable=too-many-state
# make sure the publisher's image is not available
url = flask_app.url_for( "get_image", image_type="publisher", image_id=publ_id )
try:
resp = urllib.request.urlopen( url )
with urllib.request.urlopen( url ):
pass
assert False, "Should never get here!"
except urllib.error.HTTPError as ex:
assert ex.code == 404
@ -399,7 +401,7 @@ def test_publication_lists( webdriver, flask_app, dbconn ):
publ_sr = find_search_result( publ_name, results )
pubs = find_child( ".collapsible", publ_sr )
if pub_name:
# check that the publisher appears in the publisher's search result
# check that the publication appears in the publisher's search result
assert find_child( ".caption", pubs ).text == "Publications:"
pubs = find_children( "li", pubs )
assert len(pubs) == 1

@ -104,7 +104,8 @@ def _check_scenarios( flask_app, all_scenarios, expected ):
# check the scenarios in the database
url = flask_app.url_for( "get_scenarios" )
scenarios = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
scenarios = json.load( resp )
assert set( _make_scenario_display_name(a) for a in scenarios.values() ) == all_scenarios
def _make_scenario_display_name( scenario ):

@ -583,6 +583,22 @@ def test_make_fts_query_string():
# 'foo AND "xyz 123" AND bar'
# )
# test some quoted phrases that wrap special characters
do_test( 'Mr. Jones', '"Mr." AND Jones' )
do_test( '"Mr. Jones"', '"Mr. Jones"' )
do_test( 'foo "Mr. Jones" bar', 'foo AND "Mr. Jones" AND bar' )
# test nested quoted phrases
# NOTE: This is important since searching for an author wraps their name in double quotes,
# so we need to be able to handle a quoted phrase (e.g. a nickname) within the name.
do_test( 'Joseph "Joey" Blow', 'Joseph AND "Joey" AND Blow' )
do_test( 'Joseph "Joey Joe" Blow', 'Joseph AND "Joey Joe" AND Blow' )
do_test( 'Joseph ""Joey"" Blow', 'Joseph AND ""Joey"" AND Blow' )
# NOTE: This one doesn't work properly, but no-one is going to be doing this :-/
# do_test( 'Joseph ""Joey Joe"" Blow', 'Joseph AND ""Joey Joe"" AND Blow' )
do_test( '"Joseph ""Joey"" Blow"', '"Joseph ""Joey"" Blow"' )
do_test( '"Joseph ""Joey Joe"" Blow"', '"Joseph ""Joey Joe"" Blow"' )
# test some incorrectly quoted phrases
do_test( '"', '' )
do_test( ' " " " ', '' )

@ -1,11 +1,15 @@
""" Test the startup process. """
import pytest
import asl_articles.startup
from asl_articles.tests.utils import init_tests, wait_for, find_child, set_toast_marker, check_toast
from asl_articles.tests import pytest_options
# ---------------------------------------------------------------------
@pytest.mark.skipif( pytest_options.flask_url is not None, reason="Testing against a remote Flask server." )
def test_startup_messages( webdriver, flask_app, dbconn ):
"""Test startup messages."""

@ -145,10 +145,12 @@ def _check_tags( flask_app, expected ): #pylint: disable=too-many-locals
if sr.text.startswith( "publication" ):
pub_id = sr.get_attribute( "testing--pub_id" )
url = flask_app.url_for( "get_publication", pub_id=pub_id )
pub = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
pub = json.load( resp )
assert expected[ pub["pub_name"] ] == fixup_tags( pub["pub_tags"] )
elif sr.text.startswith( "article" ):
article_id = sr.get_attribute( "testing--article_id" )
url = flask_app.url_for( "get_article", article_id=article_id )
article = json.load( urllib.request.urlopen( url ) )
with urllib.request.urlopen( url ) as resp:
article = json.load( resp )
assert expected[ article["article_title"] ] == fixup_tags( article["article_tags"] )

@ -1,12 +1,12 @@
""" Helper utilities for the test suite. """
import os
import urllib.request
import json
import time
import itertools
import uuid
import base64
import logging
import sqlalchemy
import sqlalchemy.orm
@ -19,7 +19,6 @@ from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException, TimeoutException
from asl_articles import search
from asl_articles.utils import to_bool
import asl_articles.models
@ -35,6 +34,7 @@ def init_tests( webdriver, flask_app, dbconn, **kwargs ):
global _webdriver, _flask_app
_webdriver = webdriver
_flask_app = flask_app
fixtures_dir = os.path.join( os.path.dirname( __file__ ), "fixtures/" )
# initialize the database
fixtures = kwargs.pop( "fixtures", None )
@ -46,6 +46,20 @@ def init_tests( webdriver, flask_app, dbconn, **kwargs ):
assert fixtures is None
session = None
# re-initialize the search engine
if flask_app:
url = flask_app.url_for( "init_search_for_test" )
with urllib.request.urlopen( url ) as resp:
_ = resp.read()
# initialize the documents directory
dname = kwargs.pop( "docs", None )
if dname:
flask_app.config[ "EXTERNAL_DOCS_BASEDIR" ] = os.path.join( fixtures_dir, dname )
else:
if flask_app:
flask_app.config.pop( "EXTERNAL_DOCS_BASEDIR", None )
# never highlight search results unless explicitly enabled
if "no_sr_hilite" not in kwargs:
kwargs[ "no_sr_hilite" ] = 1
@ -57,7 +71,10 @@ def init_tests( webdriver, flask_app, dbconn, **kwargs ):
if to_bool( kwargs.pop( "disable_confirm_discard_changes", True ) ):
kwargs[ "disable_confirm_discard_changes" ] = 1
webdriver.get( webdriver.make_url( "", **kwargs ) )
wait_for_elem( 2, "#search-form" )
# FUDGE! Since we switched from running the test Flask server with app.run() to make_server().serve_forever(),
# stopping and starting the server seems to be much quicker, but refreshing the page can be slower when
# running multiple tests :shrug:
wait_for_elem( 10, "#search-form" )
return session
@ -70,7 +87,8 @@ def load_fixtures( session, fname ):
if fname:
dname = os.path.join( os.path.split(__file__)[0], "fixtures/" )
fname = os.path.join( dname, fname )
data = json.load( open( fname, "r" ) )
with open( fname, "r", encoding="utf-8" ) as fp:
data = json.load( fp )
else:
data = {}
@ -86,9 +104,6 @@ def load_fixtures( session, fname ):
session.bulk_insert_mappings( model, data[table_name] )
session.commit()
# rebuild the search index
search.init_search( session, logging.getLogger("search") )
# ---------------------------------------------------------------------
def do_search( query ):
@ -123,13 +138,15 @@ def get_search_result_names( results=None ):
results = get_search_results()
return [ find_child( ".name", r ).text for r in results ]
def find_search_result( name, results=None ):
def find_search_result( name, results=None, wait=0 ):
"""Find a search result."""
if not results:
results = get_search_results()
results = [ r for r in results if find_child( ".name", r ).text == name ]
assert len(results) == 1
return results[0]
def find_sr():
matches = [
r for r in results or get_search_results()
if find_child( ".name", r ).text == name
]
return matches[0] if len(matches) == 1 else None
return wait_for( wait, find_sr )
def check_search_result( sr, check, expected ):
"""Check a search result in the UI."""
@ -306,21 +323,21 @@ def wait_for_not_elem( timeout, sel ):
def find_child( sel, parent=None ):
"""Find a child element."""
try:
return (parent if parent else _webdriver).find_element_by_css_selector( sel )
return (parent if parent else _webdriver).find_element( By.CSS_SELECTOR, sel )
except NoSuchElementException:
return None
def find_children( sel, parent=None ):
"""Find child elements."""
try:
return (parent if parent else _webdriver).find_elements_by_css_selector( sel )
return (parent if parent else _webdriver).find_elements( By.CSS_SELECTOR, sel )
except NoSuchElementException:
return None
def find_parent_by_class( elem, class_name ):
"""Find a parent element with the specified class."""
while True:
elem = elem.find_element_by_xpath( ".." )
elem = elem.find_element( By.XPATH, ".." )
if not elem:
return None
classes = set( elem.get_attribute( "class" ).split() )
@ -485,7 +502,8 @@ def call_with_retry( func, expected_exceptions, max_retries=10, delay=0.1 ):
def change_image( dlg, fname ):
"""Click on an image to change it."""
# NOTE: This is a bit tricky since we started overlaying the image with the "remove image" icon :-/
data = base64.b64encode( open( fname, "rb" ).read() )
with open( fname, "rb" ) as fp:
data = base64.b64encode( fp.read() )
data = "{}|{}".format( os.path.split(fname)[1], data.decode("ascii") )
elem = find_child( ".row.image img.image", dlg )
_webdriver.execute_script( "arguments[0].scrollTo( 0, 0 )", find_child( ".MuiDialogContent-root", dlg ) )

@ -39,19 +39,17 @@ def get_request_args( vals, arg_names, log=None ):
def clean_request_args( vals, fields, warnings, logger ):
"""Clean incoming data."""
cleaned = {}
for f in fields:
if f.endswith( "_url" ):
continue # nb: don't clean URL's
f = _parse_arg_name( f )[ 0 ]
if isinstance( vals[f], str ):
val2 = clean_html( vals[f] )
if val2 != vals[f]:
vals[f] = val2
cleaned[f] = val2
logger.debug( "Cleaned HTML: %s => %s", f, val2 )
warnings.append( "Some values had HTML cleaned up." )
return cleaned
if not isinstance( vals[f], str ):
continue
val2 = clean_html( vals[f] )
if val2 != vals[f]:
vals[f] = val2
logger.debug( "Cleaned HTML: %s => %s", f, val2 )
warnings.append( "Some values had HTML cleaned up." )
def _parse_arg_name( arg_name ):
"""Parse a request argument name."""
@ -59,15 +57,15 @@ def _parse_arg_name( arg_name ):
return ( arg_name[1:], True ) # required argument
return ( arg_name, False ) # optional argument
def make_ok_response( extras=None, updated=None, warnings=None ):
def make_ok_response( extras=None, record=None, warnings=None ):
"""Generate a Flask 'success' response."""
resp = { "status": "OK" }
if extras:
resp.update( extras )
if updated:
resp[ "updated" ] = updated
if record:
resp["record"] = record
if warnings:
resp[ "warnings" ] = list( set( warnings ) ) # nb: remove duplicate messages
resp["warnings"] = list( set( warnings ) ) # nb: remove duplicate messages
return jsonify( resp )
# ---------------------------------------------------------------------
@ -85,7 +83,9 @@ def clean_html( val, allow_tags=None, safe_attrs=None ): #pylint: disable=too-ma
# fixup smart quotes and dashes
def replace_chars( val, ch, targets ):
for t in targets:
if isinstance( t, typing.Pattern ):
# FUDGE! pylint is incorrectly flagging isinstance() when checking against typing.XXX.
# https://github.com/PyCQA/pylint/issues/3537
if isinstance( t, typing.Pattern ): #pylint: disable=isinstance-second-argument-not-valid-type
val = t.sub( ch, val )
else:
assert isinstance( t, str )

@ -9,6 +9,7 @@ from urllib.error import URLError
import pytest
import flask
import werkzeug
import sqlalchemy
from flask_sqlalchemy import SQLAlchemy
import alembic
@ -17,10 +18,12 @@ import alembic.config
import asl_articles
from asl_articles import app
from asl_articles.utils import to_bool
from asl_articles.tests import utils
from asl_articles import tests as asl_articles_tests
_FLASK_SERVER_URL = ( "localhost", 5001 ) # nb: for the test Flask server we spin up
_pytest_options = None
# ---------------------------------------------------------------------
def pytest_addoption( parser ):
@ -60,6 +63,15 @@ def pytest_addoption( parser ):
help="Database connection string."
)
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def pytest_configure( config ):
"""Called after command-line options have been parsed."""
global _pytest_options
_pytest_options = config.option
# notify the test suite about the pytest options
asl_articles_tests.pytest_options = _pytest_options
# ---------------------------------------------------------------------
@pytest.fixture( scope="session" )
@ -88,25 +100,28 @@ def flask_app( request ):
# the *configured* database connection string (since it will fail to start if there's a problem).
asl_articles._disable_db_startup = True #pylint: disable=protected-access
# yup - make it so
server = werkzeug.serving.make_server(
_FLASK_SERVER_URL[0], _FLASK_SERVER_URL[1],
app, threaded=True
)
thread = threading.Thread(
target = lambda: app.run(
host=_FLASK_SERVER_URL[0], port=_FLASK_SERVER_URL[1],
use_reloader=False
)
target = server.serve_forever,
daemon=True
)
thread.start()
# wait for the server to start up
def is_ready():
"""Try to connect to the Flask server."""
try:
resp = urllib.request.urlopen( app.url_for( "ping" ) ).read()
assert resp == b"pong"
url = app.url_for( "ping" )
with urllib.request.urlopen( url ) as resp:
assert resp.read() == b"pong"
return True
except URLError:
return False
except Exception as ex: #pylint: disable=broad-except
assert False, "Unexpected exception: {}".format( ex )
utils.wait_for( 5, is_ready )
asl_articles_tests.utils.wait_for( 5, is_ready )
# return the server to the caller
try:
@ -114,7 +129,7 @@ def flask_app( request ):
finally:
# shutdown the local Flask server
if not flask_url:
urllib.request.urlopen( app.url_for("shutdown") ).read()
server.shutdown()
thread.join()
# ---------------------------------------------------------------------
@ -131,14 +146,12 @@ def webdriver( request ):
options = wb.FirefoxOptions()
if headless:
options.add_argument( "--headless" ) #pylint: disable=no-member
driver = wb.Firefox(
options = options,
service_log_path = os.path.join( tempfile.gettempdir(), "geckodriver.log" )
)
driver = wb.Firefox( options=options )
elif driver == "chrome":
options = wb.ChromeOptions()
if headless:
options.add_argument( "--headless" ) #pylint: disable=no-member
options.add_argument( "--disable-gpu" )
driver = wb.Chrome( options=options )
else:
raise RuntimeError( "Unknown webdriver: {}".format( driver ) )

Binary file not shown.

After

Width:  |  Height:  |  Size: 122 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 109 KiB

@ -1,5 +1,5 @@
pytest==5.2.2
selenium==3.141.0
pylint==2.4.3
pylint-flask-sqlalchemy==0.1.0
pytest-pylint==0.14.1
pytest==7.1.2
selenium==4.2.0
pylint==2.14.1
pylint-flask-sqlalchemy==0.2.0
pytest-pylint==0.18.0

@ -1,8 +1,9 @@
# python 3.7.5
# python 3.10.4
flask==1.1.1
flask-sqlalchemy==2.4.1
psycopg2-binary==2.8.4
alembic==1.3.1
pyyaml==5.1.2
lxml==4.4.2
flask==2.1.2
flask-sqlalchemy==2.5.1
psycopg2-binary==2.9.3
alembic==1.8.0
pyyaml==6.0
lxml==4.9.0
waitress==2.1.2

@ -38,6 +38,8 @@ export AUTHOR_ALIASES=
export ENABLE_TESTS=
NO_BUILD=
export BUILD_NETWORK=
export DOCKER_UID=$(id -u)
export DOCKER_GID=$(id -g)
# parse the command-line arguments
if [ $# -eq 0 ]; then

@ -28,6 +28,9 @@ for fspec in ["config","static","templates"] :
# initialize
from asl_articles import app
flask_host = app.config.get( "FLASK_HOST", "localhost" )
flask_port = app.config.get( "FLASK_PORT_NO", 5000 )
flask_debug = app.config.get( "FLASK_DEBUG", False )
# FUDGE! Startup can take some time (e.g. because we have to build the search index over a large database),
# and since we do that on first request, it's annoying to have started the server up, if we don't do that
@ -37,25 +40,35 @@ from asl_articles import app
def _force_init():
time.sleep( 5 )
try:
# figoure out the URL for the request we're going to make
# figure out the URL for the request we're going to make
with app.test_request_context() as req:
url = url_for( "ping" )
host = req.request.host_url
# FUDGE! There doesn't seem to be a way to get the port number Flask is listening on :-/
port = app.config.get( "FLASK_PORT_NO", 5000 )
if host.endswith( "/" ):
host = host[:-1]
url = "{}:{}{}".format( host, port, url )
url = "{}:{}{}".format( host, flask_port, url )
# make the request
_ = urllib.request.urlopen( url ).read()
with urllib.request.urlopen( url ) as resp:
_ = resp.read()
except Exception as ex: #pylint: disable=broad-except
print( "WARNING: Startup ping failed: {}".format( ex ) )
threading.Thread( target=_force_init ).start()
# run the server
app.run(
host = app.config.get( "FLASK_HOST", "localhost" ),
port = app.config.get( "FLASK_PORT_NO" ),
debug = app.config.get( "FLASK_DEBUG", False ),
extra_files = extra_files
)
if flask_debug:
# NOTE: It's useful to run the webapp using the Flask development server, since it will
# automatically reload itself when the source files change.
app.run(
host=flask_host, port=flask_port,
debug=flask_debug,
extra_files=extra_files
)
else:
import waitress
# FUDGE! Browsers tend to send a max. of 6-8 concurrent requests per server, so we increase
# the number of worker threads to avoid task queue warnings :-/
nthreads = app.config.get( "WAITRESS_THREADS", 8 )
waitress.serve( app,
host=flask_host, port=flask_port,
threads=nthreads
)

@ -16,20 +16,22 @@ def parse_requirements( fname ):
"""Parse a requirements file."""
lines = []
fname = os.path.join( os.path.split(__file__)[0], fname )
for line in open(fname,"r"):
line = line.strip()
if line == "" or line.startswith("#"):
continue
lines.append( line )
with open( fname, "r", encoding="utf-8" ) as fp:
for line in fp:
line = line.strip()
if line == "" or line.startswith("#"):
continue
lines.append( line )
return lines
# ---------------------------------------------------------------------
setup(
name = "asl-articles",
version = "0.1", # nb: also update constants.py
version = "1.1", # nb: also update constants.py
description = "Searchable index of ASL articles.",
license = "AGPLv3",
url = "https://code.pacman-ghost.com/public/asl-articles",
packages = find_packages(),
install_requires = parse_requirements( "requirements.txt" ),
extras_require = {

@ -1,78 +0,0 @@
#!/usr/bin/env python3
""" Check the database for broken external document links. """
import sys
import os
import urllib.request
import sqlalchemy
from sqlalchemy import text
# ---------------------------------------------------------------------
def main():
"""Check the database for broken external document links."""
# parse the command line arguments
if len(sys.argv) != 3:
print( "Usage: {} <dbconn> <url-base>".format( os.path.split(__file__)[0] ) )
print( " dbconn: database connection string e.g. \"sqlite:///~/asl-articles.db\"" )
print( " url-base: Base URL for external documents e.g. http://localhost:3000/api/docs" )
sys.exit( 0 )
dbconn = sys.argv[1]
url_base = sys.argv[2]
# connect to the database
engine = sqlalchemy.create_engine( dbconn )
conn = engine.connect()
def pub_name( row ):
name = row["pub_name"]
if row["pub_edition"]:
name += " ({})".format( row["pub_edition"] )
return name
# look for broken links
find_broken_links( conn, url_base, "publisher", [
"publ_id", "publ_url", "publ_name"
] )
find_broken_links( conn, url_base, "publication", [
"pub_id", "pub_url", pub_name
] )
find_broken_links( conn, url_base, "article", [
"article_id", "article_url", "article_title"
] )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
def find_broken_links( conn, url_base, table_name, col_names ):
"""Look for broken links."""
def check_url( url, row_id, name ):
if not url.startswith( ( "http://", "https://" ) ):
url = os.path.join( url_base, url )
url = url.replace( " ", "%20" ).replace( "#", "%23" )
#print( "Checking {}: {}".format( name, url ), file=sys.stderr )
try:
buf = urllib.request.urlopen( url ).read()
except urllib.error.HTTPError:
buf = ""
if not buf:
print( "Broken link for \"{}\" (id={}): {}".format( name, row_id, url ))
# check each row in the specified table
query = conn.execute( text( "SELECT * FROM {}".format( table_name ) ) )
for row in query:
url = row[ col_names[1] ]
if not url:
continue
row_id = row[ col_names[0] ]
name = col_names[2]( row ) if callable( col_names[2] ) else row[ col_names[2] ]
check_url( url, row_id, name )
# ---------------------------------------------------------------------
if __name__ == "__main__":
main()

@ -1,92 +0,0 @@
#!/usr/bin/env python3
""" Geenrate a report on images in the database. """
import sys
import os
import hashlib
from collections import defaultdict
import sqlalchemy
from sqlalchemy import text
# ---------------------------------------------------------------------
def main():
"""Report on images in the database."""
# parse the command line arguments
if len(sys.argv) != 2:
print( "Usage: {} <dbconn> <url-base>".format( os.path.split(__file__)[0] ) )
print( " dbconn: database connection string e.g. \"sqlite:///~/asl-articles.db\"" )
sys.exit( 0 )
dbconn = sys.argv[1]
# connect to the database
engine = sqlalchemy.create_engine( dbconn )
conn = engine.connect()
# initialize
image_hashes = defaultdict( list )
def find_images( conn, table_name, col_names, get_name ):
# find rows in the specified table that have images
sql = "SELECT {cols}, image_data" \
" FROM {table}_image LEFT JOIN {table}" \
" ON {table}_image.{id_col} = {table}.{id_col}".format(
cols = ",".join( "{}.{}".format( table_name, c ) for c in col_names ),
table=table_name, id_col=col_names[0]
)
rows = [ dict(row) for row in conn.execute( text( sql ) ) ]
# save the image hashes
for row in rows:
image_hash = hashlib.md5( row["image_data"] ).hexdigest()
name = get_name( row )
image_hashes[ image_hash ].append( name )
# output the results
rows = [
[ len(row["image_data"]), row[col_names[0]], get_name(row) ]
for row in rows
]
rows.sort( key = lambda r: r[0], reverse=True )
print( "=== {}s ({}) ===".format( table_name, len(rows) ) )
print()
print( "{:>6} {:>5}".format( "size", "ID" ) )
for row in rows:
print( "{:-6.1f} | {:5} | {}".format( row[0]/1024, row[1], row[2] ) )
print()
def get_pub_name( row ):
name = row["pub_name"]
if row["pub_edition"]:
name += " ({})".format( row["pub_edition"] )
return name
# look for images in each table
find_images( conn, "publisher",
[ "publ_id", "publ_name" ],
lambda r: r["publ_name"]
)
find_images( conn, "publication",
[ "pub_id", "pub_name", "pub_edition" ],
get_pub_name
)
find_images( conn, "article",
[ "article_id", "article_title" ],
lambda r: r["article_title"]
)
# report on any duplicate images
for image_hash,images in image_hashes.items():
if len(images) == 1:
continue
print( "Found duplicate images ({}):".format( image_hash ) )
for image in images:
print( "- {}".format( image ) )
# ---------------------------------------------------------------------
if __name__ == "__main__":
main()

@ -62,7 +62,8 @@ def import_roar_scenarios( dbconn, roar_data, progress=None ):
# load the ROAR scenarios
if isinstance( roar_data, str ):
log_progress( "Loading scenarios: {}", roar_data )
roar_data = json.load( open( roar_data, "r" ) )
with open( roar_data, "r", encoding="utf-8" ) as fp:
roar_data = json.load( fp )
else:
assert isinstance( roar_data, dict )
log_progress( "- Last updated: {}".format( roar_data.get("_lastUpdated_","(unknown)") ) )

@ -1,10 +1,10 @@
# NOTE: Multi-stage builds require Docker v17.05 or later.
# create the build environment
FROM node:8.16.2-alpine AS build
FROM node:18-alpine3.15 AS build
WORKDIR /app
ENV PATH /app/node_modules/.bin:$PATH
RUN npm install react-scripts@3.2.0 -g
RUN npm install react-scripts@5.0.1 --location=global
COPY package.json /app/package.json
RUN npm install
COPY . /app/
@ -14,7 +14,7 @@ RUN if [ -n "$ENABLE_TESTS" ]; then echo -e "\nREACT_APP_TEST_MODE=1" >>/app/.en
RUN npm run build
# create the final target image
FROM nginx:1.17.5-alpine
FROM nginx:1.21.6-alpine
COPY docker/nginx-default.conf /etc/nginx/conf.d/default.conf
COPY --from=build /app/build /usr/share/nginx/html
EXPOSE 80

37267
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

@ -6,18 +6,21 @@
"@material-ui/core": "^4.7.0",
"@reach/menu-button": "^0.7.2",
"axios": "^0.19.0",
"babel-runtime": "^6.26.0",
"http-proxy-middleware": "^0.20.0",
"jquery": "^3.4.1",
"lodash.clone": "^4.5.0",
"lodash.clonedeep": "^4.5.0",
"lodash.isequal": "^4.5.0",
"query-string": "^7.1.1",
"react": "^16.11.0",
"react-dom": "^16.11.0",
"react-drag-listview": "^0.1.6",
"react-draggable": "^4.1.0",
"react-router-dom": "^5.1.2",
"react-scripts": "3.2.0",
"react-scripts": "5.0.1",
"react-select": "^3.0.8",
"react-tabs": "^3.2.3",
"react-toastify": "^5.4.1"
},
"scripts": {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 105 KiB

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.8 KiB

After

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 584 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

@ -1,5 +1,6 @@
#header { position: absolute ; top: 5px ; left: 5px ; right: 5px ; height: 65px ; }
#search-results { position: absolute ; top: 95px ; bottom: 5px ; left: 5px ; right: 5px ; overflow: auto ; }
#search-results, #db-report { position: absolute ; top: 95px ; bottom: 5px ; left: 5px ; right: 5px ; overflow: auto ; }
#db-report { z-index: 10 ; }
#header { border: 1px solid #ccc ; background: #eee ; border-top-right-radius: 10px ; padding: 5px 5px 10px 5px ; }
#header .logo { float: left ; height: 70px ; }
@ -7,15 +8,24 @@
#menu-button--app { position: absolute ; top: 10px ; right: 10px ;
width: 30px ; height: 30px ;
background: url("/images/main-menu.png") transparent no-repeat ; background-size: 100% ; border: none ;
background: url("/public/images/main-menu.png") transparent no-repeat ; background-size: 100% ; border: none ;
cursor: pointer ;
}
[data-reach-menu] { z-index: 999 ; }
[data-reach-menu-list] { padding: 5px ; }
[data-reach-menu-item] { display: flex ; height: 1.25em ; padding: 5px ; }
[data-reach-menu-item][data-selected] { background: #90caf9 ; color: black ; }
[data-reach-menu-item][data-selected] { background: #b0e0ff ; color: black ; }
[data-reach-menu-list] .divider { height: 0 ; margin: 2px 0 ; border-top: 1px solid #aaa ; }
[data-reach-menu-list] img { height: 100% ; margin-top: -0.1em ; margin-right: 0.5em ; }
#watermark {
position: fixed ; right: 0 ; bottom: 0 ;
height: 30% ;
opacity: 0 ; z-index: -999 ;
transition: opacity 5s ;
}
#watermark img { height: 100% ; }
.MuiDialogTitle-root { padding: 10px 16px 6px 16px !important ; }
.MuiDialogContent-root>div { margin-bottom: 1em ; }
.MuiDialogContent-root p { margin-top: 0.5em ; }
@ -39,4 +49,8 @@
img#loading { position: fixed ; top: 50% ; left: 50% ; margin-top: -16px ; margin-left: -16px ; }
.collapsible .caption img { height: 0.75em ; margin-left: 0.25em ; }
.collapsible .count { font-size: 80% ; font-style: italic ; color: #666 ; }
.collapsible .more { font-size: 80% ; font-style: italic ; color: #666 ; cursor: pointer ; }
.monospace { margin-top: 0.5em ; font-family: monospace ; font-style: italic ; font-size: 80% ; }

@ -10,10 +10,12 @@ import { SearchResults } from "./SearchResults" ;
import { PublisherSearchResult } from "./PublisherSearchResult" ;
import { PublicationSearchResult } from "./PublicationSearchResult" ;
import { ArticleSearchResult } from "./ArticleSearchResult" ;
import { DbReport } from "./DbReport";
import ModalForm from "./ModalForm";
import AskDialog from "./AskDialog" ;
import { DataCache } from "./DataCache" ;
import { PreviewableImage } from "./PreviewableImage" ;
import { makeSmartBulletList } from "./utils.js" ;
import { makeSmartBulletList, isLink } from "./utils.js" ;
import { APP_NAME } from "./constants.js" ;
import "./App.css" ;
@ -34,13 +36,17 @@ export class App extends React.Component
this.state = {
searchResults: [],
searchSeqNo: 0,
showDbReport: false,
modalForm: null,
askDialog: null,
startupTasks: [ "caches.publishers", "caches.publications", "caches.authors", "caches.scenarios", "caches.tags" ],
startupTasks: [ "dummy" ], // FUDGE! We need at least one startup task.
} ;
gAppRef = this ;
this.setWindowTitle( null ) ;
// initialize the data cache
this.dataCache = new DataCache() ;
// initialize
this.args = queryString.parse( window.location.search ) ;
this._storeMsgs = this.isTestMode() && this.args.store_msgs ;
@ -88,24 +94,28 @@ export class App extends React.Component
const menu = ( <Menu id="app">
<MenuButton />
<MenuList>
<MenuItem id="menu-show-publishers" onSelect={ () => this._showPublishers() } >
<img src="/images/icons/publisher.png" alt="Show publishers." /> Show publishers
<MenuItem id="menu-show-publishers" onSelect={ () => this._showPublishers(true) } >
<img src="/images/menu/publishers.png" alt="Show publishers." /> Show publishers
</MenuItem>
<MenuItem id="menu-search-technique" onSelect={ () => this._showTechniqueArticles() } >
<img src="/images/icons/technique.png" alt="Show technique articles." /> Show technique
<MenuItem id="menu-search-technique" onSelect={ () => this._showTechniqueArticles(true) } >
<img src="/images/menu/technique.png" alt="Show technique articles." /> Show technique
</MenuItem>
<MenuItem id="menu-search-tips" onSelect={ () => this._showTipsArticles() } >
<img src="/images/icons/tips.png" alt="Show tip articles." /> Show tips
<MenuItem id="menu-search-tips" onSelect={ () => this._showTipsArticles(true) } >
<img src="/images/menu/tips.png" alt="Show tip articles." /> Show tips
</MenuItem>
<div className="divider" />
<MenuItem id="menu-new-publisher" onSelect={ () => PublisherSearchResult.onNewPublisher( this._onNewPublisher.bind(this) ) } >
<img src="/images/icons/publisher.png" alt="New publisher." /> New publisher
<MenuItem id="menu-new-publisher" onSelect={PublisherSearchResult.onNewPublisher} >
<img src="/images/menu/publisher.png" alt="New publisher." /> New publisher
</MenuItem>
<MenuItem id="menu-new-publication" onSelect={PublicationSearchResult.onNewPublication} >
<img src="/images/menu/publication.png" alt="New publication." /> New publication
</MenuItem>
<MenuItem id="menu-new-publication" onSelect={ () => PublicationSearchResult.onNewPublication( this._onNewPublication.bind(this) ) } >
<img src="/images/icons/publication.png" alt="New publication." /> New publication
<MenuItem id="menu-new-article" onSelect={ArticleSearchResult.onNewArticle} >
<img src="/images/menu/article.png" alt="New article." /> New article
</MenuItem>
<MenuItem id="menu-new-article" onSelect={ () => ArticleSearchResult.onNewArticle( this._onNewArticle.bind(this) ) } >
<img src="/images/icons/article.png" alt="New article." /> New article
<div className="divider" />
<MenuItem id="menu-db-report" onSelect={ () => this._showDbReport(true) } >
<img src="/images/menu/db-report.png" alt="Database report." /> DB report
</MenuItem>
</MenuList>
</Menu> ) ;
@ -119,11 +129,13 @@ export class App extends React.Component
<SearchForm onSearch={this.onSearch.bind(this)} ref={this._searchFormRef} />
</div>
{menu}
<SearchResults ref={this._searchResultsRef}
seqNo = {this.state.searchSeqNo}
searchResults = {this.state.searchResults}
type = {this.props.type}
/>
{ this.state.showDbReport
? <DbReport />
: <SearchResults ref={this._searchResultsRef}
seqNo = {this.state.searchSeqNo}
searchResults = {this.state.searchResults}
/>
}
</div> ) ;
}
return ( <div> {content}
@ -145,6 +157,7 @@ export class App extends React.Component
{ this._fakeUploads && <div>
<textarea id="_stored_msg-upload_" ref="_stored_msg-upload_" defaultValue="" hidden={true} />
</div> }
<div id="watermark" ref="watermark"> <img src="/images/watermark.png" alt="watermark" /> </div>
</div> ) ;
}
@ -157,30 +170,16 @@ export class App extends React.Component
// check if the server started up OK
let on_startup_ok = () => {
// the backend server started up OK, continue our startup process
// initialize the caches
// NOTE: We maintain caches of key objects, so that we can quickly populate droplists. The backend server returns
// updated lists after any operation that could change them (create/update/delete), which is simpler and less error-prone
// than trying to manually keep our caches in sync. It's less efficient, but it won't happen too often, there won't be
// too many entries, and the database server is local.
this.caches = {} ;
[ "publishers", "publications", "authors", "scenarios", "tags" ].forEach( type => {
axios.get( this.makeFlaskUrl( "/" + type ) )
.then( resp => {
this.caches[ type ] = resp.data ;
this._onStartupTask( "caches." + type ) ;
} )
.catch( err => {
this.showErrorToast( <div> Couldn't load the {type}: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
this._onStartupTask( "dummy" ) ;
}
let on_startup_failure = () => {
// the backend server had problems during startup; we hide the spinner
// and leave the error message(s) on-screen.
document.getElementById( "loading" ).style.display = "none" ;
}
axios.get( this.makeFlaskUrl( "/startup-messages" ) )
.then( resp => {
axios.get(
this.makeFlaskUrl( "/startup-messages" )
).then( resp => {
// show any messages logged by the backend server as it started up
[ "info", "warning", "error" ].forEach( msgType => {
if ( resp.data[ msgType ] ) {
@ -200,8 +199,7 @@ export class App extends React.Component
on_startup_failure() ;
else
on_startup_ok() ;
} )
.catch( err => {
} ).catch( err => {
let errorMsg = err.toString() ;
if ( errorMsg.indexOf( "502" ) !== -1 || errorMsg.indexOf( "504" ) !== -1 )
this.showErrorToast( <div> Couldn't connect to the backend Flask server. </div> ) ;
@ -248,18 +246,16 @@ export class App extends React.Component
_doSearch( url, args, onDone ) {
// do the search
this.setWindowTitle( null ) ;
this.setState( { searchResults: "(loading)" } ) ;
this.setState( { searchResults: "(loading)", showDbReport: false } ) ;
args.no_hilite = this._disableSearchResultHighlighting ;
axios.post(
this.makeFlaskUrl( url ), args
)
.then( resp => {
).then( resp => {
ReactDOM.findDOMNode( this._searchResultsRef.current ).scrollTo( 0, 0 ) ;
this.setState( { searchResults: resp.data, searchSeqNo: this.state.searchSeqNo+1 } ) ;
if ( onDone )
onDone() ;
} )
.catch( err => {
} ).catch( err => {
this.showErrorResponse( "The search query failed", err ) ;
this.setState( { searchResults: null, searchSeqNo: this.state.searchSeqNo+1 } ) ;
} ) ;
@ -272,53 +268,77 @@ export class App extends React.Component
args = {} ;
this._doSearch( url, args, onDone ) ;
}
_showPublishers() {
this.runSpecialSearch( "/search/publishers", null,
() => { this.setWindowTitle( "All publishers" ) }
)
}
_showTechniqueArticles() {
this.runSpecialSearch( "/search/tag/technique", {randomize:1},
() => { this.setWindowTitle( "Technique" ) }
)
}
_showTipsArticles() {
this.runSpecialSearch( "/search/tag/tips", {randomize:1},
() => { this.setWindowTitle( "Tips" ) }
)
}
_onNewPublisher( publ_id, vals ) { this._addNewSearchResult( vals, "publisher", "publ_id", publ_id ) ; }
_onNewPublication( pub_id, vals ) { this._addNewSearchResult( vals, "publication", "pub_id", pub_id ) ; }
_onNewArticle( article_id, vals ) { this._addNewSearchResult( vals, "article", "article_id", article_id ) ; }
_addNewSearchResult( vals, srType, idName, idVal ) {
// add the new search result to the start of the search results
// NOTE: This isn't really the right thing to do, since the new object might not actually be
// a result for the current search, but it's nice to give the user some visual feedback.
vals.type = srType ;
vals[ idName ] = idVal ;
let newSearchResults = [ vals ] ;
_showPublishers( pushState ) {
this.runSpecialSearch( "/search/publishers", null, () => {
this.setWindowTitle( "All publishers" ) ;
if ( pushState )
window.history.pushState( null, document.title, "/publishers"+window.location.search ) ;
} )
}
_showTechniqueArticles( pushState ) {
this.runSpecialSearch( "/search/tag/technique", {randomize:1}, () => {
this.setWindowTitle( "Technique" ) ;
if ( pushState )
window.history.pushState( null, document.title, "/technique"+window.location.search ) ;
} )
}
_showTipsArticles( pushState ) {
this.runSpecialSearch( "/search/tag/tips", {randomize:1}, () => {
this.setWindowTitle( "Tips" ) ;
if ( pushState )
window.history.pushState( null, document.title, "/tips"+window.location.search ) ;
} )
}
_showDbReport( pushState ) {
this.setState( { showDbReport: true, searchResults: [] } ) ;
this._searchFormRef.current.setState( { queryString: "" } ) ;
this.setWindowTitle( "Database report" ) ;
if ( pushState )
window.history.pushState( null, document.title, "/report"+window.location.search ) ;
}
prependSearchResult( sr ) {
// add a new entry to the start of the search results
// NOTE: We do this after creating a new object, and while it isn't really the right thing
// to do (since the new object might not actually be a result for the current search), it's nice
// to give the user some visual feedback.
let newSearchResults = [ sr ] ;
newSearchResults.push( ...this.state.searchResults ) ;
this.setState( { searchResults: newSearchResults } ) ;
}
updatePublications( pubs ) {
// update the cache
let pubs2 = {} ;
for ( let i=0 ; i < pubs.length ; ++i ) {
const pub = pubs[ i ] ;
this.caches.publications[ pub.pub_id ] = pub ;
pubs2[ pub.pub_id ] = pub ;
}
// update the UI
updatePublisher( publ_id ) {
// update the specified publisher in the UI
this._doUpdateSearchResult(
(sr) => ( sr._type === "publisher" && sr.publ_id === publ_id ),
this.makeFlaskUrl( "/publisher/" + publ_id, {include_pubs:1,include_articles:1} )
) ;
this.forceFlaskImageReload( "publisher", publ_id ) ;
}
updatePublication( pub_id ) {
// update the specified publication in the UI
this._doUpdateSearchResult(
(sr) => ( sr._type === "publication" && sr.pub_id === pub_id ),
this.makeFlaskUrl( "/publication/" + pub_id, {include_articles:1,deep:1} )
) ;
this.forceFlaskImageReload( "publication", pub_id ) ;
}
_doUpdateSearchResult( srCheck, url ) {
// find the target search result in the UI
let newSearchResults = this.state.searchResults ;
for ( let i=0 ; i < newSearchResults.length ; ++i ) {
if ( newSearchResults[i].type === "publication" && pubs2[ newSearchResults[i].pub_id ] ) {
newSearchResults[i] = pubs2[ newSearchResults[i].pub_id ] ;
newSearchResults[i].type = "publication" ;
if ( srCheck( newSearchResults[i] ) ) {
// found it - get the latest details from the backend
axios.get( url ).then( resp => {
newSearchResults[i] = resp.data ;
this.setState( { searchResults: newSearchResults } ) ;
} ).catch( err => {
this.showErrorResponse( "Can't get the updated search result details", err ) ;
} ) ;
break ; // nb: we assume there's only 1 instance
}
}
this.setState( { searchResults: newSearchResults } ) ;
}
showModalForm( formId, title, titleColor, content, buttons ) {
@ -451,18 +471,6 @@ export class App extends React.Component
console.log( " " + detail ) ;
}
makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
// NOTE: react-select uses the "value" field to determine which choices have already been selected
// and thus should not be shown in the droplist of available choices.
let tagList = [] ;
if ( tags )
tags.map( tag => tagList.push( { value: tag, label: tag } ) ) ;
// create another list for all known tags
let allTags = this.caches.tags.map( tag => { return { value: tag[0], label: tag[0] } } ) ;
return [ tagList, allTags ] ;
}
makeAppUrl( url ) {
// FUDGE! The test suite needs any URL parameters to passed on to the next page if a link is clicked.
if ( this.isTestMode() )
@ -483,9 +491,13 @@ export class App extends React.Component
}
makeExternalDocUrl( url ) {
// generate a URL for an external document
if ( isLink( url ) )
return url ;
if ( url.substr( 0, 2 ) === "$/" )
url = url.substr( 2 ) ;
return this.makeFlaskUrl( "/docs/" + encodeURIComponent(url) ) ;
if ( url[0] === "/" )
url = url.substr( 1 ) ;
return this.makeFlaskUrl( "/docs/" + encodeURIComponent( url ) ) ;
}
makeFlaskImageUrl( type, imageId ) {
@ -528,15 +540,22 @@ export class App extends React.Component
}
_onStartupComplete() {
// startup has completed, we're ready to go
this.refs.watermark.style.opacity = 0.2 ;
if ( this.props.warning )
this.showWarningToast( this.props.warning ) ;
if ( this.props.doSearch )
this.props.doSearch() ;
else if ( this.props.type === "report" )
this._showDbReport() ;
// NOTE: We could preload the DataCache here (i.e. where it won't affect startup time),
// but it will happen on every page load (e.g. /article/NNN or /publication/NNN),
// which would probably hurt more than it helps (since the data isn't needed if the user
// is only searching for stuff i.e. most of the time).
}
setWindowTitleFromSearchResults( srType, idField, idVal, nameField ) {
for ( let sr of Object.entries( this.state.searchResults ) ) {
if ( sr[1].type === srType && String(sr[1][idField]) === idVal ) {
if ( sr[1]._type === srType && String(sr[1][idField]) === idVal ) {
this.setWindowTitle( typeof nameField === "function" ? nameField(sr[1]) : sr[1][nameField] ) ;
return ;
}
@ -544,6 +563,10 @@ export class App extends React.Component
this.setWindowTitle( null ) ;
}
setWindowTitle( caption ) {
if ( caption ) {
let doc = new DOMParser().parseFromString( caption, "text/html" ) ;
caption = doc.body.textContent ;
}
document.title = caption ? APP_NAME + " - " + caption : APP_NAME ;
}

@ -1,5 +1,6 @@
#article-form .row label.top { width: 6.5em ; }
#article-form .row label { width: 5.75em ; }
#article-form .row label.parent-mode { cursor: pointer ; }
#article-form .row.snippet { flex-direction: column ; align-items: initial ; margin-top: -0.5em ; }
#article-form .row.snippet textarea { min-height: 6em ; }

@ -3,11 +3,12 @@ import { Link } from "react-router-dom" ;
import { Menu, MenuList, MenuButton, MenuItem } from "@reach/menu-button" ;
import { ArticleSearchResult2 } from "./ArticleSearchResult2.js" ;
import "./ArticleSearchResult.css" ;
import { PublisherSearchResult } from "./PublisherSearchResult.js" ;
import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ;
import { RatingStars } from "./RatingStars.js" ;
import { gAppRef } from "./App.js" ;
import { makeScenarioDisplayName, applyUpdatedVals, removeSpecialFields, makeCommaList, isLink } from "./utils.js" ;
import { makeScenarioDisplayName, updateRecord, makeCommaList } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -24,58 +25,41 @@ export class ArticleSearchResult extends React.Component
const display_snippet = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "article_snippet!" ] || this.props.data.article_snippet
) ;
const pub = gAppRef.caches.publications[ this.props.data.pub_id ] ;
const parent_pub = this.props.data._parent_pub ;
const parent_publ = this.props.data._parent_publ ;
const image_url = gAppRef.makeFlaskImageUrl( "article", this.props.data.article_image_id ) ;
// prepare the article's URL
let article_url = this.props.data.article_url ;
if ( article_url ) {
if ( ! isLink( article_url ) )
article_url = gAppRef.makeExternalDocUrl( article_url ) ;
} else if ( pub && pub.pub_url ) {
article_url = gAppRef.makeExternalDocUrl( pub.pub_url ) ;
if ( article_url )
article_url = gAppRef.makeExternalDocUrl( article_url ) ;
else if ( parent_pub && parent_pub.pub_url ) {
article_url = gAppRef.makeExternalDocUrl( parent_pub.pub_url ) ;
if ( article_url.substr( article_url.length-4 ) === ".pdf" && this.props.data.article_pageno )
article_url += "#page=" + this.props.data.article_pageno ;
}
// prepare the authors
let authors = [] ;
if ( this.props.data[ "authors!" ] ) {
// the backend has provided us with a list of author names (possibly highlighted) - use them directly
for ( let i=0 ; i < this.props.data["authors!"].length ; ++i ) {
const author_id = this.props.data.article_authors[ i ] ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) }
dangerouslySetInnerHTML = {{ __html: this.props.data["authors!"][i] }}
/> ) ;
}
} else {
// we only have a list of author ID's (the normal case) - figure out what the corresponding names are
for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
const author_id = this.props.data.article_authors[ i ] ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) }
dangerouslySetInnerHTML = {{ __html: gAppRef.caches.authors[ author_id ].author_name }}
/> ) ;
}
const author_names_hilite = this.props.data[ "authors!" ] ;
for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
const author = this.props.data.article_authors[ i ] ;
const author_name = author_names_hilite ? author_names_hilite[i] : author.author_name ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author.author_id ) }
dangerouslySetInnerHTML = {{ __html: author_name }}
/> ) ;
}
// prepare the scenarios
let scenarios = [] ;
if ( this.props.data[ "scenarios!" ] ) {
// the backend has provided us with a list of scenarios (possibly highlighted) - use them directly
this.props.data[ "scenarios!" ].forEach( (scenario,i) =>
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( scenario ) }}
/> )
) ;
} else {
// we only have a list of scenario ID's (the normal case) - figure out what the corresponding names are
this.props.data.article_scenarios.forEach( (scenario,i) =>
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( gAppRef.caches.scenarios[scenario] ) }}
/> )
) ;
const scenario_names_hilite = this.props.data[ "scenarios!" ] ;
for ( let i=0 ; i < this.props.data.article_scenarios.length ; ++i ) {
const scenario = this.props.data.article_scenarios[ i ] ;
const scenario_display_name = scenario_names_hilite ? scenario_names_hilite[i] : makeScenarioDisplayName(scenario) ;
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: scenario_display_name }}
/> ) ;
}
// prepare the tags
@ -107,17 +91,18 @@ export class ArticleSearchResult extends React.Component
<MenuButton className="sr-menu" />
<MenuList>
<MenuItem className="edit" onSelect={ () => this.onEditArticle() } >
<img src="/images/icons/edit.png" alt="Edit." /> Edit
<img src="/images/edit.png" alt="Edit." /> Edit
</MenuItem>
<MenuItem className="delete" onSelect={ () => this.onDeleteArticle() } >
<img src="/images/icons/delete.png" alt="Delete." /> Delete
<img src="/images/delete.png" alt="Delete." /> Delete
</MenuItem>
</MenuList>
</Menu> ) ;
// NOTE: The "title" field is also given the CSS class "name" so that the normal CSS will apply to it.
// Some tests also look for a generic ".name" class name when checking search results.
const pub_display_name = pub ? PublicationSearchResult.makeDisplayName( pub ) : null ;
const pub_display_name = parent_pub ? PublicationSearchResult.makeDisplayName( parent_pub ) : null ;
const publ_display_name = parent_publ ? PublisherSearchResult.makeDisplayName( parent_publ ) : null ;
return ( <div className="search-result article"
ref = { r => gAppRef.setTestAttribute( r, "article_id", this.props.data.article_id ) }
>
@ -129,6 +114,12 @@ export class ArticleSearchResult extends React.Component
dangerouslySetInnerHTML = {{ __html: pub_display_name }}
/>
}
{ publ_display_name &&
<Link className="publisher" title="Show this publisher."
to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) }
dangerouslySetInnerHTML = {{ __html: publ_display_name }}
/>
}
<RatingStars rating={this.props.data.article_rating} title="Rate this article."
onChange = { this.onRatingChange.bind( this ) }
/>
@ -144,13 +135,22 @@ export class ArticleSearchResult extends React.Component
{ display_subtitle && <div className="subtitle" dangerouslySetInnerHTML={{ __html: display_subtitle }} /> }
</div>
<div className="content">
{ image_url && <PreviewableImage url={image_url} className="image" alt="Article." /> }
{ image_url && <PreviewableImage url={image_url} noActivate={true} className="image" alt="Article." /> }
<div className="snippet" dangerouslySetInnerHTML={{__html: display_snippet}} />
</div>
<div className="footer">
{ authors.length > 0 && <div className="authors"> By {makeCommaList(authors)} </div> }
{ scenarios.length > 0 && <div className="scenarios"> Scenarios: {makeCommaList(scenarios)} </div> }
{ tags.length > 0 && <div className="tags"> Tags: {tags} </div> }
{ authors.length > 0 &&
<div className="authors"> By {makeCommaList(authors)} </div>
}
{ this.props.data.article_date &&
<div> <label>Published:</label> <span className="article_date"> {this.props.data.article_date} </span> </div>
}
{ scenarios.length > 0 &&
<div className="scenarios"> Scenarios: {makeCommaList(scenarios)} </div>
}
{ tags.length > 0 &&
<div className="tags"> Tags: {tags} </div>
}
</div>
</div> ) ;
}
@ -170,61 +170,71 @@ export class ArticleSearchResult extends React.Component
} ) ;
}
static onNewArticle( notify ) {
ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/article/create", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.scenarios = resp.data.scenarios ;
gAppRef.caches.tags = resp.data.tags ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.article_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ;
if ( resp.data._publication )
gAppRef.updatePublications( [ resp.data._publication ] ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewArticle() {
gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/article/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
// update the UI
const newArticle = resp.data.record ;
gAppRef.prependSearchResult( newArticle ) ;
if ( newArticle._parent_pub )
gAppRef.updatePublication( newArticle._parent_pub.pub_id ) ;
else if ( newArticle._parent_publ )
gAppRef.updatePublisher( newArticle._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditArticle() {
ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.article_id = this.props.data.article_id ;
axios.post( gAppRef.makeFlaskUrl( "/article/update", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.scenarios = resp.data.scenarios ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ;
if ( resp.data._publications )
gAppRef.updatePublications( resp.data._publications ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.article_id = this.props.data.article_id ;
axios.post(
gAppRef.makeFlaskUrl( "/article/update" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
// update the UI
const article = resp.data.record ;
const orig_parent_pub = this.props.data._parent_pub ;
const orig_parent_publ = this.props.data._parent_publ ;
updateRecord( this.props.data, article ) ;
if ( article._parent_pub )
gAppRef.updatePublication( article._parent_pub.pub_id ) ;
else if ( article._parent_publ )
gAppRef.updatePublisher( article._parent_publ.publ_id ) ;
if ( orig_parent_pub )
gAppRef.updatePublication( orig_parent_pub.pub_id ) ;
if ( orig_parent_publ )
gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
}
onDeleteArticle() {
@ -236,21 +246,22 @@ export class ArticleSearchResult extends React.Component
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the article on the server
axios.get( gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id, {list:1} ) )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.tags = resp.data.tags ;
axios.get(
gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "tags" ] ) ;
// update the UI
this.props.onDelete( "article_id", this.props.data.article_id ) ;
if ( this.props.data._parent_pub )
gAppRef.updatePublication( this.props.data._parent_pub.pub_id ) ;
else if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was deleted. </div> ) ;
if ( resp.data._publication )
gAppRef.updatePublications( [ resp.data._publication ] ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},

@ -5,7 +5,7 @@ import { NEW_ARTICLE_PUB_PRIORITY_CUTOFF } from "./constants.js" ;
import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ;
import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, isNumeric } from "./utils.js" ;
import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, isNumeric } from "./utils.js" ;
// --------------------------------------------------------------------
@ -18,6 +18,26 @@ export class ArticleSearchResult2
let refs = {} ;
const isNew = Object.keys( vals ).length === 0 ;
// set the parent mode
let parentMode = vals.publ_id ? "publisher" : "publication" ;
let publicationParentRowRef = null ;
let publisherParentRowRef = null ;
let articleDateRef = null ;
function onPublicationParent() {
parentMode = "publication" ;
publicationParentRowRef.style.display = "flex" ;
publisherParentRowRef.style.display = "none" ;
articleDateRef.style.display = "none" ;
refs.pub_id.focus() ;
}
function onPublisherParent() {
parentMode = "publisher" ;
publicationParentRowRef.style.display = "none" ;
publisherParentRowRef.style.display = "flex" ;
articleDateRef.style.display = "flex" ;
refs.publ_id.focus() ;
}
// prepare to save the initial values
let initialVals = null ;
function onReady() {
@ -54,13 +74,14 @@ export class ArticleSearchResult2
} ;
// initialize the publications
let publications = [ { value: null, label: <i>(none)</i> } ] ;
let publications = [ { value: null, label: <i>(none)</i>, textLabel: "" } ] ;
let mostRecentPub = null ;
for ( let p of Object.entries(gAppRef.caches.publications) ) {
for ( let p of Object.entries( gAppRef.dataCache.data.publications ) ) {
const pub_display_name = PublicationSearchResult.makeDisplayName( p[1] ) ;
const pub = {
value: p[1].pub_id,
label: <span dangerouslySetInnerHTML={{__html: pub_display_name}} />,
textLabel: pub_display_name,
} ;
publications.push( pub ) ;
if ( mostRecentPub === null || p[1].time_created > mostRecentPub[1] )
@ -86,27 +107,41 @@ export class ArticleSearchResult2
}
}
// initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i>, textLabel: "" } ] ;
let currPubl = publishers[0] ;
for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( {
value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />,
textLabel: p[1].publ_name,
} ) ;
if ( p[1].publ_id === vals.publ_id )
currPubl = publishers[ publishers.length-1 ] ;
}
sortSelectableOptions( publishers ) ;
// initialize the authors
let allAuthors = [] ;
for ( let a of Object.entries(gAppRef.caches.authors) )
for ( let a of Object.entries( gAppRef.dataCache.data.authors ) )
allAuthors.push( { value: a[1].author_id, label: a[1].author_name } );
allAuthors.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currAuthors = [] ;
if ( vals.article_authors ) {
currAuthors = vals.article_authors.map( a => {
return { value: a, label: gAppRef.caches.authors[a].author_name }
return { value: a.author_id, label: a.author_name }
} ) ;
}
// initialize the scenarios
let allScenarios = [] ;
for ( let s of Object.entries(gAppRef.caches.scenarios) )
for ( let s of Object.entries( gAppRef.dataCache.data.scenarios ) )
allScenarios.push( { value: s[1].scenario_id, label: makeScenarioDisplayName(s[1]) } ) ;
allScenarios.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currScenarios = [] ;
if ( vals.article_scenarios ) {
currScenarios = vals.article_scenarios.map( s => {
return { value: s, label: makeScenarioDisplayName(gAppRef.caches.scenarios[s]) }
return { value: s.scenario_id, label: makeScenarioDisplayName(s) }
} ) ;
}
function onScenarioCreated( val ) {
@ -119,11 +154,12 @@ export class ArticleSearchResult2
}
// initialize the tags
const tags = gAppRef.makeTagLists( vals.article_tags ) ;
const tags = makeTagLists( vals.article_tags ) ;
// prepare the form content
/* eslint-disable jsx-a11y/img-redundant-alt */
const content = <div>
<div style={{display:"flex"}}>
<div className="image-container">
<div className="row image">
<img src={imageUrl} className="image"
@ -144,19 +180,40 @@ export class ArticleSearchResult2
/>
</div>
</div>
<div className="row title"> <label className="top"> Title: </label>
<input type="text" defaultValue={vals.article_title} autoFocus ref={r => refs.article_title=r} />
</div>
<div className="row subtitle"> <label className="top"> Subtitle: </label>
<input type="text" defaultValue={vals.article_subtitle} ref={r => refs.article_subtitle=r} />
</div>
<div className="row publication"> <label className="select top"> Publication: </label>
<Select className="react-select" classNamePrefix="react-select" options={publications} isSearchable={true}
defaultValue = {currPub}
ref = { r => refs.pub_id=r }
/>
<input className="pageno" type="text" defaultValue={vals.article_pageno} ref={r => refs.article_pageno=r} title="Page number." />
<div style={{flexGrow:1}}>
<div className="row title"> <label className="top"> Title: </label>
<input type="text" defaultValue={vals.article_title} autoFocus ref={r => refs.article_title=r} />
</div>
<div className="row subtitle"> <label className="top"> Subtitle: </label>
<input type="text" defaultValue={vals.article_subtitle} ref={r => refs.article_subtitle=r} />
</div>
<div className="row publication" style={{display:parentMode==="publication"?"flex":"none"}} ref={r => publicationParentRowRef=r} >
<label className="select top parent-mode"
title = "Click to associate this article with a publisher."
onClick = {onPublisherParent}
> Publication: </label>
<Select className="react-select" classNamePrefix="react-select" options={publications} isSearchable={true} getOptionValue={o => o.textLabel}
defaultValue = {currPub}
ref = { r => refs.pub_id=r }
/>
<input className="pageno" type="text" defaultValue={vals.article_pageno} ref={r => refs.article_pageno=r} title="Page number." />
</div>
<div className="row publisher" style={{display:parentMode==="publisher"?"flex":"none"}} ref={r => publisherParentRowRef=r} >
<label className="select top parent-mode"
title="Click to associate this article with a publication."
onClick = {onPublicationParent}
> Publisher: </label>
<Select className="react-select" classNamePrefix="react-select" options={publishers} isSearchable={true} getOptionValue={o => o.textLabel}
defaultValue = {currPubl}
ref = { r => refs.publ_id=r }
/>
</div>
<div className="row article_date" style={{display:parentMode==="publisher"?"flex":"none"}}ref={r => articleDateRef=r} >
<label className="select top"> Date: </label>
<input className="article_date" type="text" defaultValue={vals.article_date} ref={r => refs.article_date=r} />
</div>
</div>
</div>
<div className="row snippet"> <label> Snippet: </label>
<textarea defaultValue={vals.article_snippet} ref={r => refs.article_snippet=r} />
</div>
@ -190,9 +247,13 @@ export class ArticleSearchResult2
function unloadVals() {
let newVals = {} ;
for ( let r in refs ) {
if ( r === "pub_id" )
newVals[ r ] = refs[r].state.value && refs[r].state.value.value ;
else if ( r === "article_authors" ) {
if ( r === "pub_id" ) {
if ( parentMode === "publication" )
newVals[ r ] = refs[r].state.value && refs[r].state.value.value ;
} else if ( r === "publ_id" ) {
if ( parentMode === "publisher" )
newVals[ r ] = refs[r].state.value && refs[r].state.value.value ;
} else if ( r === "article_authors" ) {
let vals = unloadCreatableSelect( refs[r] ) ;
newVals.article_authors = [] ;
vals.forEach( v => {
@ -212,7 +273,7 @@ export class ArticleSearchResult2
} ) ;
} else if ( r === "article_tags" ) {
let vals = unloadCreatableSelect( refs[r] ) ;
newVals[ r ] = vals.map( v => v.label ) ;
newVals[ r ] = vals.map( v => v.label ) ;
} else
newVals[ r ] = refs[r].value.trim() ;
}
@ -233,10 +294,12 @@ export class ArticleSearchResult2
[ () => newVals.article_title === "", "Please give it a title.", refs.article_title ],
] ;
const optional = [
[ () => newVals.pub_id === null, "No publication was specified.", refs.pub_id ],
[ () => newVals.article_pageno === "" && newVals.pub_id !== null, "No page number was specified.", refs.article_pageno ],
[ () => parentMode === "publication" && newVals.pub_id === null, "No publication was specified.", refs.pub_id ],
[ () => parentMode === "publisher" && newVals.publ_id === null, "No publisher was specified.", refs.pub_id ],
[ () => parentMode === "publication" && newVals.article_pageno === "" && newVals.pub_id !== null, "No page number was specified.", refs.article_pageno ],
[ () => newVals.article_pageno !== "" && newVals.pub_id === null, "A page number was specified but no publication.", refs.pub_id ],
[ () => newVals.article_pageno !== "" && !isNumeric(newVals.article_pageno), "The page number is not numeric.", refs.article_pageno ],
[ () => newVals.publ_id && newVals.article_date === "", "The article date was not specified.", refs.article_date ],
[ () => newVals.article_snippet === "", "No snippet was provided.", refs.article_snippet ],
[ () => newVals.article_authors.length === 0, "No authors were specified.", refs.article_authors ],
[ () => newVals.article_tags && newVals.article_tags.length === 1 && newVals.article_tags[0] === "tips", "This tip has no other tags." ],
@ -264,7 +327,7 @@ export class ArticleSearchResult2
// show the form
const title = ( <div style={{display:"flex"}}>
<img src="/images/icons/article-grey.png" alt="Dialog icon." />
<img src="/images/menu/article.png" alt="Dialog icon." />
{isNew ? "New article" : "Edit article"}
</div> ) ;
gAppRef.showModalForm( "article-form",

@ -0,0 +1,59 @@
import React from "react" ;
import { gAppRef } from "./App.js" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
export class DataCache
{
constructor() {
// initialize
this.data = {} ;
}
get( keys, onOK ) {
// initialize
if ( onOK === undefined )
onOK = () => {} ;
let nOK = 0 ;
function onPartialOK() {
if ( ++nOK === keys.length ) {
onOK() ;
}
}
// refresh each key
for ( let key of keys ) {
// check if we already have the data in the cache
if ( this.data[ key ] !== undefined ) {
onPartialOK() ;
} else {
// nope - get the specified data from the backend
axios.get(
gAppRef.makeFlaskUrl( "/" + key )
).then( resp => {
// got it - update the cache
this.data[ key ] = resp.data ;
onPartialOK() ;
} ).catch( err => {
gAppRef.showErrorToast(
<div> Couldn't load the {key}: <div className="monospace"> {err.toString()} </div> </div>
) ;
} ) ;
}
}
}
refresh( keys, onOK ) {
// refresh the specified keys
for ( let key of keys )
delete this.data[ key ] ;
this.get( keys, onOK ) ;
}
}

@ -0,0 +1,24 @@
#db-report {
border: 1px solid #ccc ; border-radius: 8px ;
padding: 0.5em ;
}
#db-report .section { margin-top: 1em ; }
#db-report .section:first-of-type { margin-top: 0 ; }
#db-report h2 { border: 1px solid #ccc ; padding: 0.1em 0.2em ; background: #eee ; margin-bottom: 0.25em ; font-size: 125% ; }
#db-report h2 img.loading { height: 0.75em ; margin-left: 0.25em ; }
#db-report .db-row-counts .images { font-size: 80% ; font-style: italic ; }
#db-report .db-links .check-links-frame { display: inline-block ; position: absolute ; right: 1em ; text-align: center ; }
#db-report .db-links button.check-links { margin-bottom: 0.2em ; padding: 0.25em 0.5em ; }
#db-report .db-links .check-links-frame .status-msg { font-size: 60% ; font-style: italic ; }
#db-report .db-links .link-errors { font-size: 80% ; list-style-image: url("/public/images/link-error-bullet.png") ; }
#db-report .db-links .link-errors .status { font-family: monospace ; font-style: italic ; }
#db-report .db-images .dupe-analysis .collapsible { margin-bottom: 0.5em ; }
#db-report .db-images .dupe-analysis .hash { font-family: monospace ; font-size: 80% ; font-style: italic ; }
#db-report .db-images .image-sizes th { text-align: left ; font-weight: normal ; font-style: italic ; }
#db-report .db-images .image-sizes img { height: 0.9em ; }
#db-report .db-images .react-tabs__tab-list { margin-bottom: 0 ; font-weight: bold ; }
#db-report .db-images .react-tabs__tab-panel { border: 1px solid #aaa ; border-top-width: 0 ; padding: 0.25em 0.5em ; }

@ -0,0 +1,387 @@
import React from "react" ;
import { Link } from "react-router-dom" ;
import { Tabs, TabList, TabPanel, Tab } from 'react-tabs';
import 'react-tabs/style/react-tabs.css';
import "./DbReport.css" ;
import { PreviewableImage } from "./PreviewableImage" ;
import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, isLink } from "./utils.js" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
export class DbReport extends React.Component
{
// render the component
render() {
return ( <div id="db-report">
<div className="section"> <DbRowCounts /> </div>
<div className="section"> <DbLinks /> </div>
<div className="section"> <DbImages /> </div>
</div>
) ;
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class DbRowCounts extends React.Component
{
constructor( props ) {
// initialize
super( props ) ;
this.state = {
dbRowCounts: null,
} ;
// get the database row counts
axios.get(
gAppRef.makeFlaskUrl( "/db-report/row-counts" )
).then( resp => {
this.setState( { dbRowCounts: resp.data } ) ;
} ).catch( err => {
gAppRef.showErrorResponse( "Can't get the database row counts", err ) ;
} ) ;
}
render() {
// initialize
const dbRowCounts = this.state.dbRowCounts ;
// render the table rows
function makeRowCountRow( tableName ) {
const tableName2 = tableName[0].toUpperCase() + tableName.substring(1) ;
let nRows ;
if ( dbRowCounts ) {
nRows = dbRowCounts[ tableName ] ;
const nImages = dbRowCounts[ tableName+"_image" ] ;
if ( nImages > 0 )
nRows = ( <span>
{nRows} <span className="images">({pluralString(nImages,"image")})</span>
</span>
) ;
}
return ( <tr key={tableName}>
<td style={{paddingRight:"0.5em",fontWeight:"bold"}}> {tableName2}s: </td>
<td> {nRows} </td>
</tr>
) ;
}
let tableRows = [ "publisher", "publication", "article", "author", "scenario" ].map(
(tableName) => makeRowCountRow( tableName )
) ;
// render the component
return ( <div className="db-row-counts">
<h2> Content { !dbRowCounts && <img src="/images/loading.gif" className="loading" alt="Loading..." /> } </h2>
<table><tbody>{tableRows}</tbody></table>
</div>
) ;
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class DbLinks extends React.Component
{
constructor( props ) {
// initialize
super( props ) ;
this.state = {
dbLinks: null,
linksToCheck: null, currLinkToCheck: null, isFirstLinkCheck: true,
checkLinksInProgress: false, checkLinksStatusMsg: null,
linkErrors: {},
} ;
// initialize
this._getLinksToCheck() ;
}
render() {
// initialize
const dbLinks = this.state.dbLinks ;
// render the table rows
let tableRows = [] ;
for ( let key of [ "publisher", "publication", "article" ] ) {
const nDbLinks = dbLinks && dbLinks[key] ? dbLinks[key].length : null ;
const key2 = key[0].toUpperCase() + key.substring(1) + "s" ;
tableRows.push( <tr key={key}>
<td style={{paddingRight:"0.5em",fontWeight:"bold"}}> {key2}: </td>
<td style={{width:"100%"}}> {nDbLinks} </td>
</tr>
) ;
if ( this.state.linkErrors[ key ] ) {
// NOTE: Showing all the errors at once (e.g. not as a collapsible list) will be unwieldy
// if there are a lot of them, but this shouldn't happen often, and if it does, the user
// is likely to stop the check, fix the problem, then try again.
let rows = [] ;
for ( let linkError of this.state.linkErrors[ key ] ) {
const url = gAppRef.makeAppUrl( "/" + linkError[0][0] + "/" + linkError[0][1] ) ;
const targetUrl = linkError[0][3] ;
const target = isLink( targetUrl )
? <a href={targetUrl}>{targetUrl}</a>
: targetUrl ;
let errorMsg = linkError[1] && linkError[1] + ": " ;
rows.push( <li key={linkError[0]}>
<Link to={url} dangerouslySetInnerHTML={{__html:linkError[0][2]}} />
<span className="status"> ({errorMsg}{target}) </span>
</li>
) ;
}
tableRows.push( <tr key={key+"-errors"}>
<td colSpan="2">
<ul className="link-errors"> {rows} </ul>
</td>
</tr>
) ;
}
}
// render the component
const nLinksToCheck = this.state.linksToCheck ? this.state.linksToCheck.length - this.state.currLinkToCheck : null ;
const imageUrl = this.state.checkLinksInProgress ? "/images/loading.gif" : "/images/check-db-links.png" ;
return ( <div className="db-links">
<h2> Links { !dbLinks && <img src="/images/loading.gif" className="loading" alt="Loading..." /> } </h2>
{ this.state.linksToCheck && this.state.linksToCheck.length > 0 && (
<div className="check-links-frame">
<button className="check-links" style={{display:"flex"}} onClick={() => this.checkDbLinks()} >
<img src={imageUrl} style={{height:"1em",marginTop:"0.15em",marginRight:"0.5em"}} alt="Check database links." />
{ this.state.checkLinksInProgress ? "Stop checking" : "Check links (" + nLinksToCheck + ")" }
</button>
<div className="status-msg"> {this.state.checkLinksStatusMsg} </div>
</div>
) }
<table className="db-links" style={{width:"100%"}}><tbody>{tableRows}</tbody></table>
</div>
) ;
}
checkDbLinks() {
// start/stop checking links
const inProgress = ! this.state.checkLinksInProgress ;
this.setState( { checkLinksInProgress: inProgress } ) ;
if ( inProgress )
this._checkNextLink() ;
}
_checkNextLink( force ) {
// check if this is the start of a new run
if ( this.state.currLinkToCheck === 0 && !force ) {
// yup - reset the UI
this.setState( { linkErrors: {} } ) ;
// NOTE: If the user is checking the links *again*, it could be because some links were flagged
// during the first run, they've fixed them up, and want to check everything again. In this case,
// we need to re-fetch the links from the database.
if ( ! this.state.isFirstLinkCheck ) {
this._getLinksToCheck(
() => { this._checkNextLink( true ) ; },
() => { this.setState( { checkLinksInProgress: false } ) ; }
) ;
return ;
}
}
// check if this is the end of a run
if ( this.state.currLinkToCheck >= this.state.linksToCheck.length ) {
// yup - reset the UI
this.setState( {
checkLinksStatusMsg: "Checked " + pluralString( this.state.linksToCheck.length, "link" ) + ".",
currLinkToCheck: 0, // nb: to allow the user to check again
checkLinksInProgress: false,
isFirstLinkCheck: false,
} ) ;
return ;
}
// get the next link to check
const linkToCheck = this.state.linksToCheck[ this.state.currLinkToCheck ] ;
this.setState( { currLinkToCheck: this.state.currLinkToCheck + 1 } ) ;
let continueCheckLinks = () => {
// update the UI
this.setState( { checkLinksStatusMsg:
"Checked " + this.state.currLinkToCheck + " of " + pluralString( this.state.linksToCheck.length, "link" ) + "..."
} ) ;
// check the next link
if ( this.state.checkLinksInProgress )
this._checkNextLink() ;
}
// check the next link
let url = linkToCheck[3] ;
if ( url.substr( 0, 14 ) === "http://{FLASK}" )
url = gAppRef.makeFlaskUrl( url.substr( 14 ) ) ;
// NOTE: Because of CORS, we have to proxy URL's that don't belong to us via the backend :-/
let req = isLink( url )
? axios.post( gAppRef.makeFlaskUrl( "/db-report/check-link", {url:url} ) )
: axios.head( gAppRef.makeExternalDocUrl( url ) ) ;
req.then( resp => {
// the link worked - continue checking links
continueCheckLinks() ;
} ).catch( err => {
// the link failed - record the error
let newLinkErrors = this.state.linkErrors ;
if ( newLinkErrors[ linkToCheck[0] ] === undefined )
newLinkErrors[ linkToCheck[0] ] = [] ;
const errorMsg = err.response ? "HTTP " + err.response.status : null ;
newLinkErrors[ linkToCheck[0] ].push( [ linkToCheck, errorMsg ] ) ;
this.setState( { linkErrors: newLinkErrors } ) ;
// continue checking links
continueCheckLinks() ;
} ) ;
}
_getLinksToCheck( onOK, onError ) {
// get the links in the database
axios.get(
gAppRef.makeFlaskUrl( "/db-report/links" )
).then( resp => {
const dbLinks = resp.data ;
// flatten the links to a list
let linksToCheck = [] ;
for ( let key of [ "publisher", "publication", "article" ] ) {
for ( let row of dbLinks[key] ) {
linksToCheck.push( [
key, row[0], row[1], row[2]
] ) ;
}
}
this.setState( {
dbLinks: resp.data,
linksToCheck: linksToCheck,
currLinkToCheck: 0,
} ) ;
if ( onOK )
onOK() ;
} ).catch( err => {
gAppRef.showErrorResponse( "Can't get the database links", err ) ;
if ( onError )
onError() ;
} ) ;
}
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
class DbImages extends React.Component
{
constructor( props ) {
// initialize
super( props ) ;
this.state = {
dbImages: null,
} ;
// get the database images
axios.get(
gAppRef.makeFlaskUrl( "/db-report/images" )
).then( resp => {
this.setState( { dbImages: resp.data } ) ;
} ).catch( err => {
gAppRef.showErrorResponse( "Can't get the database images", err ) ;
} ) ;
}
render() {
// initialize
const dbImages = this.state.dbImages ;
// render any duplicate images
let dupeImages = [] ;
if ( dbImages ) {
for ( let hash in dbImages.duplicates ) {
let parents = [] ;
for ( let row of dbImages.duplicates[hash] ) {
const url = gAppRef.makeAppUrl( "/" + row[0] + "/" + row[1] ) ;
parents.push(
<Link to={url} dangerouslySetInnerHTML={{__html:row[2]}} />
) ;
}
// NOTE: We just use the first row's image since, presumably, they will all be the same.
const row = dbImages.duplicates[hash][ 0 ] ;
const imageUrl = gAppRef.makeFlaskImageUrl( row[0], row[1] ) ;
const caption = ( <span>
Found a duplicate image <span className="hash">(md5:{hash})</span>
</span>
) ;
dupeImages.push( <div className="dupe-image" style={{display:"flex"}} key={hash} >
<PreviewableImage url={imageUrl} style={{width:"3em",marginTop:"0.1em",marginRight:"0.5em"}} />
{ makeCollapsibleList( caption, parents, 5, {flexGrow:1}, hash ) }
</div>
) ;
}
}
// render the image sizes
let tabList = [] ;
let tabPanels = [] ;
if ( dbImages ) {
function toKB( n ) { return ( n / 1024 ).toFixed( 1 ) ; }
for ( let key of [ "publisher", "publication", "article" ] ) {
const tableName2 = key[0].toUpperCase() + key.substring(1) ;
tabList.push(
<Tab key={key}> {tableName2+"s"} </Tab>
) ;
let rows = [] ;
for ( let row of dbImages[key] ) {
const url = gAppRef.makeAppUrl( "/" + key + "/" + row[1] ) ;
// NOTE: Loading every image will be expensive, but we assume we're talking to a local server.
// Otherwise, we could use a generic "preview" image, and expand it out to the real image
// when the user clicks on it.
const imageUrl = gAppRef.makeFlaskImageUrl( key, row[1] ) ;
rows.push( <tr key={row}>
<td> <PreviewableImage url={imageUrl} /> </td>
<td> {toKB(row[0])} </td>
<td> <Link to={url} dangerouslySetInnerHTML={{__html:row[2]}} /> </td>
</tr>
) ;
}
tabPanels.push( <TabPanel key={key}>
{ rows.length === 0 ? "No images found." :
<table className="image-sizes"><tbody>
<tr><th style={{width:"1.25em"}}/><th style={{paddingRight:"0.5em"}}> Size (KB) </th><th> {tableName2} </th></tr>
{rows}
</tbody></table>
}
</TabPanel>
) ;
}
}
const imageSizes = tabList.length > 0 && ( <Tabs>
<TabList> {tabList} </TabList>
{tabPanels}
</Tabs>
) ;
// render the component
return ( <div className="db-images">
<h2> Images { !dbImages && <img src="/images/loading.gif" className="loading" alt="Loading..." /> } </h2>
{ dupeImages.length > 0 &&
<div className="dupe-analysis"> {dupeImages} </div>
}
{imageSizes}
</div>
) ;
}
}

@ -12,13 +12,16 @@ export class PreviewableImage extends React.Component
render() {
return ( <a href={this.props.url} className="preview" target="_blank" rel="noopener noreferrer">
<img src={this.props.url} className={this.props.className} alt={this.props.altText} />
<img src={this.props.url} className={this.props.className} style={this.props.style} alt={this.props.altText} />
</a> ) ;
}
static initPreviewableImages() {
// load the imageZoom script
$.getScript( "/jQuery/imageZoom/jquery.imageZoom.js" ) ;
$.getScript( {
url: "/jQuery/imageZoom/jquery.imageZoom.js",
cache: true,
} ) ;
// load the imageZoom CSS
let cssNode = document.createElement( "link" ) ;
cssNode.type = "text/css" ;
@ -63,6 +66,21 @@ export class PreviewableImage extends React.Component
return buf.join( "" ) ;
}
componentDidMount() {
if ( this.props.manualActivate ) {
// NOTE: We normally want PreviewableImage's to automatically activate themselves, but there is
// a common case where we don't want this to happen: when raw HTML is received from the backend
// and inserted like that into the page.
// In this case, <img> tags are fixed up by adjustHtmlForPreviewableImages() as raw HTML (i.e. not
// as a PreviewableImage instance), and so the page still needs to call activatePreviewableImages()
// to activate these. Since it's probably not a good idea to activate an image twice, in this case
// PreviewableImage instances should be created as "manually activated".
return ;
}
let $elem = $( ReactDOM.findDOMNode( this ) ) ;
$elem.imageZoom() ;
}
static activatePreviewableImages( rootNode ) {
// locate images marked as previewable and activate them
let $elems = $( ReactDOM.findDOMNode( rootNode ) ).find( "a.preview" ) ;

@ -6,7 +6,7 @@ import { PublicationSearchResult2 } from "./PublicationSearchResult2.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLICATION_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields, isLink } from "./utils.js" ;
import { makeCollapsibleList, pluralString, updateRecord } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -21,12 +21,12 @@ export class PublicationSearchResult extends React.Component
const display_description = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "pub_description!" ] || this.props.data.pub_description
) ;
const publ = gAppRef.caches.publishers[ this.props.data.publ_id ] ;
const image_url = PublicationSearchResult.makeImageUrl( this.props.data ) ;
const parent_publ = this.props.data._parent_publ ;
const image_url = PublicationSearchResult._makeImageUrl( this.props.data ) ;
// prepare the publication's URL
let pub_url = this.props.data.pub_url ;
if ( pub_url && ! isLink(pub_url) )
if ( pub_url )
pub_url = gAppRef.makeExternalDocUrl( pub_url ) ;
// prepare the tags
@ -58,21 +58,17 @@ export class PublicationSearchResult extends React.Component
if ( this.props.data.articles ) {
for ( let i=0 ; i < this.props.data.articles.length ; ++i ) {
const article = this.props.data.articles[ i ] ;
if ( this.props.onArticleClick ) {
// forward clicks on the article to the parent
articles.push( <div
dangerouslySetInnerHTML = {{__html: article.article_title}}
onClick = { () => this.props.onArticleClick( article.article_id ) }
style = {{ cursor: "pointer" }}
title = "Go to this article."
/> ) ;
} else {
// handle clicks on the article normally
articles.push( <Link title="Show this article."
to = { gAppRef.makeAppUrl( "/article/" + article.article_id ) }
dangerouslySetInnerHTML = {{ __html: article.article_title }}
/> ) ;
}
let onArticleClick = (evt) => {
// NOTE: We let the parent take a look at clicks first, so that they can scroll
// to the article if it's already on-screen.
if ( this.props.onArticleClick && this.props.onArticleClick( article.article_id ) )
evt.preventDefault() ;
} ;
articles.push( <Link title="Show this article."
to = { gAppRef.makeAppUrl( "/article/" + article.article_id ) }
onClick = {onArticleClick}
dangerouslySetInnerHTML = {{ __html: article.article_title }}
/> ) ;
}
}
@ -81,10 +77,10 @@ export class PublicationSearchResult extends React.Component
<MenuButton className="sr-menu" />
<MenuList>
<MenuItem className="edit" onSelect={ () => this.onEditPublication() } >
<img src="/images/icons/edit.png" alt="Edit." /> Edit
<img src="/images/edit.png" alt="Edit." /> Edit
</MenuItem>
<MenuItem className="delete" onSelect={ () => this.onDeletePublication() } >
<img src="/images/icons/delete.png" alt="Delete." /> Delete
<img src="/images/delete.png" alt="Delete." /> Delete
</MenuItem>
</MenuList>
</Menu> ) ;
@ -94,10 +90,10 @@ export class PublicationSearchResult extends React.Component
>
<div className="header">
{menu}
{ publ &&
{ parent_publ &&
<Link className="publisher" title="Show this publisher."
to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) }
dangerouslySetInnerHTML={{ __html: publ.publ_name }}
dangerouslySetInnerHTML={{ __html: parent_publ.publ_name }}
/>
}
<Link className="name" title="Show this publication."
@ -111,7 +107,7 @@ export class PublicationSearchResult extends React.Component
}
</div>
<div className="content">
{ image_url && <PreviewableImage url={image_url} className="image" alt="Publication." /> }
{ image_url && <PreviewableImage url={image_url} noActivate={true} className="image" alt="Publication." /> }
<div className="description" dangerouslySetInnerHTML={{__html: display_description}} />
{ makeCollapsibleList( "Articles", articles, PUBLICATION_EXCESS_ARTICLE_THRESHOLD, {float:"left",marginBottom:"0.25em"} ) }
</div>
@ -126,61 +122,69 @@ export class PublicationSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ;
}
static onNewPublication( notify ) {
PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/publication/create", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.pub_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ;
gAppRef.closeModalForm() ;
// NOTE: The parent publisher will update itself in the UI to show this new publication,
// since we've just received an updated copy of the publications.
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewPublication() {
gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/publication/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
// update the UI
const newPub = resp.data.record ;
gAppRef.prependSearchResult( newPub ) ;
if ( newPub._parent_publ )
gAppRef.updatePublisher( newPub._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditPublication() {
// get the articles for this publication
let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list
PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => {
// send the updated details to the server
newVals.pub_id = this.props.data.pub_id ;
if ( articles )
newVals.article_order = articles.map( a => a.article_id ) ;
axios.post( gAppRef.makeFlaskUrl( "/publication/update", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
// NOTE: The parent publisher will update itself in the UI to show this updated publication,
// since we've just received an updated copy of the publications.
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
// get the articles for this publication
let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list
PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => {
// send the updated details to the server
newVals.pub_id = this.props.data.pub_id ;
if ( articles )
newVals.article_order = articles.map( a => a.article_id ) ;
axios.post(
gAppRef.makeFlaskUrl( "/publication/update" ), newVals
).then( resp => {
// update the UI
gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
// update the UI
const pub = resp.data.record ;
const orig_parent_publ = this.props.data._parent_publ ;
updateRecord( this.props.data, pub ) ;
if ( pub._parent_publ )
gAppRef.updatePublisher( pub._parent_publ.publ_id ) ;
if ( orig_parent_publ )
gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
@ -208,22 +212,23 @@ export class PublicationSearchResult extends React.Component
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the publication on the server
axios.get( gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id, {list:1} ) )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
axios.get(
gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "publications", "tags" ] ) ;
// update the UI
this.props.onDelete( "pub_id", this.props.data.pub_id ) ;
resp.data.deleteArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ;
resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id, true ) ;
} ) ;
if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was deleted. </div> ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},
@ -231,11 +236,11 @@ export class PublicationSearchResult extends React.Component
} ) ;
}
// get the publication details
axios.get( gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id ) )
.then( resp => {
axios.get(
gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id )
).then( resp => {
doDelete( resp.data.nArticles ) ;
} )
.catch( err => {
} ).catch( err => {
doDelete( err ) ;
} ) ;
}
@ -253,15 +258,13 @@ export class PublicationSearchResult extends React.Component
}
_makeDisplayName( allowAlternateContent ) { return PublicationSearchResult.makeDisplayName( this.props.data, allowAlternateContent ) ; }
static makeImageUrl( vals ) {
static _makeImageUrl( vals ) {
let image_url = gAppRef.makeFlaskImageUrl( "publication", vals.pub_image_id ) ;
if ( ! image_url ) {
// check if the parent publisher has an image
if ( vals.publ_id ) {
const publ = gAppRef.caches.publishers[ vals.publ_id ] ;
if ( publ )
image_url = gAppRef.makeFlaskImageUrl( "publisher", publ.publ_image_id ) ;
}
const parent_publ = vals._parent_publ ;
if ( parent_publ )
image_url = gAppRef.makeFlaskImageUrl( "publisher", parent_publ.publ_image_id ) ;
}
return image_url ;
}

@ -4,7 +4,7 @@ import CreatableSelect from "react-select/creatable" ;
import ReactDragListView from "react-drag-listview/lib/index.js" ;
import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ;
import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, ciCompare, isNumeric } from "./utils.js" ;
import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, ciCompare, isNumeric } from "./utils.js" ;
// --------------------------------------------------------------------
@ -60,12 +60,13 @@ export class PublicationSearchResult2
} ;
// initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i> } ] ;
let publishers = [ { value: null, label: <i>(none)</i>, textLabel: "" } ] ;
let currPubl = publishers[0] ;
for ( let p of Object.entries(gAppRef.caches.publishers) ) {
for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( {
value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />,
textLabel: p[1].publ_name
} ) ;
if ( p[1].publ_id === vals.publ_id )
currPubl = publishers[ publishers.length-1 ] ;
@ -76,7 +77,7 @@ export class PublicationSearchResult2
// NOTE: As a convenience, we provide a droplist of known publication names (without edition #'s),
// to make it easier to add a new edition of an existing publication.
let publications = {} ;
for ( let p of Object.entries(gAppRef.caches.publications) )
for ( let p of Object.entries( gAppRef.dataCache.data.publications ) )
publications[ p[1].pub_name ] = p[1] ;
let publications2 = [] ;
for ( let pub_name in publications ) {
@ -93,7 +94,7 @@ export class PublicationSearchResult2
}
// initialize the tags
const tags = gAppRef.makeTagLists( vals.pub_tags ) ;
const tags = makeTagLists( vals.pub_tags ) ;
// initialize the articles
function make_article_display_name( article ) {
@ -153,7 +154,7 @@ export class PublicationSearchResult2
<input className="pub_date" type="text" defaultValue={vals.pub_date} ref={r => refs.pub_date=r} />
</div>
<div className="row publisher"> <label className="select top"> Publisher: </label>
<Select className="react-select" classNamePrefix="react-select" options={publishers} isSearchable={true}
<Select className="react-select" classNamePrefix="react-select" options={publishers} isSearchable={true} getOptionValue={o => o.textLabel}
defaultValue = {currPubl}
ref = { r => refs.publ_id=r }
/>
@ -206,7 +207,7 @@ export class PublicationSearchResult2
function checkForDupe( vals ) {
// check for an existing publication name/edition
for ( let pub of Object.entries(gAppRef.caches.publications) ) {
for ( let pub of Object.entries( gAppRef.dataCache.data.publications ) ) {
if ( ciCompare( pub[1].pub_name, vals.pub_name ) !== 0 )
continue ;
if ( ! pub[1].pub_edition && ! vals.pub_edition )
@ -278,7 +279,7 @@ export class PublicationSearchResult2
// show the form
const isNew = Object.keys( vals ).length === 0 ;
const title = ( <div style={{display:"flex"}}>
<img src="/images/icons/publication.png" alt="Dialog icon." />
<img src="/images/menu/publication.png" alt="Dialog icon." />
{isNew ? "New publication" : "Edit publication"}
</div> ) ;
gAppRef.showModalForm( "publication-form",

@ -5,9 +5,9 @@ import { PublisherSearchResult2 } from "./PublisherSearchResult2.js"
import "./PublisherSearchResult.css" ;
import { PublicationSearchResult } from "./PublicationSearchResult.js"
import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLISHER_EXCESS_PUBLICATION_THRESHOLD } from "./constants.js" ;
import { PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, PUBLISHER_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields } from "./utils.js" ;
import { makeCollapsibleList, pluralString, updateRecord } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -19,18 +19,19 @@ export class PublisherSearchResult extends React.Component
render() {
// prepare the basic details
const display_name = this.props.data[ "publ_name!" ] || this.props.data.publ_name ;
const display_name = this._makeDisplayName() ;
const display_description = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "publ_description!" ] || this.props.data.publ_description
) ;
const image_url = gAppRef.makeFlaskImageUrl( "publisher", this.props.data.publ_image_id ) ;
// prepare the publisher's URL
let publ_url = this.props.data.publ_url ;
if ( publ_url )
publ_url = gAppRef.makeExternalDocUrl( publ_url ) ;
// prepare the publications
let pubs = [] ;
for ( let pub of Object.entries(gAppRef.caches.publications) ) {
if ( pub[1].publ_id === this.props.data.publ_id )
pubs.push( pub[1] ) ;
}
let pubs = this.props.data.publications ;
pubs.sort( (lhs,rhs) => {
if ( lhs.pub_seqno && rhs.pub_seqno )
return rhs.pub_seqno - lhs.pub_seqno ;
@ -46,15 +47,25 @@ export class PublisherSearchResult extends React.Component
dangerouslySetInnerHTML = {{ __html: PublicationSearchResult.makeDisplayName(p) }}
/> ) ;
// prepare any associated articles
let articles = [] ;
if ( this.props.data.articles ) {
articles = this.props.data.articles.map( a => <Link title="Show this article."
to = { gAppRef.makeAppUrl( "/article/" + a.article_id ) }
dangerouslySetInnerHTML = {{ __html: a.article_title }}
/> ) ;
articles.reverse() ;
}
// prepare the menu
const menu = ( <Menu>
<MenuButton className="sr-menu" />
<MenuList>
<MenuItem className="edit" onSelect={ () => this.onEditPublisher() } >
<img src="/images/icons/edit.png" alt="Edit." /> Edit
<img src="/images/edit.png" alt="Edit." /> Edit
</MenuItem>
<MenuItem className="delete" onSelect={ () => this.onDeletePublisher() } >
<img src="/images/icons/delete.png" alt="Delete." /> Delete
<img src="/images/delete.png" alt="Delete." /> Delete
</MenuItem>
</MenuList>
</Menu> ) ;
@ -68,17 +79,19 @@ export class PublisherSearchResult extends React.Component
to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) }
dangerouslySetInnerHTML={{ __html: display_name }}
/>
{ this.props.data.publ_url &&
<a href={this.props.data.publ_url} className="open-link" target="_blank" rel="noopener noreferrer">
{ publ_url &&
<a href={publ_url} className="open-link" target="_blank" rel="noopener noreferrer">
<img src="/images/open-link.png" alt="Open publisher." title="Go to this publisher." />
</a>
}
</div>
<div className="content">
{ image_url && <PreviewableImage url={image_url} className="image" alt="Publisher." /> }
{ image_url && <PreviewableImage url={image_url} noActivate={true} className="image" alt="Publisher." /> }
<div className="description" dangerouslySetInnerHTML={{__html: display_description}} />
{ makeCollapsibleList( "Publications", pubs, PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, {float:"left",marginBottom:"0.25em"} ) }
{ makeCollapsibleList( "Publications", pubs, PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, {float:"left"} ) }
{ makeCollapsibleList( "Articles", articles, PUBLISHER_EXCESS_ARTICLE_THRESHOLD, {clear:"both",float:"left"} ) }
</div>
<div className="footer" />
</div> ) ;
}
@ -86,53 +99,60 @@ export class PublisherSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ;
}
static onNewPublisher( notify ) {
PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/publisher/create", {list:1} ), newVals )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.publ_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewPublisher() {
gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/publisher/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers" ] ) ;
// update the UI
const newPubl = resp.data.record ;
gAppRef.prependSearchResult( newPubl ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditPublisher() {
PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.publ_id = this.props.data.publ_id ;
axios.post( gAppRef.makeFlaskUrl( "/publisher/update", {list:1} ), newVals )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.publ_id = this.props.data.publ_id ;
axios.post(
gAppRef.makeFlaskUrl( "/publisher/update" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers" ], () => {
// update the UI
const publ = resp.data.record ;
updateRecord( this.props.data, publ ) ;
for ( let pub of publ.publications )
gAppRef.updatePublication( pub.pub_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
}
onDeletePublisher() {
@ -158,31 +178,30 @@ export class PublisherSearchResult extends React.Component
}
let content = ( <div>
Delete this publisher?
<div style={{margin:"0.5em 0 0.5em 2em",fontStyle:"italic"}} dangerouslySetInnerHTML={{__html: this.props.data.publ_name}} />
<div style={{margin:"0.5em 0 0.5em 2em",fontStyle:"italic"}} dangerouslySetInnerHTML={{__html: this._makeDisplayName()}} />
{warning}
</div> ) ;
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the publisher on the server
axios.get( gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id, {list:1} ) )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
gAppRef.caches.publications = resp.data.publications ; // nb: because of cascading deletes
axios.get(
gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers", "publications" ] ) ;
// update the UI
this.props.onDelete( "publ_id", this.props.data.publ_id ) ;
resp.data.deletedPublications.forEach( pub_id => {
this.props.onDelete( "pub_id", pub_id ) ;
this.props.onDelete( "pub_id", pub_id, true ) ;
} ) ;
resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ;
this.props.onDelete( "article_id", article_id, true ) ;
} ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was deleted. </div> ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},
@ -190,13 +209,19 @@ export class PublisherSearchResult extends React.Component
} ) ;
} ;
// get the publisher details
axios.get( gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id ) )
.then( resp => {
axios.get(
gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id )
).then( resp => {
doDelete( resp.data.nPublications, resp.data.nArticles ) ;
} )
.catch( err => {
} ).catch( err => {
doDelete( err ) ;
} ) ;
}
static makeDisplayName( vals ) {
// return the publisher's display name
return vals["publ_name!"] || vals.publ_name ;
}
_makeDisplayName() { return PublisherSearchResult.makeDisplayName( this.props.data ) ; }
}

@ -84,7 +84,7 @@ export class PublisherSearchResult2
function checkForDupe( publName ) {
// check for an existing publisher
for ( let publ of Object.entries(gAppRef.caches.publishers) ) {
for ( let publ of Object.entries( gAppRef.dataCache.data.publishers ) ) {
if ( ciCompare( publName, publ[1].publ_name ) === 0 )
return true ;
}
@ -136,7 +136,7 @@ export class PublisherSearchResult2
// show the form
const isNew = Object.keys( vals ).length === 0 ;
const title = ( <div style={{display:"flex"}}>
<img src="/images/icons/publisher.png" alt="Dialog icon." />
<img src="/images/menu/publisher.png" alt="Dialog icon." />
{isNew ? "New publisher" : "Edit publisher"}
</div> ) ;
gAppRef.showModalForm( "publisher-form",

@ -2,5 +2,5 @@
#search-form .caption { line-height: 22px ; }
#search-form .query { flex: 1 ; min-width: 5em ; max-width: 30em ; margin: 0 0.25em 0 0.5em ; }
#search-form button[type="submit"] { width: 28px ; height: 28px ;
background: url("/images/search.png") transparent no-repeat 2px 2px ; background-size: 20px ;
background: url("/public/images/search.png") transparent no-repeat 2px 2px ; background-size: 20px ;
}

@ -6,14 +6,14 @@
.search-result button.sr-menu {
width: 1em ; height: 1em ; float: right ; margin-right: -3px ;
background: url("/images/menu.png") transparent no-repeat ; background-size: 100% ; border: none ;
background: url("/public/images/menu.png") transparent no-repeat ; background-size: 100% ; border: none ;
cursor: pointer ;
}
.search-result .header { padding: 2px 5px ; border-top-right-radius: 5px ; }
.search-result .header a { text-decoration: none ; }
.search-result .header a.open-link { margin-left: 0.5em ; }
.search-result .header a.open-link img { height: 1.2em ; margin-bottom: -0.2em ; }
.search-result .header a.open-link img { height: 1em ; margin-bottom: -0.15em ; }
.search-result.publisher .header { border: 1px solid #c0c0c0 ; background: #eabe51 ; }
.search-result.publisher .header a.name { color: inherit ; text-decoration: none ; }
@ -24,7 +24,7 @@
.search-result.article .header .subtitle { font-size: 80% ; font-style: italic ; color: #333 ; }
.search-result.article .header .subtitle i { color: #666 ; }
.search-result.publication .header .publisher , .search-result.article .header .publication {
.search-result.publication .header .publisher , .search-result.article .header .publication, .search-result.article .header .publisher {
float: right ; margin-right: 0.5em ; font-size: 80% ; font-style: italic ; color: #444 ;
}
@ -32,14 +32,12 @@
.search-result .content p:not(:first-child) { margin-top: 0.25em ; }
.search-result .content ul p, .search-result .content ol p { margin-top: 0.1em ; }
.search-result .content i i { color: #666 ; }
.search-result .content a.aslrb { color: #804040 ; text-decoration: none ; border-bottom: 1px dotted #804040 ; }
.search-result .content a.aslrb { color: red ; text-decoration: none ; }
.search-result .content a.aslrb:hover { background: #ffffcc ; }
.search-result .content .image { float: left ; margin: 0.25em 0.5em 0.5em 0 ; max-height: 8em ; max-width: 6em ; }
.search-result .content .collapsible { margin-top:0.5em ; font-size: 90% ; color: #333 ; }
.search-result .content .collapsible { margin-top: 0.5em ; font-size: 90% ; color: #333 ; }
.search-result .content .collapsible a { color: #333 ; text-decoration: none ; }
.search-result .content .collapsible .caption img { height: 0.75em ; margin-left: 0.25em ; }
.search-result .content .collapsible .count { font-size: 80% ; font-style: italic ; color: #666 ; }
.search-result .content .collapsible ul { margin: 0 0 0 1em ; }
.search-result .content .collapsible .more { font-size: 80% ; font-style: italic ; color: #666 ; cursor: pointer ; }
.search-result .footer { clear: both ; padding: 0 5px ; font-size: 80% ; font-style: italic ; color: #666 ; }
.search-result .footer a { color: #666 ; text-decoration: none ; }

@ -34,47 +34,63 @@ export class SearchResults extends React.Component
// track articles
let articleRefs = {} ;
function scrollToArticle( article_id ) {
// NOTE: If the user has clicked on an article that has been listed as part of a publication,
// we just scroll to that article (since articles are also returned as part of the search results
// when searching for a publication).
// NOTE: We could do the same thing when clicking on a publication that has been listed as part
// of a publisher, but in this case, it's probably better UX to show the publication's page,
// along with its articles.
const node = ReactDOM.findDOMNode( articleRefs[article_id] ) ;
if ( node )
if ( node ) {
node.scrollIntoView() ;
else
document.location = gAppRef.makeAppUrl( "/article/" + article_id ) ;
return true ;
}
}
// render the search results
results = [] ;
this.props.searchResults.forEach( sr => {
if ( sr.type === "publisher" ) {
if ( sr._type === "publisher" ) {
results.push( <PublisherSearchResult key={"publisher:"+sr.publ_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
onDelete = { (n,v,i) => this.onDeleteSearchResult( n, v, i ) }
/> ) ;
} else if ( sr.type === "publication" ) {
} else if ( sr._type === "publication" ) {
results.push( <PublicationSearchResult key={"publication:"+sr.pub_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
onArticleClick = { this.props.type === "publication" ? (a) => scrollToArticle(a) : null }
onDelete = { (n,v,i) => this.onDeleteSearchResult( n, v, i ) }
onArticleClick = { (a) => scrollToArticle(a) }
/> ) ;
} else if ( sr.type === "article" ) {
} else if ( sr._type === "article" ) {
results.push( <ArticleSearchResult key={"article:"+sr.article_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
onDelete = { (n,v,i) => this.onDeleteSearchResult( n, v, i ) }
ref = { r => articleRefs[sr.article_id] = r }
/> ) ;
} else {
gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr.type ) ;
gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr._type ) ;
}
} ) ;
}
return <div id="search-results" seqno={this.props.seqNo}> {results} </div> ;
}
onDeleteSearchResult( idName, idVal ) {
onDeleteSearchResult( idName, idVal, ignoreNotFound ) {
// look for the specified search result
for ( let i=0 ; i < this.props.searchResults.length ; ++i ) {
const sr = this.props.searchResults[ i ] ;
if ( sr[idName] === idVal ) {
// found it - remove it the UI
this.props.searchResults.splice( i, 1 ) ;
this.forceUpdate() ;
return ;
}
}
gAppRef.logInternalError( "Tried to delete an unknown search result.", idName+" = "+idVal ) ;
// the search result wasn't found
// NOTE: This would normally indicate an internal coding error, but there is one case where
// it can happen: when a publisher or publication is deleted, we want to also delete all
// their child objects, but they may not necessarily be on-screen.
if ( ! ignoreNotFound ) {
gAppRef.logInternalError(
"Tried to delete an unknown search result.", idName+" = "+idVal
) ;
}
}
}

@ -3,6 +3,7 @@ export const APP_NAME = "ASL Articles" ;
export const MAX_IMAGE_UPLOAD_SIZE = ( 1 * 1024*1024 ) ;
export const PUBLISHER_EXCESS_PUBLICATION_THRESHOLD = 5 ;
export const PUBLISHER_EXCESS_ARTICLE_THRESHOLD = 5 ;
export const PUBLICATION_EXCESS_ARTICLE_THRESHOLD = 8 ;
export const NEW_ARTICLE_PUB_PRIORITY_CUTOFF = ( 24 * 60 * 60 ) ;

@ -2,15 +2,15 @@
body {
padding: 5px ;
font-size: 16px ;
font-family: Arial, Helvetica, sans-serif ; font-size: 16px ;
-webkit-font-smoothing: antialiased ;
-moz-osx-font-smoothing: grayscale ;
}
h1:not(:first-child), h2:not(:first-child), h3:not(:first-child), h4:not(:first-child), h5:not(:first-child), h6:not(:first-child) { margin-top: 0.25em ; }
ul, ol { margin: 0 0 0 1.25em ; }
ul { list-style-image: url("/images/bullet.png") }
ul ul, ol ul { list-style-image: url("/images/bullet2.png") }
ul { list-style-image: url("/public/images/bullet.png") }
ul ul, ol ul { list-style-image: url("/public/images/bullet2.png") }
input[type="text"] { height: 22px ; border: 1px solid #c5c5c5 ; padding: 2px 5px ; }
label { height: 1.25em ; margin-top: -3px ; }
textarea { padding: 2px 5px ; resize: vertical ; }
@ -18,7 +18,7 @@ pre { font-size: 90% ; }
blockquote {
margin: .5em 1em .75em 1em ; padding: 5px 5px 5px 15px ;
border: 1px solid #ddd ; background: #fffff0 ;
background-image: url( "/images/blockquote.png" ) ; background-position: 2px 5px ; background-repeat: no-repeat ;
background-image: url( "/public/images/blockquote.png" ) ; background-position: 2px 5px ; background-repeat: no-repeat ;
font-style: italic ;
}

@ -5,19 +5,21 @@ import { App, gAppRef } from "./App" ;
import { PublicationSearchResult } from "./PublicationSearchResult" ;
import "./index.css" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
ReactDOM.render(
<BrowserRouter>
<Switch>
<Route path="/publishers" render={ (props) => <App {...props} type="publishers" key="publishers"
doSearch = { () => gAppRef._showPublishers() }
doSearch = { () => gAppRef._showPublishers(false) }
/> } />
<Route path="/technique" render={ (props) => <App {...props} type="technique" key="technique"
doSearch = { () => gAppRef._showTechniqueArticles() }
doSearch = { () => gAppRef._showTechniqueArticles(false) }
/> } />
<Route path="/tips" render={ (props) => <App {...props} type="tips" key="tips"
doSearch = { () => gAppRef._showTipsArticles() }
doSearch = { () => gAppRef._showTipsArticles(false) }
/> } />
<Route path="/publisher/:publId" render={ (props) => <App {...props} type="publisher" key={"publ:"+props.match.params.publId}
doSearch = { () => gAppRef.runSpecialSearch( "/search/publisher/"+gAppRef.props.match.params.publId, null,
@ -38,8 +40,12 @@ ReactDOM.render(
<Route path="/author/:authorId" render={ (props) => <App {...props} type="author" key={"author:"+props.match.params.authorId}
doSearch = { () => gAppRef.runSpecialSearch( "/search/author/"+gAppRef.props.match.params.authorId, null,
() => {
const author = gAppRef.caches.authors[ gAppRef.props.match.params.authorId ] ;
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" )
axios.get(
gAppRef.makeFlaskUrl( "/author/" + gAppRef.props.match.params.authorId )
).then( resp => {
const author = resp.data ;
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" )
} ) ;
}
) }
/> } />
@ -48,6 +54,7 @@ ReactDOM.render(
() => gAppRef.setWindowTitle( gAppRef.props.match.params.tag )
) }
/> } />
<Route path="/report" render={ (props) => <App {...props} type="report" key="report" /> } />
<Route path="/" exact component={App} />
<Route path="/" render={ (props) => <App {...props} warning="Unknown URL." type="home" key="unknown-url" /> } />
</Switch>

@ -76,6 +76,8 @@ export function confirmDiscardChanges( oldVals, newVals, accept ) {
}
}
// --------------------------------------------------------------------
export function sortSelectableOptions( options ) {
options.sort( (lhs,rhs) => {
lhs = ReactDOMServer.renderToStaticMarkup( lhs.label ) ;
@ -100,32 +102,21 @@ export function unloadCreatableSelect( sel ) {
return vals2 ;
}
// --------------------------------------------------------------------
export function applyUpdatedVals( vals, newVals, updated, refs ) {
// NOTE: After the user has edited an object, we send the new values to the server to store in
// the database, but the server will sometimes return modified values back e.g. because unsafe HTML
// was removed, or the ID's of newly-created authors. This function applies these new values back
// into the original table of values.
for ( let r in refs )
vals[ r ] = (updated && updated[r] !== undefined) ? updated[r] : newVals[r] ;
// NOTE: We sometimes want to force an entry into the vals that doesn't have
// an associated ref (i.e. UI element) e.g. XXX_image_id.
for ( let key in updated )
vals[ key ] = updated[ key ] ;
}
export function removeSpecialFields( vals ) {
// NOTE: This removes special fields sent to us by the backend containing content that has search terms highlighted.
// We only really need to remove author names for articles, since the backend sends us these (possibly highlighted)
// as well as the ID's, but they could be incorrect after the user has edited an article. However, for consistency,
// we remove all these special fields for everything.
let keysToDelete = [] ;
for ( let key in vals ) {
if ( key[ key.length-1 ] === "!" )
keysToDelete.push( key ) ;
export function makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
// NOTE: react-select uses the "value" field to determine which choices have already been selected
// and thus should not be shown in the droplist of available choices.
let tagList = [] ;
if ( tags ) {
tags.map(
(tag) => tagList.push( { value: tag, label: tag } )
) ;
}
keysToDelete.forEach( k => delete vals[k] ) ;
// create another list for all known tags
let allTags = gAppRef.dataCache.data.tags.map(
(tag) => { return { value: tag[0], label: tag[0] } }
) ;
return [ tagList, allTags ] ;
}
// --------------------------------------------------------------------
@ -170,7 +161,13 @@ export function parseScenarioDisplayName( displayName ) {
// --------------------------------------------------------------------
export function makeCollapsibleList( caption, vals, maxItems, style ) {
export function updateRecord( rec, newVals ) {
// update a record with new values
for ( let key in newVals )
rec[ key ] = newVals[ key ] ;
}
export function makeCollapsibleList( caption, vals, maxItems, style, listKey ) {
if ( ! vals || vals.length === 0 )
return null ;
let items=[], excessItems=[] ;
@ -191,7 +188,7 @@ export function makeCollapsibleList( caption, vals, maxItems, style ) {
excessItemsMoreRef.style.display = show ? "none" : "block" ;
}
if ( excessItems.length === 0 )
caption = <span> {caption+":"} </span> ;
caption = <span> {caption}: </span> ;
else
caption = <span> {caption} <span className="count"> ({vals.length}) </span> </span> ;
let onClick, style2 ;
@ -199,7 +196,7 @@ export function makeCollapsibleList( caption, vals, maxItems, style ) {
onClick = flipExcessItems ;
style2 = { cursor: "pointer" } ;
}
return ( <div className="collapsible" style={style}>
return ( <div className="collapsible" style={style} key={listKey}>
<div className="caption" onClick={onClick} style={style2} >
{caption}
{ excessItems.length > 0 && <img src="/images/collapsible-down.png" ref={r => flipButtonRef=r} alt="Show/hide extra items." /> }

Loading…
Cancel
Save