Changed how data is transfered between the front- and back-end.

master
Pacman Ghost 3 years ago
parent fdc287bb61
commit 95e662c9f6
  1. 110
      asl_articles/articles.py
  2. 31
      asl_articles/authors.py
  3. 66
      asl_articles/publications.py
  4. 56
      asl_articles/publishers.py
  5. 14
      asl_articles/scenarios.py
  6. 20
      asl_articles/search.py
  7. 7
      asl_articles/tags.py
  8. 2
      asl_articles/tests/test_publishers.py
  9. 24
      asl_articles/utils.py
  10. 118
      web/src/App.js
  11. 190
      web/src/ArticleSearchResult.js
  12. 18
      web/src/ArticleSearchResult2.js
  13. 59
      web/src/DataCache.js
  14. 153
      web/src/PublicationSearchResult.js
  15. 10
      web/src/PublicationSearchResult2.js
  16. 120
      web/src/PublisherSearchResult.js
  17. 2
      web/src/PublisherSearchResult2.js
  18. 10
      web/src/SearchResults.js
  19. 10
      web/src/index.js
  20. 47
      web/src/utils.js

@ -9,11 +9,10 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db from asl_articles import app, db
from asl_articles.models import Article, Author, ArticleAuthor, Scenario, ArticleScenario, ArticleImage from asl_articles.models import Article, Author, ArticleAuthor, Scenario, ArticleScenario, ArticleImage
from asl_articles.models import Publication from asl_articles.authors import get_author_vals
from asl_articles.authors import do_get_authors from asl_articles.scenarios import get_scenario_vals
from asl_articles.scenarios import do_get_scenarios
from asl_articles.tags import do_get_tags
import asl_articles.publications import asl_articles.publications
import asl_articles.publishers
from asl_articles import search from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \ from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response apply_attrs, make_ok_response
@ -34,9 +33,10 @@ def get_article( article_id ):
if not article: if not article:
abort( 404 ) abort( 404 )
_logger.debug( "- %s", article ) _logger.debug( "- %s", article )
return jsonify( get_article_vals( article ) ) deep = request.args.get( "deep" )
return jsonify( get_article_vals( article, deep ) )
def get_article_vals( article, add_type=False ): def get_article_vals( article, deep ):
"""Extract public fields from an Article record.""" """Extract public fields from an Article record."""
authors = sorted( article.article_authors, authors = sorted( article.article_authors,
key = lambda a: a.seq_no key = lambda a: a.seq_no
@ -45,22 +45,28 @@ def get_article_vals( article, add_type=False ):
key = lambda a: a.seq_no key = lambda a: a.seq_no
) )
vals = { vals = {
"_type": "article",
"article_id": article.article_id, "article_id": article.article_id,
"article_title": article.article_title, "article_title": article.article_title,
"article_subtitle": article.article_subtitle, "article_subtitle": article.article_subtitle,
"article_image_id": article.article_id if article.article_image else None, "article_image_id": article.article_id if article.article_image else None,
"article_authors": [ a.author_id for a in authors ], "article_authors": [ get_author_vals( a.parent_author ) for a in authors ],
"article_snippet": article.article_snippet, "article_snippet": article.article_snippet,
"article_pageno": article.article_pageno, "article_pageno": article.article_pageno,
"article_url": article.article_url, "article_url": article.article_url,
"article_scenarios": [ s.scenario_id for s in scenarios ], "article_scenarios": [ get_scenario_vals( s.parent_scenario ) for s in scenarios ],
"article_tags": decode_tags( article.article_tags ), "article_tags": decode_tags( article.article_tags ),
"article_rating": article.article_rating, "article_rating": article.article_rating,
"pub_id": article.pub_id, "pub_id": article.pub_id,
"publ_id": article.publ_id, "publ_id": article.publ_id,
} }
if add_type: if deep:
vals[ "type" ] = "article" vals["_parent_pub"] = asl_articles.publications.get_publication_vals(
article.parent_pub, False, False
) if article.parent_pub else None
vals["_parent_publ"] = asl_articles.publishers.get_publisher_vals(
article.parent_publ, False, False
) if article.parent_publ else None
return vals return vals
def get_article_sort_key( article ): def get_article_sort_key( article ):
@ -80,13 +86,11 @@ def create_article():
log = ( _logger, "Create article:" ) log = ( _logger, "Create article:" )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them. # NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings ) cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags ) vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# create the new article # create the new article
vals[ "time_created" ] = datetime.datetime.now() vals[ "time_created" ] = datetime.datetime.now()
@ -95,23 +99,16 @@ def create_article():
db.session.flush() db.session.flush()
new_article_id = article.article_id new_article_id = article.article_id
_set_seqno( article, article.pub_id ) _set_seqno( article, article.pub_id )
_save_authors( article, updated ) _save_authors( article )
_save_scenarios( article, updated ) _save_scenarios( article )
_save_image( article, updated ) _save_image( article )
db.session.commit() db.session.commit()
_logger.debug( "- New ID: %d", new_article_id ) _logger.debug( "- New ID: %d", new_article_id )
search.add_or_update_article( None, article, None ) search.add_or_update_article( None, article, None )
# generate the response # generate the response
extras = { "article_id": new_article_id } vals = get_article_vals( article, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
def _set_seqno( article, pub_id ): def _set_seqno( article, pub_id ):
"""Set an article's seq#.""" """Set an article's seq#."""
@ -123,7 +120,7 @@ def _set_seqno( article, pub_id ):
else: else:
article.article_seqno = None article.article_seqno = None
def _save_authors( article, updated_fields ): def _save_authors( article ):
"""Save the article's authors.""" """Save the article's authors."""
# delete the existing article-author rows # delete the existing article-author rows
@ -133,8 +130,6 @@ def _save_authors( article, updated_fields ):
# add the article-author rows # add the article-author rows
authors = request.json.get( "article_authors", [] ) authors = request.json.get( "article_authors", [] )
author_ids = []
new_authors = False
for seq_no,author in enumerate( authors ): for seq_no,author in enumerate( authors ):
if isinstance( author, int ): if isinstance( author, int ):
# this is an existing author # this is an existing author
@ -147,19 +142,12 @@ def _save_authors( article, updated_fields ):
db.session.add( author ) db.session.add( author )
db.session.flush() db.session.flush()
author_id = author.author_id author_id = author.author_id
new_authors = True
_logger.debug( "Created new author \"%s\": id=%d", author, author_id ) _logger.debug( "Created new author \"%s\": id=%d", author, author_id )
db.session.add( db.session.add(
ArticleAuthor( seq_no=seq_no, article_id=article.article_id, author_id=author_id ) ArticleAuthor( seq_no=seq_no, article_id=article.article_id, author_id=author_id )
) )
author_ids.append( author_id )
# check if we created any new authors def _save_scenarios( article ):
if new_authors:
# yup - let the caller know about them
updated_fields[ "article_authors"] = author_ids
def _save_scenarios( article, updated_fields ):
"""Save the article's scenarios.""" """Save the article's scenarios."""
# delete the existing article-scenario rows # delete the existing article-scenario rows
@ -169,8 +157,6 @@ def _save_scenarios( article, updated_fields ):
# add the article-scenario rows # add the article-scenario rows
scenarios = request.json.get( "article_scenarios", [] ) scenarios = request.json.get( "article_scenarios", [] )
scenario_ids = []
new_scenarios = False
for seq_no,scenario in enumerate( scenarios ): for seq_no,scenario in enumerate( scenarios ):
if isinstance( scenario, int ): if isinstance( scenario, int ):
# this is an existing scenario # this is an existing scenario
@ -183,19 +169,12 @@ def _save_scenarios( article, updated_fields ):
db.session.add( new_scenario ) db.session.add( new_scenario )
db.session.flush() db.session.flush()
scenario_id = new_scenario.scenario_id scenario_id = new_scenario.scenario_id
new_scenarios = True
_logger.debug( "Created new scenario \"%s [%s]\": id=%d", scenario[1], scenario[0], scenario_id ) _logger.debug( "Created new scenario \"%s [%s]\": id=%d", scenario[1], scenario[0], scenario_id )
db.session.add( db.session.add(
ArticleScenario( seq_no=seq_no, article_id=article.article_id, scenario_id=scenario_id ) ArticleScenario( seq_no=seq_no, article_id=article.article_id, scenario_id=scenario_id )
) )
scenario_ids.append( scenario_id )
# check if we created any new scenarios
if new_scenarios:
# yup - let the caller know about them
updated_fields[ "article_scenarios"] = scenario_ids
def _save_image( article, updated ): def _save_image( article ):
"""Save the article's image.""" """Save the article's image."""
# check if a new image was provided # check if a new image was provided
@ -207,7 +186,7 @@ def _save_image( article, updated ):
ArticleImage.query.filter( ArticleImage.article_id == article.article_id ).delete() ArticleImage.query.filter( ArticleImage.article_id == article.article_id ).delete()
if image_data == "{remove}": if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the article to have no image. # NOTE: The front-end sends this if it wants the article to have no image.
updated[ "article_image_id" ] = None article.article_image_id = None
return return
# add the new image to the database # add the new image to the database
@ -217,7 +196,6 @@ def _save_image( article, updated ):
db.session.add( img ) db.session.add( img )
db.session.flush() db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) ) _logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "article_image_id" ] = article.article_id
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -231,44 +209,29 @@ def update_article():
log = ( _logger, "Update article: id={}".format( article_id ) ) log = ( _logger, "Update article: id={}".format( article_id ) )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them. # NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings ) cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags ) vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# update the article # update the article
article = Article.query.get( article_id ) article = Article.query.get( article_id )
if not article: if not article:
abort( 404 ) abort( 404 )
orig_pub = Publication.query.get( article.pub_id ) if article.pub_id else None
if vals["pub_id"] != article.pub_id: if vals["pub_id"] != article.pub_id:
_set_seqno( article, vals["pub_id"] ) _set_seqno( article, vals["pub_id"] )
vals[ "time_updated" ] = datetime.datetime.now() vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( article, vals ) apply_attrs( article, vals )
_save_authors( article, updated ) _save_authors( article )
_save_scenarios( article, updated ) _save_scenarios( article )
_save_image( article, updated ) _save_image( article )
db.session.commit() db.session.commit()
search.add_or_update_article( None, article, None ) search.add_or_update_article( None, article, None )
# generate the response # generate the response
extras = {} vals = get_article_vals( article, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
pubs = []
if orig_pub and orig_pub.pub_id != article.pub_id:
pubs.append( asl_articles.publications.get_publication_vals( orig_pub, True ) )
if article.pub_id:
pub = Publication.query.get( article.pub_id )
pubs.append( asl_articles.publications.get_publication_vals( pub, True ) )
if pubs:
extras[ "_publications" ] = pubs
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -311,11 +274,4 @@ def delete_article( article_id ):
search.delete_articles( [ article ] ) search.delete_articles( [ article ] )
# generate the response # generate the response
extras = {} return make_ok_response()
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( extras=extras )

@ -1,27 +1,38 @@
""" Handle author requests. """ """ Handle author requests. """
from flask import jsonify import logging
from flask import jsonify, abort
from asl_articles import app from asl_articles import app
from asl_articles.models import Author from asl_articles.models import Author
_logger = logging.getLogger( "db" )
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@app.route( "/authors" ) @app.route( "/authors" )
def get_authors(): def get_authors():
"""Get all authors.""" """Get all authors."""
return jsonify( do_get_authors() ) return jsonify( {
author.author_id: get_author_vals( author )
for author in Author.query.all()
} )
def do_get_authors(): # ---------------------------------------------------------------------
"""Get all authors."""
# get all the authors @app.route( "/author/<author_id>" )
return { def get_author( author_id ):
r.author_id: _get_author_vals(r) """Get an author."""
for r in Author.query #pylint: disable=not-an-iterable _logger.debug( "Get author: id=%s", author_id )
} author = Author.query.get( author_id )
if not author:
abort( 404 )
vals = get_author_vals( author )
_logger.debug( "- %s", author )
return jsonify( vals )
def _get_author_vals( author ): def get_author_vals( author ):
"""Extract public fields from an Author record.""" """Extract public fields from an Author record."""
return { return {
"author_id": author.author_id, "author_id": author.author_id,

@ -10,7 +10,7 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db from asl_articles import app, db
from asl_articles.models import Publication, PublicationImage, Article from asl_articles.models import Publication, PublicationImage, Article
from asl_articles.articles import get_article_vals, get_article_sort_key from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles.tags import do_get_tags import asl_articles.publishers
from asl_articles import search from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \ from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response apply_attrs, make_ok_response
@ -24,14 +24,10 @@ _FIELD_NAMES = [ "*pub_name", "pub_edition", "pub_description", "pub_date", "pub
@app.route( "/publications" ) @app.route( "/publications" )
def get_publications(): def get_publications():
"""Get all publications.""" """Get all publications."""
return jsonify( do_get_publications() ) return jsonify( {
pub.pub_id: get_publication_vals( pub, False, False )
def do_get_publications(): for pub in Publication.query.all()
"""Get all publications.""" } )
# NOTE: The front-end maintains a cache of the publications, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publication.query.all()
return { r.pub_id: get_publication_vals(r,False) for r in results }
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -42,16 +38,20 @@ def get_publication( pub_id ):
pub = Publication.query.get( pub_id ) pub = Publication.query.get( pub_id )
if not pub: if not pub:
abort( 404 ) abort( 404 )
vals = get_publication_vals( pub, False ) vals = get_publication_vals( pub,
request.args.get( "include_articles" ),
request.args.get( "deep" )
)
# include the number of associated articles # include the number of associated articles
query = Article.query.filter_by( pub_id = pub_id ) query = Article.query.filter_by( pub_id = pub_id )
vals[ "nArticles" ] = query.count() vals[ "nArticles" ] = query.count()
_logger.debug( "- %s ; #articles=%d", pub, vals["nArticles"] ) _logger.debug( "- %s ; #articles=%d", pub, vals["nArticles"] )
return jsonify( vals ) return jsonify( vals )
def get_publication_vals( pub, include_articles, add_type=False ): def get_publication_vals( pub, include_articles, deep ):
"""Extract public fields from a Publication record.""" """Extract public fields from a Publication record."""
vals = { vals = {
"_type": "publication",
"pub_id": pub.pub_id, "pub_id": pub.pub_id,
"pub_name": pub.pub_name, "pub_name": pub.pub_name,
"pub_edition": pub.pub_edition, "pub_edition": pub.pub_edition,
@ -66,9 +66,11 @@ def get_publication_vals( pub, include_articles, add_type=False ):
} }
if include_articles: if include_articles:
articles = sorted( pub.articles, key=get_article_sort_key ) articles = sorted( pub.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a ) for a in articles ] vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
if add_type: if deep:
vals[ "type" ] = "publication" vals[ "_parent_publ" ] = asl_articles.publishers.get_publisher_vals(
pub.parent_publ, False, False
) if pub.parent_publ else None
return vals return vals
def get_publication_sort_key( pub ): def get_publication_sort_key( pub ):
@ -96,30 +98,25 @@ def create_publication():
log = ( _logger, "Create publication:" ) log = ( _logger, "Create publication:" )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them. # NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings ) cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags ) vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# create the new publication # create the new publication
vals[ "time_created" ] = datetime.datetime.now() vals[ "time_created" ] = datetime.datetime.now()
pub = Publication( **vals ) pub = Publication( **vals )
db.session.add( pub ) db.session.add( pub )
_set_seqno( pub, pub.publ_id ) _set_seqno( pub, pub.publ_id )
_save_image( pub, updated ) _save_image( pub )
db.session.commit() db.session.commit()
_logger.debug( "- New ID: %d", pub.pub_id ) _logger.debug( "- New ID: %d", pub.pub_id )
search.add_or_update_publication( None, pub, None ) search.add_or_update_publication( None, pub, None )
# generate the response # generate the response
extras = { "pub_id": pub.pub_id } vals = get_publication_vals( pub, False, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
def _set_seqno( pub, publ_id ): def _set_seqno( pub, publ_id ):
"""Set a publication's seq#.""" """Set a publication's seq#."""
@ -139,7 +136,7 @@ def _set_seqno( pub, publ_id ):
else: else:
pub.pub_seqno = None pub.pub_seqno = None
def _save_image( pub, updated ): def _save_image( pub ):
"""Save the publication's image.""" """Save the publication's image."""
# check if a new image was provided # check if a new image was provided
@ -151,7 +148,7 @@ def _save_image( pub, updated ):
PublicationImage.query.filter( PublicationImage.pub_id == pub.pub_id ).delete() PublicationImage.query.filter( PublicationImage.pub_id == pub.pub_id ).delete()
if image_data == "{remove}": if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publication to have no image. # NOTE: The front-end sends this if it wants the publication to have no image.
updated[ "pub_image_id" ] = None pub.pub_image_id = None
return return
# add the new image to the database # add the new image to the database
@ -161,7 +158,6 @@ def _save_image( pub, updated ):
db.session.add( img ) db.session.add( img )
db.session.flush() db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) ) _logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "pub_image_id" ] = pub.pub_id
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -175,14 +171,12 @@ def update_publication():
log = ( _logger, "Update publication: id={}".format( pub_id ) ) log = ( _logger, "Update publication: id={}".format( pub_id ) )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
article_order = request.json.get( "article_order" ) article_order = request.json.get( "article_order" )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them. # NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings ) cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags ) vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# update the publication # update the publication
pub = Publication.query.get( pub_id ) pub = Publication.query.get( pub_id )
@ -192,7 +186,7 @@ def update_publication():
_set_seqno( pub, vals["publ_id"] ) _set_seqno( pub, vals["publ_id"] )
vals[ "time_updated" ] = datetime.datetime.now() vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( pub, vals ) apply_attrs( pub, vals )
_save_image( pub, updated ) _save_image( pub )
if article_order: if article_order:
query = Article.query.filter( Article.pub_id == pub_id ) query = Article.query.filter( Article.pub_id == pub_id )
articles = { int(a.article_id): a for a in query } articles = { int(a.article_id): a for a in query }
@ -212,11 +206,8 @@ def update_publication():
search.add_or_update_publication( None, pub, None ) search.add_or_update_publication( None, pub, None )
# generate the response # generate the response
extras = {} vals = get_publication_vals( pub, False, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -243,8 +234,5 @@ def delete_publication( pub_id ):
search.delete_articles( deleted_articles ) search.delete_articles( deleted_articles )
# generate the response # generate the response
extras = { "deleteArticles": deleted_articles } extras = { "deletedArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( extras=extras ) return make_ok_response( extras=extras )

@ -8,7 +8,7 @@ from flask import request, jsonify, abort
from asl_articles import app, db from asl_articles import app, db
from asl_articles.models import Publisher, PublisherImage, Publication, Article from asl_articles.models import Publisher, PublisherImage, Publication, Article
from asl_articles.publications import do_get_publications from asl_articles.publications import get_publication_vals, get_publication_sort_key
from asl_articles.articles import get_article_vals, get_article_sort_key from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles import search from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, make_ok_response, apply_attrs from asl_articles.utils import get_request_args, clean_request_args, make_ok_response, apply_attrs
@ -22,14 +22,10 @@ _FIELD_NAMES = [ "*publ_name", "publ_description", "publ_url" ]
@app.route( "/publishers" ) @app.route( "/publishers" )
def get_publishers(): def get_publishers():
"""Get all publishers.""" """Get all publishers."""
return jsonify( _do_get_publishers() ) return jsonify( {
publ.publ_id: get_publisher_vals( publ, False, False )
def _do_get_publishers(): for publ in Publisher.query.all()
"""Get all publishers.""" } )
# NOTE: The front-end maintains a cache of the publishers, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publisher.query.all()
return { r.publ_id: get_publisher_vals(r,False) for r in results }
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -41,8 +37,10 @@ def get_publisher( publ_id ):
publ = Publisher.query.get( publ_id ) publ = Publisher.query.get( publ_id )
if not publ: if not publ:
abort( 404 ) abort( 404 )
include_articles = request.args.get( "include_articles" ) vals = get_publisher_vals( publ,
vals = get_publisher_vals( publ, include_articles ) request.args.get( "include_pubs" ),
request.args.get( "include_articles" )
)
# include the number of associated publications # include the number of associated publications
query = Publication.query.filter_by( publ_id = publ_id ) query = Publication.query.filter_by( publ_id = publ_id )
vals[ "nPublications" ] = query.count() vals[ "nPublications" ] = query.count()
@ -56,20 +54,22 @@ def get_publisher( publ_id ):
_logger.debug( "- %s ; #publications=%d ; #articles=%d", publ, vals["nPublications"], vals["nArticles"] ) _logger.debug( "- %s ; #publications=%d ; #articles=%d", publ, vals["nPublications"], vals["nArticles"] )
return jsonify( vals ) return jsonify( vals )
def get_publisher_vals( publ, include_articles, add_type=False ): def get_publisher_vals( publ, include_pubs, include_articles ):
"""Extract public fields from a Publisher record.""" """Extract public fields from a Publisher record."""
vals = { vals = {
"_type": "publisher",
"publ_id": publ.publ_id, "publ_id": publ.publ_id,
"publ_name": publ.publ_name, "publ_name": publ.publ_name,
"publ_description": publ.publ_description, "publ_description": publ.publ_description,
"publ_url": publ.publ_url, "publ_url": publ.publ_url,
"publ_image_id": publ.publ_id if publ.publ_image else None, "publ_image_id": publ.publ_id if publ.publ_image else None,
} }
if include_pubs:
pubs = sorted( publ.publications, key=get_publication_sort_key )
vals[ "publications" ] = [ get_publication_vals( p, False, False ) for p in pubs ]
if include_articles: if include_articles:
articles = sorted( publ.articles, key=get_article_sort_key ) articles = sorted( publ.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ] vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
if add_type:
vals[ "type" ] = "publisher"
return vals return vals
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -83,24 +83,22 @@ def create_publisher():
log = ( _logger, "Create publisher:" ) log = ( _logger, "Create publisher:" )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# create the new publisher # create the new publisher
vals[ "time_created" ] = datetime.datetime.now() vals[ "time_created" ] = datetime.datetime.now()
publ = Publisher( **vals ) publ = Publisher( **vals )
db.session.add( publ ) db.session.add( publ )
_save_image( publ, updated ) _save_image( publ )
db.session.commit() db.session.commit()
_logger.debug( "- New ID: %d", publ.publ_id ) _logger.debug( "- New ID: %d", publ.publ_id )
search.add_or_update_publisher( None, publ, None ) search.add_or_update_publisher( None, publ, None )
# generate the response # generate the response
extras = { "publ_id": publ.publ_id } vals = get_publisher_vals( publ, True, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
def _save_image( publ, updated ): def _save_image( publ ):
"""Save the publisher's image.""" """Save the publisher's image."""
# check if a new image was provided # check if a new image was provided
@ -112,7 +110,7 @@ def _save_image( publ, updated ):
PublisherImage.query.filter( PublisherImage.publ_id == publ.publ_id ).delete() PublisherImage.query.filter( PublisherImage.publ_id == publ.publ_id ).delete()
if image_data == "{remove}": if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publisher to have no image. # NOTE: The front-end sends this if it wants the publisher to have no image.
updated[ "publ_image_id" ] = None publ.publ_image_id = None
return return
# add the new image to the database # add the new image to the database
@ -122,7 +120,6 @@ def _save_image( publ, updated ):
db.session.add( img ) db.session.add( img )
db.session.flush() db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) ) _logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "publ_image_id" ] = publ.publ_id
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -136,23 +133,21 @@ def update_publisher():
log = ( _logger, "Update publisher: id={}".format( publ_id ) ) log = ( _logger, "Update publisher: id={}".format( publ_id ) )
) )
warnings = [] warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger ) clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# update the publisher # update the publisher
publ = Publisher.query.get( publ_id ) publ = Publisher.query.get( publ_id )
if not publ: if not publ:
abort( 404 ) abort( 404 )
_save_image( publ, updated ) _save_image( publ )
vals[ "time_updated" ] = datetime.datetime.now() vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( publ, vals ) apply_attrs( publ, vals )
db.session.commit() db.session.commit()
search.add_or_update_publisher( None, publ, None ) search.add_or_update_publisher( None, publ, None )
# generate the response # generate the response
extras = {} vals = get_publisher_vals( publ, True, True )
if request.args.get( "list" ): return make_ok_response( record=vals, warnings=warnings )
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
# --------------------------------------------------------------------- # ---------------------------------------------------------------------
@ -186,7 +181,4 @@ def delete_publisher( publ_id ):
search.delete_articles( deleted_articles ) search.delete_articles( deleted_articles )
extras = { "deletedPublications": deleted_pubs, "deletedArticles": deleted_articles } extras = { "deletedPublications": deleted_pubs, "deletedArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
extras[ "publications" ] = do_get_publications()
return make_ok_response( extras=extras ) return make_ok_response( extras=extras )

@ -10,16 +10,12 @@ from asl_articles.models import Scenario
@app.route( "/scenarios" ) @app.route( "/scenarios" )
def get_scenarios(): def get_scenarios():
"""Get all scenarios.""" """Get all scenarios."""
return jsonify( do_get_scenarios() ) return jsonify( {
scenario.scenario_id: get_scenario_vals( scenario )
for scenario in Scenario.query.all()
} )
def do_get_scenarios(): def get_scenario_vals( scenario ):
"""Get all scenarios."""
return {
s.scenario_id: _get_scenario_vals( s )
for s in Scenario.query #pylint: disable=not-an-iterable
}
def _get_scenario_vals( scenario ):
"""Extract public fields from a scenario record.""" """Extract public fields from a scenario record."""
return { return {
"scenario_id": scenario.scenario_id, "scenario_id": scenario.scenario_id,

@ -161,17 +161,13 @@ def search_article( article_id ):
article = Article.query.get( article_id ) article = Article.query.get( article_id )
if not article: if not article:
return jsonify( [] ) return jsonify( [] )
article = get_article_vals( article, True ) vals = get_article_vals( article, True )
_create_aslrb_links( article ) _create_aslrb_links( vals )
results = [ article ] results = [ vals ]
if article["pub_id"]: if article.parent_pub:
pub = Publication.query.get( article["pub_id"] ) results.append( get_publication_vals( article.parent_pub, True, True ) )
if pub: if article.parent_publ:
results.append( get_publication_vals( pub, True, True ) ) results.append( get_publisher_vals( article.parent_publ, True, True ) )
if article["publ_id"]:
publ = Publisher.query.get( article["publ_id"] )
if publ:
results.append( get_publisher_vals( publ, True, True ) )
return jsonify( results ) return jsonify( results )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -305,7 +301,7 @@ def _do_fts_search( fts_query_string, col_names, results=None ): #pylint: disabl
# prepare the result for the front-end # prepare the result for the front-end
result = globals()[ "_get_{}_vals".format( owner_type ) ]( obj ) result = globals()[ "_get_{}_vals".format( owner_type ) ]( obj )
result[ "type" ] = owner_type result[ "_type" ] = owner_type
result[ "rank" ] = row[1] result[ "rank" ] = row[1]
# return highlighted versions of the content to the caller # return highlighted versions of the content to the caller

@ -13,12 +13,7 @@ from asl_articles.utils import decode_tags
@app.route( "/tags" ) @app.route( "/tags" )
def get_tags(): def get_tags():
"""Get all tags.""" """Get all tags."""
return jsonify( do_get_tags() )
def do_get_tags():
"""Get all tags."""
# get all the tags
# NOTE: This is pretty inefficient, since an article/publication's tags are munged into one big string # NOTE: This is pretty inefficient, since an article/publication's tags are munged into one big string
# and stored in a single column, so we need to manually unpack everything, but we'll see how it goes... # and stored in a single column, so we need to manually unpack everything, but we'll see how it goes...
tags = defaultdict( int ) tags = defaultdict( int )
@ -36,4 +31,4 @@ def do_get_tags():
key = lambda v: ( -v[1], v[0] ) # sort by # instances, then name key = lambda v: ( -v[1], v[0] ) # sort by # instances, then name
) )
return tags return jsonify( tags )

@ -399,7 +399,7 @@ def test_publication_lists( webdriver, flask_app, dbconn ):
publ_sr = find_search_result( publ_name, results ) publ_sr = find_search_result( publ_name, results )
pubs = find_child( ".collapsible", publ_sr ) pubs = find_child( ".collapsible", publ_sr )
if pub_name: if pub_name:
# check that the publisher appears in the publisher's search result # check that the publication appears in the publisher's search result
assert find_child( ".caption", pubs ).text == "Publications:" assert find_child( ".caption", pubs ).text == "Publications:"
pubs = find_children( "li", pubs ) pubs = find_children( "li", pubs )
assert len(pubs) == 1 assert len(pubs) == 1

@ -39,19 +39,17 @@ def get_request_args( vals, arg_names, log=None ):
def clean_request_args( vals, fields, warnings, logger ): def clean_request_args( vals, fields, warnings, logger ):
"""Clean incoming data.""" """Clean incoming data."""
cleaned = {}
for f in fields: for f in fields:
if f.endswith( "_url" ): if f.endswith( "_url" ):
continue # nb: don't clean URL's continue # nb: don't clean URL's
f = _parse_arg_name( f )[ 0 ] f = _parse_arg_name( f )[ 0 ]
if isinstance( vals[f], str ): if not isinstance( vals[f], str ):
val2 = clean_html( vals[f] ) continue
if val2 != vals[f]: val2 = clean_html( vals[f] )
vals[f] = val2 if val2 != vals[f]:
cleaned[f] = val2 vals[f] = val2
logger.debug( "Cleaned HTML: %s => %s", f, val2 ) logger.debug( "Cleaned HTML: %s => %s", f, val2 )
warnings.append( "Some values had HTML cleaned up." ) warnings.append( "Some values had HTML cleaned up." )
return cleaned
def _parse_arg_name( arg_name ): def _parse_arg_name( arg_name ):
"""Parse a request argument name.""" """Parse a request argument name."""
@ -59,15 +57,15 @@ def _parse_arg_name( arg_name ):
return ( arg_name[1:], True ) # required argument return ( arg_name[1:], True ) # required argument
return ( arg_name, False ) # optional argument return ( arg_name, False ) # optional argument
def make_ok_response( extras=None, updated=None, warnings=None ): def make_ok_response( extras=None, record=None, warnings=None ):
"""Generate a Flask 'success' response.""" """Generate a Flask 'success' response."""
resp = { "status": "OK" } resp = { "status": "OK" }
if extras: if extras:
resp.update( extras ) resp.update( extras )
if updated: if record:
resp[ "updated" ] = updated resp["record"] = record
if warnings: if warnings:
resp[ "warnings" ] = list( set( warnings ) ) # nb: remove duplicate messages resp["warnings"] = list( set( warnings ) ) # nb: remove duplicate messages
return jsonify( resp ) return jsonify( resp )
# --------------------------------------------------------------------- # ---------------------------------------------------------------------

@ -12,6 +12,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult" ;
import { ArticleSearchResult } from "./ArticleSearchResult" ; import { ArticleSearchResult } from "./ArticleSearchResult" ;
import ModalForm from "./ModalForm"; import ModalForm from "./ModalForm";
import AskDialog from "./AskDialog" ; import AskDialog from "./AskDialog" ;
import { DataCache } from "./DataCache" ;
import { PreviewableImage } from "./PreviewableImage" ; import { PreviewableImage } from "./PreviewableImage" ;
import { makeSmartBulletList } from "./utils.js" ; import { makeSmartBulletList } from "./utils.js" ;
import { APP_NAME } from "./constants.js" ; import { APP_NAME } from "./constants.js" ;
@ -36,11 +37,14 @@ export class App extends React.Component
searchSeqNo: 0, searchSeqNo: 0,
modalForm: null, modalForm: null,
askDialog: null, askDialog: null,
startupTasks: [ "caches.publishers", "caches.publications", "caches.authors", "caches.scenarios", "caches.tags" ], startupTasks: [ "dummy" ], // FUDGE! We need at least one startup task.
} ; } ;
gAppRef = this ; gAppRef = this ;
this.setWindowTitle( null ) ; this.setWindowTitle( null ) ;
// initialize the data cache
this.dataCache = new DataCache() ;
// initialize // initialize
this.args = queryString.parse( window.location.search ) ; this.args = queryString.parse( window.location.search ) ;
this._storeMsgs = this.isTestMode() && this.args.store_msgs ; this._storeMsgs = this.isTestMode() && this.args.store_msgs ;
@ -98,13 +102,13 @@ export class App extends React.Component
<img src="/images/icons/tips.png" alt="Show tip articles." /> Show tips <img src="/images/icons/tips.png" alt="Show tip articles." /> Show tips
</MenuItem> </MenuItem>
<div className="divider" /> <div className="divider" />
<MenuItem id="menu-new-publisher" onSelect={ () => PublisherSearchResult.onNewPublisher( this._onNewPublisher.bind(this) ) } > <MenuItem id="menu-new-publisher" onSelect={PublisherSearchResult.onNewPublisher} >
<img src="/images/icons/publisher.png" alt="New publisher." /> New publisher <img src="/images/icons/publisher.png" alt="New publisher." /> New publisher
</MenuItem> </MenuItem>
<MenuItem id="menu-new-publication" onSelect={ () => PublicationSearchResult.onNewPublication( this._onNewPublication.bind(this) ) } > <MenuItem id="menu-new-publication" onSelect={PublicationSearchResult.onNewPublication} >
<img src="/images/icons/publication.png" alt="New publication." /> New publication <img src="/images/icons/publication.png" alt="New publication." /> New publication
</MenuItem> </MenuItem>
<MenuItem id="menu-new-article" onSelect={ () => ArticleSearchResult.onNewArticle( this._onNewArticle.bind(this) ) } > <MenuItem id="menu-new-article" onSelect={ArticleSearchResult.onNewArticle} >
<img src="/images/icons/article.png" alt="New article." /> New article <img src="/images/icons/article.png" alt="New article." /> New article
</MenuItem> </MenuItem>
</MenuList> </MenuList>
@ -122,7 +126,6 @@ export class App extends React.Component
<SearchResults ref={this._searchResultsRef} <SearchResults ref={this._searchResultsRef}
seqNo = {this.state.searchSeqNo} seqNo = {this.state.searchSeqNo}
searchResults = {this.state.searchResults} searchResults = {this.state.searchResults}
type = {this.props.type}
/> />
</div> ) ; </div> ) ;
} }
@ -157,30 +160,16 @@ export class App extends React.Component
// check if the server started up OK // check if the server started up OK
let on_startup_ok = () => { let on_startup_ok = () => {
// the backend server started up OK, continue our startup process // the backend server started up OK, continue our startup process
// initialize the caches this._onStartupTask( "dummy" ) ;
// NOTE: We maintain caches of key objects, so that we can quickly populate droplists. The backend server returns
// updated lists after any operation that could change them (create/update/delete), which is simpler and less error-prone
// than trying to manually keep our caches in sync. It's less efficient, but it won't happen too often, there won't be
// too many entries, and the database server is local.
this.caches = {} ;
[ "publishers", "publications", "authors", "scenarios", "tags" ].forEach( type => {
axios.get( this.makeFlaskUrl( "/" + type ) )
.then( resp => {
this.caches[ type ] = resp.data ;
this._onStartupTask( "caches." + type ) ;
} )
.catch( err => {
this.showErrorToast( <div> Couldn't load the {type}: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} }
let on_startup_failure = () => { let on_startup_failure = () => {
// the backend server had problems during startup; we hide the spinner // the backend server had problems during startup; we hide the spinner
// and leave the error message(s) on-screen. // and leave the error message(s) on-screen.
document.getElementById( "loading" ).style.display = "none" ; document.getElementById( "loading" ).style.display = "none" ;
} }
axios.get( this.makeFlaskUrl( "/startup-messages" ) ) axios.get(
.then( resp => { this.makeFlaskUrl( "/startup-messages" )
).then( resp => {
// show any messages logged by the backend server as it started up // show any messages logged by the backend server as it started up
[ "info", "warning", "error" ].forEach( msgType => { [ "info", "warning", "error" ].forEach( msgType => {
if ( resp.data[ msgType ] ) { if ( resp.data[ msgType ] ) {
@ -200,8 +189,7 @@ export class App extends React.Component
on_startup_failure() ; on_startup_failure() ;
else else
on_startup_ok() ; on_startup_ok() ;
} ) } ).catch( err => {
.catch( err => {
let errorMsg = err.toString() ; let errorMsg = err.toString() ;
if ( errorMsg.indexOf( "502" ) !== -1 || errorMsg.indexOf( "504" ) !== -1 ) if ( errorMsg.indexOf( "502" ) !== -1 || errorMsg.indexOf( "504" ) !== -1 )
this.showErrorToast( <div> Couldn't connect to the backend Flask server. </div> ) ; this.showErrorToast( <div> Couldn't connect to the backend Flask server. </div> ) ;
@ -252,14 +240,12 @@ export class App extends React.Component
args.no_hilite = this._disableSearchResultHighlighting ; args.no_hilite = this._disableSearchResultHighlighting ;
axios.post( axios.post(
this.makeFlaskUrl( url ), args this.makeFlaskUrl( url ), args
) ).then( resp => {
.then( resp => {
ReactDOM.findDOMNode( this._searchResultsRef.current ).scrollTo( 0, 0 ) ; ReactDOM.findDOMNode( this._searchResultsRef.current ).scrollTo( 0, 0 ) ;
this.setState( { searchResults: resp.data, searchSeqNo: this.state.searchSeqNo+1 } ) ; this.setState( { searchResults: resp.data, searchSeqNo: this.state.searchSeqNo+1 } ) ;
if ( onDone ) if ( onDone )
onDone() ; onDone() ;
} ) } ).catch( err => {
.catch( err => {
this.showErrorResponse( "The search query failed", err ) ; this.showErrorResponse( "The search query failed", err ) ;
this.setState( { searchResults: null, searchSeqNo: this.state.searchSeqNo+1 } ) ; this.setState( { searchResults: null, searchSeqNo: this.state.searchSeqNo+1 } ) ;
} ) ; } ) ;
@ -288,37 +274,47 @@ export class App extends React.Component
) )
} }
_onNewPublisher( publ_id, vals ) { this._addNewSearchResult( vals, "publisher", "publ_id", publ_id ) ; } prependSearchResult( sr ) {
_onNewPublication( pub_id, vals ) { this._addNewSearchResult( vals, "publication", "pub_id", pub_id ) ; } // add a new entry to the start of the search results
_onNewArticle( article_id, vals ) { this._addNewSearchResult( vals, "article", "article_id", article_id ) ; } // NOTE: We do this after creating a new object, and while it isn't really the right thing
_addNewSearchResult( vals, srType, idName, idVal ) { // to do (since the new object might not actually be a result for the current search), it's nice
// add the new search result to the start of the search results // to give the user some visual feedback.
// NOTE: This isn't really the right thing to do, since the new object might not actually be let newSearchResults = [ sr ] ;
// a result for the current search, but it's nice to give the user some visual feedback.
vals.type = srType ;
vals[ idName ] = idVal ;
let newSearchResults = [ vals ] ;
newSearchResults.push( ...this.state.searchResults ) ; newSearchResults.push( ...this.state.searchResults ) ;
this.setState( { searchResults: newSearchResults } ) ; this.setState( { searchResults: newSearchResults } ) ;
} }
updatePublications( pubs ) { updatePublisher( publ_id ) {
// update the cache // update the specified publisher in the UI
let pubs2 = {} ; this._doUpdateSearchResult(
for ( let i=0 ; i < pubs.length ; ++i ) { (sr) => ( sr._type === "publisher" && sr.publ_id === publ_id ),
const pub = pubs[ i ] ; this.makeFlaskUrl( "/publisher/" + publ_id, {include_pubs:1,include_articles:1} )
this.caches.publications[ pub.pub_id ] = pub ; ) ;
pubs2[ pub.pub_id ] = pub ; this.forceFlaskImageReload( "publisher", publ_id ) ;
} }
// update the UI updatePublication( pub_id ) {
// update the specified publication in the UI
this._doUpdateSearchResult(
(sr) => ( sr._type === "publication" && sr.pub_id === pub_id ),
this.makeFlaskUrl( "/publication/" + pub_id, {include_articles:1,deep:1} )
) ;
this.forceFlaskImageReload( "publication", pub_id ) ;
}
_doUpdateSearchResult( srCheck, url ) {
// find the target search result in the UI
let newSearchResults = this.state.searchResults ; let newSearchResults = this.state.searchResults ;
for ( let i=0 ; i < newSearchResults.length ; ++i ) { for ( let i=0 ; i < newSearchResults.length ; ++i ) {
if ( newSearchResults[i].type === "publication" && pubs2[ newSearchResults[i].pub_id ] ) { if ( srCheck( newSearchResults[i] ) ) {
newSearchResults[i] = pubs2[ newSearchResults[i].pub_id ] ; // found it - get the latest details from the backend
newSearchResults[i].type = "publication" ; axios.get( url ).then( resp => {
newSearchResults[i] = resp.data ;
this.setState( { searchResults: newSearchResults } ) ;
} ).catch( err => {
this.showErrorResponse( "Can't get the updated search result details", err ) ;
} ) ;
break ; // nb: we assume there's only 1 instance
} }
} }
this.setState( { searchResults: newSearchResults } ) ;
} }
showModalForm( formId, title, titleColor, content, buttons ) { showModalForm( formId, title, titleColor, content, buttons ) {
@ -451,18 +447,6 @@ export class App extends React.Component
console.log( " " + detail ) ; console.log( " " + detail ) ;
} }
makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
// NOTE: react-select uses the "value" field to determine which choices have already been selected
// and thus should not be shown in the droplist of available choices.
let tagList = [] ;
if ( tags )
tags.map( tag => tagList.push( { value: tag, label: tag } ) ) ;
// create another list for all known tags
let allTags = this.caches.tags.map( tag => { return { value: tag[0], label: tag[0] } } ) ;
return [ tagList, allTags ] ;
}
makeAppUrl( url ) { makeAppUrl( url ) {
// FUDGE! The test suite needs any URL parameters to passed on to the next page if a link is clicked. // FUDGE! The test suite needs any URL parameters to passed on to the next page if a link is clicked.
if ( this.isTestMode() ) if ( this.isTestMode() )
@ -532,11 +516,15 @@ export class App extends React.Component
this.showWarningToast( this.props.warning ) ; this.showWarningToast( this.props.warning ) ;
if ( this.props.doSearch ) if ( this.props.doSearch )
this.props.doSearch() ; this.props.doSearch() ;
// NOTE: We could preload the DataCache here (i.e. where it won't affect startup time),
// but it will happen on every page load (e.g. /article/NNN or /publication/NNN),
// which would probably hurt more than it helps (since the data isn't needed if the user
// is only searching for stuff i.e. most of the time).
} }
setWindowTitleFromSearchResults( srType, idField, idVal, nameField ) { setWindowTitleFromSearchResults( srType, idField, idVal, nameField ) {
for ( let sr of Object.entries( this.state.searchResults ) ) { for ( let sr of Object.entries( this.state.searchResults ) ) {
if ( sr[1].type === srType && String(sr[1][idField]) === idVal ) { if ( sr[1]._type === srType && String(sr[1][idField]) === idVal ) {
this.setWindowTitle( typeof nameField === "function" ? nameField(sr[1]) : sr[1][nameField] ) ; this.setWindowTitle( typeof nameField === "function" ? nameField(sr[1]) : sr[1][nameField] ) ;
return ; return ;
} }

@ -8,7 +8,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ; import { PreviewableImage } from "./PreviewableImage.js" ;
import { RatingStars } from "./RatingStars.js" ; import { RatingStars } from "./RatingStars.js" ;
import { gAppRef } from "./App.js" ; import { gAppRef } from "./App.js" ;
import { makeScenarioDisplayName, applyUpdatedVals, removeSpecialFields, makeCommaList, isLink } from "./utils.js" ; import { makeScenarioDisplayName, updateRecord, makeCommaList, isLink } from "./utils.js" ;
const axios = require( "axios" ) ; const axios = require( "axios" ) ;
@ -25,8 +25,8 @@ export class ArticleSearchResult extends React.Component
const display_snippet = PreviewableImage.adjustHtmlForPreviewableImages( const display_snippet = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "article_snippet!" ] || this.props.data.article_snippet this.props.data[ "article_snippet!" ] || this.props.data.article_snippet
) ; ) ;
const pub = gAppRef.caches.publications[ this.props.data.pub_id ] ; const parent_pub = this.props.data._parent_pub ;
const publ = gAppRef.caches.publishers[ this.props.data.publ_id ] ; const parent_publ = this.props.data._parent_publ ;
const image_url = gAppRef.makeFlaskImageUrl( "article", this.props.data.article_image_id ) ; const image_url = gAppRef.makeFlaskImageUrl( "article", this.props.data.article_image_id ) ;
// prepare the article's URL // prepare the article's URL
@ -34,50 +34,33 @@ export class ArticleSearchResult extends React.Component
if ( article_url ) { if ( article_url ) {
if ( ! isLink( article_url ) ) if ( ! isLink( article_url ) )
article_url = gAppRef.makeExternalDocUrl( article_url ) ; article_url = gAppRef.makeExternalDocUrl( article_url ) ;
} else if ( pub && pub.pub_url ) { } else if ( parent_pub && parent_pub.pub_url ) {
article_url = gAppRef.makeExternalDocUrl( pub.pub_url ) ; article_url = gAppRef.makeExternalDocUrl( parent_pub.pub_url ) ;
if ( article_url.substr( article_url.length-4 ) === ".pdf" && this.props.data.article_pageno ) if ( article_url.substr( article_url.length-4 ) === ".pdf" && this.props.data.article_pageno )
article_url += "#page=" + this.props.data.article_pageno ; article_url += "#page=" + this.props.data.article_pageno ;
} }
// prepare the authors // prepare the authors
let authors = [] ; let authors = [] ;
if ( this.props.data[ "authors!" ] ) { const author_names_hilite = this.props.data[ "authors!" ] ;
// the backend has provided us with a list of author names (possibly highlighted) - use them directly for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
for ( let i=0 ; i < this.props.data["authors!"].length ; ++i ) { const author = this.props.data.article_authors[ i ] ;
const author_id = this.props.data.article_authors[ i ] ; const author_name = author_names_hilite ? author_names_hilite[i] : author.author_name ;
authors.push( <Link key={i} className="author" title="Show articles from this author." authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) } to = { gAppRef.makeAppUrl( "/author/" + author.author_id ) }
dangerouslySetInnerHTML = {{ __html: this.props.data["authors!"][i] }} dangerouslySetInnerHTML = {{ __html: author_name }}
/> ) ; /> ) ;
}
} else {
// we only have a list of author ID's (the normal case) - figure out what the corresponding names are
for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
const author_id = this.props.data.article_authors[ i ] ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) }
dangerouslySetInnerHTML = {{ __html: gAppRef.caches.authors[ author_id ].author_name }}
/> ) ;
}
} }
// prepare the scenarios // prepare the scenarios
let scenarios = [] ; let scenarios = [] ;
if ( this.props.data[ "scenarios!" ] ) { const scenario_names_hilite = this.props.data[ "scenarios!" ] ;
// the backend has provided us with a list of scenarios (possibly highlighted) - use them directly for ( let i=0 ; i < this.props.data.article_scenarios.length ; ++i ) {
this.props.data[ "scenarios!" ].forEach( (scenario,i) => const scenario = this.props.data.article_scenarios[ i ] ;
scenarios.push( <span key={i} className="scenario" const scenario_display_name = scenario_names_hilite ? scenario_names_hilite[i] : makeScenarioDisplayName(scenario) ;
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( scenario ) }} scenarios.push( <span key={i} className="scenario"
/> ) dangerouslySetInnerHTML = {{ __html: scenario_display_name }}
) ; /> ) ;
} else {
// we only have a list of scenario ID's (the normal case) - figure out what the corresponding names are
this.props.data.article_scenarios.forEach( (scenario,i) =>
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( gAppRef.caches.scenarios[scenario] ) }}
/> )
) ;
} }
// prepare the tags // prepare the tags
@ -119,8 +102,8 @@ export class ArticleSearchResult extends React.Component
// NOTE: The "title" field is also given the CSS class "name" so that the normal CSS will apply to it. // NOTE: The "title" field is also given the CSS class "name" so that the normal CSS will apply to it.
// Some tests also look for a generic ".name" class name when checking search results. // Some tests also look for a generic ".name" class name when checking search results.
const pub_display_name = pub ? PublicationSearchResult.makeDisplayName( pub ) : null ; const pub_display_name = parent_pub ? PublicationSearchResult.makeDisplayName( parent_pub ) : null ;
const publ_display_name = publ ? PublisherSearchResult.makeDisplayName( publ ) : null ; const publ_display_name = parent_publ ? PublisherSearchResult.makeDisplayName( parent_publ ) : null ;
return ( <div className="search-result article" return ( <div className="search-result article"
ref = { r => gAppRef.setTestAttribute( r, "article_id", this.props.data.article_id ) } ref = { r => gAppRef.setTestAttribute( r, "article_id", this.props.data.article_id ) }
> >
@ -179,61 +162,71 @@ export class ArticleSearchResult extends React.Component
} ) ; } ) ;
} }
static onNewArticle( notify ) { static onNewArticle() {
ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => { gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
axios.post( gAppRef.makeFlaskUrl( "/article/create", {list:1} ), newVals ) ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => {
.then( resp => { axios.post(
// update the caches gAppRef.makeFlaskUrl( "/article/create" ), newVals
gAppRef.caches.authors = resp.data.authors ; ).then( resp => {
gAppRef.caches.scenarios = resp.data.scenarios ; gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
gAppRef.caches.tags = resp.data.tags ; // update the UI
// unload any updated values const newArticle = resp.data.record ;
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ; gAppRef.prependSearchResult( newArticle ) ;
// update the UI with the new details if ( newArticle._parent_pub )
notify( resp.data.article_id, newVals ) ; gAppRef.updatePublication( newArticle._parent_pub.pub_id ) ;
if ( resp.data.warnings ) else if ( newArticle._parent_publ )
gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ; gAppRef.updatePublisher( newArticle._parent_publ.publ_id ) ;
else // update the UI
gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ; if ( resp.data.warnings )
if ( resp.data._publication ) gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ;
gAppRef.updatePublications( [ resp.data._publication ] ) ; else
gAppRef.closeModalForm() ; gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ;
} ) gAppRef.closeModalForm() ;
.catch( err => { } ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ; } ) ;
} ) ; } ) ;
} }
onEditArticle() { onEditArticle() {
ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => { gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
// send the updated details to the server ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => {
newVals.article_id = this.props.data.article_id ; // send the updated details to the server
axios.post( gAppRef.makeFlaskUrl( "/article/update", {list:1} ), newVals ) newVals.article_id = this.props.data.article_id ;
.then( resp => { axios.post(
// update the caches gAppRef.makeFlaskUrl( "/article/update" ), newVals
gAppRef.caches.authors = resp.data.authors ; ).then( resp => {
gAppRef.caches.scenarios = resp.data.scenarios ; gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
gAppRef.caches.tags = resp.data.tags ; // update the UI
// update the UI with the new details const article = resp.data.record ;
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ; const orig_parent_pub = this.props.data._parent_pub ;
removeSpecialFields( this.props.data ) ; const orig_parent_publ = this.props.data._parent_publ ;
if ( newVals.imageData ) updateRecord( this.props.data, article ) ;
gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ; if ( article._parent_pub )
this.forceUpdate() ; gAppRef.updatePublication( article._parent_pub.pub_id ) ;
PreviewableImage.activatePreviewableImages( this ) ; else if ( article._parent_publ )
if ( resp.data.warnings ) gAppRef.updatePublisher( article._parent_publ.publ_id ) ;
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ; if ( orig_parent_pub )
else gAppRef.updatePublication( orig_parent_pub.pub_id ) ;
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ; if ( orig_parent_publ )
if ( resp.data._publications ) gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
gAppRef.updatePublications( resp.data._publications ) ; // update the UI
gAppRef.closeModalForm() ; if ( newVals.imageData )
} ) gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ;
.catch( err => { this.forceUpdate() ;
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ; PreviewableImage.activatePreviewableImages( this ) ;
} ) ; // update the UI
} ); if ( resp.data.warnings )
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
} }
onDeleteArticle() { onDeleteArticle() {
@ -245,21 +238,22 @@ export class ArticleSearchResult extends React.Component
gAppRef.ask( content, "ask", { gAppRef.ask( content, "ask", {
"OK": () => { "OK": () => {
// delete the article on the server // delete the article on the server
axios.get( gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id, {list:1} ) ) axios.get(
.then( resp => { gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id )
// update the caches ).then( resp => {
gAppRef.caches.authors = resp.data.authors ; gAppRef.dataCache.refresh( [ "authors", "tags" ] ) ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI // update the UI
this.props.onDelete( "article_id", this.props.data.article_id ) ; this.props.onDelete( "article_id", this.props.data.article_id ) ;
if ( this.props.data._parent_pub )
gAppRef.updatePublication( this.props.data._parent_pub.pub_id ) ;
else if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings ) if ( resp.data.warnings )
gAppRef.showWarnings( "The article was deleted.", resp.data.warnings ) ; gAppRef.showWarnings( "The article was deleted.", resp.data.warnings ) ;
else else
gAppRef.showInfoToast( <div> The article was deleted. </div> ) ; gAppRef.showInfoToast( <div> The article was deleted. </div> ) ;
if ( resp.data._publication ) } ).catch( err => {
gAppRef.updatePublications( [ resp.data._publication ] ) ;
} )
.catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the article: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorToast( <div> Couldn't delete the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ; } ) ;
}, },

@ -5,7 +5,7 @@ import { NEW_ARTICLE_PUB_PRIORITY_CUTOFF } from "./constants.js" ;
import { PublicationSearchResult } from "./PublicationSearchResult.js" ; import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { gAppRef } from "./App.js" ; import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ; import { ImageFileUploader } from "./FileUploader.js" ;
import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, isNumeric } from "./utils.js" ; import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, isNumeric } from "./utils.js" ;
// -------------------------------------------------------------------- // --------------------------------------------------------------------
@ -73,7 +73,7 @@ export class ArticleSearchResult2
// initialize the publications // initialize the publications
let publications = [ { value: null, label: <i>(none)</i> } ] ; let publications = [ { value: null, label: <i>(none)</i> } ] ;
let mostRecentPub = null ; let mostRecentPub = null ;
for ( let p of Object.entries(gAppRef.caches.publications) ) { for ( let p of Object.entries( gAppRef.dataCache.data.publications ) ) {
const pub_display_name = PublicationSearchResult.makeDisplayName( p[1] ) ; const pub_display_name = PublicationSearchResult.makeDisplayName( p[1] ) ;
const pub = { const pub = {
value: p[1].pub_id, value: p[1].pub_id,
@ -106,7 +106,7 @@ export class ArticleSearchResult2
// initialize the publishers // initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i> } ] ; let publishers = [ { value: null, label: <i>(none)</i> } ] ;
let currPubl = publishers[0] ; let currPubl = publishers[0] ;
for ( let p of Object.entries(gAppRef.caches.publishers) ) { for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( { publishers.push( {
value: p[1].publ_id, value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} /> label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />
@ -118,25 +118,25 @@ export class ArticleSearchResult2
// initialize the authors // initialize the authors
let allAuthors = [] ; let allAuthors = [] ;
for ( let a of Object.entries(gAppRef.caches.authors) ) for ( let a of Object.entries( gAppRef.dataCache.data.authors ) )
allAuthors.push( { value: a[1].author_id, label: a[1].author_name } ); allAuthors.push( { value: a[1].author_id, label: a[1].author_name } );
allAuthors.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ; allAuthors.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currAuthors = [] ; let currAuthors = [] ;
if ( vals.article_authors ) { if ( vals.article_authors ) {
currAuthors = vals.article_authors.map( a => { currAuthors = vals.article_authors.map( a => {
return { value: a, label: gAppRef.caches.authors[a].author_name } return { value: a.author_id, label: a.author_name }
} ) ; } ) ;
} }
// initialize the scenarios // initialize the scenarios
let allScenarios = [] ; let allScenarios = [] ;
for ( let s of Object.entries(gAppRef.caches.scenarios) ) for ( let s of Object.entries( gAppRef.dataCache.data.scenarios ) )
allScenarios.push( { value: s[1].scenario_id, label: makeScenarioDisplayName(s[1]) } ) ; allScenarios.push( { value: s[1].scenario_id, label: makeScenarioDisplayName(s[1]) } ) ;
allScenarios.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ; allScenarios.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currScenarios = [] ; let currScenarios = [] ;
if ( vals.article_scenarios ) { if ( vals.article_scenarios ) {
currScenarios = vals.article_scenarios.map( s => { currScenarios = vals.article_scenarios.map( s => {
return { value: s, label: makeScenarioDisplayName(gAppRef.caches.scenarios[s]) } return { value: s.scenario_id, label: makeScenarioDisplayName(s) }
} ) ; } ) ;
} }
function onScenarioCreated( val ) { function onScenarioCreated( val ) {
@ -149,7 +149,7 @@ export class ArticleSearchResult2
} }
// initialize the tags // initialize the tags
const tags = gAppRef.makeTagLists( vals.article_tags ) ; const tags = makeTagLists( vals.article_tags ) ;
// prepare the form content // prepare the form content
/* eslint-disable jsx-a11y/img-redundant-alt */ /* eslint-disable jsx-a11y/img-redundant-alt */
@ -260,7 +260,7 @@ export class ArticleSearchResult2
} ) ; } ) ;
} else if ( r === "article_tags" ) { } else if ( r === "article_tags" ) {
let vals = unloadCreatableSelect( refs[r] ) ; let vals = unloadCreatableSelect( refs[r] ) ;
newVals[ r ] = vals.map( v => v.label ) ; newVals[ r ] = vals.map( v => v.label ) ;
} else } else
newVals[ r ] = refs[r].value.trim() ; newVals[ r ] = refs[r].value.trim() ;
} }

@ -0,0 +1,59 @@
import React from "react" ;
import { gAppRef } from "./App.js" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
export class DataCache
{
constructor() {
// initialize
this.data = {} ;
}
get( keys, onOK ) {
// initialize
if ( onOK === undefined )
onOK = () => {} ;
let nOK = 0 ;
function onPartialOK() {
if ( ++nOK === keys.length ) {
onOK() ;
}
}
// refresh each key
for ( let key of keys ) {
// check if we already have the data in the cache
if ( this.data[ key ] !== undefined ) {
onPartialOK() ;
} else {
// nope - get the specified data from the backend
axios.get(
gAppRef.makeFlaskUrl( "/" + key )
).then( resp => {
// got it - update the cache
this.data[ key ] = resp.data ;
onPartialOK() ;
} ).catch( err => {
gAppRef.showErrorToast(
<div> Couldn't load the {key}: <div className="monospace"> {err.toString()} </div> </div>
) ;
} ) ;
}
}
}
refresh( keys, onOK ) {
// refresh the specified keys
for ( let key of keys )
delete this.data[ key ] ;
this.get( keys, onOK ) ;
}
}

@ -6,7 +6,7 @@ import { PublicationSearchResult2 } from "./PublicationSearchResult2.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ; import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLICATION_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ; import { PUBLICATION_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ; import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields, isLink } from "./utils.js" ; import { makeCollapsibleList, pluralString, updateRecord, isLink } from "./utils.js" ;
const axios = require( "axios" ) ; const axios = require( "axios" ) ;
@ -21,8 +21,8 @@ export class PublicationSearchResult extends React.Component
const display_description = PreviewableImage.adjustHtmlForPreviewableImages( const display_description = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "pub_description!" ] || this.props.data.pub_description this.props.data[ "pub_description!" ] || this.props.data.pub_description
) ; ) ;
const publ = gAppRef.caches.publishers[ this.props.data.publ_id ] ; const parent_publ = this.props.data._parent_publ ;
const image_url = PublicationSearchResult.makeImageUrl( this.props.data ) ; const image_url = PublicationSearchResult._makeImageUrl( this.props.data ) ;
// prepare the publication's URL // prepare the publication's URL
let pub_url = this.props.data.pub_url ; let pub_url = this.props.data.pub_url ;
@ -94,10 +94,10 @@ export class PublicationSearchResult extends React.Component
> >
<div className="header"> <div className="header">
{menu} {menu}
{ publ && { parent_publ &&
<Link className="publisher" title="Show this publisher." <Link className="publisher" title="Show this publisher."
to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) } to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) }
dangerouslySetInnerHTML={{ __html: publ.publ_name }} dangerouslySetInnerHTML={{ __html: parent_publ.publ_name }}
/> />
} }
<Link className="name" title="Show this publication." <Link className="name" title="Show this publication."
@ -126,61 +126,69 @@ export class PublicationSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ; PreviewableImage.activatePreviewableImages( this ) ;
} }
static onNewPublication( notify ) { static onNewPublication() {
PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => { gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
axios.post( gAppRef.makeFlaskUrl( "/publication/create", {list:1} ), newVals ) PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => {
.then( resp => { axios.post(
// update the caches gAppRef.makeFlaskUrl( "/publication/create" ), newVals
gAppRef.caches.publications = resp.data.publications ; ).then( resp => {
gAppRef.caches.tags = resp.data.tags ; gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
// unload any updated values // update the UI
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ; const newPub = resp.data.record ;
// update the UI with the new details gAppRef.prependSearchResult( newPub ) ;
notify( resp.data.pub_id, newVals ) ; if ( newPub._parent_publ )
if ( resp.data.warnings ) gAppRef.updatePublisher( newPub._parent_publ.publ_id ) ;
gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ; // update the UI
else if ( resp.data.warnings )
gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ; gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ;
gAppRef.closeModalForm() ; else
// NOTE: The parent publisher will update itself in the UI to show this new publication, gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ;
// since we've just received an updated copy of the publications. gAppRef.closeModalForm() ;
} ) } ) ;
.catch( err => { } ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ; } ) ;
} ) ; } ) ;
} }
onEditPublication() { onEditPublication() {
// get the articles for this publication gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list // get the articles for this publication
PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => { let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list
// send the updated details to the server PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => {
newVals.pub_id = this.props.data.pub_id ; // send the updated details to the server
if ( articles ) newVals.pub_id = this.props.data.pub_id ;
newVals.article_order = articles.map( a => a.article_id ) ; if ( articles )
axios.post( gAppRef.makeFlaskUrl( "/publication/update", {list:1} ), newVals ) newVals.article_order = articles.map( a => a.article_id ) ;
.then( resp => { axios.post(
// update the caches gAppRef.makeFlaskUrl( "/publication/update" ), newVals
gAppRef.caches.publications = resp.data.publications ; ).then( resp => {
gAppRef.caches.tags = resp.data.tags ; // update the UI
// update the UI with the new details gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ; // update the UI
removeSpecialFields( this.props.data ) ; const pub = resp.data.record ;
if ( newVals.imageData ) const orig_parent_publ = this.props.data._parent_publ ;
gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ; updateRecord( this.props.data, pub ) ;
this.forceUpdate() ; if ( pub._parent_publ )
PreviewableImage.activatePreviewableImages( this ) ; gAppRef.updatePublisher( pub._parent_publ.publ_id ) ;
if ( resp.data.warnings ) if ( orig_parent_publ )
gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ; gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
else // update the UI
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ; if ( newVals.imageData )
gAppRef.closeModalForm() ; gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ;
// NOTE: The parent publisher will update itself in the UI to show this updated publication, this.forceUpdate() ;
// since we've just received an updated copy of the publications. PreviewableImage.activatePreviewableImages( this ) ;
} ) // update the UI
.catch( err => { if ( resp.data.warnings )
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ; } ) ;
} ) ; } ) ;
} }
@ -208,22 +216,23 @@ export class PublicationSearchResult extends React.Component
gAppRef.ask( content, "ask", { gAppRef.ask( content, "ask", {
"OK": () => { "OK": () => {
// delete the publication on the server // delete the publication on the server
axios.get( gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id, {list:1} ) ) axios.get(
.then( resp => { gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id )
// update the caches ).then( resp => {
gAppRef.caches.publications = resp.data.publications ; gAppRef.dataCache.refresh( [ "publications", "tags" ] ) ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI // update the UI
this.props.onDelete( "pub_id", this.props.data.pub_id ) ; this.props.onDelete( "pub_id", this.props.data.pub_id ) ;
resp.data.deleteArticles.forEach( article_id => { resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ; this.props.onDelete( "article_id", article_id ) ;
} ) ; } ) ;
if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings ) if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was deleted.", resp.data.warnings ) ; gAppRef.showWarnings( "The publication was deleted.", resp.data.warnings ) ;
else else
gAppRef.showInfoToast( <div> The publication was deleted. </div> ) ; gAppRef.showInfoToast( <div> The publication was deleted. </div> ) ;
} ) } ).catch( err => {
.catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publication: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorToast( <div> Couldn't delete the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ; } ) ;
}, },
@ -231,11 +240,11 @@ export class PublicationSearchResult extends React.Component
} ) ; } ) ;
} }
// get the publication details // get the publication details
axios.get( gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id ) ) axios.get(
.then( resp => { gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id )
).then( resp => {
doDelete( resp.data.nArticles ) ; doDelete( resp.data.nArticles ) ;
} ) } ).catch( err => {
.catch( err => {
doDelete( err ) ; doDelete( err ) ;
} ) ; } ) ;
} }
@ -253,15 +262,13 @@ export class PublicationSearchResult extends React.Component
} }
_makeDisplayName( allowAlternateContent ) { return PublicationSearchResult.makeDisplayName( this.props.data, allowAlternateContent ) ; } _makeDisplayName( allowAlternateContent ) { return PublicationSearchResult.makeDisplayName( this.props.data, allowAlternateContent ) ; }
static makeImageUrl( vals ) { static _makeImageUrl( vals ) {
let image_url = gAppRef.makeFlaskImageUrl( "publication", vals.pub_image_id ) ; let image_url = gAppRef.makeFlaskImageUrl( "publication", vals.pub_image_id ) ;
if ( ! image_url ) { if ( ! image_url ) {
// check if the parent publisher has an image // check if the parent publisher has an image
if ( vals.publ_id ) { const parent_publ = vals._parent_publ ;
const publ = gAppRef.caches.publishers[ vals.publ_id ] ; if ( parent_publ )
if ( publ ) image_url = gAppRef.makeFlaskImageUrl( "publisher", parent_publ.publ_image_id ) ;
image_url = gAppRef.makeFlaskImageUrl( "publisher", publ.publ_image_id ) ;
}
} }
return image_url ; return image_url ;
} }

@ -4,7 +4,7 @@ import CreatableSelect from "react-select/creatable" ;
import ReactDragListView from "react-drag-listview/lib/index.js" ; import ReactDragListView from "react-drag-listview/lib/index.js" ;
import { gAppRef } from "./App.js" ; import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ; import { ImageFileUploader } from "./FileUploader.js" ;
import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, ciCompare, isNumeric } from "./utils.js" ; import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, ciCompare, isNumeric } from "./utils.js" ;
// -------------------------------------------------------------------- // --------------------------------------------------------------------
@ -62,7 +62,7 @@ export class PublicationSearchResult2
// initialize the publishers // initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i> } ] ; let publishers = [ { value: null, label: <i>(none)</i> } ] ;
let currPubl = publishers[0] ; let currPubl = publishers[0] ;
for ( let p of Object.entries(gAppRef.caches.publishers) ) { for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( { publishers.push( {
value: p[1].publ_id, value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} /> label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />
@ -76,7 +76,7 @@ export class PublicationSearchResult2
// NOTE: As a convenience, we provide a droplist of known publication names (without edition #'s), // NOTE: As a convenience, we provide a droplist of known publication names (without edition #'s),
// to make it easier to add a new edition of an existing publication. // to make it easier to add a new edition of an existing publication.
let publications = {} ; let publications = {} ;
for ( let p of Object.entries(gAppRef.caches.publications) ) for ( let p of Object.entries( gAppRef.dataCache.data.publications ) )
publications[ p[1].pub_name ] = p[1] ; publications[ p[1].pub_name ] = p[1] ;
let publications2 = [] ; let publications2 = [] ;
for ( let pub_name in publications ) { for ( let pub_name in publications ) {
@ -93,7 +93,7 @@ export class PublicationSearchResult2
} }
// initialize the tags // initialize the tags
const tags = gAppRef.makeTagLists( vals.pub_tags ) ; const tags = makeTagLists( vals.pub_tags ) ;
// initialize the articles // initialize the articles
function make_article_display_name( article ) { function make_article_display_name( article ) {
@ -206,7 +206,7 @@ export class PublicationSearchResult2
function checkForDupe( vals ) { function checkForDupe( vals ) {
// check for an existing publication name/edition // check for an existing publication name/edition
for ( let pub of Object.entries(gAppRef.caches.publications) ) { for ( let pub of Object.entries( gAppRef.dataCache.data.publications ) ) {
if ( ciCompare( pub[1].pub_name, vals.pub_name ) !== 0 ) if ( ciCompare( pub[1].pub_name, vals.pub_name ) !== 0 )
continue ; continue ;
if ( ! pub[1].pub_edition && ! vals.pub_edition ) if ( ! pub[1].pub_edition && ! vals.pub_edition )

@ -7,7 +7,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult.js"
import { PreviewableImage } from "./PreviewableImage.js" ; import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, PUBLISHER_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ; import { PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, PUBLISHER_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ; import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields } from "./utils.js" ; import { makeCollapsibleList, pluralString, updateRecord } from "./utils.js" ;
const axios = require( "axios" ) ; const axios = require( "axios" ) ;
@ -26,11 +26,7 @@ export class PublisherSearchResult extends React.Component
const image_url = gAppRef.makeFlaskImageUrl( "publisher", this.props.data.publ_image_id ) ; const image_url = gAppRef.makeFlaskImageUrl( "publisher", this.props.data.publ_image_id ) ;
// prepare the publications // prepare the publications
let pubs = [] ; let pubs = this.props.data.publications ;
for ( let pub of Object.entries(gAppRef.caches.publications) ) {
if ( pub[1].publ_id === this.props.data.publ_id )
pubs.push( pub[1] ) ;
}
pubs.sort( (lhs,rhs) => { pubs.sort( (lhs,rhs) => {
if ( lhs.pub_seqno && rhs.pub_seqno ) if ( lhs.pub_seqno && rhs.pub_seqno )
return rhs.pub_seqno - lhs.pub_seqno ; return rhs.pub_seqno - lhs.pub_seqno ;
@ -98,53 +94,60 @@ export class PublisherSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ; PreviewableImage.activatePreviewableImages( this ) ;
} }
static onNewPublisher( notify ) { static onNewPublisher() {
PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => { gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
axios.post( gAppRef.makeFlaskUrl( "/publisher/create", {list:1} ), newVals ) PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => {
.then( resp => { axios.post(
// update the cached publishers gAppRef.makeFlaskUrl( "/publisher/create" ), newVals
gAppRef.caches.publishers = resp.data.publishers ; ).then( resp => {
// unload any updated values gAppRef.dataCache.refresh( [ "publishers" ] ) ;
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ; // update the UI
// update the UI with the new details const newPubl = resp.data.record ;
notify( resp.data.publ_id, newVals ) ; gAppRef.prependSearchResult( newPubl ) ;
if ( resp.data.warnings ) // update the UI
gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ; if ( resp.data.warnings )
else gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ;
gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ; else
gAppRef.closeModalForm() ; gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ;
} ) gAppRef.closeModalForm() ;
.catch( err => { } ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ; } ) ;
} ) ; } ) ;
} }
onEditPublisher() { onEditPublisher() {
PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => { gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
// send the updated details to the server PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => {
newVals.publ_id = this.props.data.publ_id ; // send the updated details to the server
axios.post( gAppRef.makeFlaskUrl( "/publisher/update", {list:1} ), newVals ) newVals.publ_id = this.props.data.publ_id ;
.then( resp => { axios.post(
// update the cached publishers gAppRef.makeFlaskUrl( "/publisher/update" ), newVals
gAppRef.caches.publishers = resp.data.publishers ; ).then( resp => {
// update the UI with the new details gAppRef.dataCache.refresh( [ "publishers" ], () => {
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ; // update the UI
removeSpecialFields( this.props.data ) ; const publ = resp.data.record ;
if ( newVals.imageData ) updateRecord( this.props.data, publ ) ;
gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ; for ( let pub of publ.publications )
this.forceUpdate() ; gAppRef.updatePublication( pub.pub_id ) ;
PreviewableImage.activatePreviewableImages( this ) ; // update the UI
if ( resp.data.warnings ) if ( newVals.imageData )
gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ; gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ;
else this.forceUpdate() ;
gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ; PreviewableImage.activatePreviewableImages( this ) ;
gAppRef.closeModalForm() ; // update the UI
} ) if ( resp.data.warnings )
.catch( err => { gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ;
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ; else
} ) ; gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ;
} ); gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
} }
onDeletePublisher() { onDeletePublisher() {
@ -176,11 +179,10 @@ export class PublisherSearchResult extends React.Component
gAppRef.ask( content, "ask", { gAppRef.ask( content, "ask", {
"OK": () => { "OK": () => {
// delete the publisher on the server // delete the publisher on the server
axios.get( gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id, {list:1} ) ) axios.get(
.then( resp => { gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id )
// update the cached publishers ).then( resp => {
gAppRef.caches.publishers = resp.data.publishers ; gAppRef.dataCache.refresh( [ "publishers", "publications" ] ) ;
gAppRef.caches.publications = resp.data.publications ; // nb: because of cascading deletes
// update the UI // update the UI
this.props.onDelete( "publ_id", this.props.data.publ_id ) ; this.props.onDelete( "publ_id", this.props.data.publ_id ) ;
resp.data.deletedPublications.forEach( pub_id => { resp.data.deletedPublications.forEach( pub_id => {
@ -189,12 +191,12 @@ export class PublisherSearchResult extends React.Component
resp.data.deletedArticles.forEach( article_id => { resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ; this.props.onDelete( "article_id", article_id ) ;
} ) ; } ) ;
// update the UI
if ( resp.data.warnings ) if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was deleted.", resp.data.warnings ) ; gAppRef.showWarnings( "The publisher was deleted.", resp.data.warnings ) ;
else else
gAppRef.showInfoToast( <div> The publisher was deleted. </div> ) ; gAppRef.showInfoToast( <div> The publisher was deleted. </div> ) ;
} ) } ).catch( err => {
.catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ; gAppRef.showErrorToast( <div> Couldn't delete the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ; } ) ;
}, },
@ -202,11 +204,11 @@ export class PublisherSearchResult extends React.Component
} ) ; } ) ;
} ; } ;
// get the publisher details // get the publisher details
axios.get( gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id ) ) axios.get(
.then( resp => { gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id )
).then( resp => {
doDelete( resp.data.nPublications, resp.data.nArticles ) ; doDelete( resp.data.nPublications, resp.data.nArticles ) ;
} ) } ).catch( err => {
.catch( err => {
doDelete( err ) ; doDelete( err ) ;
} ) ; } ) ;
} }

@ -84,7 +84,7 @@ export class PublisherSearchResult2
function checkForDupe( publName ) { function checkForDupe( publName ) {
// check for an existing publisher // check for an existing publisher
for ( let publ of Object.entries(gAppRef.caches.publishers) ) { for ( let publ of Object.entries( gAppRef.dataCache.data.publishers ) ) {
if ( ciCompare( publName, publ[1].publ_name ) === 0 ) if ( ciCompare( publName, publ[1].publ_name ) === 0 )
return true ; return true ;
} }

@ -43,22 +43,22 @@ export class SearchResults extends React.Component
// render the search results // render the search results
results = [] ; results = [] ;
this.props.searchResults.forEach( sr => { this.props.searchResults.forEach( sr => {
if ( sr.type === "publisher" ) { if ( sr._type === "publisher" ) {
results.push( <PublisherSearchResult key={"publisher:"+sr.publ_id} data={sr} results.push( <PublisherSearchResult key={"publisher:"+sr.publ_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) } onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
/> ) ; /> ) ;
} else if ( sr.type === "publication" ) { } else if ( sr._type === "publication" ) {
results.push( <PublicationSearchResult key={"publication:"+sr.pub_id} data={sr} results.push( <PublicationSearchResult key={"publication:"+sr.pub_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) } onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
onArticleClick = { this.props.type === "publication" ? (a) => scrollToArticle(a) : null } onArticleClick = { (a) => scrollToArticle(a) }
/> ) ; /> ) ;
} else if ( sr.type === "article" ) { } else if ( sr._type === "article" ) {
results.push( <ArticleSearchResult key={"article:"+sr.article_id} data={sr} results.push( <ArticleSearchResult key={"article:"+sr.article_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) } onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
ref = { r => articleRefs[sr.article_id] = r } ref = { r => articleRefs[sr.article_id] = r }
/> ) ; /> ) ;
} else { } else {
gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr.type ) ; gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr._type ) ;
} }
} ) ; } ) ;
} }

@ -5,6 +5,8 @@ import { App, gAppRef } from "./App" ;
import { PublicationSearchResult } from "./PublicationSearchResult" ; import { PublicationSearchResult } from "./PublicationSearchResult" ;
import "./index.css" ; import "./index.css" ;
const axios = require( "axios" ) ;
// -------------------------------------------------------------------- // --------------------------------------------------------------------
ReactDOM.render( ReactDOM.render(
@ -38,8 +40,12 @@ ReactDOM.render(
<Route path="/author/:authorId" render={ (props) => <App {...props} type="author" key={"author:"+props.match.params.authorId} <Route path="/author/:authorId" render={ (props) => <App {...props} type="author" key={"author:"+props.match.params.authorId}
doSearch = { () => gAppRef.runSpecialSearch( "/search/author/"+gAppRef.props.match.params.authorId, null, doSearch = { () => gAppRef.runSpecialSearch( "/search/author/"+gAppRef.props.match.params.authorId, null,
() => { () => {
const author = gAppRef.caches.authors[ gAppRef.props.match.params.authorId ] ; axios.get(
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" ) gAppRef.makeFlaskUrl( "/author/" + gAppRef.props.match.params.authorId )
).then( resp => {
const author = resp.data ;
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" )
} ) ;
} }
) } ) }
/> } /> /> } />

@ -76,6 +76,8 @@ export function confirmDiscardChanges( oldVals, newVals, accept ) {
} }
} }
// --------------------------------------------------------------------
export function sortSelectableOptions( options ) { export function sortSelectableOptions( options ) {
options.sort( (lhs,rhs) => { options.sort( (lhs,rhs) => {
lhs = ReactDOMServer.renderToStaticMarkup( lhs.label ) ; lhs = ReactDOMServer.renderToStaticMarkup( lhs.label ) ;
@ -100,32 +102,21 @@ export function unloadCreatableSelect( sel ) {
return vals2 ; return vals2 ;
} }
// -------------------------------------------------------------------- export function makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
export function applyUpdatedVals( vals, newVals, updated, refs ) { // NOTE: react-select uses the "value" field to determine which choices have already been selected
// NOTE: After the user has edited an object, we send the new values to the server to store in // and thus should not be shown in the droplist of available choices.
// the database, but the server will sometimes return modified values back e.g. because unsafe HTML let tagList = [] ;
// was removed, or the ID's of newly-created authors. This function applies these new values back if ( tags ) {
// into the original table of values. tags.map(
for ( let r in refs ) (tag) => tagList.push( { value: tag, label: tag } )
vals[ r ] = (updated && updated[r] !== undefined) ? updated[r] : newVals[r] ; ) ;
// NOTE: We sometimes want to force an entry into the vals that doesn't have
// an associated ref (i.e. UI element) e.g. XXX_image_id.
for ( let key in updated )
vals[ key ] = updated[ key ] ;
}
export function removeSpecialFields( vals ) {
// NOTE: This removes special fields sent to us by the backend containing content that has search terms highlighted.
// We only really need to remove author names for articles, since the backend sends us these (possibly highlighted)
// as well as the ID's, but they could be incorrect after the user has edited an article. However, for consistency,
// we remove all these special fields for everything.
let keysToDelete = [] ;
for ( let key in vals ) {
if ( key[ key.length-1 ] === "!" )
keysToDelete.push( key ) ;
} }
keysToDelete.forEach( k => delete vals[k] ) ; // create another list for all known tags
let allTags = gAppRef.dataCache.data.tags.map(
(tag) => { return { value: tag[0], label: tag[0] } }
) ;
return [ tagList, allTags ] ;
} }
// -------------------------------------------------------------------- // --------------------------------------------------------------------
@ -170,6 +161,12 @@ export function parseScenarioDisplayName( displayName ) {
// -------------------------------------------------------------------- // --------------------------------------------------------------------
export function updateRecord( rec, newVals ) {
// update a record with new values
for ( let key in newVals )
rec[ key ] = newVals[ key ] ;
}
export function makeCollapsibleList( caption, vals, maxItems, style ) { export function makeCollapsibleList( caption, vals, maxItems, style ) {
if ( ! vals || vals.length === 0 ) if ( ! vals || vals.length === 0 )
return null ; return null ;

Loading…
Cancel
Save