Changed how data is transfered between the front- and back-end.

master
Pacman Ghost 3 years ago
parent fdc287bb61
commit 95e662c9f6
  1. 110
      asl_articles/articles.py
  2. 31
      asl_articles/authors.py
  3. 66
      asl_articles/publications.py
  4. 56
      asl_articles/publishers.py
  5. 14
      asl_articles/scenarios.py
  6. 20
      asl_articles/search.py
  7. 7
      asl_articles/tags.py
  8. 2
      asl_articles/tests/test_publishers.py
  9. 24
      asl_articles/utils.py
  10. 118
      web/src/App.js
  11. 190
      web/src/ArticleSearchResult.js
  12. 18
      web/src/ArticleSearchResult2.js
  13. 59
      web/src/DataCache.js
  14. 153
      web/src/PublicationSearchResult.js
  15. 10
      web/src/PublicationSearchResult2.js
  16. 120
      web/src/PublisherSearchResult.js
  17. 2
      web/src/PublisherSearchResult2.js
  18. 10
      web/src/SearchResults.js
  19. 10
      web/src/index.js
  20. 47
      web/src/utils.js

@ -9,11 +9,10 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db
from asl_articles.models import Article, Author, ArticleAuthor, Scenario, ArticleScenario, ArticleImage
from asl_articles.models import Publication
from asl_articles.authors import do_get_authors
from asl_articles.scenarios import do_get_scenarios
from asl_articles.tags import do_get_tags
from asl_articles.authors import get_author_vals
from asl_articles.scenarios import get_scenario_vals
import asl_articles.publications
import asl_articles.publishers
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response
@ -34,9 +33,10 @@ def get_article( article_id ):
if not article:
abort( 404 )
_logger.debug( "- %s", article )
return jsonify( get_article_vals( article ) )
deep = request.args.get( "deep" )
return jsonify( get_article_vals( article, deep ) )
def get_article_vals( article, add_type=False ):
def get_article_vals( article, deep ):
"""Extract public fields from an Article record."""
authors = sorted( article.article_authors,
key = lambda a: a.seq_no
@ -45,22 +45,28 @@ def get_article_vals( article, add_type=False ):
key = lambda a: a.seq_no
)
vals = {
"_type": "article",
"article_id": article.article_id,
"article_title": article.article_title,
"article_subtitle": article.article_subtitle,
"article_image_id": article.article_id if article.article_image else None,
"article_authors": [ a.author_id for a in authors ],
"article_authors": [ get_author_vals( a.parent_author ) for a in authors ],
"article_snippet": article.article_snippet,
"article_pageno": article.article_pageno,
"article_url": article.article_url,
"article_scenarios": [ s.scenario_id for s in scenarios ],
"article_scenarios": [ get_scenario_vals( s.parent_scenario ) for s in scenarios ],
"article_tags": decode_tags( article.article_tags ),
"article_rating": article.article_rating,
"pub_id": article.pub_id,
"publ_id": article.publ_id,
}
if add_type:
vals[ "type" ] = "article"
if deep:
vals["_parent_pub"] = asl_articles.publications.get_publication_vals(
article.parent_pub, False, False
) if article.parent_pub else None
vals["_parent_publ"] = asl_articles.publishers.get_publisher_vals(
article.parent_publ, False, False
) if article.parent_publ else None
return vals
def get_article_sort_key( article ):
@ -80,13 +86,11 @@ def create_article():
log = ( _logger, "Create article:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# create the new article
vals[ "time_created" ] = datetime.datetime.now()
@ -95,23 +99,16 @@ def create_article():
db.session.flush()
new_article_id = article.article_id
_set_seqno( article, article.pub_id )
_save_authors( article, updated )
_save_scenarios( article, updated )
_save_image( article, updated )
_save_authors( article )
_save_scenarios( article )
_save_image( article )
db.session.commit()
_logger.debug( "- New ID: %d", new_article_id )
search.add_or_update_article( None, article, None )
# generate the response
extras = { "article_id": new_article_id }
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_article_vals( article, True )
return make_ok_response( record=vals, warnings=warnings )
def _set_seqno( article, pub_id ):
"""Set an article's seq#."""
@ -123,7 +120,7 @@ def _set_seqno( article, pub_id ):
else:
article.article_seqno = None
def _save_authors( article, updated_fields ):
def _save_authors( article ):
"""Save the article's authors."""
# delete the existing article-author rows
@ -133,8 +130,6 @@ def _save_authors( article, updated_fields ):
# add the article-author rows
authors = request.json.get( "article_authors", [] )
author_ids = []
new_authors = False
for seq_no,author in enumerate( authors ):
if isinstance( author, int ):
# this is an existing author
@ -147,19 +142,12 @@ def _save_authors( article, updated_fields ):
db.session.add( author )
db.session.flush()
author_id = author.author_id
new_authors = True
_logger.debug( "Created new author \"%s\": id=%d", author, author_id )
db.session.add(
ArticleAuthor( seq_no=seq_no, article_id=article.article_id, author_id=author_id )
)
author_ids.append( author_id )
# check if we created any new authors
if new_authors:
# yup - let the caller know about them
updated_fields[ "article_authors"] = author_ids
def _save_scenarios( article, updated_fields ):
def _save_scenarios( article ):
"""Save the article's scenarios."""
# delete the existing article-scenario rows
@ -169,8 +157,6 @@ def _save_scenarios( article, updated_fields ):
# add the article-scenario rows
scenarios = request.json.get( "article_scenarios", [] )
scenario_ids = []
new_scenarios = False
for seq_no,scenario in enumerate( scenarios ):
if isinstance( scenario, int ):
# this is an existing scenario
@ -183,19 +169,12 @@ def _save_scenarios( article, updated_fields ):
db.session.add( new_scenario )
db.session.flush()
scenario_id = new_scenario.scenario_id
new_scenarios = True
_logger.debug( "Created new scenario \"%s [%s]\": id=%d", scenario[1], scenario[0], scenario_id )
db.session.add(
ArticleScenario( seq_no=seq_no, article_id=article.article_id, scenario_id=scenario_id )
)
scenario_ids.append( scenario_id )
# check if we created any new scenarios
if new_scenarios:
# yup - let the caller know about them
updated_fields[ "article_scenarios"] = scenario_ids
def _save_image( article, updated ):
def _save_image( article ):
"""Save the article's image."""
# check if a new image was provided
@ -207,7 +186,7 @@ def _save_image( article, updated ):
ArticleImage.query.filter( ArticleImage.article_id == article.article_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the article to have no image.
updated[ "article_image_id" ] = None
article.article_image_id = None
return
# add the new image to the database
@ -217,7 +196,6 @@ def _save_image( article, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "article_image_id" ] = article.article_id
# ---------------------------------------------------------------------
@ -231,44 +209,29 @@ def update_article():
log = ( _logger, "Update article: id={}".format( article_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("article_tags"), warnings )
vals[ "article_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "article_tags" ):
updated[ "article_tags" ] = decode_tags( vals["article_tags"] )
# update the article
article = Article.query.get( article_id )
if not article:
abort( 404 )
orig_pub = Publication.query.get( article.pub_id ) if article.pub_id else None
if vals["pub_id"] != article.pub_id:
_set_seqno( article, vals["pub_id"] )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( article, vals )
_save_authors( article, updated )
_save_scenarios( article, updated )
_save_image( article, updated )
_save_authors( article )
_save_scenarios( article )
_save_image( article )
db.session.commit()
search.add_or_update_article( None, article, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "scenarios" ] = do_get_scenarios()
extras[ "tags" ] = do_get_tags()
pubs = []
if orig_pub and orig_pub.pub_id != article.pub_id:
pubs.append( asl_articles.publications.get_publication_vals( orig_pub, True ) )
if article.pub_id:
pub = Publication.query.get( article.pub_id )
pubs.append( asl_articles.publications.get_publication_vals( pub, True ) )
if pubs:
extras[ "_publications" ] = pubs
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_article_vals( article, True )
return make_ok_response( record=vals, warnings=warnings )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -311,11 +274,4 @@ def delete_article( article_id ):
search.delete_articles( [ article ] )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "authors" ] = do_get_authors()
extras[ "tags" ] = do_get_tags()
if article.pub_id:
pub = Publication.query.get( article.pub_id )
extras[ "_publication" ] = asl_articles.publications.get_publication_vals( pub, True )
return make_ok_response( extras=extras )
return make_ok_response()

@ -1,27 +1,38 @@
""" Handle author requests. """
from flask import jsonify
import logging
from flask import jsonify, abort
from asl_articles import app
from asl_articles.models import Author
_logger = logging.getLogger( "db" )
# ---------------------------------------------------------------------
@app.route( "/authors" )
def get_authors():
"""Get all authors."""
return jsonify( do_get_authors() )
return jsonify( {
author.author_id: get_author_vals( author )
for author in Author.query.all()
} )
def do_get_authors():
"""Get all authors."""
# ---------------------------------------------------------------------
# get all the authors
return {
r.author_id: _get_author_vals(r)
for r in Author.query #pylint: disable=not-an-iterable
}
@app.route( "/author/<author_id>" )
def get_author( author_id ):
"""Get an author."""
_logger.debug( "Get author: id=%s", author_id )
author = Author.query.get( author_id )
if not author:
abort( 404 )
vals = get_author_vals( author )
_logger.debug( "- %s", author )
return jsonify( vals )
def _get_author_vals( author ):
def get_author_vals( author ):
"""Extract public fields from an Author record."""
return {
"author_id": author.author_id,

@ -10,7 +10,7 @@ from sqlalchemy.sql.expression import func
from asl_articles import app, db
from asl_articles.models import Publication, PublicationImage, Article
from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles.tags import do_get_tags
import asl_articles.publishers
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, clean_tags, encode_tags, decode_tags, \
apply_attrs, make_ok_response
@ -24,14 +24,10 @@ _FIELD_NAMES = [ "*pub_name", "pub_edition", "pub_description", "pub_date", "pub
@app.route( "/publications" )
def get_publications():
"""Get all publications."""
return jsonify( do_get_publications() )
def do_get_publications():
"""Get all publications."""
# NOTE: The front-end maintains a cache of the publications, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publication.query.all()
return { r.pub_id: get_publication_vals(r,False) for r in results }
return jsonify( {
pub.pub_id: get_publication_vals( pub, False, False )
for pub in Publication.query.all()
} )
# ---------------------------------------------------------------------
@ -42,16 +38,20 @@ def get_publication( pub_id ):
pub = Publication.query.get( pub_id )
if not pub:
abort( 404 )
vals = get_publication_vals( pub, False )
vals = get_publication_vals( pub,
request.args.get( "include_articles" ),
request.args.get( "deep" )
)
# include the number of associated articles
query = Article.query.filter_by( pub_id = pub_id )
vals[ "nArticles" ] = query.count()
_logger.debug( "- %s ; #articles=%d", pub, vals["nArticles"] )
return jsonify( vals )
def get_publication_vals( pub, include_articles, add_type=False ):
def get_publication_vals( pub, include_articles, deep ):
"""Extract public fields from a Publication record."""
vals = {
"_type": "publication",
"pub_id": pub.pub_id,
"pub_name": pub.pub_name,
"pub_edition": pub.pub_edition,
@ -66,9 +66,11 @@ def get_publication_vals( pub, include_articles, add_type=False ):
}
if include_articles:
articles = sorted( pub.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a ) for a in articles ]
if add_type:
vals[ "type" ] = "publication"
vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
if deep:
vals[ "_parent_publ" ] = asl_articles.publishers.get_publisher_vals(
pub.parent_publ, False, False
) if pub.parent_publ else None
return vals
def get_publication_sort_key( pub ):
@ -96,30 +98,25 @@ def create_publication():
log = ( _logger, "Create publication:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# create the new publication
vals[ "time_created" ] = datetime.datetime.now()
pub = Publication( **vals )
db.session.add( pub )
_set_seqno( pub, pub.publ_id )
_save_image( pub, updated )
_save_image( pub )
db.session.commit()
_logger.debug( "- New ID: %d", pub.pub_id )
search.add_or_update_publication( None, pub, None )
# generate the response
extras = { "pub_id": pub.pub_id }
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publication_vals( pub, False, True )
return make_ok_response( record=vals, warnings=warnings )
def _set_seqno( pub, publ_id ):
"""Set a publication's seq#."""
@ -139,7 +136,7 @@ def _set_seqno( pub, publ_id ):
else:
pub.pub_seqno = None
def _save_image( pub, updated ):
def _save_image( pub ):
"""Save the publication's image."""
# check if a new image was provided
@ -151,7 +148,7 @@ def _save_image( pub, updated ):
PublicationImage.query.filter( PublicationImage.pub_id == pub.pub_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publication to have no image.
updated[ "pub_image_id" ] = None
pub.pub_image_id = None
return
# add the new image to the database
@ -161,7 +158,6 @@ def _save_image( pub, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "pub_image_id" ] = pub.pub_id
# ---------------------------------------------------------------------
@ -175,14 +171,12 @@ def update_publication():
log = ( _logger, "Update publication: id={}".format( pub_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
article_order = request.json.get( "article_order" )
# NOTE: Tags are stored in the database using \n as a separator, so we need to encode *after* cleaning them.
cleaned_tags = clean_tags( vals.get("pub_tags"), warnings )
vals[ "pub_tags" ] = encode_tags( cleaned_tags )
if cleaned_tags != vals.get( "pub_tags" ):
updated[ "pub_tags" ] = decode_tags( vals["pub_tags"] )
# update the publication
pub = Publication.query.get( pub_id )
@ -192,7 +186,7 @@ def update_publication():
_set_seqno( pub, vals["publ_id"] )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( pub, vals )
_save_image( pub, updated )
_save_image( pub )
if article_order:
query = Article.query.filter( Article.pub_id == pub_id )
articles = { int(a.article_id): a for a in query }
@ -212,11 +206,8 @@ def update_publication():
search.add_or_update_publication( None, pub, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publication_vals( pub, False, True )
return make_ok_response( record=vals, warnings=warnings )
# ---------------------------------------------------------------------
@ -243,8 +234,5 @@ def delete_publication( pub_id ):
search.delete_articles( deleted_articles )
# generate the response
extras = { "deleteArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publications" ] = do_get_publications()
extras[ "tags" ] = do_get_tags()
extras = { "deletedArticles": deleted_articles }
return make_ok_response( extras=extras )

@ -8,7 +8,7 @@ from flask import request, jsonify, abort
from asl_articles import app, db
from asl_articles.models import Publisher, PublisherImage, Publication, Article
from asl_articles.publications import do_get_publications
from asl_articles.publications import get_publication_vals, get_publication_sort_key
from asl_articles.articles import get_article_vals, get_article_sort_key
from asl_articles import search
from asl_articles.utils import get_request_args, clean_request_args, make_ok_response, apply_attrs
@ -22,14 +22,10 @@ _FIELD_NAMES = [ "*publ_name", "publ_description", "publ_url" ]
@app.route( "/publishers" )
def get_publishers():
"""Get all publishers."""
return jsonify( _do_get_publishers() )
def _do_get_publishers():
"""Get all publishers."""
# NOTE: The front-end maintains a cache of the publishers, so as a convenience,
# we return the current list as part of the response to a create/update/delete operation.
results = Publisher.query.all()
return { r.publ_id: get_publisher_vals(r,False) for r in results }
return jsonify( {
publ.publ_id: get_publisher_vals( publ, False, False )
for publ in Publisher.query.all()
} )
# ---------------------------------------------------------------------
@ -41,8 +37,10 @@ def get_publisher( publ_id ):
publ = Publisher.query.get( publ_id )
if not publ:
abort( 404 )
include_articles = request.args.get( "include_articles" )
vals = get_publisher_vals( publ, include_articles )
vals = get_publisher_vals( publ,
request.args.get( "include_pubs" ),
request.args.get( "include_articles" )
)
# include the number of associated publications
query = Publication.query.filter_by( publ_id = publ_id )
vals[ "nPublications" ] = query.count()
@ -56,20 +54,22 @@ def get_publisher( publ_id ):
_logger.debug( "- %s ; #publications=%d ; #articles=%d", publ, vals["nPublications"], vals["nArticles"] )
return jsonify( vals )
def get_publisher_vals( publ, include_articles, add_type=False ):
def get_publisher_vals( publ, include_pubs, include_articles ):
"""Extract public fields from a Publisher record."""
vals = {
"_type": "publisher",
"publ_id": publ.publ_id,
"publ_name": publ.publ_name,
"publ_description": publ.publ_description,
"publ_url": publ.publ_url,
"publ_image_id": publ.publ_id if publ.publ_image else None,
}
if include_pubs:
pubs = sorted( publ.publications, key=get_publication_sort_key )
vals[ "publications" ] = [ get_publication_vals( p, False, False ) for p in pubs ]
if include_articles:
articles = sorted( publ.articles, key=get_article_sort_key )
vals[ "articles" ] = [ get_article_vals( a, False ) for a in articles ]
if add_type:
vals[ "type" ] = "publisher"
return vals
# ---------------------------------------------------------------------
@ -83,24 +83,22 @@ def create_publisher():
log = ( _logger, "Create publisher:" )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# create the new publisher
vals[ "time_created" ] = datetime.datetime.now()
publ = Publisher( **vals )
db.session.add( publ )
_save_image( publ, updated )
_save_image( publ )
db.session.commit()
_logger.debug( "- New ID: %d", publ.publ_id )
search.add_or_update_publisher( None, publ, None )
# generate the response
extras = { "publ_id": publ.publ_id }
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publisher_vals( publ, True, True )
return make_ok_response( record=vals, warnings=warnings )
def _save_image( publ, updated ):
def _save_image( publ ):
"""Save the publisher's image."""
# check if a new image was provided
@ -112,7 +110,7 @@ def _save_image( publ, updated ):
PublisherImage.query.filter( PublisherImage.publ_id == publ.publ_id ).delete()
if image_data == "{remove}":
# NOTE: The front-end sends this if it wants the publisher to have no image.
updated[ "publ_image_id" ] = None
publ.publ_image_id = None
return
# add the new image to the database
@ -122,7 +120,6 @@ def _save_image( publ, updated ):
db.session.add( img )
db.session.flush()
_logger.debug( "Created new image: %s, #bytes=%d", fname, len(image_data) )
updated[ "publ_image_id" ] = publ.publ_id
# ---------------------------------------------------------------------
@ -136,23 +133,21 @@ def update_publisher():
log = ( _logger, "Update publisher: id={}".format( publ_id ) )
)
warnings = []
updated = clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
clean_request_args( vals, _FIELD_NAMES, warnings, _logger )
# update the publisher
publ = Publisher.query.get( publ_id )
if not publ:
abort( 404 )
_save_image( publ, updated )
_save_image( publ )
vals[ "time_updated" ] = datetime.datetime.now()
apply_attrs( publ, vals )
db.session.commit()
search.add_or_update_publisher( None, publ, None )
# generate the response
extras = {}
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
return make_ok_response( updated=updated, extras=extras, warnings=warnings )
vals = get_publisher_vals( publ, True, True )
return make_ok_response( record=vals, warnings=warnings )
# ---------------------------------------------------------------------
@ -186,7 +181,4 @@ def delete_publisher( publ_id ):
search.delete_articles( deleted_articles )
extras = { "deletedPublications": deleted_pubs, "deletedArticles": deleted_articles }
if request.args.get( "list" ):
extras[ "publishers" ] = _do_get_publishers()
extras[ "publications" ] = do_get_publications()
return make_ok_response( extras=extras )

@ -10,16 +10,12 @@ from asl_articles.models import Scenario
@app.route( "/scenarios" )
def get_scenarios():
"""Get all scenarios."""
return jsonify( do_get_scenarios() )
return jsonify( {
scenario.scenario_id: get_scenario_vals( scenario )
for scenario in Scenario.query.all()
} )
def do_get_scenarios():
"""Get all scenarios."""
return {
s.scenario_id: _get_scenario_vals( s )
for s in Scenario.query #pylint: disable=not-an-iterable
}
def _get_scenario_vals( scenario ):
def get_scenario_vals( scenario ):
"""Extract public fields from a scenario record."""
return {
"scenario_id": scenario.scenario_id,

@ -161,17 +161,13 @@ def search_article( article_id ):
article = Article.query.get( article_id )
if not article:
return jsonify( [] )
article = get_article_vals( article, True )
_create_aslrb_links( article )
results = [ article ]
if article["pub_id"]:
pub = Publication.query.get( article["pub_id"] )
if pub:
results.append( get_publication_vals( pub, True, True ) )
if article["publ_id"]:
publ = Publisher.query.get( article["publ_id"] )
if publ:
results.append( get_publisher_vals( publ, True, True ) )
vals = get_article_vals( article, True )
_create_aslrb_links( vals )
results = [ vals ]
if article.parent_pub:
results.append( get_publication_vals( article.parent_pub, True, True ) )
if article.parent_publ:
results.append( get_publisher_vals( article.parent_publ, True, True ) )
return jsonify( results )
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@ -305,7 +301,7 @@ def _do_fts_search( fts_query_string, col_names, results=None ): #pylint: disabl
# prepare the result for the front-end
result = globals()[ "_get_{}_vals".format( owner_type ) ]( obj )
result[ "type" ] = owner_type
result[ "_type" ] = owner_type
result[ "rank" ] = row[1]
# return highlighted versions of the content to the caller

@ -13,12 +13,7 @@ from asl_articles.utils import decode_tags
@app.route( "/tags" )
def get_tags():
"""Get all tags."""
return jsonify( do_get_tags() )
def do_get_tags():
"""Get all tags."""
# get all the tags
# NOTE: This is pretty inefficient, since an article/publication's tags are munged into one big string
# and stored in a single column, so we need to manually unpack everything, but we'll see how it goes...
tags = defaultdict( int )
@ -36,4 +31,4 @@ def do_get_tags():
key = lambda v: ( -v[1], v[0] ) # sort by # instances, then name
)
return tags
return jsonify( tags )

@ -399,7 +399,7 @@ def test_publication_lists( webdriver, flask_app, dbconn ):
publ_sr = find_search_result( publ_name, results )
pubs = find_child( ".collapsible", publ_sr )
if pub_name:
# check that the publisher appears in the publisher's search result
# check that the publication appears in the publisher's search result
assert find_child( ".caption", pubs ).text == "Publications:"
pubs = find_children( "li", pubs )
assert len(pubs) == 1

@ -39,19 +39,17 @@ def get_request_args( vals, arg_names, log=None ):
def clean_request_args( vals, fields, warnings, logger ):
"""Clean incoming data."""
cleaned = {}
for f in fields:
if f.endswith( "_url" ):
continue # nb: don't clean URL's
f = _parse_arg_name( f )[ 0 ]
if isinstance( vals[f], str ):
val2 = clean_html( vals[f] )
if val2 != vals[f]:
vals[f] = val2
cleaned[f] = val2
logger.debug( "Cleaned HTML: %s => %s", f, val2 )
warnings.append( "Some values had HTML cleaned up." )
return cleaned
if not isinstance( vals[f], str ):
continue
val2 = clean_html( vals[f] )
if val2 != vals[f]:
vals[f] = val2
logger.debug( "Cleaned HTML: %s => %s", f, val2 )
warnings.append( "Some values had HTML cleaned up." )
def _parse_arg_name( arg_name ):
"""Parse a request argument name."""
@ -59,15 +57,15 @@ def _parse_arg_name( arg_name ):
return ( arg_name[1:], True ) # required argument
return ( arg_name, False ) # optional argument
def make_ok_response( extras=None, updated=None, warnings=None ):
def make_ok_response( extras=None, record=None, warnings=None ):
"""Generate a Flask 'success' response."""
resp = { "status": "OK" }
if extras:
resp.update( extras )
if updated:
resp[ "updated" ] = updated
if record:
resp["record"] = record
if warnings:
resp[ "warnings" ] = list( set( warnings ) ) # nb: remove duplicate messages
resp["warnings"] = list( set( warnings ) ) # nb: remove duplicate messages
return jsonify( resp )
# ---------------------------------------------------------------------

@ -12,6 +12,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult" ;
import { ArticleSearchResult } from "./ArticleSearchResult" ;
import ModalForm from "./ModalForm";
import AskDialog from "./AskDialog" ;
import { DataCache } from "./DataCache" ;
import { PreviewableImage } from "./PreviewableImage" ;
import { makeSmartBulletList } from "./utils.js" ;
import { APP_NAME } from "./constants.js" ;
@ -36,11 +37,14 @@ export class App extends React.Component
searchSeqNo: 0,
modalForm: null,
askDialog: null,
startupTasks: [ "caches.publishers", "caches.publications", "caches.authors", "caches.scenarios", "caches.tags" ],
startupTasks: [ "dummy" ], // FUDGE! We need at least one startup task.
} ;
gAppRef = this ;
this.setWindowTitle( null ) ;
// initialize the data cache
this.dataCache = new DataCache() ;
// initialize
this.args = queryString.parse( window.location.search ) ;
this._storeMsgs = this.isTestMode() && this.args.store_msgs ;
@ -98,13 +102,13 @@ export class App extends React.Component
<img src="/images/icons/tips.png" alt="Show tip articles." /> Show tips
</MenuItem>
<div className="divider" />
<MenuItem id="menu-new-publisher" onSelect={ () => PublisherSearchResult.onNewPublisher( this._onNewPublisher.bind(this) ) } >
<MenuItem id="menu-new-publisher" onSelect={PublisherSearchResult.onNewPublisher} >
<img src="/images/icons/publisher.png" alt="New publisher." /> New publisher
</MenuItem>
<MenuItem id="menu-new-publication" onSelect={ () => PublicationSearchResult.onNewPublication( this._onNewPublication.bind(this) ) } >
<MenuItem id="menu-new-publication" onSelect={PublicationSearchResult.onNewPublication} >
<img src="/images/icons/publication.png" alt="New publication." /> New publication
</MenuItem>
<MenuItem id="menu-new-article" onSelect={ () => ArticleSearchResult.onNewArticle( this._onNewArticle.bind(this) ) } >
<MenuItem id="menu-new-article" onSelect={ArticleSearchResult.onNewArticle} >
<img src="/images/icons/article.png" alt="New article." /> New article
</MenuItem>
</MenuList>
@ -122,7 +126,6 @@ export class App extends React.Component
<SearchResults ref={this._searchResultsRef}
seqNo = {this.state.searchSeqNo}
searchResults = {this.state.searchResults}
type = {this.props.type}
/>
</div> ) ;
}
@ -157,30 +160,16 @@ export class App extends React.Component
// check if the server started up OK
let on_startup_ok = () => {
// the backend server started up OK, continue our startup process
// initialize the caches
// NOTE: We maintain caches of key objects, so that we can quickly populate droplists. The backend server returns
// updated lists after any operation that could change them (create/update/delete), which is simpler and less error-prone
// than trying to manually keep our caches in sync. It's less efficient, but it won't happen too often, there won't be
// too many entries, and the database server is local.
this.caches = {} ;
[ "publishers", "publications", "authors", "scenarios", "tags" ].forEach( type => {
axios.get( this.makeFlaskUrl( "/" + type ) )
.then( resp => {
this.caches[ type ] = resp.data ;
this._onStartupTask( "caches." + type ) ;
} )
.catch( err => {
this.showErrorToast( <div> Couldn't load the {type}: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
this._onStartupTask( "dummy" ) ;
}
let on_startup_failure = () => {
// the backend server had problems during startup; we hide the spinner
// and leave the error message(s) on-screen.
document.getElementById( "loading" ).style.display = "none" ;
}
axios.get( this.makeFlaskUrl( "/startup-messages" ) )
.then( resp => {
axios.get(
this.makeFlaskUrl( "/startup-messages" )
).then( resp => {
// show any messages logged by the backend server as it started up
[ "info", "warning", "error" ].forEach( msgType => {
if ( resp.data[ msgType ] ) {
@ -200,8 +189,7 @@ export class App extends React.Component
on_startup_failure() ;
else
on_startup_ok() ;
} )
.catch( err => {
} ).catch( err => {
let errorMsg = err.toString() ;
if ( errorMsg.indexOf( "502" ) !== -1 || errorMsg.indexOf( "504" ) !== -1 )
this.showErrorToast( <div> Couldn't connect to the backend Flask server. </div> ) ;
@ -252,14 +240,12 @@ export class App extends React.Component
args.no_hilite = this._disableSearchResultHighlighting ;
axios.post(
this.makeFlaskUrl( url ), args
)
.then( resp => {
).then( resp => {
ReactDOM.findDOMNode( this._searchResultsRef.current ).scrollTo( 0, 0 ) ;
this.setState( { searchResults: resp.data, searchSeqNo: this.state.searchSeqNo+1 } ) ;
if ( onDone )
onDone() ;
} )
.catch( err => {
} ).catch( err => {
this.showErrorResponse( "The search query failed", err ) ;
this.setState( { searchResults: null, searchSeqNo: this.state.searchSeqNo+1 } ) ;
} ) ;
@ -288,37 +274,47 @@ export class App extends React.Component
)
}
_onNewPublisher( publ_id, vals ) { this._addNewSearchResult( vals, "publisher", "publ_id", publ_id ) ; }
_onNewPublication( pub_id, vals ) { this._addNewSearchResult( vals, "publication", "pub_id", pub_id ) ; }
_onNewArticle( article_id, vals ) { this._addNewSearchResult( vals, "article", "article_id", article_id ) ; }
_addNewSearchResult( vals, srType, idName, idVal ) {
// add the new search result to the start of the search results
// NOTE: This isn't really the right thing to do, since the new object might not actually be
// a result for the current search, but it's nice to give the user some visual feedback.
vals.type = srType ;
vals[ idName ] = idVal ;
let newSearchResults = [ vals ] ;
prependSearchResult( sr ) {
// add a new entry to the start of the search results
// NOTE: We do this after creating a new object, and while it isn't really the right thing
// to do (since the new object might not actually be a result for the current search), it's nice
// to give the user some visual feedback.
let newSearchResults = [ sr ] ;
newSearchResults.push( ...this.state.searchResults ) ;
this.setState( { searchResults: newSearchResults } ) ;
}
updatePublications( pubs ) {
// update the cache
let pubs2 = {} ;
for ( let i=0 ; i < pubs.length ; ++i ) {
const pub = pubs[ i ] ;
this.caches.publications[ pub.pub_id ] = pub ;
pubs2[ pub.pub_id ] = pub ;
}
// update the UI
updatePublisher( publ_id ) {
// update the specified publisher in the UI
this._doUpdateSearchResult(
(sr) => ( sr._type === "publisher" && sr.publ_id === publ_id ),
this.makeFlaskUrl( "/publisher/" + publ_id, {include_pubs:1,include_articles:1} )
) ;
this.forceFlaskImageReload( "publisher", publ_id ) ;
}
updatePublication( pub_id ) {
// update the specified publication in the UI
this._doUpdateSearchResult(
(sr) => ( sr._type === "publication" && sr.pub_id === pub_id ),
this.makeFlaskUrl( "/publication/" + pub_id, {include_articles:1,deep:1} )
) ;
this.forceFlaskImageReload( "publication", pub_id ) ;
}
_doUpdateSearchResult( srCheck, url ) {
// find the target search result in the UI
let newSearchResults = this.state.searchResults ;
for ( let i=0 ; i < newSearchResults.length ; ++i ) {
if ( newSearchResults[i].type === "publication" && pubs2[ newSearchResults[i].pub_id ] ) {
newSearchResults[i] = pubs2[ newSearchResults[i].pub_id ] ;
newSearchResults[i].type = "publication" ;
if ( srCheck( newSearchResults[i] ) ) {
// found it - get the latest details from the backend
axios.get( url ).then( resp => {
newSearchResults[i] = resp.data ;
this.setState( { searchResults: newSearchResults } ) ;
} ).catch( err => {
this.showErrorResponse( "Can't get the updated search result details", err ) ;
} ) ;
break ; // nb: we assume there's only 1 instance
}
}
this.setState( { searchResults: newSearchResults } ) ;
}
showModalForm( formId, title, titleColor, content, buttons ) {
@ -451,18 +447,6 @@ export class App extends React.Component
console.log( " " + detail ) ;
}
makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
// NOTE: react-select uses the "value" field to determine which choices have already been selected
// and thus should not be shown in the droplist of available choices.
let tagList = [] ;
if ( tags )
tags.map( tag => tagList.push( { value: tag, label: tag } ) ) ;
// create another list for all known tags
let allTags = this.caches.tags.map( tag => { return { value: tag[0], label: tag[0] } } ) ;
return [ tagList, allTags ] ;
}
makeAppUrl( url ) {
// FUDGE! The test suite needs any URL parameters to passed on to the next page if a link is clicked.
if ( this.isTestMode() )
@ -532,11 +516,15 @@ export class App extends React.Component
this.showWarningToast( this.props.warning ) ;
if ( this.props.doSearch )
this.props.doSearch() ;
// NOTE: We could preload the DataCache here (i.e. where it won't affect startup time),
// but it will happen on every page load (e.g. /article/NNN or /publication/NNN),
// which would probably hurt more than it helps (since the data isn't needed if the user
// is only searching for stuff i.e. most of the time).
}
setWindowTitleFromSearchResults( srType, idField, idVal, nameField ) {
for ( let sr of Object.entries( this.state.searchResults ) ) {
if ( sr[1].type === srType && String(sr[1][idField]) === idVal ) {
if ( sr[1]._type === srType && String(sr[1][idField]) === idVal ) {
this.setWindowTitle( typeof nameField === "function" ? nameField(sr[1]) : sr[1][nameField] ) ;
return ;
}

@ -8,7 +8,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ;
import { RatingStars } from "./RatingStars.js" ;
import { gAppRef } from "./App.js" ;
import { makeScenarioDisplayName, applyUpdatedVals, removeSpecialFields, makeCommaList, isLink } from "./utils.js" ;
import { makeScenarioDisplayName, updateRecord, makeCommaList, isLink } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -25,8 +25,8 @@ export class ArticleSearchResult extends React.Component
const display_snippet = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "article_snippet!" ] || this.props.data.article_snippet
) ;
const pub = gAppRef.caches.publications[ this.props.data.pub_id ] ;
const publ = gAppRef.caches.publishers[ this.props.data.publ_id ] ;
const parent_pub = this.props.data._parent_pub ;
const parent_publ = this.props.data._parent_publ ;
const image_url = gAppRef.makeFlaskImageUrl( "article", this.props.data.article_image_id ) ;
// prepare the article's URL
@ -34,50 +34,33 @@ export class ArticleSearchResult extends React.Component
if ( article_url ) {
if ( ! isLink( article_url ) )
article_url = gAppRef.makeExternalDocUrl( article_url ) ;
} else if ( pub && pub.pub_url ) {
article_url = gAppRef.makeExternalDocUrl( pub.pub_url ) ;
} else if ( parent_pub && parent_pub.pub_url ) {
article_url = gAppRef.makeExternalDocUrl( parent_pub.pub_url ) ;
if ( article_url.substr( article_url.length-4 ) === ".pdf" && this.props.data.article_pageno )
article_url += "#page=" + this.props.data.article_pageno ;
}
// prepare the authors
let authors = [] ;
if ( this.props.data[ "authors!" ] ) {
// the backend has provided us with a list of author names (possibly highlighted) - use them directly
for ( let i=0 ; i < this.props.data["authors!"].length ; ++i ) {
const author_id = this.props.data.article_authors[ i ] ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) }
dangerouslySetInnerHTML = {{ __html: this.props.data["authors!"][i] }}
/> ) ;
}
} else {
// we only have a list of author ID's (the normal case) - figure out what the corresponding names are
for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
const author_id = this.props.data.article_authors[ i ] ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author_id ) }
dangerouslySetInnerHTML = {{ __html: gAppRef.caches.authors[ author_id ].author_name }}
/> ) ;
}
const author_names_hilite = this.props.data[ "authors!" ] ;
for ( let i=0 ; i < this.props.data.article_authors.length ; ++i ) {
const author = this.props.data.article_authors[ i ] ;
const author_name = author_names_hilite ? author_names_hilite[i] : author.author_name ;
authors.push( <Link key={i} className="author" title="Show articles from this author."
to = { gAppRef.makeAppUrl( "/author/" + author.author_id ) }
dangerouslySetInnerHTML = {{ __html: author_name }}
/> ) ;
}
// prepare the scenarios
let scenarios = [] ;
if ( this.props.data[ "scenarios!" ] ) {
// the backend has provided us with a list of scenarios (possibly highlighted) - use them directly
this.props.data[ "scenarios!" ].forEach( (scenario,i) =>
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( scenario ) }}
/> )
) ;
} else {
// we only have a list of scenario ID's (the normal case) - figure out what the corresponding names are
this.props.data.article_scenarios.forEach( (scenario,i) =>
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: makeScenarioDisplayName( gAppRef.caches.scenarios[scenario] ) }}
/> )
) ;
const scenario_names_hilite = this.props.data[ "scenarios!" ] ;
for ( let i=0 ; i < this.props.data.article_scenarios.length ; ++i ) {
const scenario = this.props.data.article_scenarios[ i ] ;
const scenario_display_name = scenario_names_hilite ? scenario_names_hilite[i] : makeScenarioDisplayName(scenario) ;
scenarios.push( <span key={i} className="scenario"
dangerouslySetInnerHTML = {{ __html: scenario_display_name }}
/> ) ;
}
// prepare the tags
@ -119,8 +102,8 @@ export class ArticleSearchResult extends React.Component
// NOTE: The "title" field is also given the CSS class "name" so that the normal CSS will apply to it.
// Some tests also look for a generic ".name" class name when checking search results.
const pub_display_name = pub ? PublicationSearchResult.makeDisplayName( pub ) : null ;
const publ_display_name = publ ? PublisherSearchResult.makeDisplayName( publ ) : null ;
const pub_display_name = parent_pub ? PublicationSearchResult.makeDisplayName( parent_pub ) : null ;
const publ_display_name = parent_publ ? PublisherSearchResult.makeDisplayName( parent_publ ) : null ;
return ( <div className="search-result article"
ref = { r => gAppRef.setTestAttribute( r, "article_id", this.props.data.article_id ) }
>
@ -179,61 +162,71 @@ export class ArticleSearchResult extends React.Component
} ) ;
}
static onNewArticle( notify ) {
ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/article/create", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.scenarios = resp.data.scenarios ;
gAppRef.caches.tags = resp.data.tags ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.article_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ;
if ( resp.data._publication )
gAppRef.updatePublications( [ resp.data._publication ] ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewArticle() {
gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
ArticleSearchResult2._doEditArticle( {}, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/article/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
// update the UI
const newArticle = resp.data.record ;
gAppRef.prependSearchResult( newArticle ) ;
if ( newArticle._parent_pub )
gAppRef.updatePublication( newArticle._parent_pub.pub_id ) ;
else if ( newArticle._parent_publ )
gAppRef.updatePublisher( newArticle._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new article was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new article was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditArticle() {
ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.article_id = this.props.data.article_id ;
axios.post( gAppRef.makeFlaskUrl( "/article/update", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.scenarios = resp.data.scenarios ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ;
if ( resp.data._publications )
gAppRef.updatePublications( resp.data._publications ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
gAppRef.dataCache.get( [ "publishers", "publications", "authors", "scenarios", "tags" ], () => {
ArticleSearchResult2._doEditArticle( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.article_id = this.props.data.article_id ;
axios.post(
gAppRef.makeFlaskUrl( "/article/update" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "scenarios", "tags" ] ) ;
// update the UI
const article = resp.data.record ;
const orig_parent_pub = this.props.data._parent_pub ;
const orig_parent_publ = this.props.data._parent_publ ;
updateRecord( this.props.data, article ) ;
if ( article._parent_pub )
gAppRef.updatePublication( article._parent_pub.pub_id ) ;
else if ( article._parent_publ )
gAppRef.updatePublisher( article._parent_publ.publ_id ) ;
if ( orig_parent_pub )
gAppRef.updatePublication( orig_parent_pub.pub_id ) ;
if ( orig_parent_publ )
gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "article", newVals.article_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
}
onDeleteArticle() {
@ -245,21 +238,22 @@ export class ArticleSearchResult extends React.Component
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the article on the server
axios.get( gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id, {list:1} ) )
.then( resp => {
// update the caches
gAppRef.caches.authors = resp.data.authors ;
gAppRef.caches.tags = resp.data.tags ;
axios.get(
gAppRef.makeFlaskUrl( "/article/delete/" + this.props.data.article_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "authors", "tags" ] ) ;
// update the UI
this.props.onDelete( "article_id", this.props.data.article_id ) ;
if ( this.props.data._parent_pub )
gAppRef.updatePublication( this.props.data._parent_pub.pub_id ) ;
else if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The article was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The article was deleted. </div> ) ;
if ( resp.data._publication )
gAppRef.updatePublications( [ resp.data._publication ] ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the article: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},

@ -5,7 +5,7 @@ import { NEW_ARTICLE_PUB_PRIORITY_CUTOFF } from "./constants.js" ;
import { PublicationSearchResult } from "./PublicationSearchResult.js" ;
import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ;
import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, isNumeric } from "./utils.js" ;
import { makeScenarioDisplayName, parseScenarioDisplayName, checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, isNumeric } from "./utils.js" ;
// --------------------------------------------------------------------
@ -73,7 +73,7 @@ export class ArticleSearchResult2
// initialize the publications
let publications = [ { value: null, label: <i>(none)</i> } ] ;
let mostRecentPub = null ;
for ( let p of Object.entries(gAppRef.caches.publications) ) {
for ( let p of Object.entries( gAppRef.dataCache.data.publications ) ) {
const pub_display_name = PublicationSearchResult.makeDisplayName( p[1] ) ;
const pub = {
value: p[1].pub_id,
@ -106,7 +106,7 @@ export class ArticleSearchResult2
// initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i> } ] ;
let currPubl = publishers[0] ;
for ( let p of Object.entries(gAppRef.caches.publishers) ) {
for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( {
value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />
@ -118,25 +118,25 @@ export class ArticleSearchResult2
// initialize the authors
let allAuthors = [] ;
for ( let a of Object.entries(gAppRef.caches.authors) )
for ( let a of Object.entries( gAppRef.dataCache.data.authors ) )
allAuthors.push( { value: a[1].author_id, label: a[1].author_name } );
allAuthors.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currAuthors = [] ;
if ( vals.article_authors ) {
currAuthors = vals.article_authors.map( a => {
return { value: a, label: gAppRef.caches.authors[a].author_name }
return { value: a.author_id, label: a.author_name }
} ) ;
}
// initialize the scenarios
let allScenarios = [] ;
for ( let s of Object.entries(gAppRef.caches.scenarios) )
for ( let s of Object.entries( gAppRef.dataCache.data.scenarios ) )
allScenarios.push( { value: s[1].scenario_id, label: makeScenarioDisplayName(s[1]) } ) ;
allScenarios.sort( (lhs,rhs) => { return lhs.label.localeCompare( rhs.label ) ; } ) ;
let currScenarios = [] ;
if ( vals.article_scenarios ) {
currScenarios = vals.article_scenarios.map( s => {
return { value: s, label: makeScenarioDisplayName(gAppRef.caches.scenarios[s]) }
return { value: s.scenario_id, label: makeScenarioDisplayName(s) }
} ) ;
}
function onScenarioCreated( val ) {
@ -149,7 +149,7 @@ export class ArticleSearchResult2
}
// initialize the tags
const tags = gAppRef.makeTagLists( vals.article_tags ) ;
const tags = makeTagLists( vals.article_tags ) ;
// prepare the form content
/* eslint-disable jsx-a11y/img-redundant-alt */
@ -260,7 +260,7 @@ export class ArticleSearchResult2
} ) ;
} else if ( r === "article_tags" ) {
let vals = unloadCreatableSelect( refs[r] ) ;
newVals[ r ] = vals.map( v => v.label ) ;
newVals[ r ] = vals.map( v => v.label ) ;
} else
newVals[ r ] = refs[r].value.trim() ;
}

@ -0,0 +1,59 @@
import React from "react" ;
import { gAppRef } from "./App.js" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
export class DataCache
{
constructor() {
// initialize
this.data = {} ;
}
get( keys, onOK ) {
// initialize
if ( onOK === undefined )
onOK = () => {} ;
let nOK = 0 ;
function onPartialOK() {
if ( ++nOK === keys.length ) {
onOK() ;
}
}
// refresh each key
for ( let key of keys ) {
// check if we already have the data in the cache
if ( this.data[ key ] !== undefined ) {
onPartialOK() ;
} else {
// nope - get the specified data from the backend
axios.get(
gAppRef.makeFlaskUrl( "/" + key )
).then( resp => {
// got it - update the cache
this.data[ key ] = resp.data ;
onPartialOK() ;
} ).catch( err => {
gAppRef.showErrorToast(
<div> Couldn't load the {key}: <div className="monospace"> {err.toString()} </div> </div>
) ;
} ) ;
}
}
}
refresh( keys, onOK ) {
// refresh the specified keys
for ( let key of keys )
delete this.data[ key ] ;
this.get( keys, onOK ) ;
}
}

@ -6,7 +6,7 @@ import { PublicationSearchResult2 } from "./PublicationSearchResult2.js" ;
import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLICATION_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields, isLink } from "./utils.js" ;
import { makeCollapsibleList, pluralString, updateRecord, isLink } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -21,8 +21,8 @@ export class PublicationSearchResult extends React.Component
const display_description = PreviewableImage.adjustHtmlForPreviewableImages(
this.props.data[ "pub_description!" ] || this.props.data.pub_description
) ;
const publ = gAppRef.caches.publishers[ this.props.data.publ_id ] ;
const image_url = PublicationSearchResult.makeImageUrl( this.props.data ) ;
const parent_publ = this.props.data._parent_publ ;
const image_url = PublicationSearchResult._makeImageUrl( this.props.data ) ;
// prepare the publication's URL
let pub_url = this.props.data.pub_url ;
@ -94,10 +94,10 @@ export class PublicationSearchResult extends React.Component
>
<div className="header">
{menu}
{ publ &&
{ parent_publ &&
<Link className="publisher" title="Show this publisher."
to = { gAppRef.makeAppUrl( "/publisher/" + this.props.data.publ_id ) }
dangerouslySetInnerHTML={{ __html: publ.publ_name }}
dangerouslySetInnerHTML={{ __html: parent_publ.publ_name }}
/>
}
<Link className="name" title="Show this publication."
@ -126,61 +126,69 @@ export class PublicationSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ;
}
static onNewPublication( notify ) {
PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/publication/create", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.pub_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ;
gAppRef.closeModalForm() ;
// NOTE: The parent publisher will update itself in the UI to show this new publication,
// since we've just received an updated copy of the publications.
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewPublication() {
gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
PublicationSearchResult2._doEditPublication( {}, null, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/publication/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
// update the UI
const newPub = resp.data.record ;
gAppRef.prependSearchResult( newPub ) ;
if ( newPub._parent_publ )
gAppRef.updatePublisher( newPub._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publication was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publication was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditPublication() {
// get the articles for this publication
let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list
PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => {
// send the updated details to the server
newVals.pub_id = this.props.data.pub_id ;
if ( articles )
newVals.article_order = articles.map( a => a.article_id ) ;
axios.post( gAppRef.makeFlaskUrl( "/publication/update", {list:1} ), newVals )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
// NOTE: The parent publisher will update itself in the UI to show this updated publication,
// since we've just received an updated copy of the publications.
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
gAppRef.dataCache.get( [ "publishers", "publications", "tags" ], () => {
// get the articles for this publication
let articles = this.props.data.articles ; // nb: _doEditPublication() might change the order of this list
PublicationSearchResult2._doEditPublication( this.props.data, articles, (newVals,refs) => {
// send the updated details to the server
newVals.pub_id = this.props.data.pub_id ;
if ( articles )
newVals.article_order = articles.map( a => a.article_id ) ;
axios.post(
gAppRef.makeFlaskUrl( "/publication/update" ), newVals
).then( resp => {
// update the UI
gAppRef.dataCache.refresh( [ "publications", "tags" ], () => {
// update the UI
const pub = resp.data.record ;
const orig_parent_publ = this.props.data._parent_publ ;
updateRecord( this.props.data, pub ) ;
if ( pub._parent_publ )
gAppRef.updatePublisher( pub._parent_publ.publ_id ) ;
if ( orig_parent_publ )
gAppRef.updatePublisher( orig_parent_publ.publ_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publication", newVals.pub_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
@ -208,22 +216,23 @@ export class PublicationSearchResult extends React.Component
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the publication on the server
axios.get( gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id, {list:1} ) )
.then( resp => {
// update the caches
gAppRef.caches.publications = resp.data.publications ;
gAppRef.caches.tags = resp.data.tags ;
axios.get(
gAppRef.makeFlaskUrl( "/publication/delete/" + this.props.data.pub_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "publications", "tags" ] ) ;
// update the UI
this.props.onDelete( "pub_id", this.props.data.pub_id ) ;
resp.data.deleteArticles.forEach( article_id => {
resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ;
} ) ;
if ( this.props.data._parent_publ )
gAppRef.updatePublisher( this.props.data._parent_publ.publ_id ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publication was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publication was deleted. </div> ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publication: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},
@ -231,11 +240,11 @@ export class PublicationSearchResult extends React.Component
} ) ;
}
// get the publication details
axios.get( gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id ) )
.then( resp => {
axios.get(
gAppRef.makeFlaskUrl( "/publication/" + this.props.data.pub_id )
).then( resp => {
doDelete( resp.data.nArticles ) ;
} )
.catch( err => {
} ).catch( err => {
doDelete( err ) ;
} ) ;
}
@ -253,15 +262,13 @@ export class PublicationSearchResult extends React.Component
}
_makeDisplayName( allowAlternateContent ) { return PublicationSearchResult.makeDisplayName( this.props.data, allowAlternateContent ) ; }
static makeImageUrl( vals ) {
static _makeImageUrl( vals ) {
let image_url = gAppRef.makeFlaskImageUrl( "publication", vals.pub_image_id ) ;
if ( ! image_url ) {
// check if the parent publisher has an image
if ( vals.publ_id ) {
const publ = gAppRef.caches.publishers[ vals.publ_id ] ;
if ( publ )
image_url = gAppRef.makeFlaskImageUrl( "publisher", publ.publ_image_id ) ;
}
const parent_publ = vals._parent_publ ;
if ( parent_publ )
image_url = gAppRef.makeFlaskImageUrl( "publisher", parent_publ.publ_image_id ) ;
}
return image_url ;
}

@ -4,7 +4,7 @@ import CreatableSelect from "react-select/creatable" ;
import ReactDragListView from "react-drag-listview/lib/index.js" ;
import { gAppRef } from "./App.js" ;
import { ImageFileUploader } from "./FileUploader.js" ;
import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, ciCompare, isNumeric } from "./utils.js" ;
import { checkConstraints, confirmDiscardChanges, sortSelectableOptions, unloadCreatableSelect, makeTagLists, ciCompare, isNumeric } from "./utils.js" ;
// --------------------------------------------------------------------
@ -62,7 +62,7 @@ export class PublicationSearchResult2
// initialize the publishers
let publishers = [ { value: null, label: <i>(none)</i> } ] ;
let currPubl = publishers[0] ;
for ( let p of Object.entries(gAppRef.caches.publishers) ) {
for ( let p of Object.entries( gAppRef.dataCache.data.publishers ) ) {
publishers.push( {
value: p[1].publ_id,
label: <span dangerouslySetInnerHTML={{__html: p[1].publ_name}} />
@ -76,7 +76,7 @@ export class PublicationSearchResult2
// NOTE: As a convenience, we provide a droplist of known publication names (without edition #'s),
// to make it easier to add a new edition of an existing publication.
let publications = {} ;
for ( let p of Object.entries(gAppRef.caches.publications) )
for ( let p of Object.entries( gAppRef.dataCache.data.publications ) )
publications[ p[1].pub_name ] = p[1] ;
let publications2 = [] ;
for ( let pub_name in publications ) {
@ -93,7 +93,7 @@ export class PublicationSearchResult2
}
// initialize the tags
const tags = gAppRef.makeTagLists( vals.pub_tags ) ;
const tags = makeTagLists( vals.pub_tags ) ;
// initialize the articles
function make_article_display_name( article ) {
@ -206,7 +206,7 @@ export class PublicationSearchResult2
function checkForDupe( vals ) {
// check for an existing publication name/edition
for ( let pub of Object.entries(gAppRef.caches.publications) ) {
for ( let pub of Object.entries( gAppRef.dataCache.data.publications ) ) {
if ( ciCompare( pub[1].pub_name, vals.pub_name ) !== 0 )
continue ;
if ( ! pub[1].pub_edition && ! vals.pub_edition )

@ -7,7 +7,7 @@ import { PublicationSearchResult } from "./PublicationSearchResult.js"
import { PreviewableImage } from "./PreviewableImage.js" ;
import { PUBLISHER_EXCESS_PUBLICATION_THRESHOLD, PUBLISHER_EXCESS_ARTICLE_THRESHOLD } from "./constants.js" ;
import { gAppRef } from "./App.js" ;
import { makeCollapsibleList, pluralString, applyUpdatedVals, removeSpecialFields } from "./utils.js" ;
import { makeCollapsibleList, pluralString, updateRecord } from "./utils.js" ;
const axios = require( "axios" ) ;
@ -26,11 +26,7 @@ export class PublisherSearchResult extends React.Component
const image_url = gAppRef.makeFlaskImageUrl( "publisher", this.props.data.publ_image_id ) ;
// prepare the publications
let pubs = [] ;
for ( let pub of Object.entries(gAppRef.caches.publications) ) {
if ( pub[1].publ_id === this.props.data.publ_id )
pubs.push( pub[1] ) ;
}
let pubs = this.props.data.publications ;
pubs.sort( (lhs,rhs) => {
if ( lhs.pub_seqno && rhs.pub_seqno )
return rhs.pub_seqno - lhs.pub_seqno ;
@ -98,53 +94,60 @@ export class PublisherSearchResult extends React.Component
PreviewableImage.activatePreviewableImages( this ) ;
}
static onNewPublisher( notify ) {
PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => {
axios.post( gAppRef.makeFlaskUrl( "/publisher/create", {list:1} ), newVals )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
// unload any updated values
applyUpdatedVals( newVals, newVals, resp.data.updated, refs ) ;
// update the UI with the new details
notify( resp.data.publ_id, newVals ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
static onNewPublisher() {
gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
PublisherSearchResult2._doEditPublisher( {}, (newVals,refs) => {
axios.post(
gAppRef.makeFlaskUrl( "/publisher/create" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers" ] ) ;
// update the UI
const newPubl = resp.data.record ;
gAppRef.prependSearchResult( newPubl ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The new publisher was created OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The new publisher was created OK. </div> ) ;
gAppRef.closeModalForm() ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't create the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} ) ;
} ) ;
}
onEditPublisher() {
PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.publ_id = this.props.data.publ_id ;
axios.post( gAppRef.makeFlaskUrl( "/publisher/update", {list:1} ), newVals )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
// update the UI with the new details
applyUpdatedVals( this.props.data, newVals, resp.data.updated, refs ) ;
removeSpecialFields( this.props.data ) ;
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} )
.catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
gAppRef.dataCache.get( [ "publishers", "publications" ], () => {
PublisherSearchResult2._doEditPublisher( this.props.data, (newVals,refs) => {
// send the updated details to the server
newVals.publ_id = this.props.data.publ_id ;
axios.post(
gAppRef.makeFlaskUrl( "/publisher/update" ), newVals
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers" ], () => {
// update the UI
const publ = resp.data.record ;
updateRecord( this.props.data, publ ) ;
for ( let pub of publ.publications )
gAppRef.updatePublication( pub.pub_id ) ;
// update the UI
if ( newVals.imageData )
gAppRef.forceFlaskImageReload( "publisher", newVals.publ_id ) ;
this.forceUpdate() ;
PreviewableImage.activatePreviewableImages( this ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was updated OK.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was updated OK. </div> ) ;
gAppRef.closeModalForm() ;
} ) ;
} ).catch( err => {
gAppRef.showErrorMsg( <div> Couldn't update the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
} );
} ) ;
}
onDeletePublisher() {
@ -176,11 +179,10 @@ export class PublisherSearchResult extends React.Component
gAppRef.ask( content, "ask", {
"OK": () => {
// delete the publisher on the server
axios.get( gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id, {list:1} ) )
.then( resp => {
// update the cached publishers
gAppRef.caches.publishers = resp.data.publishers ;
gAppRef.caches.publications = resp.data.publications ; // nb: because of cascading deletes
axios.get(
gAppRef.makeFlaskUrl( "/publisher/delete/" + this.props.data.publ_id )
).then( resp => {
gAppRef.dataCache.refresh( [ "publishers", "publications" ] ) ;
// update the UI
this.props.onDelete( "publ_id", this.props.data.publ_id ) ;
resp.data.deletedPublications.forEach( pub_id => {
@ -189,12 +191,12 @@ export class PublisherSearchResult extends React.Component
resp.data.deletedArticles.forEach( article_id => {
this.props.onDelete( "article_id", article_id ) ;
} ) ;
// update the UI
if ( resp.data.warnings )
gAppRef.showWarnings( "The publisher was deleted.", resp.data.warnings ) ;
else
gAppRef.showInfoToast( <div> The publisher was deleted. </div> ) ;
} )
.catch( err => {
} ).catch( err => {
gAppRef.showErrorToast( <div> Couldn't delete the publisher: <div className="monospace"> {err.toString()} </div> </div> ) ;
} ) ;
},
@ -202,11 +204,11 @@ export class PublisherSearchResult extends React.Component
} ) ;
} ;
// get the publisher details
axios.get( gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id ) )
.then( resp => {
axios.get(
gAppRef.makeFlaskUrl( "/publisher/" + this.props.data.publ_id )
).then( resp => {
doDelete( resp.data.nPublications, resp.data.nArticles ) ;
} )
.catch( err => {
} ).catch( err => {
doDelete( err ) ;
} ) ;
}

@ -84,7 +84,7 @@ export class PublisherSearchResult2
function checkForDupe( publName ) {
// check for an existing publisher
for ( let publ of Object.entries(gAppRef.caches.publishers) ) {
for ( let publ of Object.entries( gAppRef.dataCache.data.publishers ) ) {
if ( ciCompare( publName, publ[1].publ_name ) === 0 )
return true ;
}

@ -43,22 +43,22 @@ export class SearchResults extends React.Component
// render the search results
results = [] ;
this.props.searchResults.forEach( sr => {
if ( sr.type === "publisher" ) {
if ( sr._type === "publisher" ) {
results.push( <PublisherSearchResult key={"publisher:"+sr.publ_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
/> ) ;
} else if ( sr.type === "publication" ) {
} else if ( sr._type === "publication" ) {
results.push( <PublicationSearchResult key={"publication:"+sr.pub_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
onArticleClick = { this.props.type === "publication" ? (a) => scrollToArticle(a) : null }
onArticleClick = { (a) => scrollToArticle(a) }
/> ) ;
} else if ( sr.type === "article" ) {
} else if ( sr._type === "article" ) {
results.push( <ArticleSearchResult key={"article:"+sr.article_id} data={sr}
onDelete = { (n,v) => this.onDeleteSearchResult( n, v ) }
ref = { r => articleRefs[sr.article_id] = r }
/> ) ;
} else {
gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr.type ) ;
gAppRef.logInternalError( "Unknown search result type.", "srType = "+sr._type ) ;
}
} ) ;
}

@ -5,6 +5,8 @@ import { App, gAppRef } from "./App" ;
import { PublicationSearchResult } from "./PublicationSearchResult" ;
import "./index.css" ;
const axios = require( "axios" ) ;
// --------------------------------------------------------------------
ReactDOM.render(
@ -38,8 +40,12 @@ ReactDOM.render(
<Route path="/author/:authorId" render={ (props) => <App {...props} type="author" key={"author:"+props.match.params.authorId}
doSearch = { () => gAppRef.runSpecialSearch( "/search/author/"+gAppRef.props.match.params.authorId, null,
() => {
const author = gAppRef.caches.authors[ gAppRef.props.match.params.authorId ] ;
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" )
axios.get(
gAppRef.makeFlaskUrl( "/author/" + gAppRef.props.match.params.authorId )
).then( resp => {
const author = resp.data ;
gAppRef.setWindowTitle( author ? author.author_name : "Unknown author" )
} ) ;
}
) }
/> } />

@ -76,6 +76,8 @@ export function confirmDiscardChanges( oldVals, newVals, accept ) {
}
}
// --------------------------------------------------------------------
export function sortSelectableOptions( options ) {
options.sort( (lhs,rhs) => {
lhs = ReactDOMServer.renderToStaticMarkup( lhs.label ) ;
@ -100,32 +102,21 @@ export function unloadCreatableSelect( sel ) {
return vals2 ;
}
// --------------------------------------------------------------------
export function applyUpdatedVals( vals, newVals, updated, refs ) {
// NOTE: After the user has edited an object, we send the new values to the server to store in
// the database, but the server will sometimes return modified values back e.g. because unsafe HTML
// was removed, or the ID's of newly-created authors. This function applies these new values back
// into the original table of values.
for ( let r in refs )
vals[ r ] = (updated && updated[r] !== undefined) ? updated[r] : newVals[r] ;
// NOTE: We sometimes want to force an entry into the vals that doesn't have
// an associated ref (i.e. UI element) e.g. XXX_image_id.
for ( let key in updated )
vals[ key ] = updated[ key ] ;
}
export function removeSpecialFields( vals ) {
// NOTE: This removes special fields sent to us by the backend containing content that has search terms highlighted.
// We only really need to remove author names for articles, since the backend sends us these (possibly highlighted)
// as well as the ID's, but they could be incorrect after the user has edited an article. However, for consistency,
// we remove all these special fields for everything.
let keysToDelete = [] ;
for ( let key in vals ) {
if ( key[ key.length-1 ] === "!" )
keysToDelete.push( key ) ;
export function makeTagLists( tags ) {
// convert the tags into a list suitable for CreatableSelect
// NOTE: react-select uses the "value" field to determine which choices have already been selected
// and thus should not be shown in the droplist of available choices.
let tagList = [] ;
if ( tags ) {
tags.map(
(tag) => tagList.push( { value: tag, label: tag } )
) ;
}
keysToDelete.forEach( k => delete vals[k] ) ;
// create another list for all known tags
let allTags = gAppRef.dataCache.data.tags.map(
(tag) => { return { value: tag[0], label: tag[0] } }
) ;
return [ tagList, allTags ] ;
}
// --------------------------------------------------------------------
@ -170,6 +161,12 @@ export function parseScenarioDisplayName( displayName ) {
// --------------------------------------------------------------------
export function updateRecord( rec, newVals ) {
// update a record with new values
for ( let key in newVals )
rec[ key ] = newVals[ key ] ;
}
export function makeCollapsibleList( caption, vals, maxItems, style ) {
if ( ! vals || vals.length === 0 )
return null ;

Loading…
Cancel
Save