From 01de1941295038a6ab93d327ba6b704e62d23a66 Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Fri, 15 May 2020 14:54:44 +0700 Subject: [PATCH 01/11] Implement Export BibTex --- .../templates/weko_items_ui/export_list.html | 1 + .../translations/en/LC_MESSAGES/messages.po | 3 + .../translations/ja/LC_MESSAGES/messages.po | 2 + .../weko_items_ui/translations/messages.pot | 3 + modules/weko-items-ui/weko_items_ui/utils.py | 100 +- modules/weko-items-ui/weko_items_ui/views.py | 13 + .../serializers/WekoBibTexSerializer.py | 907 ++++++++++++------ .../static/js/weko_search_ui/app.js | 35 + 8 files changed, 736 insertions(+), 328 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html index b030d62ad8..c3baacbb37 100644 --- a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html +++ b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html @@ -32,6 +32,7 @@

Contains restricted content

+

{{_("Required item is not inputted")}}

{{ (record.metadata.hasOwnProperty('file') && record.metadata.file.hasOwnProperty('URI')) ? record.metadata.file.URI.length : '0' }} diff --git a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po index 8ecdd27f7a..022643c1fc 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po @@ -307,3 +307,6 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" + +msgid "Required item is not inputted" +msgstr "" diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po index 6d11b2af1b..52147318be 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po @@ -424,3 +424,5 @@ msgstr "以下のメールアドレスがシステムに登録されていませ msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "規定の数を超えています。選択できるのは2つまでです。" +msgid "Required item is not inputted" +msgstr "必須項目がありません。" diff --git a/modules/weko-items-ui/weko_items_ui/translations/messages.pot b/modules/weko-items-ui/weko_items_ui/translations/messages.pot index f134f2caf2..aabd7a015e 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/messages.pot +++ b/modules/weko-items-ui/weko_items_ui/translations/messages.pot @@ -358,3 +358,6 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" + +msgid "Required item is not inputted" +msgstr "" diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index acb33b6392..d34ccb20c5 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -852,6 +852,50 @@ def get_list_file_by_record_id(recid): return list_file_name +def write_bibtex_files(item_types_data, export_path): + """Write Bitex data to files. + + @param item_types_data: + @param export_path: + @return: + """ + for item_type_id in item_types_data: + item_type_data = item_types_data[item_type_id] + output = make_bibtex_data(item_type_data['recids']) + # create file to write data in case has output of Bibtex + if output: + with open('{}/{}.bib'.format(export_path, + item_type_data.get('name')), + 'w') as file: + file.write(output) + + +def write_tsv_files(item_types_data, export_path, list_item_role): + """Write TSV data to files. + + @param item_types_data: + @param export_path: + @param list_item_role: + @return: + """ + for item_type_id in item_types_data: + keys, labels, records = make_stats_tsv( + item_type_id, + item_types_data[item_type_id]['recids'], + list_item_role) + item_types_data[item_type_id]['recids'].sort() + item_types_data[item_type_id]['keys'] = keys + item_types_data[item_type_id]['labels'] = labels + item_types_data[item_type_id]['data'] = records + item_type_data = item_types_data[item_type_id] + + with open('{}/{}.tsv'.format(export_path, + item_type_data.get('name')), + 'w') as file: + tsvs_output = package_export_file(item_type_data) + file.write(tsvs_output.getvalue()) + + def export_items(post_data): """Gather all the item data and export and return as a JSON or BIBTEX. @@ -915,22 +959,10 @@ def check_item_type_name(name): item_types_data[item_type_id]['recids'].append(record_id) # Create export info file - for item_type_id in item_types_data: - keys, labels, records = make_stats_tsv( - item_type_id, - item_types_data[item_type_id]['recids'], - list_item_role) - item_types_data[item_type_id]['recids'].sort() - item_types_data[item_type_id]['keys'] = keys - item_types_data[item_type_id]['labels'] = labels - item_types_data[item_type_id]['data'] = records - item_type_data = item_types_data[item_type_id] - - with open('{}/{}.tsv'.format(export_path, - item_type_data.get('name')), - 'w') as file: - tsvs_output = package_export_file(item_type_data) - file.write(tsvs_output.getvalue()) + if export_format == 'BIBTEX': + write_bibtex_files(item_types_data, export_path) + else: + write_tsv_files(item_types_data, export_path, list_item_role) # Create bag bagit.make_bag(export_path) @@ -1626,3 +1658,39 @@ def translate_validation_message(item_property, cur_lang): for _key, value in item_property.get(properties_attr).items(): set_validation_message(value, cur_lang) translate_validation_message(value, cur_lang) + + +def validate_bibtex(record_ids): + """Validate data of records for Bibtex exporting. + + @param record_ids: + @return: + """ + lst_invalid_ids = [] + err_msg = _('Please input all required item.') + from weko_schema_ui.serializers import WekoBibTexSerializer + for record_id in record_ids: + record = WekoRecord.get_record_by_pid(record_id) + pid = record.pid_recid + serializer = WekoBibTexSerializer() + result = serializer.serialize(pid, record, True) + if not result or result == err_msg: + lst_invalid_ids.append(record_id) + return lst_invalid_ids + + +def make_bibtex_data(record_ids): + """Serialize all Bibtex data by record ids + @param record_ids: + @return: + """ + result = '' + err_msg = _('Please input all required item.') + from weko_schema_ui.serializers import WekoBibTexSerializer + for record_id in record_ids: + record = WekoRecord.get_record_by_pid(record_id) + pid = record.pid_recid + serializer = WekoBibTexSerializer() + output = serializer.serialize(pid, record) + result += output if output != err_msg else '' + return result diff --git a/modules/weko-items-ui/weko_items_ui/views.py b/modules/weko-items-ui/weko_items_ui/views.py index 1feaabfc4a..64b7c6272b 100644 --- a/modules/weko-items-ui/weko_items_ui/views.py +++ b/modules/weko-items-ui/weko_items_ui/views.py @@ -1098,6 +1098,19 @@ def check_restricted_content(): return jsonify({'restricted_records': list(restricted_records)}) +@blueprint.route('/validate_bibtext_export', methods=['POST']) +def validate_bibtex_export(): + """Validate export Bibtex. + + @return: + """ + from .utils import validate_bibtex + post_data = request.get_json() + record_ids = post_data['record_ids'] + invalid_record_ids = validate_bibtex(record_ids) + return jsonify(invalid_record_ids=invalid_record_ids) + + @blueprint.route('/export', methods=['GET', 'POST']) def export(): """Item export view.""" diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index 1ca9cb80fc..68eb7fd08a 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -19,274 +19,451 @@ # MA 02111-1307, USA. """WEKO BibTex Serializer.""" - import xml.etree.ElementTree as ET from datetime import datetime +from enum import Enum from bibtexparser.bibdatabase import BibDatabase from bibtexparser.bwriter import BibTexWriter -from flask import abort +from flask import current_app from ..schema import SchemaTree, cache_schema from .wekoxml import WekoXMLSerializer +class BibTexTypes(Enum): + """BibTex Types.""" + + ARTICLE = 'article' + BOOK = 'book' + BOOKLET = 'booklet' + CONFERENCE = 'conference' + INBOOK = 'inbook' + INCOLLECTION = 'incollection' + INPROCEEDINGS = 'inproceedings' + MANUAL = 'manual' + MASTERSTHESIS = 'mastersthesis' + MISC = 'misc' + PHDTHESIS = 'phdthesis' + PROCEEDINGS = 'proceedings' + TECHREPORT = 'techreport' + UNPUBLISHED = 'unpublished' + + +class BibTexFields(Enum): + """BibTex Fields.""" + + AUTHOR = 'author' + YOMI = 'yomi' + TITLE = 'title' + BOOK_TITLE = 'book' + JOURNAL = 'journal' + VOLUME = 'volume' + NUMBER = 'issue' + PAGES = 'pages' + PAGE_START = 'page_start' + PAGE_END = 'page_end' + NOTE = 'note' + PUBLISHER = 'publisher' + YEAR = 'year' + MONTH = 'month' + URL = 'url' + DOI = 'doi' + SCHOOL = 'school' + TYPE = 'type' + EDITOR = 'editor' + EDITION = 'edition' + CHAPTER = 'chapter' + SERIES = 'series' + ADDRESS = 'address' + ORGANIZATION = 'organization' + KEY = 'key' + CROSSREF = 'crossref' + ANNOTE = 'annote' + INSTITUTION = 'institution' + HOW_PUBLISHER = 'how publisher' + + class WekoBibTexSerializer(): """Weko bibtex serializer.""" + # Mapping type between Bibtex type and dc:type of jpcoar + type_mapping = { + BibTexTypes.ARTICLE: ['journal article', + 'departmental bulletin paper', + 'review article', 'data paper', 'periodical', + 'editorial', + 'article'], + BibTexTypes.BOOK: ['book'], + BibTexTypes.INBOOK: ['book part'], + BibTexTypes.INPROCEEDINGS: ['conference paper'], + BibTexTypes.MASTERSTHESIS: ['master thesis'], + BibTexTypes.MISC: ['research proposal', 'technical documentation', + 'thesis', + 'bachelor thesis', 'cartographic material', + 'map', + 'lecture', 'conference object', 'conference poster', + 'image', 'still image', 'moving image', 'video', + 'sound', + 'musical notation', 'interactive resource', + 'learning material', 'patent', 'dataset', 'software', + 'workflow', + 'other'], + BibTexTypes.PHDTHESIS: ['doctoral thesis'], + BibTexTypes.PROCEEDINGS: ['conference proceedings'], + BibTexTypes.TECHREPORT: ['report', + 'research report', + 'working paper', + 'technical report', + 'policy report', + 'internal report', + 'report part'], + BibTexTypes.INCOLLECTION: [], + BibTexTypes.BOOKLET: [], + BibTexTypes.CONFERENCE: [], + BibTexTypes.MANUAL: [], + BibTexTypes.UNPUBLISHED: []} + def __init__(self): """Init.""" # Load namespace self.ns = cache_schema('jpcoar_mapping').get('namespaces') + self.lst_identifier_type = ['doi', 'hdl', 'url'] + # JPCOAR elements + jp_jp = '{' + self.ns['jpcoar'] + '}' + jp_dc = '{' + self.ns['dc'] + '}' + jp_datacite = '{' + self.ns['datacite'] + '}' + self.find_pattern = './/{}' + + self.fields_mapping = { + BibTexFields.AUTHOR: jp_jp + 'creatorName', + BibTexFields.TITLE: jp_dc + 'title', + BibTexFields.JOURNAL: jp_jp + 'sourceTitle', + BibTexFields.BOOK_TITLE: jp_jp + 'sourceTitle', + BibTexFields.VOLUME: jp_jp + 'volume', + BibTexFields.NUMBER: jp_jp + 'issue', + BibTexFields.PAGE_START: jp_jp + 'pageStart', + BibTexFields.PAGE_END: jp_jp + 'pageEnd', + BibTexFields.PUBLISHER: jp_dc + 'publisher', + BibTexFields.HOW_PUBLISHER: jp_dc + 'mimeType', + BibTexFields.YEAR: jp_datacite + 'date', + BibTexFields.MONTH: jp_datacite + 'date', + BibTexFields.INSTITUTION: jp_jp + 'contributor' + '//' + jp_jp + 'contributorName', + BibTexFields.TYPE: 'none', + BibTexFields.EDITOR: 'none', + BibTexFields.EDITION: 'none', + BibTexFields.CHAPTER: 'none', + BibTexFields.SERIES: 'none', + BibTexFields.ADDRESS: 'none', + BibTexFields.NOTE: jp_datacite + 'description', + BibTexFields.SCHOOL: jp_jp + 'degreeGrantorName', + BibTexFields.ORGANIZATION: 'none', + BibTexFields.KEY: 'none', + BibTexFields.CROSSREF: 'none', + BibTexFields.ANNOTE: 'none', + BibTexFields.DOI: jp_jp + 'identifier', + BibTexFields.URL: jp_jp + 'identifier', + } - # JPCOAR types - self.article_types = ['conference paper', 'data paper', 'editorial', - 'journal article', 'periodical', - 'review article', 'article', - 'departmental bulletin paper'] - - self.book_types = ['book', 'book part'] - self.inproceedings_types = ['conference proceedings'] - self.techreport_types = [ - 'technical report', - 'report', - 'research report'] - self.unpublished_types = ['conference object', 'conference poster'] - - self.misc_types = ['thesis', 'bachelor thesis', 'master thesis', - 'doctoral thesis', 'learning material', - 'dataset', 'software', 'other', - 'cartographic material', 'map', 'image', - 'still image', 'moving image', 'video', - 'lecture', 'patent', 'internal report', - 'policy report', 'report part', 'working paper', - 'sound', 'interactive resource', - 'musical notation', 'research proposal', - 'technical documentation', 'workflow'] + @staticmethod + def get_bibtex_type_fields(self, bibtex_type): + """Get all fields of BibTex type. - # JPCOAR elements - creator_name = '{' + self.ns['jpcoar'] + '}' + 'creatorName' - title = '{' + self.ns['dc'] + '}' + 'title' - source_title = '{' + self.ns['jpcoar'] + '}' + 'sourceTitle' - volume = '{' + self.ns['jpcoar'] + '}' + 'volume' - issue = '{' + self.ns['jpcoar'] + '}' + 'issue' - page_start = '{' + self.ns['jpcoar'] + '}' + 'pageStart' - page_end = '{' + self.ns['jpcoar'] + '}' + 'pageEnd' - date = '{' + self.ns['datacite'] + '}' + 'date' - publisher = '{' + self.ns['dc'] + '}' + 'publisher' - type = '{' + self.ns['datacite'] + '}' + 'description' - mime_type = '{' + self.ns['jpcoar'] + '}' + 'mimeType' - contributor_name = '{' + self.ns['jpcoar'] + '}' + 'contributor' + \ - '//' + '{' + self.ns['jpcoar'] + \ - '}' + 'affiliationName' - - # [BibTex]Article columns - self.article_cols_required = {'author': creator_name, - 'title': title, - 'journal': source_title, - 'date': date} - - self.article_cols_all = {'author': creator_name, - 'title': title, - 'journal': source_title, - 'volume': volume, - 'number': issue, - 'page_start': page_start, - 'page_end': page_end, - 'date': date} - - # [BibTex]Book columns - self.book_cols_required = {'author': creator_name, - 'title': title, - 'publisher': publisher, - 'date': date} - - self.book_cols_all = {'author': creator_name, - 'title': title, - 'volume': volume, - 'number': issue, - 'publisher': publisher, - 'date': date} - - # [BibTex]Booklet columns - self.booklet_cols_required = {'title': title} - - self.booklet_cols_all = {'author': creator_name, - 'title': title, - 'howpublished': mime_type, - 'date': date} - - # [BibTex]Inbook columns - self.inbook_cols_required = {'author': creator_name, - 'title': title, - 'page_start': page_start, - 'page_end': page_end, - 'publisher': publisher, - 'date': date} - - self.inbook_cols_all = {'author': creator_name, - 'title': title, - 'volume': volume, - 'number': issue, - 'page_start': page_start, - 'page_end': page_end, - 'publisher': publisher, - 'date': date, - 'type': type} - - # [BibTex]Incollection columns - self.incollection_cols_required = {'author': creator_name, - 'title': title, - 'booktitle': source_title, - 'publisher': publisher, - 'date': date} - - self.incollection_cols_all = {'author': creator_name, - 'title': title, - 'booktitle': source_title, - 'volume': volume, - 'number': issue, - 'page_start': page_start, - 'page_end': page_end, - 'publisher': publisher, - 'date': date, - 'type': type} - - # [BibTex]Inproceedings columns - self.inproceedings_cols_required = {'author': creator_name, - 'title': title, - 'booktitle': source_title, - 'date': date} - - self.inproceedings_cols_all = {'author': creator_name, - 'title': title, - 'booktitle': source_title, - 'volume': volume, - 'number': issue, - 'page_start': page_start, - 'page_end': page_end, - 'publisher': publisher, - 'date': date} - - # [BibTex]Techreport columns - self.techreport_cols_required = {'author': creator_name, - 'title': title, - 'date': date, - 'institution': contributor_name} - - self.techreport_cols_all = {'author': creator_name, - 'title': title, - 'number': issue, - 'date': date, - 'institution': contributor_name, - 'type': type} - - # [BibTex]Unpublished columns - self.unpublished_cols_required = {'author': creator_name, - 'title': title} - - self.unpublished_cols_all = {'author': creator_name, - 'title': title, - 'date': date} - - # [BibTex]Misc columns - self.misc_cols_all = {'author': creator_name, - 'title': title, - 'howpublished': mime_type, - 'date': date} - - def serialize(self, pid, record): + @param self: + @param bibtex_type: + @return: + """ + result = { + BibTexTypes.ARTICLE: self.get_article_fields(), + BibTexTypes.BOOK: self.get_book_fields(), + BibTexTypes.BOOKLET: self.get_booklet_fields(), + BibTexTypes.CONFERENCE: self.get_conference_fields(), + BibTexTypes.INBOOK: self.get_inbook_fields(), + BibTexTypes.INCOLLECTION: self.get_incollection_fields(), + BibTexTypes.INPROCEEDINGS: self.get_inproceedings_fields(), + BibTexTypes.MANUAL: self.get_manual_fields(), + BibTexTypes.MASTERSTHESIS: self.get_mastersthesis_fields(), + BibTexTypes.MISC: self.get_misc_fields(), + BibTexTypes.PHDTHESIS: self.get_phdthesis_fields(), + BibTexTypes.PROCEEDINGS: self.get_proceedings_fields(), + BibTexTypes.TECHREPORT: self.get_techreport_fields(), + BibTexTypes.UNPUBLISHED: self.get_unpublished_fields(), + } + return result.get(bibtex_type) + + @staticmethod + def get_article_fields(): + """Get article's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.JOURNAL, BibTexFields.YEAR] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.PAGE_START, + BibTexFields.PAGE_END, BibTexFields.MONTH, + BibTexFields.NOTE, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_book_fields(): + """Get book's fields. + + @return: + """ + lst_required = [BibTexFields.TITLE, BibTexFields.PUBLISHER, + BibTexFields.YEAR] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.MONTH, + BibTexFields.EDITION, BibTexFields.SERIES, + BibTexFields.ADDRESS, + BibTexFields.NOTE, BibTexFields.KEY] + lst_required_partial = [[BibTexFields.EDITOR, + BibTexFields.AUTHOR]] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_booklet_fields(): + """Get booklet's fields. + + @return: + """ + lst_required = [BibTexFields.TITLE] + lst_optional = [BibTexFields.AUTHOR, BibTexFields.HOW_PUBLISHER, + BibTexFields.YEAR, BibTexFields.MONTH, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_conference_fields(): + """Get conference's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.BOOK_TITLE, BibTexFields.YEAR] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.PAGE_START, + BibTexFields.PAGE_END, BibTexFields.PUBLISHER, + BibTexFields.MONTH, + BibTexFields.EDITOR, BibTexFields.SERIES, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.ORGANIZATION, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_inbook_fields(): + """Get inbook's fields. + + @return: + """ + lst_required = [BibTexFields.TITLE, BibTexFields.YEAR, + BibTexFields.PUBLISHER] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.MONTH, + BibTexFields.TYPE, BibTexFields.EDITION, + BibTexFields.SERIES, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.KEY] + lst_required_partial = [[BibTexFields.AUTHOR, + BibTexFields.EDITOR], + [BibTexFields.PAGES, + BibTexFields.CHAPTER]] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_incollection_fields(): + """Get incollection's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.BOOK_TITLE, BibTexFields.YEAR, + BibTexFields.PUBLISHER] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.PAGE_START, + BibTexFields.PAGE_END, BibTexFields.MONTH, + BibTexFields.TYPE, BibTexFields.EDITOR, + BibTexFields.EDITION, BibTexFields.CHAPTER, + BibTexFields.SERIES, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.ORGANIZATION, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_inproceedings_fields(): + """Get inproceedings's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.BOOK_TITLE, BibTexFields.YEAR] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.PAGE_START, + BibTexFields.PAGE_END, BibTexFields.PUBLISHER, + BibTexFields.MONTH, BibTexFields.EDITOR, + BibTexFields.SERIES, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.ORGANIZATION, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_manual_fields(): + """Get manual's fields. + + @return: + """ + lst_required = [BibTexFields.TITLE] + lst_optional = [BibTexFields.AUTHOR, BibTexFields.YEAR, + BibTexFields.MONTH, BibTexFields.EDITION, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.ORGANIZATION, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_mastersthesis_fields(): + """Get mastersthesis's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.YEAR, BibTexFields.SCHOOL] + lst_optional = [BibTexFields.MONTH, BibTexFields.TYPE, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_phdthesis_fields(): + """Get phdthesis's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.YEAR, BibTexFields.SCHOOL] + lst_optional = [BibTexFields.MONTH, BibTexFields.TYPE, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_proceedings_fields(): + """Get proceedings's fields. + + @return: + """ + lst_required = [BibTexFields.TITLE, BibTexFields.YEAR] + lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER, + BibTexFields.PUBLISHER, BibTexFields.MONTH, + BibTexFields.EDITOR, BibTexFields.SERIES, + BibTexFields.ADDRESS, BibTexFields.NOTE, + BibTexFields.ORGANIZATION, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_techreport_fields(): + """Get techreport's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.YEAR, BibTexFields.INSTITUTION] + lst_optional = [BibTexFields.NUMBER, BibTexFields.MONTH, + BibTexFields.TYPE, BibTexFields.ADDRESS, + BibTexFields.NOTE, BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_unpublished_fields(): + """Get unpublished's fields. + + @return: + """ + lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.NOTE] + lst_optional = [BibTexFields.YEAR, BibTexFields.MONTH, + BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + @staticmethod + def get_misc_fields(): + """Get mis's fields. + + @return: + """ + lst_required = [] + lst_optional = [BibTexFields.AUTHOR, BibTexFields.TITLE, + BibTexFields.HOW_PUBLISHER, BibTexFields.YEAR, + BibTexFields.MONTH, BibTexFields.NOTE, + BibTexFields.KEY] + lst_required_partial = [] + return {'required': lst_required, 'optional': lst_optional, + 'required_partial': lst_required_partial} + + def serialize(self, pid, record, validate_mode=False): """Serialize to bibtex from jpcoar record. :param pid: The :class:`invenio_pidstore.models.PersistentIdentifier` instance. :param record: The :class:`invenio_records.api.Record` instance. + :param validate_mode: validate or not :returns: The object serialized. """ - # Get JPCOAR data(XML) and ElementTree root + err_msg = 'Please input all required item.' + # Get JPCOAR datas(XML) and ElementTree root jpcoar_data = self.get_jpcoar_data(pid, record) root = ET.fromstring(jpcoar_data) - if self.is_empty(root): - return 'This item has no mapping info.' + return err_msg db = BibDatabase() - # Article - if self.is_bibtex_type(root, - self.article_types, - self.article_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.article_cols_all, - 'article')) - # Incollection - elif self.is_bibtex_type(root, - self.book_types, - self.incollection_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.incollection_cols_all, - 'incollection')) - # Inbook - elif self.is_bibtex_type(root, - self.book_types, - self.inbook_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.inbook_cols_all, - 'inbook')) - # Book - elif self.is_bibtex_type(root, - self.book_types, - self.book_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.book_cols_all, - 'book')) - # Booklet - elif self.is_bibtex_type(root, - self.book_types, - self.booklet_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.booklet_cols_all, - 'booklet')) - # Inproceedings - elif self.is_bibtex_type(root, - self.inproceedings_types, - self.inproceedings_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.inproceedings_cols_all, - 'inproceedings')) - # Techreport - elif self.is_bibtex_type(root, - self.techreport_types, - self.techreport_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.techreport_cols_all, - 'techreport')) - # Unpublished - elif self.is_bibtex_type(root, - self.unpublished_types, - self.unpublished_cols_required): - - db.entries.append(self.get_bibtex_data(root, - self.unpublished_cols_all, - 'unpublished')) - # Misc - elif self.is_misc_type(root): - db.entries.append(self.get_bibtex_data(root, - self.misc_cols_all, - 'misc')) - # Unknown type - else: - return 'This item has no mapping info.' - + bibtex_type = self.get_bibtex_type(root) + + if not bibtex_type: + current_app.logger.error( + "Can not find Bibtex type for record {}".format( + record.get('recid'))) + return err_msg + valid, lst_invalid_fields = self.validate_fields(root, bibtex_type) + + if validate_mode: + return valid + elif not validate_mode and not valid: + if len(lst_invalid_fields) > 0: + current_app.logger.error( + 'Missing required fields [{}] for record {}'.format( + ','.join(lst_invalid_fields), record.get('recid'))) + return err_msg + + db.entries.append(self.get_bibtex_data(root, bibtex_type)) writer = BibTexWriter() - - return writer.write(db) + result = writer.write(db) + return result @staticmethod def get_jpcoar_data(pid, record): @@ -318,115 +495,208 @@ def is_empty(self, root): return False - def is_bibtex_type(self, root, bibtex_types, bibtex_cols_required): + def get_bibtex_type(self, root): """ Determine jpcoar record types(except misc). :return: """ + type_result = None type_value = '' for element in root.findall('.//dc:type', self.ns): type_value = element.text - - if type_value.lower() not in bibtex_types: - return False - - if not self.contains_all(root, bibtex_cols_required.values()): - return False - - return True - - def is_misc_type(self, root): - """ - Determine jpcoar record type(misc). - - :param root: - :return: - - """ - type_value = '' - for element in root.findall('.//dc:type', self.ns): - type_value = element.text - - if type_value.lower() in self.misc_types or \ - type_value.lower() in self.article_types or \ - type_value.lower() in self.book_types or \ - type_value.lower() in self.inproceedings_types or \ - type_value.lower() in self.techreport_types or \ - type_value.lower() in self.unpublished_types: - - return True - - return False - - def contains_all(self, root, field_list): + # Determine which type of Bibtex type is it + for bib_type, item_types in self.type_mapping.items(): + if type_value.lower() in item_types: + type_result = bib_type + break + return type_result + + def validate_fields(self, root, bibtex_type): + """Validate required fields of bibtex type. + + @param root: + @param bibtex_type: + @return: """ - Determine whether all required items exist. - - :param root: - :param field_list: - :return: + def validate_by_att(attribute_name, expected_values): + valid_date = False + for element in elements: + if element.get(attribute_name) and element.get( + attribute_name).lower() in expected_values: + valid_date = True + return valid_date + + def validate_partial_req(): + result = True + for par_req in fields.get('required_partial'): + partial_valid = False + for field in par_req: + # check for pages because pages is represented for start + # and end page + if field == BibTexFields.PAGES: + start_page = root.findall(self.find_pattern.format( + self.fields_mapping[BibTexFields.PAGE_START]), + self.ns) + end_page = root.findall( + self.find_pattern.format(self.fields_mapping[BibTexFields.PAGE_END]), + self.ns) + if len(start_page) > 0 and len(end_page) > 0: + partial_valid = True + continue + else: + field_data = root.findall( + self.find_pattern.format(self.fields_mapping[field]), + self.ns) + if len(field_data) > 0: + partial_valid = True + continue + if not partial_valid: + result = False + lst_invalid_fields.append(par_req[0].value) + lst_invalid_fields.append(par_req[1].value) + return result + + lst_invalid_fields = [] + required_valid = True + fields = self.get_bibtex_type_fields(bibtex_type) + for item_required in fields.get('required'): + elements = root.findall( + self.find_pattern.format(self.fields_mapping[item_required]), + self.ns) + if len(elements) == 0: + required_valid = False + lst_invalid_fields.append(item_required.value) + elif item_required == BibTexFields.YEAR or \ + item_required == BibTexFields.MONTH: + date_valid = validate_by_att('dateType', ['issued']) + if not date_valid: + lst_invalid_fields.append(item_required.value) + required_valid = False + elif item_required == BibTexFields.DOI: + doi_valid = validate_by_att('identifierType', ['doi']) + if not doi_valid: + lst_invalid_fields.append(item_required.value) + required_valid = False + elif item_required == BibTexFields.URL: + url_valid = validate_by_att('identifierType', + ['doi', 'hdl', 'uri']) + if not url_valid: + lst_invalid_fields.append(item_required.value) + required_valid = False + partial_req_valid = validate_partial_req() + return required_valid and partial_req_valid, lst_invalid_fields + + def combine_all_fields(self, bibtex_type): + """Combine all fields of item type. + + @param bibtex_type: + @return: """ - for field in field_list: - if len(root.findall('.//' + field, self.ns)) == 0: - return False - - return True - - def get_bibtex_data(self, root, bibtex_cols_all={}, entry_type='article'): + all_field_type = self.get_bibtex_type_fields(bibtex_type) + all_fields = all_field_type.get( + 'required') + all_field_type.get('optional') + partial_req = all_field_type.get('required_partial') + for item in partial_req: + if BibTexFields.PAGES in item: + item.remove(BibTexFields.PAGES) + item.extend(BibTexFields.PAGE_START, + BibTexFields.PAGE_END) + all_fields.extend(item) + return all_fields + + def get_bibtex_data(self, root, bibtex_type): + """Get Bibtex data base on Bibtex type. + + @param root: + @param bibtex_type: + @return: """ - Get bibtex data from jpcoar record. - :param root: - :param bibtex_cols_all: - :param entry_type: - :return: + def process_by_att(att, expected_val, existed_lst): + date_type = element.get(att) + if date_type and date_type.lower() == expected_val and \ + element.text not in existed_lst: + dates.append(element.text) + + def process_author(): + author_lang = element.get(xml_ns + 'lang') + if not author_lang or author_lang.lower() != 'ja-kana': + creator[BibTexFields.AUTHOR.value].append( + element.text) + else: + creator[BibTexFields.YOMI.value].append( + element.text) + + def process_url(): + identifier_type = element.get(xml_ns + 'identifierType') + if identifier_type and identifier_type.lower in self.lst_identifier_type: + lst_identifier_type_data[ + identifier_type.lower].append(element.text) - """ - # Initialization data = {} page_start = '' page_end = '' xml_ns = '{' + self.ns['xml'] + '}' - - # Create book record - for field in bibtex_cols_all.keys(): - elements = root.findall('.//' + bibtex_cols_all[field], self.ns) + creator = {BibTexFields.AUTHOR.value: [], + BibTexFields.YOMI.value: []} + lst_identifier_type_data = {} + dois = [] + all_fields = self.combine_all_fields(bibtex_type) + + for i in self.lst_identifier_type: + lst_identifier_type_data[i] = [] + + for field in all_fields: + elements = root.findall( + self.find_pattern.format(self.fields_mapping[field]), self.ns) if len(elements) != 0: - value = '' dates = [] for element in elements: - if field == 'date' and (element.get('dateType') is not None - and element.get('dateType').lower() == 'issued'): - dates.append(element.text) - continue - elif field == 'type' and (element.get('descriptionType') is None - or element.get('descriptionType').lower() != 'other'): - continue - elif field == 'author' and (element.get(xml_ns + 'lang') is None - or element.get(xml_ns + 'lang').lower() != 'en'): - continue - + if field == BibTexFields.YEAR or \ + field == BibTexFields.MONTH: + process_by_att('DateType', 'issued', dates) + elif field == BibTexFields.AUTHOR: + process_author() + elif field == BibTexFields.DOI: + process_by_att(xml_ns + 'identifierType', 'doi', dois) + elif field == BibTexFields.URL: + process_url() if value != '': - value += ' and ' if field == 'author' else ', ' + value += ' and ' if field == BibTexFields.AUTHOR else ', ' value += element.text - if field == 'page_start': + if field == BibTexFields.PAGE_START: page_start = value - elif field == 'page_end': + elif field == BibTexFields.PAGE_END: page_end = value - elif field == 'date' and len(dates) != 0: - data['year'], data['month'] = self.get_dates(dates) + elif field == BibTexFields.YEAR or \ + field == BibTexFields.MONTH and len(dates) != 0: + data[BibTexFields.YEAR.value], data[ + BibTexFields.MONTH.value] = self.get_dates(dates) + elif field == BibTexFields.AUTHOR: + if creator[BibTexFields.AUTHOR.value]: + data[field.value] = ' and '.join( + creator[BibTexFields.AUTHOR.value]) + if creator[BibTexFields.YOMI.value]: + data[BibTexFields.YOMI.value] = ' and '.join( + creator[BibTexFields.YOMI.value]) + elif field == BibTexFields.DOI and len(dois) > 0: + data[field.value] = ','.join(dois) + elif field == BibTexFields.URL and len(): + data[field.value] = self.get_identifier( + self.lst_identifier_type, + lst_identifier_type_data) elif value != '': - data[field] = value + data[field.value] = value if page_start != '' and page_end != '': data['pages'] = str(page_start) + '--' + str(page_end) - data['ENTRYTYPE'] = entry_type + data['ENTRYTYPE'] = bibtex_type.value data['ID'] = self.get_item_id(root) return data @@ -470,3 +740,16 @@ def get_dates(dates): month += date.strftime('%b') return year, month + + @staticmethod + def get_identifier(identifier_type, identifier_types_data): + """Get identifier data. + + @param identifier_type: + @param identifier_types_data: + @return: + """ + for type in identifier_type: + if identifier_types_data.get(type) and len( + identifier_types_data.get(type)) > 0: + return identifier_types_data.get(type)[0] diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js index cb8350c88d..6baadb8741 100644 --- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js +++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js @@ -334,12 +334,47 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { } }); }) + exportBibtex=document.getElementById("export_format_radio_bibtex").checked + if (exportBibtex) { + let invalidBibtexRecordIds = $scope.validateBibtexExport(Object.keys(export_metadata)); + if (invalidBibtexRecordIds.length > 0) { + $scope.showErrMsgBibtex(invalidBibtexRecordIds); + } + } $('#record_metadata').val(JSON.stringify(export_metadata)); $('#export_items_form').submit(); // Submit form and let controller handle file making } $('#item_export_button').attr("disabled", false); } + $scope.validateBibtexExport = function (record_ids) { + request_url = '/items/validate_bibtext_export'; + var data = { record_ids: record_ids } + invalidRecordIds = [] + $.ajax({ + method: 'POST', + url: request_url, + data: JSON.stringify(data), + async: false, + contentType: 'application/json', + success: function (data) { + if (data.invalid_record_ids.length) { + invalidRecordIds = data.invalid_record_ids; + } + }, + error: function (status, error) { + console.log(error); + } + }); + return invalidRecordIds; + } + + $scope.showErrMsgBibtex = function(invalidRecordIds) { + invalidRecordIds.forEach(function(recordId){ + $('#bibtex_err_' + recordId).removeClass('hide'); + }); + } + $scope.getExportItemsMetadata = function () { let cur_url = new URL(window.location.href); let q = cur_url.searchParams.get("q"); From e8f03b8594172014b8a49565e5965f8466d81c8e Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Mon, 18 May 2020 17:09:57 +0700 Subject: [PATCH 02/11] Commit missing code --- .../templates/weko_items_ui/export_list.html | 2 +- .../translations/en/LC_MESSAGES/messages.po | 3 -- .../translations/ja/LC_MESSAGES/messages.po | 3 -- .../weko_items_ui/translations/messages.pot | 3 -- .../serializers/WekoBibTexSerializer.py | 11 ++++---- .../static/js/weko_search_ui/app.js | 28 +++++++++++++++++-- 6 files changed, 31 insertions(+), 19 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html index c3baacbb37..3e6385cc85 100644 --- a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html +++ b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html @@ -32,7 +32,7 @@

Contains restricted content

-

{{_("Required item is not inputted")}}

+

{{ (record.metadata.hasOwnProperty('file') && record.metadata.file.hasOwnProperty('URI')) ? record.metadata.file.URI.length : '0' }} diff --git a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po index 022643c1fc..8ecdd27f7a 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po @@ -307,6 +307,3 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" - -msgid "Required item is not inputted" -msgstr "" diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po index 52147318be..7a87890d3e 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po @@ -423,6 +423,3 @@ msgstr "以下のメールアドレスがシステムに登録されていませ msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "規定の数を超えています。選択できるのは2つまでです。" - -msgid "Required item is not inputted" -msgstr "必須項目がありません。" diff --git a/modules/weko-items-ui/weko_items_ui/translations/messages.pot b/modules/weko-items-ui/weko_items_ui/translations/messages.pot index aabd7a015e..f134f2caf2 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/messages.pot +++ b/modules/weko-items-ui/weko_items_ui/translations/messages.pot @@ -358,6 +358,3 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" - -msgid "Required item is not inputted" -msgstr "" diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index 68eb7fd08a..f6e20a88aa 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -108,7 +108,7 @@ class WekoBibTexSerializer(): 'musical notation', 'interactive resource', 'learning material', 'patent', 'dataset', 'software', 'workflow', - 'other'], + 'other(その他)','other(プレプリント)'], BibTexTypes.PHDTHESIS: ['doctoral thesis'], BibTexTypes.PROCEEDINGS: ['conference proceedings'], BibTexTypes.TECHREPORT: ['report', @@ -148,7 +148,7 @@ def __init__(self): BibTexFields.HOW_PUBLISHER: jp_dc + 'mimeType', BibTexFields.YEAR: jp_datacite + 'date', BibTexFields.MONTH: jp_datacite + 'date', - BibTexFields.INSTITUTION: jp_jp + 'contributor' + '//' + jp_jp + 'contributorName', + BibTexFields.INSTITUTION: 'none', BibTexFields.TYPE: 'none', BibTexFields.EDITOR: 'none', BibTexFields.EDITION: 'none', @@ -165,7 +165,6 @@ def __init__(self): BibTexFields.URL: jp_jp + 'identifier', } - @staticmethod def get_bibtex_type_fields(self, bibtex_type): """Get all fields of BibTex type. @@ -602,8 +601,8 @@ def combine_all_fields(self, bibtex_type): for item in partial_req: if BibTexFields.PAGES in item: item.remove(BibTexFields.PAGES) - item.extend(BibTexFields.PAGE_START, - BibTexFields.PAGE_END) + item.extend([BibTexFields.PAGE_START, + BibTexFields.PAGE_END]) all_fields.extend(item) return all_fields @@ -658,7 +657,7 @@ def process_url(): for element in elements: if field == BibTexFields.YEAR or \ field == BibTexFields.MONTH: - process_by_att('DateType', 'issued', dates) + process_by_att('dateType', 'issued', dates) elif field == BibTexFields.AUTHOR: process_author() elif field == BibTexFields.DOI: diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js index 6baadb8741..a36ca3bc43 100644 --- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js +++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js @@ -1,3 +1,9 @@ +const MESSAGE = { + bibtex_err: { + en: "Required item is not inputted.", + ja: "必須項目がありません。", + } +} require([ "jquery", "bootstrap", @@ -116,6 +122,21 @@ function searchResCtrl($scope, $rootScope, $http, $location) { $rootScope.commInfoIndex = ""; } + $rootScope.getMessage = function(messageCode) { + const defaultLanguage = "en"; + let currentLanguage = document.getElementById("current_language").value; + let message = MESSAGE[messageCode]; + if (message) { + if (message[currentLanguage]) { + return message[currentLanguage]; + } else { + return message[defaultLanguage]; + } + } else { + return ""; + } +} + $rootScope.disable_flg = true; $rootScope.display_flg = true; $rootScope.index_id_q = $location.search().q != undefined ? $location.search().q : ''; @@ -369,9 +390,10 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { return invalidRecordIds; } - $scope.showErrMsgBibtex = function(invalidRecordIds) { - invalidRecordIds.forEach(function(recordId){ - $('#bibtex_err_' + recordId).removeClass('hide'); + $scope.showErrMsgBibtex = function (invalidRecordIds) { + errMsg = $scope.getMessage('bibtex_err'); + invalidRecordIds.forEach(function (recordId) { + document.getElementById('bibtex_err_' + recordId).textContent=errMsg; }); } From 9c8fa1c426b2c481fbcdfd74d80e3a7eee40c545 Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Tue, 19 May 2020 13:28:03 +0700 Subject: [PATCH 03/11] Commit missing code --- .../weko_schema_ui/serializers/WekoBibTexSerializer.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index f6e20a88aa..ec0ddba065 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -108,7 +108,8 @@ class WekoBibTexSerializer(): 'musical notation', 'interactive resource', 'learning material', 'patent', 'dataset', 'software', 'workflow', - 'other(その他)','other(プレプリント)'], + 'other(その他)', + 'other(プレプリント)'], BibTexTypes.PHDTHESIS: ['doctoral thesis'], BibTexTypes.PROCEEDINGS: ['conference proceedings'], BibTexTypes.TECHREPORT: ['report', @@ -638,6 +639,7 @@ def process_url(): data = {} page_start = '' page_end = '' + title = '' xml_ns = '{' + self.ns['xml'] + '}' creator = {BibTexFields.AUTHOR.value: [], BibTexFields.YOMI.value: []} @@ -664,6 +666,9 @@ def process_url(): process_by_att(xml_ns + 'identifierType', 'doi', dois) elif field == BibTexFields.URL: process_url() + elif field == BibTexFields.TITLE and title == '': + # Get only one title at all + title = element.text if value != '': value += ' and ' if field == BibTexFields.AUTHOR else ', ' value += element.text @@ -689,6 +694,8 @@ def process_url(): data[field.value] = self.get_identifier( self.lst_identifier_type, lst_identifier_type_data) + elif field == BibTexFields.TITLE and title != '': + data[field.value] = title elif value != '': data[field.value] = value From 9c0aa5e5fada63f24de9cfb0065fa78fc5cbb73e Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Tue, 19 May 2020 15:29:02 +0700 Subject: [PATCH 04/11] Refactor code --- modules/weko-items-ui/weko_items_ui/utils.py | 26 +--- .../serializers/WekoBibTexSerializer.py | 142 +++++++++--------- .../static/js/weko_search_ui/app.js | 36 ++--- 3 files changed, 95 insertions(+), 109 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index d34ccb20c5..6d3ed69c19 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -853,7 +853,7 @@ def get_list_file_by_record_id(recid): def write_bibtex_files(item_types_data, export_path): - """Write Bitex data to files. + """Write Bibtex data to files. @param item_types_data: @param export_path: @@ -892,8 +892,8 @@ def write_tsv_files(item_types_data, export_path, list_item_role): with open('{}/{}.tsv'.format(export_path, item_type_data.get('name')), 'w') as file: - tsvs_output = package_export_file(item_type_data) - file.write(tsvs_output.getvalue()) + tsv_output = package_export_file(item_type_data) + file.write(tsv_output.getvalue()) def export_items(post_data): @@ -1025,22 +1025,7 @@ def export_item_custorm(post_data): item_types_data[item_type_id]['recids'].append(record_id) # Create export info file - for item_type_id in item_types_data: - keys, labels, records = make_stats_tsv( - item_type_id, - item_types_data[item_type_id]['recids'], - list_item_role) - item_types_data[item_type_id]['recids'].sort() - item_types_data[item_type_id]['keys'] = keys - item_types_data[item_type_id]['labels'] = labels - item_types_data[item_type_id]['data'] = records - item_type_data = item_types_data[item_type_id] - - with open('{}/{}.tsv'.format(export_path, - item_type_data.get('name')), - 'w') as file: - tsvs_output = package_export_file(item_type_data) - file.write(tsvs_output.getvalue()) + write_tsv_files(item_types_data, export_path, list_item_role) # Create bag bagit.make_bag(export_path) @@ -1680,7 +1665,8 @@ def validate_bibtex(record_ids): def make_bibtex_data(record_ids): - """Serialize all Bibtex data by record ids + """Serialize all Bibtex data by record ids. + @param record_ids: @return: """ diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index ec0ddba065..cdb1c101e3 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -128,15 +128,15 @@ class WekoBibTexSerializer(): def __init__(self): """Init.""" # Load namespace - self.ns = cache_schema('jpcoar_mapping').get('namespaces') - self.lst_identifier_type = ['doi', 'hdl', 'url'] + self.__ns = cache_schema('jpcoar_mapping').get('namespaces') + self.__lst_identifier_type = ['doi', 'hdl', 'url'] # JPCOAR elements - jp_jp = '{' + self.ns['jpcoar'] + '}' - jp_dc = '{' + self.ns['dc'] + '}' - jp_datacite = '{' + self.ns['datacite'] + '}' - self.find_pattern = './/{}' + jp_jp = '{' + self.__ns['jpcoar'] + '}' + jp_dc = '{' + self.__ns['dc'] + '}' + jp_datacite = '{' + self.__ns['datacite'] + '}' + self.__find_pattern = './/{}' - self.fields_mapping = { + self.__fields_mapping = { BibTexFields.AUTHOR: jp_jp + 'creatorName', BibTexFields.TITLE: jp_dc + 'title', BibTexFields.JOURNAL: jp_jp + 'sourceTitle', @@ -166,7 +166,7 @@ def __init__(self): BibTexFields.URL: jp_jp + 'identifier', } - def get_bibtex_type_fields(self, bibtex_type): + def ____get_bibtex_type_fields(self, bibtex_type): """Get all fields of BibTex type. @param self: @@ -174,25 +174,25 @@ def get_bibtex_type_fields(self, bibtex_type): @return: """ result = { - BibTexTypes.ARTICLE: self.get_article_fields(), - BibTexTypes.BOOK: self.get_book_fields(), - BibTexTypes.BOOKLET: self.get_booklet_fields(), - BibTexTypes.CONFERENCE: self.get_conference_fields(), - BibTexTypes.INBOOK: self.get_inbook_fields(), - BibTexTypes.INCOLLECTION: self.get_incollection_fields(), - BibTexTypes.INPROCEEDINGS: self.get_inproceedings_fields(), - BibTexTypes.MANUAL: self.get_manual_fields(), - BibTexTypes.MASTERSTHESIS: self.get_mastersthesis_fields(), - BibTexTypes.MISC: self.get_misc_fields(), - BibTexTypes.PHDTHESIS: self.get_phdthesis_fields(), - BibTexTypes.PROCEEDINGS: self.get_proceedings_fields(), - BibTexTypes.TECHREPORT: self.get_techreport_fields(), - BibTexTypes.UNPUBLISHED: self.get_unpublished_fields(), + BibTexTypes.ARTICLE: self.__get_article_fields(), + BibTexTypes.BOOK: self.__get_book_fields(), + BibTexTypes.BOOKLET: self.__get_booklet_fields(), + BibTexTypes.CONFERENCE: self.__get_conference_fields(), + BibTexTypes.INBOOK: self.__get_inbook_fields(), + BibTexTypes.INCOLLECTION: self.__get_incollection_fields(), + BibTexTypes.INPROCEEDINGS: self.__get_inproceedings_fields(), + BibTexTypes.MANUAL: self.__get_manual_fields(), + BibTexTypes.MASTERSTHESIS: self.__get_mastersthesis_fields(), + BibTexTypes.MISC: self.__get_misc_fields(), + BibTexTypes.PHDTHESIS: self.__get_phdthesis_fields(), + BibTexTypes.PROCEEDINGS: self.__get_proceedings_fields(), + BibTexTypes.TECHREPORT: self.__get_techreport_fields(), + BibTexTypes.UNPUBLISHED: self.__get_unpublished_fields(), } return result.get(bibtex_type) @staticmethod - def get_article_fields(): + def __get_article_fields(): """Get article's fields. @return: @@ -208,7 +208,7 @@ def get_article_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_book_fields(): + def __get_book_fields(): """Get book's fields. @return: @@ -226,7 +226,7 @@ def get_book_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_booklet_fields(): + def __get_booklet_fields(): """Get booklet's fields. @return: @@ -241,7 +241,7 @@ def get_booklet_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_conference_fields(): + def __get_conference_fields(): """Get conference's fields. @return: @@ -260,7 +260,7 @@ def get_conference_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_inbook_fields(): + def __get_inbook_fields(): """Get inbook's fields. @return: @@ -281,7 +281,7 @@ def get_inbook_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_incollection_fields(): + def __get_incollection_fields(): """Get incollection's fields. @return: @@ -302,7 +302,7 @@ def get_incollection_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_inproceedings_fields(): + def __get_inproceedings_fields(): """Get inproceedings's fields. @return: @@ -321,7 +321,7 @@ def get_inproceedings_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_manual_fields(): + def __get_manual_fields(): """Get manual's fields. @return: @@ -336,7 +336,7 @@ def get_manual_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_mastersthesis_fields(): + def __get_mastersthesis_fields(): """Get mastersthesis's fields. @return: @@ -351,7 +351,7 @@ def get_mastersthesis_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_phdthesis_fields(): + def __get_phdthesis_fields(): """Get phdthesis's fields. @return: @@ -366,7 +366,7 @@ def get_phdthesis_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_proceedings_fields(): + def __get_proceedings_fields(): """Get proceedings's fields. @return: @@ -382,7 +382,7 @@ def get_proceedings_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_techreport_fields(): + def __get_techreport_fields(): """Get techreport's fields. @return: @@ -397,7 +397,7 @@ def get_techreport_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_unpublished_fields(): + def __get_unpublished_fields(): """Get unpublished's fields. @return: @@ -411,7 +411,7 @@ def get_unpublished_fields(): 'required_partial': lst_required_partial} @staticmethod - def get_misc_fields(): + def __get_misc_fields(): """Get mis's fields. @return: @@ -436,20 +436,20 @@ def serialize(self, pid, record, validate_mode=False): """ err_msg = 'Please input all required item.' # Get JPCOAR datas(XML) and ElementTree root - jpcoar_data = self.get_jpcoar_data(pid, record) + jpcoar_data = self.__get_jpcoar_data(pid, record) root = ET.fromstring(jpcoar_data) if self.is_empty(root): return err_msg db = BibDatabase() - bibtex_type = self.get_bibtex_type(root) + bibtex_type = self.__get_bibtex_type(root) if not bibtex_type: current_app.logger.error( "Can not find Bibtex type for record {}".format( record.get('recid'))) return err_msg - valid, lst_invalid_fields = self.validate_fields(root, bibtex_type) + valid, lst_invalid_fields = self.__validate_fields(root, bibtex_type) if validate_mode: return valid @@ -460,13 +460,13 @@ def serialize(self, pid, record, validate_mode=False): ','.join(lst_invalid_fields), record.get('recid'))) return err_msg - db.entries.append(self.get_bibtex_data(root, bibtex_type)) + db.entries.append(self.__get_bibtex_data(root, bibtex_type)) writer = BibTexWriter() result = writer.write(db) return result @staticmethod - def get_jpcoar_data(pid, record): + def __get_jpcoar_data(pid, record): """Get jpcoar record. :param pid: The :class:`invenio_pidstore.models.PersistentIdentifier` @@ -481,7 +481,7 @@ def get_jpcoar_data(pid, record): return data - def is_empty(self, root): + def __is_empty(self, root): """ Determine whether the jpcoar record is empty. @@ -489,13 +489,13 @@ def is_empty(self, root): :return: """ - elements = root.findall('.//jpcoar:jpcoar', self.ns) + elements = root.findall('.//jpcoar:jpcoar', self.__ns) if len(elements) == 0 or len(list(elements[0])) == 0: return True return False - def get_bibtex_type(self, root): + def __get_bibtex_type(self, root): """ Determine jpcoar record types(except misc). @@ -504,7 +504,7 @@ def get_bibtex_type(self, root): """ type_result = None type_value = '' - for element in root.findall('.//dc:type', self.ns): + for element in root.findall('.//dc:type', self.__ns): type_value = element.text # Determine which type of Bibtex type is it for bib_type, item_types in self.type_mapping.items(): @@ -513,7 +513,7 @@ def get_bibtex_type(self, root): break return type_result - def validate_fields(self, root, bibtex_type): + def __validate_fields(self, root, bibtex_type): """Validate required fields of bibtex type. @param root: @@ -537,19 +537,19 @@ def validate_partial_req(): # check for pages because pages is represented for start # and end page if field == BibTexFields.PAGES: - start_page = root.findall(self.find_pattern.format( - self.fields_mapping[BibTexFields.PAGE_START]), - self.ns) + start_page = root.findall(self.__find_pattern.format( + self.__fields_mapping[BibTexFields.PAGE_START]), + self.__ns) end_page = root.findall( - self.find_pattern.format(self.fields_mapping[BibTexFields.PAGE_END]), - self.ns) + self.__find_pattern.format(self.__fields_mapping[BibTexFields.PAGE_END]), + self.__ns) if len(start_page) > 0 and len(end_page) > 0: partial_valid = True continue else: field_data = root.findall( - self.find_pattern.format(self.fields_mapping[field]), - self.ns) + self.__find_pattern.format(self.__fields_mapping[field]), + self.__ns) if len(field_data) > 0: partial_valid = True continue @@ -561,11 +561,11 @@ def validate_partial_req(): lst_invalid_fields = [] required_valid = True - fields = self.get_bibtex_type_fields(bibtex_type) + fields = self.____get_bibtex_type_fields(bibtex_type) for item_required in fields.get('required'): elements = root.findall( - self.find_pattern.format(self.fields_mapping[item_required]), - self.ns) + self.__find_pattern.format(self.__fields_mapping[item_required]), + self.__ns) if len(elements) == 0: required_valid = False lst_invalid_fields.append(item_required.value) @@ -589,13 +589,13 @@ def validate_partial_req(): partial_req_valid = validate_partial_req() return required_valid and partial_req_valid, lst_invalid_fields - def combine_all_fields(self, bibtex_type): + def __combine_all_fields(self, bibtex_type): """Combine all fields of item type. @param bibtex_type: @return: """ - all_field_type = self.get_bibtex_type_fields(bibtex_type) + all_field_type = self.____get_bibtex_type_fields(bibtex_type) all_fields = all_field_type.get( 'required') + all_field_type.get('optional') partial_req = all_field_type.get('required_partial') @@ -607,7 +607,7 @@ def combine_all_fields(self, bibtex_type): all_fields.extend(item) return all_fields - def get_bibtex_data(self, root, bibtex_type): + def __get_bibtex_data(self, root, bibtex_type): """Get Bibtex data base on Bibtex type. @param root: @@ -632,7 +632,7 @@ def process_author(): def process_url(): identifier_type = element.get(xml_ns + 'identifierType') - if identifier_type and identifier_type.lower in self.lst_identifier_type: + if identifier_type and identifier_type.lower in self.__lst_identifier_type: lst_identifier_type_data[ identifier_type.lower].append(element.text) @@ -640,19 +640,19 @@ def process_url(): page_start = '' page_end = '' title = '' - xml_ns = '{' + self.ns['xml'] + '}' + xml_ns = '{' + self.__ns['xml'] + '}' creator = {BibTexFields.AUTHOR.value: [], BibTexFields.YOMI.value: []} lst_identifier_type_data = {} dois = [] - all_fields = self.combine_all_fields(bibtex_type) + all_fields = self.__combine_all_fields(bibtex_type) - for i in self.lst_identifier_type: + for i in self.__lst_identifier_type: lst_identifier_type_data[i] = [] for field in all_fields: elements = root.findall( - self.find_pattern.format(self.fields_mapping[field]), self.ns) + self.__find_pattern.format(self.__fields_mapping[field]), self.__ns) if len(elements) != 0: value = '' dates = [] @@ -680,7 +680,7 @@ def process_url(): elif field == BibTexFields.YEAR or \ field == BibTexFields.MONTH and len(dates) != 0: data[BibTexFields.YEAR.value], data[ - BibTexFields.MONTH.value] = self.get_dates(dates) + BibTexFields.MONTH.value] = self.__get_dates(dates) elif field == BibTexFields.AUTHOR: if creator[BibTexFields.AUTHOR.value]: data[field.value] = ' and '.join( @@ -691,8 +691,8 @@ def process_url(): elif field == BibTexFields.DOI and len(dois) > 0: data[field.value] = ','.join(dois) elif field == BibTexFields.URL and len(): - data[field.value] = self.get_identifier( - self.lst_identifier_type, + data[field.value] = self.__get_identifier( + self.__lst_identifier_type, lst_identifier_type_data) elif field == BibTexFields.TITLE and title != '': data[field.value] = title @@ -703,12 +703,12 @@ def process_url(): data['pages'] = str(page_start) + '--' + str(page_end) data['ENTRYTYPE'] = bibtex_type.value - data['ID'] = self.get_item_id(root) + data['ID'] = self.__get_item_id(root) return data @staticmethod - def get_item_id(root): + def __get_item_id(root): """ Get item id from jpcoar record. @@ -727,7 +727,7 @@ def get_item_id(root): return item_id @staticmethod - def get_dates(dates): + def __get_dates(dates): """ Get year and month from date. @@ -748,7 +748,7 @@ def get_dates(dates): return year, month @staticmethod - def get_identifier(identifier_type, identifier_types_data): + def __get_identifier(identifier_type, identifier_types_data): """Get identifier data. @param identifier_type: diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js index a36ca3bc43..573f9f4854 100644 --- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js +++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js @@ -108,6 +108,20 @@ require([ }); }); + function getMessage(messageCode) { + const defaultLanguage = "en"; + let currentLanguage = document.getElementById("current_language").value; + let message = MESSAGE[messageCode]; + if (message) { + if (message[currentLanguage]) { + return message[currentLanguage]; + } else { + return message[defaultLanguage]; + } + } else { + return ""; + } +} //add controller to invenioSearch // add by ryuu. at 20181129 start @@ -122,20 +136,6 @@ function searchResCtrl($scope, $rootScope, $http, $location) { $rootScope.commInfoIndex = ""; } - $rootScope.getMessage = function(messageCode) { - const defaultLanguage = "en"; - let currentLanguage = document.getElementById("current_language").value; - let message = MESSAGE[messageCode]; - if (message) { - if (message[currentLanguage]) { - return message[currentLanguage]; - } else { - return message[defaultLanguage]; - } - } else { - return ""; - } -} $rootScope.disable_flg = true; $rootScope.display_flg = true; @@ -355,7 +355,7 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { } }); }) - exportBibtex=document.getElementById("export_format_radio_bibtex").checked + let exportBibtex = document.getElementById("export_format_radio_bibtex").checked if (exportBibtex) { let invalidBibtexRecordIds = $scope.validateBibtexExport(Object.keys(export_metadata)); if (invalidBibtexRecordIds.length > 0) { @@ -369,9 +369,9 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { } $scope.validateBibtexExport = function (record_ids) { - request_url = '/items/validate_bibtext_export'; + var request_url = '/items/validate_bibtext_export'; var data = { record_ids: record_ids } - invalidRecordIds = [] + var invalidRecordIds = [] $.ajax({ method: 'POST', url: request_url, @@ -391,7 +391,7 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { } $scope.showErrMsgBibtex = function (invalidRecordIds) { - errMsg = $scope.getMessage('bibtex_err'); + var errMsg = getMessage('bibtex_err'); invalidRecordIds.forEach(function (recordId) { document.getElementById('bibtex_err_' + recordId).textContent=errMsg; }); From a2a52a34017952e90642a9b19d3845f4dff114fd Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Tue, 19 May 2020 15:32:40 +0700 Subject: [PATCH 05/11] Commut missing code --- .../weko_schema_ui/serializers/WekoBibTexSerializer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index cdb1c101e3..a0f31f43e3 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -438,7 +438,7 @@ def serialize(self, pid, record, validate_mode=False): # Get JPCOAR datas(XML) and ElementTree root jpcoar_data = self.__get_jpcoar_data(pid, record) root = ET.fromstring(jpcoar_data) - if self.is_empty(root): + if self.__is_empty(root): return err_msg db = BibDatabase() From 419ad7d78b6076ad8ea60fa97cbf5e9f07f0f3ee Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Tue, 19 May 2020 17:58:26 +0700 Subject: [PATCH 06/11] PEP8 isort and add multi language --- .../translations/en/LC_MESSAGES/messages.po | 3 +++ .../translations/ja/LC_MESSAGES/messages.po | 3 +++ .../weko_items_ui/translations/messages.pot | 3 +++ .../serializers/WekoBibTexSerializer.py | 16 +++++++++------- .../static/js/weko_search_ui/app.js | 2 +- 5 files changed, 19 insertions(+), 8 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po index 8ecdd27f7a..022643c1fc 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po @@ -307,3 +307,6 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" + +msgid "Required item is not inputted" +msgstr "" diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po index 7a87890d3e..52147318be 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po +++ b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po @@ -423,3 +423,6 @@ msgstr "以下のメールアドレスがシステムに登録されていませ msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "規定の数を超えています。選択できるのは2つまでです。" + +msgid "Required item is not inputted" +msgstr "必須項目がありません。" diff --git a/modules/weko-items-ui/weko_items_ui/translations/messages.pot b/modules/weko-items-ui/weko_items_ui/translations/messages.pot index f134f2caf2..aabd7a015e 100644 --- a/modules/weko-items-ui/weko_items_ui/translations/messages.pot +++ b/modules/weko-items-ui/weko_items_ui/translations/messages.pot @@ -358,3 +358,6 @@ msgstr "" msgid "The specified number has been exceeded. You can only select up to two fields." msgstr "" + +msgid "Required item is not inputted" +msgstr "" diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py index a0f31f43e3..07ebf7c85f 100644 --- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py +++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py @@ -545,14 +545,14 @@ def validate_partial_req(): self.__ns) if len(start_page) > 0 and len(end_page) > 0: partial_valid = True - continue + break else: field_data = root.findall( self.__find_pattern.format(self.__fields_mapping[field]), self.__ns) if len(field_data) > 0: partial_valid = True - continue + break if not partial_valid: result = False lst_invalid_fields.append(par_req[0].value) @@ -560,6 +560,7 @@ def validate_partial_req(): return result lst_invalid_fields = [] + identifierType_str = 'identifierType' required_valid = True fields = self.____get_bibtex_type_fields(bibtex_type) for item_required in fields.get('required'): @@ -576,12 +577,12 @@ def validate_partial_req(): lst_invalid_fields.append(item_required.value) required_valid = False elif item_required == BibTexFields.DOI: - doi_valid = validate_by_att('identifierType', ['doi']) + doi_valid = validate_by_att(identifierType_str, ['doi']) if not doi_valid: lst_invalid_fields.append(item_required.value) required_valid = False elif item_required == BibTexFields.URL: - url_valid = validate_by_att('identifierType', + url_valid = validate_by_att(identifierType_str, ['doi', 'hdl', 'uri']) if not url_valid: lst_invalid_fields.append(item_required.value) @@ -641,6 +642,7 @@ def process_url(): page_end = '' title = '' xml_ns = '{' + self.__ns['xml'] + '}' + and_str = ' and ' creator = {BibTexFields.AUTHOR.value: [], BibTexFields.YOMI.value: []} lst_identifier_type_data = {} @@ -670,7 +672,7 @@ def process_url(): # Get only one title at all title = element.text if value != '': - value += ' and ' if field == BibTexFields.AUTHOR else ', ' + value += and_str if field == BibTexFields.AUTHOR else ', ' value += element.text if field == BibTexFields.PAGE_START: @@ -683,10 +685,10 @@ def process_url(): BibTexFields.MONTH.value] = self.__get_dates(dates) elif field == BibTexFields.AUTHOR: if creator[BibTexFields.AUTHOR.value]: - data[field.value] = ' and '.join( + data[field.value] = and_str.join( creator[BibTexFields.AUTHOR.value]) if creator[BibTexFields.YOMI.value]: - data[BibTexFields.YOMI.value] = ' and '.join( + data[BibTexFields.YOMI.value] = and_str.join( creator[BibTexFields.YOMI.value]) elif field == BibTexFields.DOI and len(dois) > 0: data[field.value] = ','.join(dois) diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js index 573f9f4854..174762892c 100644 --- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js +++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js @@ -108,7 +108,7 @@ require([ }); }); - function getMessage(messageCode) { +function getMessage(messageCode) { const defaultLanguage = "en"; let currentLanguage = document.getElementById("current_language").value; let message = MESSAGE[messageCode]; From 29aff1ddc993d97465b64fb3996f344309b7ff8e Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Tue, 19 May 2020 18:42:57 +0700 Subject: [PATCH 07/11] Commit compiled .mo file --- .../translations/ja/LC_MESSAGES/messages.mo | Bin 5882 -> 5962 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo index 07887591931b0b92fa284c63ab37ca5e4d7f2c7b..d231d8dfcbb485cfa5de4d47409b2f4ab8ec43e7 100644 GIT binary patch delta 1423 zcmYM!OGs2v9LMpKGp3p5BOj$@PM8^{SQ(XA7G;U3(84T)2-IN8(o#zznPy1HQjkKT zBymxMP$mr}w5TXDh{Tn;vcQ|B)WSuJ-uC^$frgp;Iq(1ZpL_58%zl^S`w-{uHhd29 zTgC5gh-&{o8$*p*Of?(BFb~7Az}AZ}j(Qm;;xSx-=dE4nrrwX5H-OPNgnWJGEmti( zVjZ(iq5@B2GJds&(WnL1Vk~Y(y39U|z!F@E6{vvqs13B>GHgW`cHk1?n=T5GG~7mQ zkGL$wr>KQrVFHez7IaYKXE6%rZTnx;I2WBdk(Iavb5Zm1Q72T0A;dR_DR{67 z*I+Ad!#kLYW4Ii@p%xA&tx8NqWq6K@7WUyDyo{@H7}<;YWc$CM5}n5q{DVGCEM|MU z_hqPnCr}gHQT<(*gxz=uA7VbbIhHakM9u3&W%d9WV%{KoHlx-FRDvHd0~aDVe`TJ* zImciz>fRnfJyc%Qy}gL{78w(;{h6G%#%)KPNHHq2qt=tC4Ygnj-n0f$iH)M>k4JI- z3N%B5cKidikc%fp17lF_Nw)66I_jC2i`}RI!>G)sP~(500)%o|j|r&yPA=*%!+5Op zQBa^d+tG;HP%H8^UDiHSpg~l|@39!CP#ezT_=st?qcT5$3UnN^u>sSu7q$LtR9xRU zg)I~&Q4?Z$r*bg`HL(;6@q(>CLj{;cJ!E05R*UOU3tqEc$5QIIP`7x-wukd1Yg{za z=H-QYfOK8h2nn>Rvjig??K-yaM{;*q;Yug(zNIQY0*oDh5ZS6%j?W?GH87#(I$l;g= zoE11{9kY(35>H|nKUil`0fO8t!IemtS&x2f#4=2x5_Y2&a2SiQ2VHm?edIU2Tr8yH zB5EN6$YHK?D#V+p!1r(o=1>7&p~k<%0DiLl)2MMXs2%Y!$cI&^c{QjVs>1^En-(rs zU>ioT2kWs9D{%}%_!Sj+4tHS@<*C9$oD}#tZpBkbZgb!E=THSc!B%`}`@^it(KC;5 zp+rrniF;51Q@9kqb>{2DxPhkTse?>upqFcQJ|+sDkJG?EfMz z!tAr2=^E6_vCTN9`Swc>I-Vm*&4Y!Een2$g3v z!2WB+FX&L9NjvZ}s{f~L&)_lId5mM4H%SSupei3hjUPuPc#Es>hiwgzL~5 z=0X#`U>tv;CRXy>+Jfz<_9aw;hggAAco1h%0rwJJ9>8`yYTKjMN62BGaoT}XNM6U( z@oGouXuv+)hZJR|Q5F70y)1?7+gXg^ejLUQ46!Y$FojKc0X6?AYC*421-?fW@(q(1 v; Date: Wed, 20 May 2020 15:02:07 +0700 Subject: [PATCH 08/11] Fix bug Js and not to export invalid file/folders json when exporting Bibtex --- .../weko_items_ui/macros/export_controls.html | 3 ++- modules/weko-items-ui/weko_items_ui/utils.py | 6 +++++- .../weko_search_ui/static/js/weko_search_ui/app.js | 14 ++++++++------ 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html b/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html index 38fc8694ea..9bce5fa85d 100644 --- a/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html +++ b/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html @@ -47,6 +47,7 @@
{{ _('File contents cannot be exported.') }}
+
@@ -55,7 +56,7 @@
{{ _('Max number of items able to export') }}:{{ max_expor
diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index 6d3ed69c19..509c0b92af 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -866,7 +866,7 @@ def write_bibtex_files(item_types_data, export_path): if output: with open('{}/{}.bib'.format(export_path, item_type_data.get('name')), - 'w') as file: + 'w', encoding="utf-8") as file: file.write(output) @@ -910,6 +910,10 @@ def check_item_type_name(name): post_data['export_file_contents_radio'] == 'True' else False export_format = post_data['export_format_radio'] record_ids = json.loads(post_data['record_ids']) + invalid_record_ids = json.loads(post_data['invalid_record_ids']) + invalid_record_ids = [int(i) for i in invalid_record_ids] + # Remove all invalid records + record_ids = set(record_ids) - set(invalid_record_ids) record_metadata = json.loads(post_data['record_metadata']) if len(record_ids) > _get_max_export_items(): return abort(400) diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js index 174762892c..b0e304a6fe 100644 --- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js +++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js @@ -298,15 +298,15 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { // Check if current hits in selected array - $scope.checkIfAllInArray = function () { - angular.forEach($scope.vm.invenioSearchResults.hits.hits, function (record) { + $scope.checkIfAllInArray = function() { + all_in_array = true; + angular.forEach($scope.vm.invenioSearchResults.hits.hits, function(record) { item_index = $rootScope.item_export_checkboxes.indexOf(record.id); - if (checkAll && item_index == -1) { - $rootScope.item_export_checkboxes.push(record.id); - } else if (!checkAll && item_index >= 0) { - $rootScope.item_export_checkboxes.splice(item_index, 1); + if(item_index == -1) { + all_in_array = false; } }); + return all_in_array; } $scope.checkAll = function (checkAll) { @@ -347,6 +347,7 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { if ($rootScope.item_export_checkboxes.length <= $rootScope.max_export_num) { records_metadata = $scope.getExportItemsMetadata(); $('#record_ids').val(JSON.stringify($rootScope.item_export_checkboxes)); + $('#invalid_record_ids').val(JSON.stringify([])); let export_metadata = {} $rootScope.item_export_checkboxes.map(function(recid) { $.each(records_metadata, function (index, value) { @@ -359,6 +360,7 @@ function itemExportCtrl($scope, $rootScope, $http, $location) { if (exportBibtex) { let invalidBibtexRecordIds = $scope.validateBibtexExport(Object.keys(export_metadata)); if (invalidBibtexRecordIds.length > 0) { + $('#invalid_record_ids').val(JSON.stringify(invalidBibtexRecordIds)); $scope.showErrMsgBibtex(invalidBibtexRecordIds); } } From a255dee6f04e6a11efa7a43589aaed2418054564 Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Wed, 20 May 2020 17:51:52 +0700 Subject: [PATCH 09/11] return an empty response in case (export list) == 0 in oder to keep the showing error message --- modules/weko-items-ui/weko_items_ui/utils.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index 509c0b92af..2e64778a1b 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -866,7 +866,7 @@ def write_bibtex_files(item_types_data, export_path): if output: with open('{}/{}.bib'.format(export_path, item_type_data.get('name')), - 'w', encoding="utf-8") as file: + 'w', encoding='utf8') as file: file.write(output) @@ -918,8 +918,7 @@ def check_item_type_name(name): if len(record_ids) > _get_max_export_items(): return abort(400) elif len(record_ids) == 0: - flash(_('Please select Items to export.'), 'error') - return redirect(url_for('weko_items_ui.export')) + return ('', 204) result = {'items': []} temp_path = tempfile.TemporaryDirectory() @@ -1679,7 +1678,7 @@ def make_bibtex_data(record_ids): from weko_schema_ui.serializers import WekoBibTexSerializer for record_id in record_ids: record = WekoRecord.get_record_by_pid(record_id) - pid = record.pid_recid + pid = record.pid_re serializer = WekoBibTexSerializer() output = serializer.serialize(pid, record) result += output if output != err_msg else '' From c42179dfba2895f3233b080425ee0a260e8d013b Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Wed, 20 May 2020 17:54:11 +0700 Subject: [PATCH 10/11] PEP8 --- modules/weko-items-ui/weko_items_ui/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index 2e64778a1b..e26c703827 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -918,7 +918,7 @@ def check_item_type_name(name): if len(record_ids) > _get_max_export_items(): return abort(400) elif len(record_ids) == 0: - return ('', 204) + return '', 204 result = {'items': []} temp_path = tempfile.TemporaryDirectory() From 37c4853a22ca2c26033c80bd9bb175487da10c18 Mon Sep 17 00:00:00 2001 From: weko3-dev037 Date: Wed, 20 May 2020 19:30:38 +0700 Subject: [PATCH 11/11] Commt missing code --- modules/weko-items-ui/weko_items_ui/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py index e26c703827..72c9f07965 100644 --- a/modules/weko-items-ui/weko_items_ui/utils.py +++ b/modules/weko-items-ui/weko_items_ui/utils.py @@ -1678,7 +1678,7 @@ def make_bibtex_data(record_ids): from weko_schema_ui.serializers import WekoBibTexSerializer for record_id in record_ids: record = WekoRecord.get_record_by_pid(record_id) - pid = record.pid_re + pid = record.pid_recid serializer = WekoBibTexSerializer() output = serializer.serialize(pid, record) result += output if output != err_msg else ''