diff --git a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html
index b030d62ad8..3e6385cc85 100644
--- a/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html
+++ b/modules/weko-items-ui/weko_items_ui/static/templates/weko_items_ui/export_list.html
@@ -32,6 +32,7 @@
|
Contains restricted content
+
|
{{ (record.metadata.hasOwnProperty('file') && record.metadata.file.hasOwnProperty('URI')) ? record.metadata.file.URI.length : '0' }}
diff --git a/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html b/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html
index 38fc8694ea..9bce5fa85d 100644
--- a/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html
+++ b/modules/weko-items-ui/weko_items_ui/templates/weko_items_ui/macros/export_controls.html
@@ -47,6 +47,7 @@ {{ _('File contents cannot be exported.') }}
+
@@ -55,7 +56,7 @@ {{ _('Max number of items able to export') }}:{{ max_expor
diff --git a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po
index 8ecdd27f7a..022643c1fc 100644
--- a/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po
+++ b/modules/weko-items-ui/weko_items_ui/translations/en/LC_MESSAGES/messages.po
@@ -307,3 +307,6 @@ msgstr ""
msgid "The specified number has been exceeded. You can only select up to two fields."
msgstr ""
+
+msgid "Required item is not inputted"
+msgstr ""
diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo
index 0788759193..d231d8dfcb 100644
Binary files a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo and b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.mo differ
diff --git a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po
index 6d11b2af1b..52147318be 100644
--- a/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po
+++ b/modules/weko-items-ui/weko_items_ui/translations/ja/LC_MESSAGES/messages.po
@@ -424,3 +424,5 @@ msgstr "以下のメールアドレスがシステムに登録されていませ
msgid "The specified number has been exceeded. You can only select up to two fields."
msgstr "規定の数を超えています。選択できるのは2つまでです。"
+msgid "Required item is not inputted"
+msgstr "必須項目がありません。"
diff --git a/modules/weko-items-ui/weko_items_ui/translations/messages.pot b/modules/weko-items-ui/weko_items_ui/translations/messages.pot
index f134f2caf2..aabd7a015e 100644
--- a/modules/weko-items-ui/weko_items_ui/translations/messages.pot
+++ b/modules/weko-items-ui/weko_items_ui/translations/messages.pot
@@ -358,3 +358,6 @@ msgstr ""
msgid "The specified number has been exceeded. You can only select up to two fields."
msgstr ""
+
+msgid "Required item is not inputted"
+msgstr ""
diff --git a/modules/weko-items-ui/weko_items_ui/utils.py b/modules/weko-items-ui/weko_items_ui/utils.py
index acb33b6392..72c9f07965 100644
--- a/modules/weko-items-ui/weko_items_ui/utils.py
+++ b/modules/weko-items-ui/weko_items_ui/utils.py
@@ -852,6 +852,50 @@ def get_list_file_by_record_id(recid):
return list_file_name
+def write_bibtex_files(item_types_data, export_path):
+ """Write Bibtex data to files.
+
+ @param item_types_data:
+ @param export_path:
+ @return:
+ """
+ for item_type_id in item_types_data:
+ item_type_data = item_types_data[item_type_id]
+ output = make_bibtex_data(item_type_data['recids'])
+ # create file to write data in case has output of Bibtex
+ if output:
+ with open('{}/{}.bib'.format(export_path,
+ item_type_data.get('name')),
+ 'w', encoding='utf8') as file:
+ file.write(output)
+
+
+def write_tsv_files(item_types_data, export_path, list_item_role):
+ """Write TSV data to files.
+
+ @param item_types_data:
+ @param export_path:
+ @param list_item_role:
+ @return:
+ """
+ for item_type_id in item_types_data:
+ keys, labels, records = make_stats_tsv(
+ item_type_id,
+ item_types_data[item_type_id]['recids'],
+ list_item_role)
+ item_types_data[item_type_id]['recids'].sort()
+ item_types_data[item_type_id]['keys'] = keys
+ item_types_data[item_type_id]['labels'] = labels
+ item_types_data[item_type_id]['data'] = records
+ item_type_data = item_types_data[item_type_id]
+
+ with open('{}/{}.tsv'.format(export_path,
+ item_type_data.get('name')),
+ 'w') as file:
+ tsv_output = package_export_file(item_type_data)
+ file.write(tsv_output.getvalue())
+
+
def export_items(post_data):
"""Gather all the item data and export and return as a JSON or BIBTEX.
@@ -866,12 +910,15 @@ def check_item_type_name(name):
post_data['export_file_contents_radio'] == 'True' else False
export_format = post_data['export_format_radio']
record_ids = json.loads(post_data['record_ids'])
+ invalid_record_ids = json.loads(post_data['invalid_record_ids'])
+ invalid_record_ids = [int(i) for i in invalid_record_ids]
+ # Remove all invalid records
+ record_ids = set(record_ids) - set(invalid_record_ids)
record_metadata = json.loads(post_data['record_metadata'])
if len(record_ids) > _get_max_export_items():
return abort(400)
elif len(record_ids) == 0:
- flash(_('Please select Items to export.'), 'error')
- return redirect(url_for('weko_items_ui.export'))
+ return '', 204
result = {'items': []}
temp_path = tempfile.TemporaryDirectory()
@@ -915,22 +962,10 @@ def check_item_type_name(name):
item_types_data[item_type_id]['recids'].append(record_id)
# Create export info file
- for item_type_id in item_types_data:
- keys, labels, records = make_stats_tsv(
- item_type_id,
- item_types_data[item_type_id]['recids'],
- list_item_role)
- item_types_data[item_type_id]['recids'].sort()
- item_types_data[item_type_id]['keys'] = keys
- item_types_data[item_type_id]['labels'] = labels
- item_types_data[item_type_id]['data'] = records
- item_type_data = item_types_data[item_type_id]
-
- with open('{}/{}.tsv'.format(export_path,
- item_type_data.get('name')),
- 'w') as file:
- tsvs_output = package_export_file(item_type_data)
- file.write(tsvs_output.getvalue())
+ if export_format == 'BIBTEX':
+ write_bibtex_files(item_types_data, export_path)
+ else:
+ write_tsv_files(item_types_data, export_path, list_item_role)
# Create bag
bagit.make_bag(export_path)
@@ -993,22 +1028,7 @@ def export_item_custorm(post_data):
item_types_data[item_type_id]['recids'].append(record_id)
# Create export info file
- for item_type_id in item_types_data:
- keys, labels, records = make_stats_tsv(
- item_type_id,
- item_types_data[item_type_id]['recids'],
- list_item_role)
- item_types_data[item_type_id]['recids'].sort()
- item_types_data[item_type_id]['keys'] = keys
- item_types_data[item_type_id]['labels'] = labels
- item_types_data[item_type_id]['data'] = records
- item_type_data = item_types_data[item_type_id]
-
- with open('{}/{}.tsv'.format(export_path,
- item_type_data.get('name')),
- 'w') as file:
- tsvs_output = package_export_file(item_type_data)
- file.write(tsvs_output.getvalue())
+ write_tsv_files(item_types_data, export_path, list_item_role)
# Create bag
bagit.make_bag(export_path)
@@ -1626,3 +1646,40 @@ def translate_validation_message(item_property, cur_lang):
for _key, value in item_property.get(properties_attr).items():
set_validation_message(value, cur_lang)
translate_validation_message(value, cur_lang)
+
+
+def validate_bibtex(record_ids):
+ """Validate data of records for Bibtex exporting.
+
+ @param record_ids:
+ @return:
+ """
+ lst_invalid_ids = []
+ err_msg = _('Please input all required item.')
+ from weko_schema_ui.serializers import WekoBibTexSerializer
+ for record_id in record_ids:
+ record = WekoRecord.get_record_by_pid(record_id)
+ pid = record.pid_recid
+ serializer = WekoBibTexSerializer()
+ result = serializer.serialize(pid, record, True)
+ if not result or result == err_msg:
+ lst_invalid_ids.append(record_id)
+ return lst_invalid_ids
+
+
+def make_bibtex_data(record_ids):
+ """Serialize all Bibtex data by record ids.
+
+ @param record_ids:
+ @return:
+ """
+ result = ''
+ err_msg = _('Please input all required item.')
+ from weko_schema_ui.serializers import WekoBibTexSerializer
+ for record_id in record_ids:
+ record = WekoRecord.get_record_by_pid(record_id)
+ pid = record.pid_recid
+ serializer = WekoBibTexSerializer()
+ output = serializer.serialize(pid, record)
+ result += output if output != err_msg else ''
+ return result
diff --git a/modules/weko-items-ui/weko_items_ui/views.py b/modules/weko-items-ui/weko_items_ui/views.py
index 1feaabfc4a..64b7c6272b 100644
--- a/modules/weko-items-ui/weko_items_ui/views.py
+++ b/modules/weko-items-ui/weko_items_ui/views.py
@@ -1098,6 +1098,19 @@ def check_restricted_content():
return jsonify({'restricted_records': list(restricted_records)})
+@blueprint.route('/validate_bibtext_export', methods=['POST'])
+def validate_bibtex_export():
+ """Validate export Bibtex.
+
+ @return:
+ """
+ from .utils import validate_bibtex
+ post_data = request.get_json()
+ record_ids = post_data['record_ids']
+ invalid_record_ids = validate_bibtex(record_ids)
+ return jsonify(invalid_record_ids=invalid_record_ids)
+
+
@blueprint.route('/export', methods=['GET', 'POST'])
def export():
"""Item export view."""
diff --git a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py
index 1ca9cb80fc..07ebf7c85f 100644
--- a/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py
+++ b/modules/weko-schema-ui/weko_schema_ui/serializers/WekoBibTexSerializer.py
@@ -19,277 +19,454 @@
# MA 02111-1307, USA.
"""WEKO BibTex Serializer."""
-
import xml.etree.ElementTree as ET
from datetime import datetime
+from enum import Enum
from bibtexparser.bibdatabase import BibDatabase
from bibtexparser.bwriter import BibTexWriter
-from flask import abort
+from flask import current_app
from ..schema import SchemaTree, cache_schema
from .wekoxml import WekoXMLSerializer
+class BibTexTypes(Enum):
+ """BibTex Types."""
+
+ ARTICLE = 'article'
+ BOOK = 'book'
+ BOOKLET = 'booklet'
+ CONFERENCE = 'conference'
+ INBOOK = 'inbook'
+ INCOLLECTION = 'incollection'
+ INPROCEEDINGS = 'inproceedings'
+ MANUAL = 'manual'
+ MASTERSTHESIS = 'mastersthesis'
+ MISC = 'misc'
+ PHDTHESIS = 'phdthesis'
+ PROCEEDINGS = 'proceedings'
+ TECHREPORT = 'techreport'
+ UNPUBLISHED = 'unpublished'
+
+
+class BibTexFields(Enum):
+ """BibTex Fields."""
+
+ AUTHOR = 'author'
+ YOMI = 'yomi'
+ TITLE = 'title'
+ BOOK_TITLE = 'book'
+ JOURNAL = 'journal'
+ VOLUME = 'volume'
+ NUMBER = 'issue'
+ PAGES = 'pages'
+ PAGE_START = 'page_start'
+ PAGE_END = 'page_end'
+ NOTE = 'note'
+ PUBLISHER = 'publisher'
+ YEAR = 'year'
+ MONTH = 'month'
+ URL = 'url'
+ DOI = 'doi'
+ SCHOOL = 'school'
+ TYPE = 'type'
+ EDITOR = 'editor'
+ EDITION = 'edition'
+ CHAPTER = 'chapter'
+ SERIES = 'series'
+ ADDRESS = 'address'
+ ORGANIZATION = 'organization'
+ KEY = 'key'
+ CROSSREF = 'crossref'
+ ANNOTE = 'annote'
+ INSTITUTION = 'institution'
+ HOW_PUBLISHER = 'how publisher'
+
+
class WekoBibTexSerializer():
"""Weko bibtex serializer."""
+ # Mapping type between Bibtex type and dc:type of jpcoar
+ type_mapping = {
+ BibTexTypes.ARTICLE: ['journal article',
+ 'departmental bulletin paper',
+ 'review article', 'data paper', 'periodical',
+ 'editorial',
+ 'article'],
+ BibTexTypes.BOOK: ['book'],
+ BibTexTypes.INBOOK: ['book part'],
+ BibTexTypes.INPROCEEDINGS: ['conference paper'],
+ BibTexTypes.MASTERSTHESIS: ['master thesis'],
+ BibTexTypes.MISC: ['research proposal', 'technical documentation',
+ 'thesis',
+ 'bachelor thesis', 'cartographic material',
+ 'map',
+ 'lecture', 'conference object', 'conference poster',
+ 'image', 'still image', 'moving image', 'video',
+ 'sound',
+ 'musical notation', 'interactive resource',
+ 'learning material', 'patent', 'dataset', 'software',
+ 'workflow',
+ 'other(その他)',
+ 'other(プレプリント)'],
+ BibTexTypes.PHDTHESIS: ['doctoral thesis'],
+ BibTexTypes.PROCEEDINGS: ['conference proceedings'],
+ BibTexTypes.TECHREPORT: ['report',
+ 'research report',
+ 'working paper',
+ 'technical report',
+ 'policy report',
+ 'internal report',
+ 'report part'],
+ BibTexTypes.INCOLLECTION: [],
+ BibTexTypes.BOOKLET: [],
+ BibTexTypes.CONFERENCE: [],
+ BibTexTypes.MANUAL: [],
+ BibTexTypes.UNPUBLISHED: []}
+
def __init__(self):
"""Init."""
# Load namespace
- self.ns = cache_schema('jpcoar_mapping').get('namespaces')
-
- # JPCOAR types
- self.article_types = ['conference paper', 'data paper', 'editorial',
- 'journal article', 'periodical',
- 'review article', 'article',
- 'departmental bulletin paper']
-
- self.book_types = ['book', 'book part']
- self.inproceedings_types = ['conference proceedings']
- self.techreport_types = [
- 'technical report',
- 'report',
- 'research report']
- self.unpublished_types = ['conference object', 'conference poster']
-
- self.misc_types = ['thesis', 'bachelor thesis', 'master thesis',
- 'doctoral thesis', 'learning material',
- 'dataset', 'software', 'other',
- 'cartographic material', 'map', 'image',
- 'still image', 'moving image', 'video',
- 'lecture', 'patent', 'internal report',
- 'policy report', 'report part', 'working paper',
- 'sound', 'interactive resource',
- 'musical notation', 'research proposal',
- 'technical documentation', 'workflow']
-
+ self.__ns = cache_schema('jpcoar_mapping').get('namespaces')
+ self.__lst_identifier_type = ['doi', 'hdl', 'url']
# JPCOAR elements
- creator_name = '{' + self.ns['jpcoar'] + '}' + 'creatorName'
- title = '{' + self.ns['dc'] + '}' + 'title'
- source_title = '{' + self.ns['jpcoar'] + '}' + 'sourceTitle'
- volume = '{' + self.ns['jpcoar'] + '}' + 'volume'
- issue = '{' + self.ns['jpcoar'] + '}' + 'issue'
- page_start = '{' + self.ns['jpcoar'] + '}' + 'pageStart'
- page_end = '{' + self.ns['jpcoar'] + '}' + 'pageEnd'
- date = '{' + self.ns['datacite'] + '}' + 'date'
- publisher = '{' + self.ns['dc'] + '}' + 'publisher'
- type = '{' + self.ns['datacite'] + '}' + 'description'
- mime_type = '{' + self.ns['jpcoar'] + '}' + 'mimeType'
- contributor_name = '{' + self.ns['jpcoar'] + '}' + 'contributor' + \
- '//' + '{' + self.ns['jpcoar'] + \
- '}' + 'affiliationName'
-
- # [BibTex]Article columns
- self.article_cols_required = {'author': creator_name,
- 'title': title,
- 'journal': source_title,
- 'date': date}
-
- self.article_cols_all = {'author': creator_name,
- 'title': title,
- 'journal': source_title,
- 'volume': volume,
- 'number': issue,
- 'page_start': page_start,
- 'page_end': page_end,
- 'date': date}
-
- # [BibTex]Book columns
- self.book_cols_required = {'author': creator_name,
- 'title': title,
- 'publisher': publisher,
- 'date': date}
-
- self.book_cols_all = {'author': creator_name,
- 'title': title,
- 'volume': volume,
- 'number': issue,
- 'publisher': publisher,
- 'date': date}
-
- # [BibTex]Booklet columns
- self.booklet_cols_required = {'title': title}
-
- self.booklet_cols_all = {'author': creator_name,
- 'title': title,
- 'howpublished': mime_type,
- 'date': date}
-
- # [BibTex]Inbook columns
- self.inbook_cols_required = {'author': creator_name,
- 'title': title,
- 'page_start': page_start,
- 'page_end': page_end,
- 'publisher': publisher,
- 'date': date}
-
- self.inbook_cols_all = {'author': creator_name,
- 'title': title,
- 'volume': volume,
- 'number': issue,
- 'page_start': page_start,
- 'page_end': page_end,
- 'publisher': publisher,
- 'date': date,
- 'type': type}
-
- # [BibTex]Incollection columns
- self.incollection_cols_required = {'author': creator_name,
- 'title': title,
- 'booktitle': source_title,
- 'publisher': publisher,
- 'date': date}
-
- self.incollection_cols_all = {'author': creator_name,
- 'title': title,
- 'booktitle': source_title,
- 'volume': volume,
- 'number': issue,
- 'page_start': page_start,
- 'page_end': page_end,
- 'publisher': publisher,
- 'date': date,
- 'type': type}
-
- # [BibTex]Inproceedings columns
- self.inproceedings_cols_required = {'author': creator_name,
- 'title': title,
- 'booktitle': source_title,
- 'date': date}
-
- self.inproceedings_cols_all = {'author': creator_name,
- 'title': title,
- 'booktitle': source_title,
- 'volume': volume,
- 'number': issue,
- 'page_start': page_start,
- 'page_end': page_end,
- 'publisher': publisher,
- 'date': date}
-
- # [BibTex]Techreport columns
- self.techreport_cols_required = {'author': creator_name,
- 'title': title,
- 'date': date,
- 'institution': contributor_name}
-
- self.techreport_cols_all = {'author': creator_name,
- 'title': title,
- 'number': issue,
- 'date': date,
- 'institution': contributor_name,
- 'type': type}
-
- # [BibTex]Unpublished columns
- self.unpublished_cols_required = {'author': creator_name,
- 'title': title}
-
- self.unpublished_cols_all = {'author': creator_name,
- 'title': title,
- 'date': date}
-
- # [BibTex]Misc columns
- self.misc_cols_all = {'author': creator_name,
- 'title': title,
- 'howpublished': mime_type,
- 'date': date}
-
- def serialize(self, pid, record):
+ jp_jp = '{' + self.__ns['jpcoar'] + '}'
+ jp_dc = '{' + self.__ns['dc'] + '}'
+ jp_datacite = '{' + self.__ns['datacite'] + '}'
+ self.__find_pattern = './/{}'
+
+ self.__fields_mapping = {
+ BibTexFields.AUTHOR: jp_jp + 'creatorName',
+ BibTexFields.TITLE: jp_dc + 'title',
+ BibTexFields.JOURNAL: jp_jp + 'sourceTitle',
+ BibTexFields.BOOK_TITLE: jp_jp + 'sourceTitle',
+ BibTexFields.VOLUME: jp_jp + 'volume',
+ BibTexFields.NUMBER: jp_jp + 'issue',
+ BibTexFields.PAGE_START: jp_jp + 'pageStart',
+ BibTexFields.PAGE_END: jp_jp + 'pageEnd',
+ BibTexFields.PUBLISHER: jp_dc + 'publisher',
+ BibTexFields.HOW_PUBLISHER: jp_dc + 'mimeType',
+ BibTexFields.YEAR: jp_datacite + 'date',
+ BibTexFields.MONTH: jp_datacite + 'date',
+ BibTexFields.INSTITUTION: 'none',
+ BibTexFields.TYPE: 'none',
+ BibTexFields.EDITOR: 'none',
+ BibTexFields.EDITION: 'none',
+ BibTexFields.CHAPTER: 'none',
+ BibTexFields.SERIES: 'none',
+ BibTexFields.ADDRESS: 'none',
+ BibTexFields.NOTE: jp_datacite + 'description',
+ BibTexFields.SCHOOL: jp_jp + 'degreeGrantorName',
+ BibTexFields.ORGANIZATION: 'none',
+ BibTexFields.KEY: 'none',
+ BibTexFields.CROSSREF: 'none',
+ BibTexFields.ANNOTE: 'none',
+ BibTexFields.DOI: jp_jp + 'identifier',
+ BibTexFields.URL: jp_jp + 'identifier',
+ }
+
+ def ____get_bibtex_type_fields(self, bibtex_type):
+ """Get all fields of BibTex type.
+
+ @param self:
+ @param bibtex_type:
+ @return:
+ """
+ result = {
+ BibTexTypes.ARTICLE: self.__get_article_fields(),
+ BibTexTypes.BOOK: self.__get_book_fields(),
+ BibTexTypes.BOOKLET: self.__get_booklet_fields(),
+ BibTexTypes.CONFERENCE: self.__get_conference_fields(),
+ BibTexTypes.INBOOK: self.__get_inbook_fields(),
+ BibTexTypes.INCOLLECTION: self.__get_incollection_fields(),
+ BibTexTypes.INPROCEEDINGS: self.__get_inproceedings_fields(),
+ BibTexTypes.MANUAL: self.__get_manual_fields(),
+ BibTexTypes.MASTERSTHESIS: self.__get_mastersthesis_fields(),
+ BibTexTypes.MISC: self.__get_misc_fields(),
+ BibTexTypes.PHDTHESIS: self.__get_phdthesis_fields(),
+ BibTexTypes.PROCEEDINGS: self.__get_proceedings_fields(),
+ BibTexTypes.TECHREPORT: self.__get_techreport_fields(),
+ BibTexTypes.UNPUBLISHED: self.__get_unpublished_fields(),
+ }
+ return result.get(bibtex_type)
+
+ @staticmethod
+ def __get_article_fields():
+ """Get article's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.JOURNAL, BibTexFields.YEAR]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.PAGE_START,
+ BibTexFields.PAGE_END, BibTexFields.MONTH,
+ BibTexFields.NOTE, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_book_fields():
+ """Get book's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.TITLE, BibTexFields.PUBLISHER,
+ BibTexFields.YEAR]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.MONTH,
+ BibTexFields.EDITION, BibTexFields.SERIES,
+ BibTexFields.ADDRESS,
+ BibTexFields.NOTE, BibTexFields.KEY]
+ lst_required_partial = [[BibTexFields.EDITOR,
+ BibTexFields.AUTHOR]]
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_booklet_fields():
+ """Get booklet's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.TITLE]
+ lst_optional = [BibTexFields.AUTHOR, BibTexFields.HOW_PUBLISHER,
+ BibTexFields.YEAR, BibTexFields.MONTH,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_conference_fields():
+ """Get conference's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.BOOK_TITLE, BibTexFields.YEAR]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.PAGE_START,
+ BibTexFields.PAGE_END, BibTexFields.PUBLISHER,
+ BibTexFields.MONTH,
+ BibTexFields.EDITOR, BibTexFields.SERIES,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.ORGANIZATION, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_inbook_fields():
+ """Get inbook's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.TITLE, BibTexFields.YEAR,
+ BibTexFields.PUBLISHER]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.MONTH,
+ BibTexFields.TYPE, BibTexFields.EDITION,
+ BibTexFields.SERIES,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.KEY]
+ lst_required_partial = [[BibTexFields.AUTHOR,
+ BibTexFields.EDITOR],
+ [BibTexFields.PAGES,
+ BibTexFields.CHAPTER]]
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_incollection_fields():
+ """Get incollection's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.BOOK_TITLE, BibTexFields.YEAR,
+ BibTexFields.PUBLISHER]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.PAGE_START,
+ BibTexFields.PAGE_END, BibTexFields.MONTH,
+ BibTexFields.TYPE, BibTexFields.EDITOR,
+ BibTexFields.EDITION, BibTexFields.CHAPTER,
+ BibTexFields.SERIES,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.ORGANIZATION, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_inproceedings_fields():
+ """Get inproceedings's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.BOOK_TITLE, BibTexFields.YEAR]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.PAGE_START,
+ BibTexFields.PAGE_END, BibTexFields.PUBLISHER,
+ BibTexFields.MONTH, BibTexFields.EDITOR,
+ BibTexFields.SERIES,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.ORGANIZATION, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_manual_fields():
+ """Get manual's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.TITLE]
+ lst_optional = [BibTexFields.AUTHOR, BibTexFields.YEAR,
+ BibTexFields.MONTH, BibTexFields.EDITION,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.ORGANIZATION, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_mastersthesis_fields():
+ """Get mastersthesis's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.YEAR, BibTexFields.SCHOOL]
+ lst_optional = [BibTexFields.MONTH, BibTexFields.TYPE,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_phdthesis_fields():
+ """Get phdthesis's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.YEAR, BibTexFields.SCHOOL]
+ lst_optional = [BibTexFields.MONTH, BibTexFields.TYPE,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_proceedings_fields():
+ """Get proceedings's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.TITLE, BibTexFields.YEAR]
+ lst_optional = [BibTexFields.VOLUME, BibTexFields.NUMBER,
+ BibTexFields.PUBLISHER, BibTexFields.MONTH,
+ BibTexFields.EDITOR, BibTexFields.SERIES,
+ BibTexFields.ADDRESS, BibTexFields.NOTE,
+ BibTexFields.ORGANIZATION, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_techreport_fields():
+ """Get techreport's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.YEAR, BibTexFields.INSTITUTION]
+ lst_optional = [BibTexFields.NUMBER, BibTexFields.MONTH,
+ BibTexFields.TYPE, BibTexFields.ADDRESS,
+ BibTexFields.NOTE, BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_unpublished_fields():
+ """Get unpublished's fields.
+
+ @return:
+ """
+ lst_required = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.NOTE]
+ lst_optional = [BibTexFields.YEAR, BibTexFields.MONTH,
+ BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ @staticmethod
+ def __get_misc_fields():
+ """Get mis's fields.
+
+ @return:
+ """
+ lst_required = []
+ lst_optional = [BibTexFields.AUTHOR, BibTexFields.TITLE,
+ BibTexFields.HOW_PUBLISHER, BibTexFields.YEAR,
+ BibTexFields.MONTH, BibTexFields.NOTE,
+ BibTexFields.KEY]
+ lst_required_partial = []
+ return {'required': lst_required, 'optional': lst_optional,
+ 'required_partial': lst_required_partial}
+
+ def serialize(self, pid, record, validate_mode=False):
"""Serialize to bibtex from jpcoar record.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
instance.
:param record: The :class:`invenio_records.api.Record` instance.
+ :param validate_mode: validate or not
:returns: The object serialized.
"""
- # Get JPCOAR data(XML) and ElementTree root
- jpcoar_data = self.get_jpcoar_data(pid, record)
+ err_msg = 'Please input all required item.'
+ # Get JPCOAR datas(XML) and ElementTree root
+ jpcoar_data = self.__get_jpcoar_data(pid, record)
root = ET.fromstring(jpcoar_data)
-
- if self.is_empty(root):
- return 'This item has no mapping info.'
+ if self.__is_empty(root):
+ return err_msg
db = BibDatabase()
- # Article
- if self.is_bibtex_type(root,
- self.article_types,
- self.article_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.article_cols_all,
- 'article'))
- # Incollection
- elif self.is_bibtex_type(root,
- self.book_types,
- self.incollection_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.incollection_cols_all,
- 'incollection'))
- # Inbook
- elif self.is_bibtex_type(root,
- self.book_types,
- self.inbook_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.inbook_cols_all,
- 'inbook'))
- # Book
- elif self.is_bibtex_type(root,
- self.book_types,
- self.book_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.book_cols_all,
- 'book'))
- # Booklet
- elif self.is_bibtex_type(root,
- self.book_types,
- self.booklet_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.booklet_cols_all,
- 'booklet'))
- # Inproceedings
- elif self.is_bibtex_type(root,
- self.inproceedings_types,
- self.inproceedings_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.inproceedings_cols_all,
- 'inproceedings'))
- # Techreport
- elif self.is_bibtex_type(root,
- self.techreport_types,
- self.techreport_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.techreport_cols_all,
- 'techreport'))
- # Unpublished
- elif self.is_bibtex_type(root,
- self.unpublished_types,
- self.unpublished_cols_required):
-
- db.entries.append(self.get_bibtex_data(root,
- self.unpublished_cols_all,
- 'unpublished'))
- # Misc
- elif self.is_misc_type(root):
- db.entries.append(self.get_bibtex_data(root,
- self.misc_cols_all,
- 'misc'))
- # Unknown type
- else:
- return 'This item has no mapping info.'
-
+ bibtex_type = self.__get_bibtex_type(root)
+
+ if not bibtex_type:
+ current_app.logger.error(
+ "Can not find Bibtex type for record {}".format(
+ record.get('recid')))
+ return err_msg
+ valid, lst_invalid_fields = self.__validate_fields(root, bibtex_type)
+
+ if validate_mode:
+ return valid
+ elif not validate_mode and not valid:
+ if len(lst_invalid_fields) > 0:
+ current_app.logger.error(
+ 'Missing required fields [{}] for record {}'.format(
+ ','.join(lst_invalid_fields), record.get('recid')))
+ return err_msg
+
+ db.entries.append(self.__get_bibtex_data(root, bibtex_type))
writer = BibTexWriter()
-
- return writer.write(db)
+ result = writer.write(db)
+ return result
@staticmethod
- def get_jpcoar_data(pid, record):
+ def __get_jpcoar_data(pid, record):
"""Get jpcoar record.
:param pid: The :class:`invenio_pidstore.models.PersistentIdentifier`
@@ -304,7 +481,7 @@ def get_jpcoar_data(pid, record):
return data
- def is_empty(self, root):
+ def __is_empty(self, root):
"""
Determine whether the jpcoar record is empty.
@@ -312,127 +489,228 @@ def is_empty(self, root):
:return:
"""
- elements = root.findall('.//jpcoar:jpcoar', self.ns)
+ elements = root.findall('.//jpcoar:jpcoar', self.__ns)
if len(elements) == 0 or len(list(elements[0])) == 0:
return True
return False
- def is_bibtex_type(self, root, bibtex_types, bibtex_cols_required):
+ def __get_bibtex_type(self, root):
"""
Determine jpcoar record types(except misc).
:return:
"""
+ type_result = None
type_value = ''
- for element in root.findall('.//dc:type', self.ns):
+ for element in root.findall('.//dc:type', self.__ns):
type_value = element.text
-
- if type_value.lower() not in bibtex_types:
- return False
-
- if not self.contains_all(root, bibtex_cols_required.values()):
- return False
-
- return True
-
- def is_misc_type(self, root):
+ # Determine which type of Bibtex type is it
+ for bib_type, item_types in self.type_mapping.items():
+ if type_value.lower() in item_types:
+ type_result = bib_type
+ break
+ return type_result
+
+ def __validate_fields(self, root, bibtex_type):
+ """Validate required fields of bibtex type.
+
+ @param root:
+ @param bibtex_type:
+ @return:
"""
- Determine jpcoar record type(misc).
-
- :param root:
- :return:
+ def validate_by_att(attribute_name, expected_values):
+ valid_date = False
+ for element in elements:
+ if element.get(attribute_name) and element.get(
+ attribute_name).lower() in expected_values:
+ valid_date = True
+ return valid_date
+
+ def validate_partial_req():
+ result = True
+ for par_req in fields.get('required_partial'):
+ partial_valid = False
+ for field in par_req:
+ # check for pages because pages is represented for start
+ # and end page
+ if field == BibTexFields.PAGES:
+ start_page = root.findall(self.__find_pattern.format(
+ self.__fields_mapping[BibTexFields.PAGE_START]),
+ self.__ns)
+ end_page = root.findall(
+ self.__find_pattern.format(self.__fields_mapping[BibTexFields.PAGE_END]),
+ self.__ns)
+ if len(start_page) > 0 and len(end_page) > 0:
+ partial_valid = True
+ break
+ else:
+ field_data = root.findall(
+ self.__find_pattern.format(self.__fields_mapping[field]),
+ self.__ns)
+ if len(field_data) > 0:
+ partial_valid = True
+ break
+ if not partial_valid:
+ result = False
+ lst_invalid_fields.append(par_req[0].value)
+ lst_invalid_fields.append(par_req[1].value)
+ return result
+
+ lst_invalid_fields = []
+ identifierType_str = 'identifierType'
+ required_valid = True
+ fields = self.____get_bibtex_type_fields(bibtex_type)
+ for item_required in fields.get('required'):
+ elements = root.findall(
+ self.__find_pattern.format(self.__fields_mapping[item_required]),
+ self.__ns)
+ if len(elements) == 0:
+ required_valid = False
+ lst_invalid_fields.append(item_required.value)
+ elif item_required == BibTexFields.YEAR or \
+ item_required == BibTexFields.MONTH:
+ date_valid = validate_by_att('dateType', ['issued'])
+ if not date_valid:
+ lst_invalid_fields.append(item_required.value)
+ required_valid = False
+ elif item_required == BibTexFields.DOI:
+ doi_valid = validate_by_att(identifierType_str, ['doi'])
+ if not doi_valid:
+ lst_invalid_fields.append(item_required.value)
+ required_valid = False
+ elif item_required == BibTexFields.URL:
+ url_valid = validate_by_att(identifierType_str,
+ ['doi', 'hdl', 'uri'])
+ if not url_valid:
+ lst_invalid_fields.append(item_required.value)
+ required_valid = False
+ partial_req_valid = validate_partial_req()
+ return required_valid and partial_req_valid, lst_invalid_fields
+
+ def __combine_all_fields(self, bibtex_type):
+ """Combine all fields of item type.
+
+ @param bibtex_type:
+ @return:
"""
- type_value = ''
- for element in root.findall('.//dc:type', self.ns):
- type_value = element.text
-
- if type_value.lower() in self.misc_types or \
- type_value.lower() in self.article_types or \
- type_value.lower() in self.book_types or \
- type_value.lower() in self.inproceedings_types or \
- type_value.lower() in self.techreport_types or \
- type_value.lower() in self.unpublished_types:
-
- return True
-
- return False
-
- def contains_all(self, root, field_list):
- """
- Determine whether all required items exist.
-
- :param root:
- :param field_list:
- :return:
-
- """
- for field in field_list:
- if len(root.findall('.//' + field, self.ns)) == 0:
- return False
-
- return True
-
- def get_bibtex_data(self, root, bibtex_cols_all={}, entry_type='article'):
+ all_field_type = self.____get_bibtex_type_fields(bibtex_type)
+ all_fields = all_field_type.get(
+ 'required') + all_field_type.get('optional')
+ partial_req = all_field_type.get('required_partial')
+ for item in partial_req:
+ if BibTexFields.PAGES in item:
+ item.remove(BibTexFields.PAGES)
+ item.extend([BibTexFields.PAGE_START,
+ BibTexFields.PAGE_END])
+ all_fields.extend(item)
+ return all_fields
+
+ def __get_bibtex_data(self, root, bibtex_type):
+ """Get Bibtex data base on Bibtex type.
+
+ @param root:
+ @param bibtex_type:
+ @return:
"""
- Get bibtex data from jpcoar record.
- :param root:
- :param bibtex_cols_all:
- :param entry_type:
- :return:
+ def process_by_att(att, expected_val, existed_lst):
+ date_type = element.get(att)
+ if date_type and date_type.lower() == expected_val and \
+ element.text not in existed_lst:
+ dates.append(element.text)
+
+ def process_author():
+ author_lang = element.get(xml_ns + 'lang')
+ if not author_lang or author_lang.lower() != 'ja-kana':
+ creator[BibTexFields.AUTHOR.value].append(
+ element.text)
+ else:
+ creator[BibTexFields.YOMI.value].append(
+ element.text)
+
+ def process_url():
+ identifier_type = element.get(xml_ns + 'identifierType')
+ if identifier_type and identifier_type.lower in self.__lst_identifier_type:
+ lst_identifier_type_data[
+ identifier_type.lower].append(element.text)
- """
- # Initialization
data = {}
page_start = ''
page_end = ''
- xml_ns = '{' + self.ns['xml'] + '}'
-
- # Create book record
- for field in bibtex_cols_all.keys():
- elements = root.findall('.//' + bibtex_cols_all[field], self.ns)
+ title = ''
+ xml_ns = '{' + self.__ns['xml'] + '}'
+ and_str = ' and '
+ creator = {BibTexFields.AUTHOR.value: [],
+ BibTexFields.YOMI.value: []}
+ lst_identifier_type_data = {}
+ dois = []
+ all_fields = self.__combine_all_fields(bibtex_type)
+
+ for i in self.__lst_identifier_type:
+ lst_identifier_type_data[i] = []
+
+ for field in all_fields:
+ elements = root.findall(
+ self.__find_pattern.format(self.__fields_mapping[field]), self.__ns)
if len(elements) != 0:
-
value = ''
dates = []
for element in elements:
- if field == 'date' and (element.get('dateType') is not None
- and element.get('dateType').lower() == 'issued'):
- dates.append(element.text)
- continue
- elif field == 'type' and (element.get('descriptionType') is None
- or element.get('descriptionType').lower() != 'other'):
- continue
- elif field == 'author' and (element.get(xml_ns + 'lang') is None
- or element.get(xml_ns + 'lang').lower() != 'en'):
- continue
-
+ if field == BibTexFields.YEAR or \
+ field == BibTexFields.MONTH:
+ process_by_att('dateType', 'issued', dates)
+ elif field == BibTexFields.AUTHOR:
+ process_author()
+ elif field == BibTexFields.DOI:
+ process_by_att(xml_ns + 'identifierType', 'doi', dois)
+ elif field == BibTexFields.URL:
+ process_url()
+ elif field == BibTexFields.TITLE and title == '':
+ # Get only one title at all
+ title = element.text
if value != '':
- value += ' and ' if field == 'author' else ', '
+ value += and_str if field == BibTexFields.AUTHOR else ', '
value += element.text
- if field == 'page_start':
+ if field == BibTexFields.PAGE_START:
page_start = value
- elif field == 'page_end':
+ elif field == BibTexFields.PAGE_END:
page_end = value
- elif field == 'date' and len(dates) != 0:
- data['year'], data['month'] = self.get_dates(dates)
+ elif field == BibTexFields.YEAR or \
+ field == BibTexFields.MONTH and len(dates) != 0:
+ data[BibTexFields.YEAR.value], data[
+ BibTexFields.MONTH.value] = self.__get_dates(dates)
+ elif field == BibTexFields.AUTHOR:
+ if creator[BibTexFields.AUTHOR.value]:
+ data[field.value] = and_str.join(
+ creator[BibTexFields.AUTHOR.value])
+ if creator[BibTexFields.YOMI.value]:
+ data[BibTexFields.YOMI.value] = and_str.join(
+ creator[BibTexFields.YOMI.value])
+ elif field == BibTexFields.DOI and len(dois) > 0:
+ data[field.value] = ','.join(dois)
+ elif field == BibTexFields.URL and len():
+ data[field.value] = self.__get_identifier(
+ self.__lst_identifier_type,
+ lst_identifier_type_data)
+ elif field == BibTexFields.TITLE and title != '':
+ data[field.value] = title
elif value != '':
- data[field] = value
+ data[field.value] = value
if page_start != '' and page_end != '':
data['pages'] = str(page_start) + '--' + str(page_end)
- data['ENTRYTYPE'] = entry_type
- data['ID'] = self.get_item_id(root)
+ data['ENTRYTYPE'] = bibtex_type.value
+ data['ID'] = self.__get_item_id(root)
return data
@staticmethod
- def get_item_id(root):
+ def __get_item_id(root):
"""
Get item id from jpcoar record.
@@ -451,7 +729,7 @@ def get_item_id(root):
return item_id
@staticmethod
- def get_dates(dates):
+ def __get_dates(dates):
"""
Get year and month from date.
@@ -470,3 +748,16 @@ def get_dates(dates):
month += date.strftime('%b')
return year, month
+
+ @staticmethod
+ def __get_identifier(identifier_type, identifier_types_data):
+ """Get identifier data.
+
+ @param identifier_type:
+ @param identifier_types_data:
+ @return:
+ """
+ for type in identifier_type:
+ if identifier_types_data.get(type) and len(
+ identifier_types_data.get(type)) > 0:
+ return identifier_types_data.get(type)[0]
diff --git a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js
index cb8350c88d..b0e304a6fe 100644
--- a/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js
+++ b/modules/weko-search-ui/weko_search_ui/static/js/weko_search_ui/app.js
@@ -1,3 +1,9 @@
+const MESSAGE = {
+ bibtex_err: {
+ en: "Required item is not inputted.",
+ ja: "必須項目がありません。",
+ }
+}
require([
"jquery",
"bootstrap",
@@ -102,6 +108,20 @@ require([
});
});
+function getMessage(messageCode) {
+ const defaultLanguage = "en";
+ let currentLanguage = document.getElementById("current_language").value;
+ let message = MESSAGE[messageCode];
+ if (message) {
+ if (message[currentLanguage]) {
+ return message[currentLanguage];
+ } else {
+ return message[defaultLanguage];
+ }
+ } else {
+ return "";
+ }
+}
//add controller to invenioSearch
// add by ryuu. at 20181129 start
@@ -116,6 +136,7 @@ function searchResCtrl($scope, $rootScope, $http, $location) {
$rootScope.commInfoIndex = "";
}
+
$rootScope.disable_flg = true;
$rootScope.display_flg = true;
$rootScope.index_id_q = $location.search().q != undefined ? $location.search().q : '';
@@ -277,15 +298,15 @@ function itemExportCtrl($scope, $rootScope, $http, $location) {
// Check if current hits in selected array
- $scope.checkIfAllInArray = function () {
- angular.forEach($scope.vm.invenioSearchResults.hits.hits, function (record) {
+ $scope.checkIfAllInArray = function() {
+ all_in_array = true;
+ angular.forEach($scope.vm.invenioSearchResults.hits.hits, function(record) {
item_index = $rootScope.item_export_checkboxes.indexOf(record.id);
- if (checkAll && item_index == -1) {
- $rootScope.item_export_checkboxes.push(record.id);
- } else if (!checkAll && item_index >= 0) {
- $rootScope.item_export_checkboxes.splice(item_index, 1);
+ if(item_index == -1) {
+ all_in_array = false;
}
});
+ return all_in_array;
}
$scope.checkAll = function (checkAll) {
@@ -326,6 +347,7 @@ function itemExportCtrl($scope, $rootScope, $http, $location) {
if ($rootScope.item_export_checkboxes.length <= $rootScope.max_export_num) {
records_metadata = $scope.getExportItemsMetadata();
$('#record_ids').val(JSON.stringify($rootScope.item_export_checkboxes));
+ $('#invalid_record_ids').val(JSON.stringify([]));
let export_metadata = {}
$rootScope.item_export_checkboxes.map(function(recid) {
$.each(records_metadata, function (index, value) {
@@ -334,12 +356,49 @@ function itemExportCtrl($scope, $rootScope, $http, $location) {
}
});
})
+ let exportBibtex = document.getElementById("export_format_radio_bibtex").checked
+ if (exportBibtex) {
+ let invalidBibtexRecordIds = $scope.validateBibtexExport(Object.keys(export_metadata));
+ if (invalidBibtexRecordIds.length > 0) {
+ $('#invalid_record_ids').val(JSON.stringify(invalidBibtexRecordIds));
+ $scope.showErrMsgBibtex(invalidBibtexRecordIds);
+ }
+ }
$('#record_metadata').val(JSON.stringify(export_metadata));
$('#export_items_form').submit(); // Submit form and let controller handle file making
}
$('#item_export_button').attr("disabled", false);
}
+ $scope.validateBibtexExport = function (record_ids) {
+ var request_url = '/items/validate_bibtext_export';
+ var data = { record_ids: record_ids }
+ var invalidRecordIds = []
+ $.ajax({
+ method: 'POST',
+ url: request_url,
+ data: JSON.stringify(data),
+ async: false,
+ contentType: 'application/json',
+ success: function (data) {
+ if (data.invalid_record_ids.length) {
+ invalidRecordIds = data.invalid_record_ids;
+ }
+ },
+ error: function (status, error) {
+ console.log(error);
+ }
+ });
+ return invalidRecordIds;
+ }
+
+ $scope.showErrMsgBibtex = function (invalidRecordIds) {
+ var errMsg = getMessage('bibtex_err');
+ invalidRecordIds.forEach(function (recordId) {
+ document.getElementById('bibtex_err_' + recordId).textContent=errMsg;
+ });
+ }
+
$scope.getExportItemsMetadata = function () {
let cur_url = new URL(window.location.href);
let q = cur_url.searchParams.get("q");
|