Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
3b44dc2
file descriptions were not searchable #3745
pdurbin May 8, 2014
8ba43d1
text_en and copyField for variable name and label #3945
pdurbin May 8, 2014
f9d8cdc
Update "Add Data" Button permissions for user page
sekmiller May 8, 2014
3b0cc33
Update appendix.rst
May 8, 2014
41a0a4d
Simplify the logic for Add Data button
sekmiller May 8, 2014
9b8cefb
Modified confirmDialog message by removing "Unfortunately" from both …
xyang02 May 8, 2014
b09a52b
Modified confirmDialog message by removing "Unfortunately" from both …
xyang02 May 8, 2014
9ff2421
Merge origin/master
May 8, 2014
17a16cb
A fix for CSV files not being recognized as tabular data when uploade…
landreev May 8, 2014
ff4cd45
added variable name and label to advanced search #3945
pdurbin May 8, 2014
3981e65
added filetype_en to catchall #3745
pdurbin May 8, 2014
9fb572b
temporary: allow users to create in dataverse with aliases that end i…
scolapasta May 8, 2014
d218518
Change to the thumbnail converter - will enforce the size of the thum…
landreev May 8, 2014
3caa9c1
highlight on the file type #3745
pdurbin May 8, 2014
187e7e9
Applied fixed width and height to popovers to fix layout issue in Chr…
May 8, 2014
d682a4e
Merge origin/master
May 8, 2014
b8f9d28
typo: s/variable_lable_en/variable_label_en/
pdurbin May 8, 2014
a240dce
Add javascript rebinding for file upload and then cancel. Bug #3942
raprasad May 8, 2014
ebc5163
don't limit deletion to 10 file solr docs #3795
pdurbin May 8, 2014
321f05a
Changed the async page refresh to use "displayVersion", instead of th…
landreev May 8, 2014
e0cb254
Applied the Dataverse brand color to the logo icon, and Beta label. A…
May 8, 2014
3f9beaa
Merge origin/master
May 8, 2014
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion conf/solr/4.6.0/schema.xml
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,10 @@
<field name="description" type="text_en" multiValued="false" stored="true" indexed="true"/>
<copyField source="description" dest="text" maxChars="3000"/>

<!-- Added for Dataverse 4.0 Beta: make variable names and labels searchable in basic search https://redmine.hmdc.harvard.edu/issues/3945 -->
<copyField source="variable_name_en" dest="text" maxChars="3000"/>
<copyField source="variable_label_en" dest="text" maxChars="3000"/>

<!-- Added for Dataverse 4.0 alpha 1 from output of http://localhost:8080/api/config/solr/schema -->
<field name="accessToSources" type="text_en" multiValued="false" stored="true" indexed="true"/>
<field name="actionsToMinimizeLoss" type="text_en" multiValued="false" stored="true" indexed="true"/>
Expand Down Expand Up @@ -412,7 +416,8 @@
<!-- <copyField source="cat" dest="text"/> -->
<!-- Dataverse 4.0: we want the "name" field in the "catchall" -->
<copyField source="name" dest="text"/>
<!-- Dataverse 4.0: we want the "filename_without_extension_en" field in the "catchall" per https://redmine.hmdc.harvard.edu/issues/3848 -->
<!-- Dataverse 4.0: we want the "filetype_en" and "filename_without_extension_en" field in the "catchall" per https://redmine.hmdc.harvard.edu/issues/3848 -->
<copyField source="filetype_en" dest="text"/>
<copyField source="filename_without_extension_en" dest="text"/>
<!-- <copyField source="manu" dest="text"/> -->
<!-- <copyField source="features" dest="text"/> -->
Expand Down
2 changes: 1 addition & 1 deletion doc/Sphinx/source/User/appendix.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ currently proposed metadata fields for 4.0 (per metadata block):
- `Citation Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodDBaYTFPakhGaEpoa3hqZUJTOWZtclE&usp=sharing>`__ (compliant with `DDI 2.5 <http://www.ddialliance.org/>`__ and `DataCite 3.0 <http://schema.datacite.org/meta/kernel-3/index.html>`__)
- `Social Science & Humanities Metadata (DDI 2.5 compliant) <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodEppcTFHT1NnajNLV0tacE10NEdmUnc&usp=sharing>`__
- `Astronomy and Astrophysics Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodEp4Qmp0QURkUWo1S0t4X3hia0FnZUE&usp=sharing>`__
(based on `Virtual Observatory (VO) Discovery and Provenance Metadata <http://www.wf4ever-project.org/wiki/download/attachments/1179927/DPmetadata.pdf?version=1&modificationDate=1337186963000>`__)
: These metadata elements can be mapped/exported to the International Virtual Observatory Alliance’s (IVOA) Resource Metadata for the Virtual Observatory (VOResource Schema format) and based on `Virtual Observatory (VO) Discovery and Provenance Metadata <http://www.wf4ever-project.org/wiki/download/attachments/1179927/DPmetadata.pdf?version=1&modificationDate=1337186963000>`__)
- `Biomedical Metadata <https://docs.google.com/spreadsheet/ccc?key=0AjeLxEN77UZodExsRTB2SEpVWWd1Qmx6M09HSkExd3c&usp=sharing>`__
(based on `ISA-Tab <http://isatab.sourceforge.net/format.html>`__ and `Stem Cell Commons <http://stemcellcommons.org/>`__)

Expand Down
Binary file added scripts/search/data/tabular/50by1000.dta
Binary file not shown.
26 changes: 26 additions & 0 deletions src/main/java/edu/harvard/iq/dataverse/AdvancedSearchPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,8 @@ public class AdvancedSearchPage {
private String fileFieldName;
private String fileFieldDescription;
private String fileFieldFiletype;
private String fileFieldVariableName;
private String fileFieldVariableLabel;

public void init() {
/**
Expand Down Expand Up @@ -115,6 +117,14 @@ private String constructFileQuery() {
queryStrings.add(constructQuery(SearchFields.FILE_TYPE_SEARCHABLE, fileFieldFiletype));
}

if (!fileFieldVariableName.isEmpty()) {
queryStrings.add(constructQuery(SearchFields.VARIABLE_NAME, fileFieldVariableName));
}

if (!fileFieldVariableLabel.isEmpty()) {
queryStrings.add(constructQuery(SearchFields.VARIABLE_LABEL, fileFieldVariableLabel));
}

return constructQuery(queryStrings, true);
}

Expand Down Expand Up @@ -256,4 +266,20 @@ public void setFileFieldFiletype(String fileFieldFiletype) {
this.fileFieldFiletype = fileFieldFiletype;
}

public String getFileFieldVariableName() {
return fileFieldVariableName;
}

public void setFileFieldVariableName(String fileFieldVariableName) {
this.fileFieldVariableName = fileFieldVariableName;
}

public String getFileFieldVariableLabel() {
return fileFieldVariableLabel;
}

public void setFileFieldVariableLabel(String fileFieldVariableLabel) {
this.fileFieldVariableLabel = fileFieldVariableLabel;
}

}
2 changes: 1 addition & 1 deletion src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ public void refresh(ActionEvent e) {
// .getLatestVersion().getFileMetadatas() - because that's how the page is
// accessing them. -- L.A.)
//for (DataFile dataFile : dataset.getFiles()) {
for (FileMetadata fileMetadata : dataset.getLatestVersion().getFileMetadatas()) {
for (FileMetadata fileMetadata : getDisplayVersion().getFileMetadatas()) {
DataFile dataFile = fileMetadata.getDataFile();
// and see if any are marked as "ingest-in-progress":
if (dataFile.isIngestInProgress()) {
Expand Down
29 changes: 7 additions & 22 deletions src/main/java/edu/harvard/iq/dataverse/IndexServiceBean.java
Original file line number Diff line number Diff line change
Expand Up @@ -714,6 +714,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) {
datafileSolrInputDocument.addField(SearchFields.FILE_TYPE, FileUtil.getFacetFileType(fileMetadata.getDataFile()));
datafileSolrInputDocument.addField(SearchFields.FILE_TYPE_SEARCHABLE, FileUtil.getFacetFileType(fileMetadata.getDataFile()));
datafileSolrInputDocument.addField(SearchFields.DESCRIPTION, fileMetadata.getDescription());
datafileSolrInputDocument.addField(SearchFields.FILE_DESCRIPTION, fileMetadata.getDescription());
datafileSolrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
// datafileSolrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataFile.getOwner().getOwner().getName());
// datafileSolrInputDocument.addField(SearchFields.PARENT_NAME, dataFile.getDataset().getTitle());
Expand All @@ -727,11 +728,8 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) {

if (fileMetadata.getDataFile().isTabularData()) {
List<DataVariable> variables = fileMetadata.getDataFile().getDataTable().getDataVariables();
String variableNamesToIndex = null;
String variableLabelsToIndex = null;
for (DataVariable var : variables) {
// Hard-coded search fields, for now:
// TODO: immediately: define these as constants in SearchFields;
// TODO: eventually: review, decide how datavariables should
// be handled for indexing purposes. (should it be a fixed
// setup, defined in the code? should it be flexible? unlikely
Expand All @@ -741,28 +739,12 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset) {
// anyway -- needs to be reviewed. -- L.A. 4.0alpha1

if (var.getName() != null && !var.getName().equals("")) {
if (variableNamesToIndex == null) {
variableNamesToIndex = var.getName();
} else {
variableNamesToIndex = variableNamesToIndex + " " + var.getName();
}
datafileSolrInputDocument.addField(SearchFields.VARIABLE_NAME, var.getName());
}
if (var.getLabel() != null && !var.getLabel().equals("")) {
if (variableLabelsToIndex == null) {
variableLabelsToIndex = var.getLabel();
} else {
variableLabelsToIndex = variableLabelsToIndex + " " + var.getLabel();
}
datafileSolrInputDocument.addField(SearchFields.VARIABLE_LABEL, var.getLabel());
}
}
if (variableNamesToIndex != null) {
logger.info("indexing " + variableNamesToIndex.length() + " bytes");
datafileSolrInputDocument.addField("varname_s", variableNamesToIndex);
}
if (variableLabelsToIndex != null) {
logger.info("indexing " + variableLabelsToIndex.length() + " bytes");
datafileSolrInputDocument.addField("varlabel_s", variableLabelsToIndex);
}
}

docs.add(datafileSolrInputDocument);
Expand Down Expand Up @@ -991,7 +973,8 @@ private List<String> findSolrDocIdsForDraftFilesToDelete(Dataset datasetWithDraf
Long datasetId = datasetWithDraftFilesToDelete.getId();
SolrServer solrServer = new HttpSolrServer("http://localhost:8983/solr");
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery("parentid:" + datasetId);
solrQuery.setRows(Integer.MAX_VALUE);
solrQuery.setQuery(SearchFields.PARENT_ID + ":" + datasetId);
/**
* @todo rather than hard coding "_draft" here, tie to
* IndexableDataset(new DatasetVersion()).getDatasetState().getSuffix()
Expand All @@ -1000,6 +983,8 @@ private List<String> findSolrDocIdsForDraftFilesToDelete(Dataset datasetWithDraf
solrQuery.addFilterQuery(SearchFields.ID + ":" + "*_draft");
List<String> solrIdsOfFilesToDelete = new ArrayList<>();
try {
// i.e. rows=2147483647&q=parentid%3A16&fq=id%3A*_draft
logger.info("passing this Solr query to find draft files to delete: " + solrQuery);
QueryResponse queryResponse = solrServer.query(solrQuery);
SolrDocumentList results = queryResponse.getResults();
for (SolrDocument solrDocument : results) {
Expand Down
Loading