Skip to content

Commit

Permalink
Merge branch '#343_suggest_change_and_log' into #341_grscicoll_permis…
Browse files Browse the repository at this point in the history
…sions
  • Loading branch information
marcos-lg committed May 27, 2021
2 parents d7bd774 + 59a6290 commit 62dfcf2
Show file tree
Hide file tree
Showing 9 changed files with 186 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,8 +84,6 @@
import java.net.URI;
import java.util.UUID;

import org.apache.ibatis.type.LocalDateTimeTypeHandler;
import org.apache.ibatis.type.LocalDateTypeHandler;
import org.mybatis.spring.boot.autoconfigure.ConfigurationCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ public abstract class BaseChangeSuggestionService<
"modifiedBy",
"created",
"modified",
"deleted",
"key",
"convertedToCollection"));

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package org.gbif.registry.service.collections.suggestions;

import org.gbif.api.model.collections.Collection;
import org.gbif.api.model.collections.CollectionEntityType;
import org.gbif.api.model.collections.suggestions.CollectionChangeSuggestion;
import org.gbif.api.service.collections.CollectionService;
import org.gbif.registry.events.EventManager;
Expand Down Expand Up @@ -48,7 +49,13 @@ public CollectionChangeSuggestionService(

@Override
public CollectionChangeSuggestion getChangeSuggestion(int key) {
return dtoToChangeSuggestion(changeSuggestionMapper.get(key));
ChangeSuggestionDto dto = changeSuggestionMapper.get(key);

if (dto.getEntityType() != CollectionEntityType.COLLECTION) {
return null;
}

return dtoToChangeSuggestion(dto);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.gbif.registry.service.collections.suggestions;

import org.gbif.api.model.collections.CollectionEntityType;
import org.gbif.api.model.collections.Institution;
import org.gbif.api.model.collections.suggestions.InstitutionChangeSuggestion;
import org.gbif.api.model.collections.suggestions.Type;
Expand Down Expand Up @@ -80,6 +81,10 @@ protected ChangeSuggestionDto createConvertToCollectionSuggestionDto(
public InstitutionChangeSuggestion getChangeSuggestion(int key) {
ChangeSuggestionDto dto = changeSuggestionMapper.get(key);

if (dto.getEntityType() != CollectionEntityType.INSTITUTION) {
return null;
}

InstitutionChangeSuggestion suggestion = dtoToChangeSuggestion(dto);
suggestion.setInstitutionForConvertedCollection(dto.getInstitutionConvertedCollection());
suggestion.setNameForNewInstitutionForConvertedCollection(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,25 @@
/*
* Copyright 2020 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.registry.ws.export;

import org.gbif.api.model.common.DOI;
import org.gbif.api.model.common.export.ExportFormat;
import org.gbif.api.model.occurrence.Download;
import org.gbif.api.model.occurrence.DownloadStatistics;
import org.gbif.api.model.registry.DatasetOccurrenceDownloadUsage;
import org.gbif.api.model.registry.search.DatasetSearchResult;
import org.gbif.api.vocabulary.Country;
import org.gbif.api.vocabulary.DatasetSubtype;
Expand All @@ -14,9 +32,6 @@
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import lombok.Builder;
import lombok.Data;
import lombok.SneakyThrows;
import org.supercsv.cellprocessor.Optional;
import org.supercsv.cellprocessor.ParseEnum;
import org.supercsv.cellprocessor.ParseInt;
Expand All @@ -27,6 +42,10 @@
import org.supercsv.prefs.CsvPreference;
import org.supercsv.util.CsvContext;

import lombok.Builder;
import lombok.Data;
import lombok.SneakyThrows;

@Data
@Builder
public class CsvWriter<T> {
Expand Down Expand Up @@ -88,7 +107,7 @@ public static CsvWriter<DownloadStatistics> downloadStatisticsCsvWriter(Iterable
* Creates an CsvWriter/exporter of DatasetSearchResult.
*/
public static CsvWriter<DatasetSearchResult> datasetSearchResultCsvWriter(Iterable<DatasetSearchResult> pager,
ExportFormat preference) {
ExportFormat preference) {
return CsvWriter.<DatasetSearchResult>builder()
.fields(new String[]{"key", "title", "doi", "license", "type", "subType", "hostingOrganizationKey", "hostingOrganizationTitle", "hostingCountry", "publishingOrganizationKey", "publishingOrganizationTitle", "publishingCountry","endorsingNodeKey", "networkKeys", "projectIdentifier", "recordCount", "nameUsagesCount"})
.header(new String[]{"dataset_key", "title", "doi", "license", "type", "sub_type", "hosting_organization_Key", "hosting_organization_title", "hosting_country","publishing_organization_key", "publishing_organization_title", "publishing_country", "endorsing_node_key", "network_keys", "project_identifier", "occurrence_records_count", "name_usages_count"})
Expand Down Expand Up @@ -116,6 +135,28 @@ public static CsvWriter<DatasetSearchResult> datasetSearchResultCsvWriter(Iterab
.build();
}

/**
* Creates an CsvWriter/exporter of DatasetOccurrenceDownloadUsage.
*/
public static CsvWriter<DatasetOccurrenceDownloadUsage> datasetOccurrenceDownloadUsageCsvWriter(Iterable<DatasetOccurrenceDownloadUsage> pager,
ExportFormat preference) {
return CsvWriter.<DatasetOccurrenceDownloadUsage>builder()
.fields(new String[]{"download", "downloadKey", "datasetDOI", "datasetKey", "datasetTitle", "datasetCitation", "numberRecords"})
.header(new String[]{"download_doi", "download_key", "dataset_doi", "dataset_key", "dataset_title", "dataset_citation", "number_records"})
.processors(new CellProcessor[]{new DownloadDOIProcessor(), //download.doi
null, //downloadKey
new DOIProcessor(), //datasetDOI
new UUIDProcessor(), //datasetKey
new CleanStringProcessor(), //datasetTitle
new CleanStringProcessor(), //datasetCitation
new Optional(new ParseLong()) //numberRecords
})
.preference(preference)
.pager(pager)
.build();

}

/**
* Null aware UUID processor.
*/
Expand Down Expand Up @@ -147,6 +188,21 @@ public String execute(Object value, CsvContext csvContext) {
}
}

/**
* Null aware UUID processor.
*/
private static class DownloadDOIProcessor implements CellProcessor {

private static String getDoiValue(Download download) {
return java.util.Optional.ofNullable(download.getDoi()).map(DOI::toString).orElse("");
}

@Override
public String execute(Object value, CsvContext csvContext) {
return value != null ? getDoiValue((Download)value) : "";
}
}


/**
* Null aware Country processor.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,33 @@
*/
package org.gbif.registry.ws.resources;

import org.gbif.api.model.common.export.ExportFormat;
import org.gbif.api.model.common.paging.Pageable;
import org.gbif.api.model.common.paging.PagingResponse;
import org.gbif.api.model.registry.DatasetOccurrenceDownloadUsage;
import org.gbif.api.service.registry.DatasetOccurrenceDownloadUsageService;
import org.gbif.api.util.iterables.Iterables;
import org.gbif.registry.persistence.mapper.DatasetOccurrenceDownloadMapper;
import org.gbif.registry.ws.export.CsvWriter;

import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.List;
import java.util.UUID;

import javax.servlet.http.HttpServletResponse;

import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import static org.gbif.registry.security.util.DownloadSecurityUtils.clearSensitiveData;
Expand All @@ -42,6 +53,13 @@
public class DatasetOccurrenceDownloadUsageResource
implements DatasetOccurrenceDownloadUsageService {

//Page size to iterate over download stats export service
private static final int EXPORT_LIMIT = 5_000;

//Export header prefix
private static final String FILE_HEADER_PRE = "attachment; filename=datasets_download_usage.";


private final DatasetOccurrenceDownloadMapper datasetOccurrenceDownloadMapper;

public DatasetOccurrenceDownloadUsageResource(
Expand All @@ -60,4 +78,21 @@ public PagingResponse<DatasetOccurrenceDownloadUsage> listByDataset(
return new PagingResponse<>(
page, (long) datasetOccurrenceDownloadMapper.countByDataset(datasetKey), usages);
}

@GetMapping("{datasetKey}/export")
public void exportListByDataset(
HttpServletResponse response,
@PathVariable UUID datasetKey,
@RequestParam(value = "format", defaultValue = "TSV") ExportFormat format) throws IOException {

response.setHeader(HttpHeaders.CONTENT_DISPOSITION, FILE_HEADER_PRE + format.name().toLowerCase());

try (Writer writer = new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))) {
CsvWriter.datasetOccurrenceDownloadUsageCsvWriter(Iterables.datasetOccurrenceDownloadUsages(this,
datasetKey,
EXPORT_LIMIT),
format)
.export(writer);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,9 @@ public class DatasetResource extends BaseNetworkEntityResource<Dataset>
//Page size to iterate over search export service
private static final int SEARCH_EXPORT_LIMIT = 300;

//Search export file header
private static final String EXPORT_FILE_PRE = "attachment; filename=gbif_datasets.";

private final RegistryDatasetService registryDatasetService;
private final DatasetSearchService searchService;
private final MetadataMapper metadataMapper;
Expand Down Expand Up @@ -203,13 +206,13 @@ public void search(HttpServletResponse response,
@RequestParam(value = "format", defaultValue = "TSV") ExportFormat format,
DatasetSearchRequest searchRequest) throws IOException {

String headerValue = "attachment; filename=gbif_datasets." + format.name().toLowerCase();
response.setHeader(HttpHeaders.CONTENT_DISPOSITION, headerValue);
response.setHeader(HttpHeaders.CONTENT_DISPOSITION, EXPORT_FILE_PRE + format.name().toLowerCase());

try (Writer writer = new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))) {
CsvWriter.datasetSearchResultCsvWriter(Iterables.datasetSearchResults(searchRequest,
searchService,
SEARCH_EXPORT_LIMIT), format)
searchService,
SEARCH_EXPORT_LIMIT),
format)
.export(writer);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@ public class OccurrenceDownloadResource implements OccurrenceDownloadService {

//Page size to iterate over download stats export service
private static final int STATS_EXPORT_LIMIT = 7_500;
//Download stats file header
private static final String EXPORT_FILE_HEADER_PRE = "attachment; filename=download_statistics.";

private static final Logger LOG = LoggerFactory.getLogger(OccurrenceDownloadResource.class);

Expand Down Expand Up @@ -392,8 +394,7 @@ public void getDownloadStatistics(
@RequestParam(value = "publishingOrgKey", required = false) UUID publishingOrgKey) throws
IOException {

String headerValue = "attachment; filename=download_statistics." + format.name().toLowerCase();
response.setHeader(HttpHeaders.CONTENT_DISPOSITION, headerValue);
response.setHeader(HttpHeaders.CONTENT_DISPOSITION, EXPORT_FILE_HEADER_PRE + format.name().toLowerCase());

try (Writer writer = new BufferedWriter(new OutputStreamWriter(response.getOutputStream()))) {
CsvWriter.downloadStatisticsCsvWriter(Iterables.downloadStatistics(this,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,25 @@
/*
* Copyright 2020 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.registry.ws.export;

import org.gbif.api.model.common.DOI;
import org.gbif.api.model.common.export.ExportFormat;
import org.gbif.api.model.occurrence.Download;
import org.gbif.api.model.occurrence.DownloadStatistics;
import org.gbif.api.model.registry.DatasetOccurrenceDownloadUsage;
import org.gbif.api.model.registry.search.DatasetSearchResult;
import org.gbif.api.vocabulary.Country;
import org.gbif.api.vocabulary.DatasetSubtype;
Expand Down Expand Up @@ -150,4 +167,54 @@ public void datasetSearchTest() {

assertExport(datasets, writer, csvWriter, this::assertDatasetSearchResult);
}

/**
* Test one DatasetOccurrenceDownloadUsage against its expected exported data.
*/
private void assertDatasetOccurrenceDownloadUsage(DatasetOccurrenceDownloadUsage downloadUsage, String[] line) {
//"download", "downloadKey", "datasetDOI", "datasetKey", "datasetTitle", "datasetCitation", "numberRecords"
assertEquals(downloadUsage.getDownload().getDoi().toString(), line[0]);
assertEquals(downloadUsage.getDownloadKey(), line[1]);
assertEquals(downloadUsage.getDatasetDOI().toString(), line[2]);
assertEquals(downloadUsage.getDatasetKey().toString(), line[3]);
assertEquals(downloadUsage.getDatasetTitle(), line[4]);
assertEquals(downloadUsage.getDatasetCitation(), line[5]);
assertEquals(downloadUsage.getNumberRecords(), Long.parseLong(line[6].replace("\r","")));
}

/**
* Generates test instances of DatasetOccurrenceDownloadUsage.
*/
private static DatasetOccurrenceDownloadUsage newDatasetOccurrenceDownloadUsageTest(int consecutive, Download download) {
DatasetOccurrenceDownloadUsage downloadUsage = new DatasetOccurrenceDownloadUsage();

downloadUsage.setDatasetKey(UUID.randomUUID());
downloadUsage.setDatasetDOI(new DOI("10.21373/6m9yw" + consecutive));
downloadUsage.setDatasetTitle("UsageTitle" + consecutive);
downloadUsage.setDatasetCitation("Citation" + consecutive);
downloadUsage.setNumberRecords(consecutive);
downloadUsage.setDownloadKey(download.getKey());
downloadUsage.setDownload(download);

return downloadUsage;
}

@Test
public void datasetOccurrenceDownloadUsageTest() {

//Test data
Download download = new Download();
download.setKey("0220580-200613084148143");
download.setDoi(new DOI("10.21373/6m9yw0"));

List<DatasetOccurrenceDownloadUsage> downloadUsages = Arrays.asList(newDatasetOccurrenceDownloadUsageTest(1, download),
newDatasetOccurrenceDownloadUsageTest(2, download));

StringWriter writer = new StringWriter();

CsvWriter<DatasetOccurrenceDownloadUsage> csvWriter = CsvWriter.datasetOccurrenceDownloadUsageCsvWriter(downloadUsages, ExportFormat.CSV);
csvWriter.export(writer);

assertExport(downloadUsages, writer, csvWriter, this::assertDatasetOccurrenceDownloadUsage);
}
}

0 comments on commit 62dfcf2

Please sign in to comment.