Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,29 @@ public HttpResponse<Response<List<DatasetMetadata>>> getDatasets(

}

@Get("/${micronaut.bi.api.version}/programs/{programId}/experiments/{experimentId}/recommended-sub-entity-dataset-names")
@ProgramSecured(roleGroups = {ProgramSecuredRoleGroup.PROGRAM_SCOPED_ROLES})
@Produces(MediaType.APPLICATION_JSON)
public HttpResponse<Response<List<String>>> getRecommendedSubEntityDatasetNames(
@PathVariable("programId") UUID programId,
@PathVariable("experimentId") UUID experimentId) {
try {
Optional<Program> programOptional = programService.getById(programId);
if (programOptional.isEmpty()) {
return HttpResponse.status(HttpStatus.NOT_FOUND, "Program does not exist");
}

Response<List<String>> response = new Response<>(experimentService.getRecommendedSubEntityDatasetNames(programOptional.get(), experimentId));
return HttpResponse.ok(response);
} catch (DoesNotExistException e) {
log.info(e.getMessage());
return HttpResponse.status(HttpStatus.NOT_FOUND, e.getMessage());
} catch (Exception e) {
log.error("Error finding recommended sub-entity dataset names", e);
return HttpResponse.status(HttpStatus.INTERNAL_SERVER_ERROR, "Error finding recommended sub-entity dataset names");
}
}

/**
* Adds a record to the experiment_program_user_role table
* @param programId The UUID of the program
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,25 +18,28 @@
package org.breedinginsight.brapi.v2.dao;

import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import io.micronaut.http.HttpResponse;
import io.micronaut.http.HttpStatus;
import lombok.extern.slf4j.Slf4j;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.Request;
import okhttp3.RequestBody;
import org.apache.commons.lang3.StringUtils;
import org.brapi.client.v2.JSON;
import org.brapi.client.v2.model.exceptions.ApiException;
import org.breedinginsight.model.DatasetLevel;
import org.breedinginsight.model.Program;
import org.breedinginsight.utilities.BrAPIDAOUtil;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.ArrayList;
import java.util.List;

@Slf4j
@Singleton
Expand Down Expand Up @@ -96,4 +99,55 @@ public void deleteObservationLevelName(Program program, String levelDbId) {
}
}

public List<String> getObservationLevelNames(Program program, String programDbId) throws ApiException {
List<String> levelNames = new ArrayList<>();
int currentPage = 0;
int totalPages = 1;

do {
HttpUrl.Builder urlBuilder = HttpUrl.parse(brAPIDAOUtil.getProgramBrAPIBaseUrl(program.getId()))
.newBuilder()
.addPathSegment("observationlevelnames")
.addQueryParameter("page", Integer.toString(currentPage))
.addQueryParameter("pageSize", "1000");
if (StringUtils.isNotBlank(programDbId)) {
urlBuilder.addQueryParameter("programDbId", programDbId);
}

Request request = new Request.Builder()
.url(urlBuilder.build())
.get()
.addHeader("Content-Type", "application/json")
.build();

String responseBody = brAPIDAOUtil.makeCallWithResponse(request);
if (StringUtils.isBlank(responseBody)) {
return levelNames;
}

JsonObject responseJson = JsonParser.parseString(responseBody).getAsJsonObject();
JsonObject resultJson = responseJson.getAsJsonObject("result");
if (resultJson != null) {
JsonArray data = resultJson.getAsJsonArray("data");
if (data != null) {
for (JsonElement level : data) {
if (level.isJsonObject()) {
JsonElement levelName = level.getAsJsonObject().get("levelName");
if (levelName != null && !levelName.isJsonNull()) {
levelNames.add(levelName.getAsString());
}
}
}
}
}

JsonObject metadata = responseJson.getAsJsonObject("metadata");
JsonObject pagination = metadata != null ? metadata.getAsJsonObject("pagination") : null;
totalPages = pagination != null && pagination.has("totalPages") ? pagination.get("totalPages").getAsInt() : currentPage + 1;
currentPage++;
} while (currentPage < totalPages);

return levelNames;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
import org.breedinginsight.model.DownloadFile;
import org.breedinginsight.model.Program;
import org.breedinginsight.model.*;
import org.breedinginsight.model.delta.DeltaEntityFactory;
import org.breedinginsight.model.delta.Experiment;
import org.breedinginsight.services.TraitService;
import org.breedinginsight.services.exceptions.AlreadyExistsException;
import org.breedinginsight.services.exceptions.DoesNotExistException;
Expand Down Expand Up @@ -81,6 +83,7 @@ public class BrAPITrialService {
private final DistributedLockService lockService;
private static final String SHEET_NAME = "Data";
private final DatasetService datasetService;
private final DeltaEntityFactory deltaEntityFactory;

@Inject
public BrAPITrialService(@Property(name = "brapi.server.reference-source") String referenceSource,
Expand All @@ -96,7 +99,8 @@ public BrAPITrialService(@Property(name = "brapi.server.reference-source") Strin
BrAPIGermplasmDAO germplasmDAO,
FileMappingUtil fileMappingUtil,
DistributedLockService lockService,
DatasetService datasetService) {
DatasetService datasetService,
DeltaEntityFactory deltaEntityFactory) {

this.referenceSource = referenceSource;
this.trialDAO = trialDAO;
Expand All @@ -112,6 +116,7 @@ public BrAPITrialService(@Property(name = "brapi.server.reference-source") Strin
this.fileMappingUtil = fileMappingUtil;
this.lockService = lockService;
this.datasetService = datasetService;
this.deltaEntityFactory = deltaEntityFactory;
}

public List<BrAPITrial> getExperiments(UUID programId) throws ApiException, DoesNotExistException {
Expand Down Expand Up @@ -429,6 +434,34 @@ public List<DatasetMetadata> getDatasetsMetadata(Program program, UUID experimen
return datasets;
}

/**
* Assumptions:
* @param program
* @param experimentId
* @return
* @throws DoesNotExistException
* @throws ApiException
*/
public List<String> getRecommendedSubEntityDatasetNames(Program program, UUID experimentId) throws DoesNotExistException, ApiException {
BrAPITrial experiment = trialDAO.getTrialById(program.getId(), experimentId).orElseThrow(() -> new DoesNotExistException("Trial does not exist"));
Experiment deltaExperiment = deltaEntityFactory.makeExperimentBean(experiment);
// set to eliminate possible duplicates like plant for exp unit and sub unit
Set<String> currentExperimentDatasetNames = deltaExperiment.getDatasetsMetadata()
.stream()
.map(DatasetMetadata::getName)
//.filter(StringUtils::isNotBlank)
.collect(Collectors.toSet());

return getProgramObservationLevelNames(program).stream()
//.filter(StringUtils::isNotBlank)
//.filter(name -> !BrAPIConstants.REPLICATE.getValue().equalsIgnoreCase(name))
//.filter(name -> !BrAPIConstants.BLOCK.getValue().equalsIgnoreCase(name))
.filter(name -> !currentExperimentDatasetNames.contains(name))
.distinct()
.sorted()
.collect(Collectors.toList());
}

/**
* Creates sub-entity dataset
* TODO: Handle compensating transactions in event of failure. Currently brapi server does not support
Expand Down Expand Up @@ -801,6 +834,11 @@ public int deleteExperiment(Program program, UUID experimentId, boolean hard) th
return existingObservations.size();
}

private List<String> getProgramObservationLevelNames(Program program) throws ApiException {
String programDbId = program.getBrapiProgram() != null ? program.getBrapiProgram().getProgramDbId() : null;
return observationLevelDAO.getObservationLevelNames(program, programDbId);
}

private Map<String, Object> createExportRow(
BrAPITrial experiment,
Program program,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -213,12 +213,17 @@ void setup() throws Exception {

// Create an experiment with no observations.
private String uploadExperimentWithoutObs() throws Exception {
return uploadExperimentWithoutObs("Without Obs", "Plot");
}

private String uploadExperimentWithoutObs(String title, String expUnit) throws Exception {
ImportTestUtils importTestUtils = new ImportTestUtils();
List<Map<String, Object>> expRows = new ArrayList<>();

// Make test experiment import.
Map<String, Object> row1 = makeExpImportRow("Without Obs", "NewEnv1");
Map<String, Object> row2 = makeExpImportRow("Without Obs", "NewEnv2");
String envBase = title.replaceAll("\\s+", "");
Map<String, Object> row1 = makeExpImportRow(title, envBase + "1", expUnit);
Map<String, Object> row2 = makeExpImportRow(title, envBase + "2", expUnit);

expRows.add(row1);
expRows.add(row2);
Expand Down Expand Up @@ -396,6 +401,75 @@ void downloadSubEntityDataset(String extension) {
parseAndCheck(plantBodyStream, extension, false, plantRows, false, 23);
}

@Test
@Order(1)
public void createSubEntityDatasetRejectsExpUnitNameAlreadyUsedInSameExperiment() throws Exception {
String plantExperimentId = uploadExperimentWithoutObs("Plant Same Experiment", "Plant");

Flowable<HttpResponse<String>> call = client.exchange(
POST(String.format("/programs/%s/experiments/%s/dataset", program.getId(), plantExperimentId),
"{\"name\":\"Plant\",\"repeatedMeasures\":2}")
.contentType(MediaType.APPLICATION_JSON)
.cookie(new NettyCookie("phylo-token", "test-registered-user")),
String.class
);

HttpClientResponseException e = assertThrows(HttpClientResponseException.class, call::blockingFirst);
assertEquals(HttpStatus.CONFLICT, e.getStatus());
}

@Test
@Order(2)
public void createSubEntityDatasetAllowsExpUnitNameUsedInOtherExperiment() throws Exception {
uploadExperimentWithoutObs("Plant Source Experiment", "Plant");
String recipientExperimentId = uploadExperimentWithoutObs("Plot Recipient Experiment", "Plot");

Flowable<HttpResponse<String>> call = client.exchange(
POST(String.format("/programs/%s/experiments/%s/dataset", program.getId(), recipientExperimentId),
"{\"name\":\"Plant\",\"repeatedMeasures\":2}")
.contentType(MediaType.APPLICATION_JSON)
.cookie(new NettyCookie("phylo-token", "test-registered-user")),
String.class
);

HttpResponse<String> response = call.blockingFirst();
assertEquals(HttpStatus.OK, response.getStatus());
}

@Test
@Order(3)
public void recommendedSubEntityDatasetNamesIncludeExpUnitNamesFromOtherExperiments() throws Exception {
uploadExperimentWithoutObs("Plant Autocomplete Source", "Plant");
String recipientExperimentId = uploadExperimentWithoutObs("Autocomplete Recipient", "Plot");

List<String> recommendedNames = getRecommendedSubEntityDatasetNames(recipientExperimentId);

assertTrue(recommendedNames.stream().anyMatch(name -> name.equalsIgnoreCase("plant")));
assertFalse(recommendedNames.stream().anyMatch(name -> name.equalsIgnoreCase("plot")));
}

@Test
@Order(4)
public void recommendedSubEntityDatasetNamesDeDuplicateExpUnitAndSubUnitNamesAcrossExperiments() throws Exception {
uploadExperimentWithoutObs("Plant Exp Unit Source", "Plant");
String subEntitySourceExperimentId = uploadExperimentWithoutObs("Plant Sub Unit Source", "Plot");
String recipientExperimentId = uploadExperimentWithoutObs("Plant Unique Recipient", "Plot");

Flowable<HttpResponse<String>> postCall = client.exchange(
POST(String.format("/programs/%s/experiments/%s/dataset", program.getId(), subEntitySourceExperimentId),
"{\"name\":\"Plant\",\"repeatedMeasures\":2}")
.contentType(MediaType.APPLICATION_JSON)
.cookie(new NettyCookie("phylo-token", "test-registered-user")),
String.class
);
HttpResponse<String> postResponse = postCall.blockingFirst();
assertEquals(HttpStatus.OK, postResponse.getStatus());

List<String> recommendedNames = getRecommendedSubEntityDatasetNames(recipientExperimentId);

assertEquals(1L, recommendedNames.stream().filter(name -> name.equalsIgnoreCase("plant")).count());
}

/**
* Tests for Experimental Collaborator endpoints
*/
Expand Down Expand Up @@ -845,12 +919,36 @@ private File writeDataToFile(List<Map<String, Object>> data, List<Trait> traits)
return file;
}

private List<String> getRecommendedSubEntityDatasetNames(String targetExperimentId) {
Flowable<HttpResponse<String>> call = client.exchange(
GET(String.format("/programs/%s/experiments/%s/recommended-sub-entity-dataset-names",
program.getId(), targetExperimentId))
.contentType(MediaType.APPLICATION_JSON)
.cookie(new NettyCookie("phylo-token", "test-registered-user")),
String.class
);
HttpResponse<String> response = call.blockingFirst();
assertEquals(HttpStatus.OK, response.getStatus());

JsonArray result = JsonParser.parseString(Objects.requireNonNull(response.body()))
.getAsJsonObject()
.getAsJsonArray("result");

List<String> recommendedNames = new ArrayList<>();
result.forEach(name -> recommendedNames.add(name.getAsString()));
return recommendedNames;
}

private Map<String, Object> makeExpImportRow(String title, String environment) {
return makeExpImportRow(title, environment, "Plot");
}

private Map<String, Object> makeExpImportRow(String title, String environment, String expUnit) {
Map<String, Object> row = new HashMap<>();
row.put(ExperimentObservation.Columns.GERMPLASM_GID, "1");
row.put(ExperimentObservation.Columns.TEST_CHECK, "T");
row.put(ExperimentObservation.Columns.EXP_TITLE, title);
row.put(ExperimentObservation.Columns.EXP_UNIT, "Plot");
row.put(ExperimentObservation.Columns.EXP_UNIT, expUnit);
//row.put(ExperimentObservation.Columns.SUB_OBS_UNIT, "");
row.put(ExperimentObservation.Columns.EXP_TYPE, "Phenotyping");
row.put(ExperimentObservation.Columns.ENV, environment);
Expand Down
Loading