Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Laurentvallet issue#1980 (qc in ShUp) #2420

Open
wants to merge 42 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
12722c1
Adding ms datasets dependency
LaurentPV Aug 30, 2024
0fafa74
Prepare call of getQualityCards endpoint and apply locally quality cards
LaurentPV Sep 2, 2024
8c30f01
implements mapstruct mappers
LaurentPV Sep 10, 2024
decdf6b
apply qualityCard from ShUp
LaurentPV Sep 10, 2024
d996c6b
apply qualityCard from ShUp
LaurentPV Sep 10, 2024
755f418
Correction of errors in mapping importJob and error pop up
LaurentPV Sep 11, 2024
6f2761f
Externalize qualityService
LaurentPV Sep 17, 2024
941ed3a
delete unused simpletons
LaurentPV Sep 18, 2024
3f28603
preparing dataset creation from instances
LaurentPV Sep 25, 2024
44d8e8a
create datasets from instances
LaurentPV Sep 25, 2024
42f1b76
get dicom metadata from absolute image filepath
LaurentPV Sep 25, 2024
0c907db
avoid execution of quality control at import on server side if done b…
LaurentPV Sep 26, 2024
c512d56
improve quality report message presentation
LaurentPV Sep 27, 2024
b54e1de
retrieve quality tag from qualityCardResult to importJob
LaurentPV Sep 30, 2024
6ed0a48
Set uploadState to Error if quality control failed
LaurentPV Oct 1, 2024
0e44532
setting new quality tag on server side and allowing failedValid as wa…
LaurentPV Oct 1, 2024
e82d922
correcting log
LaurentPV Oct 2, 2024
5a7bd71
Allow the use of datasets as docker executable and shup dependency
LaurentPV Oct 2, 2024
cecf6d4
correction of importerService unit test
LaurentPV Oct 9, 2024
80ab0c4
Merge branch 'fli-iam:develop' into laurentvallet_issue#1980
LaurentPV Oct 9, 2024
851b5db
rollback unit test modification
LaurentPV Oct 9, 2024
66d6c4e
correcting nullPointerException
LaurentPV Oct 9, 2024
e392519
attempt at correcting UT
LaurentPV Oct 9, 2024
a57e2a5
debugging unit test
LaurentPV Oct 9, 2024
78f9c75
debugging getDicomAttributes mock
LaurentPV Oct 9, 2024
827462c
run build maven with full stacktrace
LaurentPV Oct 9, 2024
18619a1
correcting unit test
LaurentPV Oct 9, 2024
c82f291
testing
LaurentPV Oct 9, 2024
c3814ea
test
LaurentPV Oct 9, 2024
a059351
update mockito version to manage static methods
LaurentPV Oct 10, 2024
d05109c
Mock static method getDicomAttributes
LaurentPV Oct 10, 2024
6b87e38
testing
LaurentPV Oct 10, 2024
0494b8f
refacto unit test
LaurentPV Oct 10, 2024
365b883
Adding condition if QualityCardResult is empty because no quality car…
LaurentPV Oct 10, 2024
10468cc
refactoring unit test
LaurentPV Oct 10, 2024
2be7964
testing again
LaurentPV Oct 10, 2024
b6921b4
adding null condition in ImporterService
LaurentPV Oct 10, 2024
a444e60
Avoid nullPointers on QualityCardResult
LaurentPV Oct 10, 2024
8f70954
Merge branch 'develop' into laurentvallet_issue#1980
LaurentPV Oct 10, 2024
ce07011
Cover case where quality card rule has no condition
LaurentPV Oct 23, 2024
5e8266a
catch errors during checkQuality process
LaurentPV Oct 23, 2024
b11a1c3
Merge branch 'develop' into laurentvallet_issue#1980
julien-louis Nov 12, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion shanoir-ng-datasets/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@
<profiles>
<profile>${spring.active.profile}</profile>
</profiles>
<classifier>exec</classifier>
</configuration>
<executions>
<execution>
Expand All @@ -168,7 +169,7 @@
<phase>package</phase>
<configuration>
<target>
<copy file="target/${project.build.finalName}.jar"
<copy file="target/${project.build.finalName}-exec.jar"
tofile="${basedir}/../docker-compose/datasets/${project.artifactId}.jar" />
</target>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@
@Service
public class DicomProcessing {

UIDGeneration uidGenerator = new UIDGeneration();
private static UIDGeneration uidGenerator = new UIDGeneration();

@Autowired
private WADOURLHandler wadoURLHandler;
private static WADOURLHandler wadoURLHandler;

public Attributes getDicomObjectAttributes(DatasetFile image, Boolean isEnhancedMR) throws IOException {
public static Attributes getDicomObjectAttributes(DatasetFile image, Boolean isEnhancedMR) throws IOException {
File dicomFile = new File(image.getPath());
try (DicomInputStream dIS = new DicomInputStream(dicomFile)) {
Attributes datasetAttributes;
Expand All @@ -56,7 +56,7 @@ public Attributes getDicomObjectAttributes(DatasetFile image, Boolean isEnhanced
}

public ExaminationAttributes<String> getDicomExaminationAttributes(Study study, Boolean isEnhanced) throws ShanoirException {
ExaminationAttributes<String> attributes = new ExaminationAttributes<String>(wadoURLHandler);
ExaminationAttributes<String> attributes = new ExaminationAttributes<>(wadoURLHandler);
if (study != null) {
for (Serie serie : study.getSeries()) {
attributes.addAcquisitionAttributes(serie.getSeriesInstanceUID(), getDicomAcquisitionAttributes(serie, isEnhanced));
Expand All @@ -65,8 +65,8 @@ public ExaminationAttributes<String> getDicomExaminationAttributes(Study study,
return attributes;
}

public ExaminationAttributes<String> getDicomExaminationAttributes(Study study) throws ShanoirException {
ExaminationAttributes<String> attributes = new ExaminationAttributes<String>(wadoURLHandler);
public static ExaminationAttributes<String> getDicomExaminationAttributes(Study study) throws ShanoirException {
ExaminationAttributes<String> attributes = new ExaminationAttributes<>(wadoURLHandler);
if (study != null) {
for (Serie serie : study.getSeries()) {
attributes.addAcquisitionAttributes(serie.getSeriesInstanceUID(), getDicomAcquisitionAttributes(serie));
Expand All @@ -75,26 +75,29 @@ public ExaminationAttributes<String> getDicomExaminationAttributes(Study study)
return attributes;
}

public AcquisitionAttributes<String> getDicomAcquisitionAttributes(Serie serie, Boolean isEnhanced) throws ShanoirException {
AcquisitionAttributes<String> attributes = new AcquisitionAttributes<String>();
public static AcquisitionAttributes<String> getDicomAcquisitionAttributes(Serie serie, Boolean isEnhanced) throws ShanoirException {
AcquisitionAttributes<String> attributes = new AcquisitionAttributes<>();
String sopUID = null;
if (!CollectionUtils.isEmpty(serie.getImages())) {
sopUID = serie.getImages().get(0).getSOPInstanceUID();
} else {
sopUID = uidGenerator.getNewUID();
}
for (Dataset dataset : serie.getDatasets()) {
try {
dataset.setFirstImageSOPInstanceUID(sopUID);
attributes.addDatasetAttributes(dataset.getFirstImageSOPInstanceUID(), getDicomObjectAttributes(serie.getFirstDatasetFileForCurrentSerie(), isEnhanced));
} catch (IOException e) {
throw new ShanoirException("Could not read dicom metadata from file for serie " + serie.getSopClassUID(), e);
// In case of Quality Check during Import from ShUp, Serie does not have any Dataset and conditions are applied on DICOM metadata only.
if (!CollectionUtils.isEmpty(serie.getDatasets())) {
for (Dataset dataset : serie.getDatasets()) {
try {
dataset.setFirstImageSOPInstanceUID(sopUID);
attributes.addDatasetAttributes(dataset.getFirstImageSOPInstanceUID(), getDicomObjectAttributes(serie.getFirstDatasetFileForCurrentSerie(), isEnhanced));
} catch (IOException e) {
throw new ShanoirException("Could not read dicom metadata from file for serie " + serie.getSopClassUID(), e);
}
}
}
return attributes;
}

public AcquisitionAttributes<String> getDicomAcquisitionAttributes(Serie serie) throws ShanoirException {
public static AcquisitionAttributes<String> getDicomAcquisitionAttributes(Serie serie) throws ShanoirException {
return getDicomAcquisitionAttributes(serie, serie.getIsEnhanced());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.util.Map;

import org.shanoir.ng.shared.event.ShanoirEvent;
import org.shanoir.ng.shared.quality.QualityTag;

/**
* @author atouboul
Expand Down Expand Up @@ -72,6 +73,8 @@ public class ImportJob implements Serializable {
private Long userId;

private String username;

private QualityTag qualityTag;

public long getTimestamp() {
return timestamp;
Expand Down Expand Up @@ -234,6 +237,14 @@ public void setProperties(Map<String, String> properties) {
this.properties = properties;
}

public QualityTag getQualityTag() {
return qualityTag;
}

public void setQualityTag(QualityTag qualityTag) {
this.qualityTag = qualityTag;
}

public Serie getFirstSerie() {
if ( getPatients() == null || getPatients().size() == 0
|| getPatients().get(0) == null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,27 @@
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;

import org.joda.time.DateTime;
import org.shanoir.ng.dataset.modality.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import org.shanoir.ng.dataset.modality.CalibrationDataset;
import org.shanoir.ng.dataset.modality.CtDataset;
import org.shanoir.ng.dataset.modality.EegDataset;
import org.shanoir.ng.dataset.modality.GenericDataset;
import org.shanoir.ng.dataset.modality.MegDataset;
import org.shanoir.ng.dataset.modality.MeshDataset;
import org.shanoir.ng.dataset.modality.MrDataset;
import org.shanoir.ng.dataset.modality.ParameterQuantificationDataset;
import org.shanoir.ng.dataset.modality.PetDataset;
import org.shanoir.ng.dataset.modality.RegistrationDataset;
import org.shanoir.ng.dataset.modality.SegmentationDataset;
import org.shanoir.ng.dataset.modality.SpectDataset;
import org.shanoir.ng.dataset.modality.StatisticalDataset;
import org.shanoir.ng.dataset.modality.TemplateDataset;
import org.shanoir.ng.dataset.modality.XaDataset;
import org.shanoir.ng.dataset.model.Dataset;
import org.shanoir.ng.dataset.model.DatasetExpression;
import org.shanoir.ng.dataset.model.DatasetExpressionFormat;
Expand All @@ -38,8 +55,6 @@
import org.shanoir.ng.datasetfile.DatasetFile;
import org.shanoir.ng.dicom.DicomProcessing;
import org.shanoir.ng.download.AcquisitionAttributes;
import org.shanoir.ng.download.ExaminationAttributes;
import org.shanoir.ng.download.WADODownloaderService;
import org.shanoir.ng.examination.model.Examination;
import org.shanoir.ng.examination.repository.ExaminationRepository;
import org.shanoir.ng.examination.service.ExaminationService;
Expand All @@ -60,11 +75,9 @@
import org.shanoir.ng.solr.service.SolrService;
import org.shanoir.ng.studycard.dto.QualityCardResult;
import org.shanoir.ng.studycard.model.ExaminationData;
import org.shanoir.ng.studycard.model.QualityCard;
import org.shanoir.ng.studycard.model.QualityException;
import org.shanoir.ng.studycard.model.StudyCard;
import org.shanoir.ng.studycard.repository.StudyCardRepository;
import org.shanoir.ng.studycard.service.QualityCardService;
import org.shanoir.ng.utils.KeycloakUtil;
import org.shanoir.ng.utils.SecurityContextUtil;
import org.shanoir.ng.utils.Utils;
Expand Down Expand Up @@ -111,9 +124,6 @@ public class ImporterService {

@Autowired
private StudyCardRepository studyCardRepository;

@Autowired
private QualityCardService qualityCardService;

@Autowired
private DatasetAcquisitionService datasetAcquisitionService;
Expand All @@ -122,13 +132,10 @@ public class ImporterService {
private SubjectStudyService subjectStudyService;

@Autowired
private DicomProcessing dicomProcessing;

@Autowired
private WADODownloaderService downloader;
private SolrService solrService;

@Autowired
private SolrService solrService;
private QualityService qualityService;

private DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmssSSS");

Expand Down Expand Up @@ -167,18 +174,37 @@ public void createAllDatasetAcquisition(ImportJob importJob, Long userId) throws
QualityTag tagSave = subjectStudy != null ? subjectStudy.getQualityTag() : null;
ExaminationData examData = new ExaminationData(examination);
examData.setDatasetAcquisitions(Utils.toList(generatedAcquisitions));
QualityCardResult qualityResult = checkQuality(examData, importJob);
QualityCardResult qualityResult;

// If import comes from ShanoirUploader, the check quality at import has already been done
if (!importJob.isFromShanoirUploader()) {
qualityResult = qualityService.checkQuality(examData, importJob, null);
} else {
LOG.info("Importing Data from ShanoirUploader.");
// We retrieve quality card result from ShUp import job
qualityResult = qualityService.retrieveQualityCardResult(importJob);
if (!qualityResult.isEmpty()) {
LOG.info("Retrieving Quality Control result from ShanoirUploader.");
if(subjectStudy != null) {
subjectStudy.setQualityTag(qualityResult.get(0).getTagSet());
qualityResult.addUpdatedSubjectStudy(subjectStudy);
}
}
}

// Has quality check passed ?
if (qualityResult.hasError()) {
if (qualityResult != null && !qualityResult.isEmpty() && qualityResult.hasError()) {
// TODO : Delete newly created Examination ?
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think deleting the exam would not fit to the frontend import workflow as the exam is created "independently"

throw new QualityException(examination, qualityResult);
} else { // Then do the import
if (qualityResult.hasWarning() || qualityResult.hasFailedValid()) {
event.setReport(qualityResult.toString());
if (qualityResult != null && !qualityResult.isEmpty()) {
if (qualityResult.hasWarning() || qualityResult.hasFailedValid()) {
event.setReport(qualityResult.toString());
}
// add tag to subject-study
subjectStudyService.update(qualityResult.getUpdatedSubjectStudies());
}
// add tag to subject-study
subjectStudyService.update(qualityResult.getUpdatedSubjectStudies());

generatedAcquisitions = new HashSet<DatasetAcquisition>(datasetAcquisitionService.createAll(generatedAcquisitions));
generatedAcquisitions = new HashSet<>(datasetAcquisitionService.createAll(generatedAcquisitions));
try {
persistPatientInPacs(importJob.getPatients(), event);
} catch (Exception e) { // if error in pacs
Expand All @@ -187,8 +213,10 @@ public void createAllDatasetAcquisition(ImportJob importJob, Long userId) throws
datasetAcquisitionService.deleteById(acquisition.getId(), null);
}
// revert quality tag
subjectStudy.setQualityTag(tagSave);
subjectStudyService.update(qualityResult.getUpdatedSubjectStudies());
if(subjectStudy != null) {
subjectStudy.setQualityTag(tagSave);
subjectStudyService.update(qualityResult.getUpdatedSubjectStudies());
}
throw new ShanoirException("Error while saving data in pacs, the import is canceled and acquisitions were not saved", e);
}
}
Expand Down Expand Up @@ -264,7 +292,7 @@ private Set<DatasetAcquisition> generateAcquisitions(Examination examination, Im
// get dicomAttributes
AcquisitionAttributes<String> dicomAttributes = null;
try {
dicomAttributes = dicomProcessing.getDicomAcquisitionAttributes(serie, serie.getIsEnhanced());
dicomAttributes = DicomProcessing.getDicomAcquisitionAttributes(serie, serie.getIsEnhanced());
} catch (PacsException e) {
throw new ShanoirException("Unable to retrieve dicom attributes in file " + serie.getFirstDatasetFileForCurrentSerie().getPath(), e);
}
Expand Down Expand Up @@ -293,38 +321,6 @@ private Set<DatasetAcquisition> generateAcquisitions(Examination examination, Im
return generatedAcquisitions;
}

private QualityCardResult checkQuality(ExaminationData examination, ImportJob importJob) throws ShanoirException {
List<QualityCard> qualityCards = qualityCardService.findByStudy(examination.getStudyId());
if (!hasQualityChecksAtImport(qualityCards)) {
return new QualityCardResult();
}
Study firstStudy = importJob.getFirstStudy();
if (firstStudy == null) {
throw new ShanoirException("The given import job does not provide any serie. Examination : " + importJob.getExaminationId());
}
ExaminationAttributes<String> dicomAttributes = dicomProcessing.getDicomExaminationAttributes(firstStudy);
QualityCardResult qualityResult = new QualityCardResult();
for (QualityCard qualityCard : qualityCards) {
if (qualityCard.isToCheckAtImport()) {
qualityResult.merge(qualityCard.apply(examination, dicomAttributes, downloader));
}
}
return qualityResult;
}

private boolean hasQualityChecksAtImport(List<QualityCard> qualityCards) {
if (qualityCards == null || qualityCards.isEmpty()) {
LOG.warn("No qualitycard given for this import.");
return false;
}
for (QualityCard qualityCard : qualityCards) {
if (qualityCard.isToCheckAtImport()) {
return true;
}
}
return false;
}

StudyCard getStudyCard(ImportJob importJob) {
if (importJob.getStudyCardId() != null) { // makes sense: imports without studycard exist
StudyCard studyCard = getStudyCard(importJob.getStudyCardId());
Expand Down Expand Up @@ -402,7 +398,7 @@ private StudyCard getStudyCard(Long studyCardId) {
* @param serie
* @return
*/
private boolean checkSerieForDicomImages(Serie serie) {
private static boolean checkSerieForDicomImages(Serie serie) {
return serie.getModality() != null
&& serie.getDatasets() != null
&& !serie.getDatasets().isEmpty()
Expand Down
Loading
Loading