From 0e257475925aa1ebf3eff73150dae604f85d97ea Mon Sep 17 00:00:00 2001 From: Rick Herrick <jrherrick@wustl.edu> Date: Wed, 27 Jul 2016 15:04:53 -0500 Subject: [PATCH] XNAT-4156 Added checks for status when data is received. This previously just reset the session to RECEIVING. On session get or create, checks status and if the session is in interruptable state, sets to RECEIVING_INTERRUPT. Otherwise a new session is created that will need to be merged later. --- .../xnat/archive/GradualDicomImporter.java | 482 +++++++------ .../helpers/prearchive/PrearcDatabase.java | 119 ++-- .../prearchive/PrearcTableBuilder.java | 652 ++++++++---------- .../xnat/helpers/prearchive/PrearcUtils.java | 44 +- .../helpers/prearchive/SessionDataTriple.java | 200 +++--- .../handlers/PrearchiveRebuildHandler.java | 70 +- .../restlet/actions/PrearcBlankSession.java | 109 --- .../prearchive/PrearcSessionResource.java | 2 +- .../nrg/xnat/restlet/services/Importer.java | 23 +- .../xnat/spawner/site-admin-elements.yaml | 42 +- 10 files changed, 855 insertions(+), 888 deletions(-) delete mode 100644 src/main/java/org/nrg/xnat/restlet/actions/PrearcBlankSession.java diff --git a/src/main/java/org/nrg/xnat/archive/GradualDicomImporter.java b/src/main/java/org/nrg/xnat/archive/GradualDicomImporter.java index 624e22c1..27c700db 100644 --- a/src/main/java/org/nrg/xnat/archive/GradualDicomImporter.java +++ b/src/main/java/org/nrg/xnat/archive/GradualDicomImporter.java @@ -70,56 +70,20 @@ import java.util.concurrent.Callable; @ImporterHandler(handler = ImporterHandlerA.GRADUAL_DICOM_IMPORTER) public class GradualDicomImporter extends ImporterHandlerA { public static final String SENDER_AE_TITLE_PARAM = "Sender-AE-Title"; - public static final String SENDER_ID_PARAM = "Sender-ID"; - public static final String TSUID_PARAM = "Transfer-Syntax-UID"; - public static final int LAST_TAG = Tag.SeriesDescription; - - private static final Logger logger = LoggerFactory.getLogger(GradualDicomImporter.class); - private static final Object NOT_A_WRITABLE_PROJECT = new Object(); - private static final String DEFAULT_TRANSFER_SYNTAX = TransferSyntax.ImplicitVRLittleEndian.uid(); - private static final String RENAME_PARAM = "rename"; - private static final DicomFileNamer DEFAULT_NAMER = new SOPHashDicomFileNamer(); - private static final long PROJECT_CACHE_EXPIRY_SECONDS = 120; - private static final boolean canDecompress = initializeCanDecompress(); - private static final CacheManager cacheManager = CacheManager.getInstance(); - private DicomFilterService _filterService; - - private static boolean initializeCanDecompress() { - try { - return Decompress.isSupported(); - } catch (NoClassDefFoundError error) { - return false; - } - } - - private final FileWriterWrapperI fw; - private final UserI user; - private final Map<String, Object> params; - private DicomObjectIdentifier<XnatProjectdata> dicomObjectIdentifier; - private DicomFileNamer namer = DEFAULT_NAMER; - private TransferSyntax ts = null; - private Cache projectCache = null; + public static final String SENDER_ID_PARAM = "Sender-ID"; + public static final String TSUID_PARAM = "Transfer-Syntax-UID"; public GradualDicomImporter(final Object listenerControl, - final UserI u, - final FileWriterWrapperI fw, - final Map<String, Object> params) + final UserI user, + final FileWriterWrapperI fileWriter, + final Map<String, Object> parameters) throws IOException, ClientException { - super(listenerControl, u, fw, params); - this.user = u; - this.fw = fw; - this.params = params; - if (params.containsKey(TSUID_PARAM)) { - ts = TransferSyntax.valueOf((String) params.get(TSUID_PARAM)); - } - } - - private boolean canCreateIn(final XnatProjectdata p) { - try { - return PrearcUtils.canModify(user, p.getId()); - } catch (Throwable t) { - logger.error("Unable to check permissions for " + user + " in " + p, t); - return false; + super(listenerControl, user, fileWriter, parameters); + _user = user; + _fileWriter = fileWriter; + _parameters = parameters; + if (_parameters.containsKey(TSUID_PARAM)) { + _transferSyntax = TransferSyntax.valueOf((String) _parameters.get(TSUID_PARAM)); } } @@ -128,6 +92,7 @@ public class GradualDicomImporter extends ImporterHandlerA { * does not map to a project where user has create perms? * * @param e cache element (not null) + * * @return true if this element indicates no writable project */ public static boolean isCachedNotWriteableProject(final Element e) { @@ -146,114 +111,52 @@ public class GradualDicomImporter extends ImporterHandlerA { cache.put(new Element(name, NOT_A_WRITABLE_PROJECT)); } - private XnatProjectdata getProject(final Object alias, final Callable<XnatProjectdata> lookupProject) { - if (null == projectCache) { - projectCache = getUserProjectCache(user); - } - if (null != alias) { - logger.debug("looking for project matching alias {} from query parameters", alias); - final Element pe = projectCache.get(alias); - if (null != pe) { - if (isCachedNotWriteableProject(pe)) { - // this alias is cached as a non-writable project name, but user is specifying it. - // maybe they know something we don't; clear cache entry so we can try again. - projectCache.remove(alias); - return getProject(alias, lookupProject); - } else { - return (XnatProjectdata) pe.getObjectValue(); - } + /** + * Adds a cache of project objects on a per-user basis. This is currently used by GradualDicomImporter and DbBackedProjectIdentifier + * + * @param user The user for whom to retrieve the cache. + * + * @return The user's cache. + */ + public static Cache getUserProjectCache(final UserI user) { + final String cacheName = user.getLogin() + "-projects"; + synchronized (cacheManager) { + if (!cacheManager.cacheExists(cacheName)) { + final CacheConfiguration config = new CacheConfiguration(cacheName, 0) + .copyOnRead(false).copyOnWrite(false) + .eternal(false) + .persistence(new PersistenceConfiguration().strategy(PersistenceConfiguration.Strategy.NONE)) + .timeToLiveSeconds(PROJECT_CACHE_EXPIRY_SECONDS); + final Cache cache = new Cache(config); + cacheManager.addCache(cache); + return cache; } else { - logger.trace("cache miss for project alias {}, trying database", alias); - final XnatProjectdata p = XnatProjectdata.getXnatProjectdatasById(alias, user, false); - if (null != p && canCreateIn(p)) { - projectCache.put(new Element(alias, p)); - return p; - } else { - for (final XnatProjectdata pa : - XnatProjectdata.getXnatProjectdatasByField("xnat:projectData/aliases/alias/alias", - alias, user, false)) { - if (canCreateIn(pa)) { - projectCache.put(new Element(alias, pa)); - return pa; - } - } - } - } - logger.info("storage request specified project {}, which does not exist or user does not have create perms", alias); - } else { - logger.trace("no project alias found in query parameters"); - } - // No alias, or we couldn't match it to a project. Run the identifier to see if that can get a project name/alias. - // (Don't cache alias->identifier-derived-project because we didn't use the alias to derive the project.) - try { - return null == lookupProject ? null : lookupProject.call(); - } catch (Throwable t) { - logger.error("error in project lookup", t); - return null; - } - } - - private File getSafeFile(File sessionDir, String scan, String name, DicomObject o, boolean forceRename) { - String fileName = namer.makeFileName(o); - while (fileName.charAt(0) == '.') { - fileName = fileName.substring(1); - } - final File safeFile = Files.getImageFile(sessionDir, scan, fileName); - if (forceRename) { - return safeFile; - } - final String valname = Files.toFileNameChars(name); - if (!Files.isValidFilename(valname)) { - return safeFile; - } - final File reqFile = Files.getImageFile(sessionDir, scan, valname); - if (reqFile.exists()) { - try (final FileInputStream fin = new FileInputStream(reqFile)) { - final DicomObject o1 = read(fin, name); - if (Objects.equal(o.get(Tag.SOPInstanceUID), o1.get(Tag.SOPInstanceUID)) && - Objects.equal(o.get(Tag.SOPClassUID), o1.get(Tag.SOPClassUID))) { - return reqFile; // object are equivalent; ok to overwrite - } else { - return safeFile; - } - } catch (Throwable t) { - return safeFile; + return cacheManager.getCache(cacheName); } - } else { - return reqFile; - } - } - - private static <K, V> String getString(final Map<K, V> m, final K k, final V defaultValue) { - final V v = m.get(k); - if (null == v) { - return null == defaultValue ? null : defaultValue.toString(); - } else { - return v.toString(); } } @Override public List<String> call() throws ClientException, ServerException { - final String name = fw.getName(); + final String name = _fileWriter.getName(); final DicomObject dicom; final XnatProjectdata project; - try (final BufferedInputStream bis = new BufferedInputStream(fw.getInputStream()); - final DicomInputStream dis = null == ts ? new DicomInputStream(bis) : new DicomInputStream(bis, ts)) { - final int lastTag = Math.max(dicomObjectIdentifier.getTags().last(), LAST_TAG) + 1; + try (final BufferedInputStream bis = new BufferedInputStream(_fileWriter.getInputStream()); + final DicomInputStream dis = null == _transferSyntax ? new DicomInputStream(bis) : new DicomInputStream(bis, _transferSyntax)) { + final int lastTag = Math.max(_dicomObjectIdentifier.getTags().last(), Tag.SeriesDescription) + 1; logger.trace("reading object into memory up to {}", TagUtils.toString(lastTag)); dis.setHandler(new StopTagInputHandler(lastTag)); dicom = dis.readDicomObject(); - logger.trace("handling file with query parameters {}", params); + logger.trace("handling file with query parameters {}", _parameters); try { // project identifier is expensive, so avoid if possible - project = getProject(PrearcUtils.identifyProject(params), - new Callable<XnatProjectdata>() { - public XnatProjectdata call() { - return dicomObjectIdentifier.getProject(dicom); - } - }); + project = getProject(PrearcUtils.identifyProject(_parameters), + new Callable<XnatProjectdata>() { + public XnatProjectdata call() { + return _dicomObjectIdentifier.getProject(dicom); + } + }); } catch (MalformedURLException e1) { logger.error("unable to parse supplied destination flag", e1); throw new ClientException(Status.CLIENT_ERROR_BAD_REQUEST, e1); @@ -276,11 +179,11 @@ public class GradualDicomImporter extends ImporterHandlerA { } if (!(shouldIncludeDicomObject(siteFilter, dicom) && shouldIncludeDicomObject(projectFilter, dicom))) { return new ArrayList<>(); - /** TODO: Return information to user on rejected files. Unfortunately throwing an - * exception causes DicomBrowser to display a panicked error message. Some way of - * returning the information that a particular file type was not accepted would be - * nice, though. Possibly record the information and display on an admin page. - * Work to be done for 1.7 + /* TODO: Return information to user on rejected files. Unfortunately throwing an + * exception causes DicomBrowser to display a panicked error message. Some way of + * returning the information that a particular file type was not accepted would be + * nice, though. Possibly record the information and display on an admin page. + * Work to be done for 1.7 */ } try { @@ -307,22 +210,19 @@ public class GradualDicomImporter extends ImporterHandlerA { //root = new File(project.getPrearchivePath()); root = new File(ArcSpecManager.GetInstance().getGlobalPrearchivePath() + "/" + project.getId()); } - final File tsdir, sessdir; - - tsdir = new File(root, PrearcUtils.makeTimestamp()); String sessionLabel; - if (params.containsKey(URIManager.EXPT_LABEL)) { - sessionLabel = (String) params.get(URIManager.EXPT_LABEL); - logger.trace("using provided experiment label {}", params.get(URIManager.EXPT_LABEL)); + if (_parameters.containsKey(URIManager.EXPT_LABEL)) { + sessionLabel = (String) _parameters.get(URIManager.EXPT_LABEL); + logger.trace("using provided experiment label {}", _parameters.get(URIManager.EXPT_LABEL)); } else { - sessionLabel = dicomObjectIdentifier.getSessionLabel(dicom); + sessionLabel = _dicomObjectIdentifier.getSessionLabel(dicom); } String visit; - if (params.containsKey(URIManager.VISIT_LABEL)) { - visit = (String) params.get(URIManager.VISIT_LABEL); - logger.trace("using provided visit label {}", params.get(URIManager.VISIT_LABEL)); + if (_parameters.containsKey(URIManager.VISIT_LABEL)) { + visit = (String) _parameters.get(URIManager.VISIT_LABEL); + logger.trace("using provided visit label {}", _parameters.get(URIManager.VISIT_LABEL)); } else { visit = null; } @@ -332,13 +232,17 @@ public class GradualDicomImporter extends ImporterHandlerA { } final String subject; - if (params.containsKey(URIManager.SUBJECT_ID)) { - subject = (String) params.get(URIManager.SUBJECT_ID); + if (_parameters.containsKey(URIManager.SUBJECT_ID)) { + subject = (String) _parameters.get(URIManager.SUBJECT_ID); } else { - subject = dicomObjectIdentifier.getSubjectLabel(dicom); + subject = _dicomObjectIdentifier.getSubjectLabel(dicom); } + + + final File timestamp = new File(root, PrearcUtils.makeTimestamp()); + if (null == subject) { - logger.trace("subject is null for session {}/{}", tsdir, sessionLabel); + logger.trace("subject is null for session {}/{}", timestamp, sessionLabel); } session = new SessionData(); @@ -348,14 +252,14 @@ public class GradualDicomImporter extends ImporterHandlerA { session.setVisit(visit); session.setScan_date(dicom.getDate(Tag.StudyDate)); session.setTag(studyInstanceUID); - session.setTimestamp(tsdir.getName()); + session.setTimestamp(timestamp.getName()); session.setStatus(PrearcUtils.PrearcStatus.RECEIVING); session.setLastBuiltDate(Calendar.getInstance().getTime()); session.setSubject(subject); - session.setUrl((new File(tsdir, sessionLabel)).getAbsolutePath()); - session.setSource(params.get(URIManager.SOURCE)); - session.setPreventAnon(Boolean.valueOf((String) params.get(URIManager.PREVENT_ANON))); - session.setPreventAutoCommit(Boolean.valueOf((String) params.get(URIManager.PREVENT_AUTO_COMMIT))); + session.setUrl((new File(timestamp, sessionLabel)).getAbsolutePath()); + session.setSource(_parameters.get(URIManager.SOURCE)); + session.setPreventAnon(Boolean.valueOf((String) _parameters.get(URIManager.PREVENT_ANON))); + session.setPreventAutoCommit(Boolean.valueOf((String) _parameters.get(URIManager.PREVENT_AUTO_COMMIT))); // Query the cache for an existing session that has this Study Instance UID, project name, and optional modality. // If found the SessionData object we just created is over-ridden with the values from the cache. @@ -364,10 +268,9 @@ public class GradualDicomImporter extends ImporterHandlerA { // // This record is necessary so that, if this row was created by this call, it can be deleted if anonymization // goes wrong. In case of any other error the file is left on the filesystem. - // TODO: This is where things are going awry with Jenny's prearchive bug. Either<SessionData, SessionData> getOrCreate; try { - getOrCreate = PrearcDatabase.eitherGetOrCreateSession(session, tsdir, shouldAutoArchive(project, dicom)); + getOrCreate = PrearcDatabase.eitherGetOrCreateSession(session, timestamp, shouldAutoArchive(project, dicom)); if (getOrCreate.isLeft()) { session = getOrCreate.getLeft(); } else { @@ -378,26 +281,17 @@ public class GradualDicomImporter extends ImporterHandlerA { } try { - //if the status isn't RECEIVING, fix it - //else if the last mod time is more then 15 seconds ago, update it. - //this code builds and executes the sql directly, because the APIs for doing so generate multiple SELECT statements (to confirm the row is there) - //we've confirmed the row is there in line 338, so that shouldn't be necessary here. + // else if the last mod time is more then 15 seconds ago, update it. + // this code builds and executes the sql directly, because the APIs for doing so generate multiple SELECT statements (to confirm the row is there) + // we've confirmed the row is there in line 338, so that shouldn't be necessary here. // this code executes for every file received, so any unnecessary sql should be eliminated. - if (!PrearcUtils.PrearcStatus.RECEIVING.equals(session.getStatus())) { - //update the last modified time and set the status - PoolDBUtils.ExecuteNonSelectQuery(DatabaseSession.updateSessionStatusSQL(session.getName(), session.getTimestamp(), session.getProject(), PrearcUtils.PrearcStatus.RECEIVING), null, null); - } else if (Calendar.getInstance().getTime().after(DateUtils.addSeconds(session.getLastBuiltDate(), 15))) { + if (Calendar.getInstance().getTime().after(DateUtils.addSeconds(session.getLastBuiltDate(), 15))) { PoolDBUtils.ExecuteNonSelectQuery(DatabaseSession.updateSessionLastModSQL(session.getName(), session.getTimestamp(), session.getProject()), null, null); } } catch (Exception e) { - logger.error("An error occurred while trying to set the session status to RECEIVING", e); - //not exactly sure what we should do here. should we throw an exception, and the received file won't be stored locally? Or should we let it go and let the file be saved but unreferenced. - //the old code threw an exception, so we'll keep that logic. - throw new ServerException("An error occurred while trying to set the session status to RECEIVING", e); + logger.error("An error occurred trying to update the session update timestamp.", e); } - sessdir = new File(new File(root, session.getTimestamp()), session.getFolderName()); - // Build the scan label final String seriesNum = dicom.getString(Tag.SeriesNumber); final String seriesUID = dicom.getString(Tag.SeriesInstanceUID); @@ -410,7 +304,7 @@ public class GradualDicomImporter extends ImporterHandlerA { scan = null; } - final String source = getString(params, SENDER_ID_PARAM, user.getLogin()); + final String source = getString(_parameters, SENDER_ID_PARAM, _user.getLogin()); final DicomObject fmi; if (dicom.contains(Tag.TransferSyntaxUID)) { @@ -419,26 +313,26 @@ public class GradualDicomImporter extends ImporterHandlerA { final String sopClassUID = dicom.getString(Tag.SOPClassUID); final String sopInstanceUID = dicom.getString(Tag.SOPInstanceUID); final String transferSyntaxUID; - if (null == ts) { + if (null == _transferSyntax) { transferSyntaxUID = dicom.getString(Tag.TransferSyntaxUID, DEFAULT_TRANSFER_SYNTAX); } else { - transferSyntaxUID = ts.uid(); + transferSyntaxUID = _transferSyntax.uid(); } fmi = new BasicDicomObject(); fmi.initFileMetaInformation(sopClassUID, sopInstanceUID, transferSyntaxUID); - if (params.containsKey(SENDER_AE_TITLE_PARAM)) { - fmi.putString(Tag.SourceApplicationEntityTitle, VR.AE, (String) params.get(SENDER_AE_TITLE_PARAM)); + if (_parameters.containsKey(SENDER_AE_TITLE_PARAM)) { + fmi.putString(Tag.SourceApplicationEntityTitle, VR.AE, (String) _parameters.get(SENDER_AE_TITLE_PARAM)); } } - final File f = getSafeFile(sessdir, scan, name, dicom, Boolean.valueOf((String) params.get(RENAME_PARAM))); - f.getParentFile().mkdirs(); + final File sessionFolder = new File(new File(root, session.getTimestamp()), session.getFolderName()); + final File outputFile = getSafeFile(sessionFolder, scan, name, dicom, Boolean.valueOf((String) _parameters.get(RENAME_PARAM))); + outputFile.getParentFile().mkdirs(); - PrearcUtils.PrearcFileLock lock; + final PrearcUtils.PrearcFileLock lock; try { - lock = PrearcUtils.lockFile(session.getSessionDataTriple(), f.getName()); - write(fmi, dicom, bis, f, source); - + lock = PrearcUtils.lockFile(session.getSessionDataTriple(), outputFile.getName()); + write(fmi, dicom, bis, outputFile, source); } catch (IOException e) { throw new ServerException(Status.SERVER_ERROR_INSUFFICIENT_STORAGE, e); } catch (SessionFileLockException e) { @@ -446,35 +340,35 @@ public class GradualDicomImporter extends ImporterHandlerA { } try { // check to see of this session came in through the upload applet - if (!session.getPreventAnon() && AnonUtils.getService().isEnabled(DicomEdit.buildScriptPath(DicomEdit.ResourceScope.SITE_WIDE, null),null)) { + if (!session.getPreventAnon() && AnonUtils.getService().isEnabled(DicomEdit.buildScriptPath(DicomEdit.ResourceScope.SITE_WIDE, null), null)) { Configuration c = AnonUtils.getCachedSitewideAnon(); if (c != null && c.getStatus().equals(Configuration.ENABLED_STRING)) { //noinspection deprecation - Anonymize.anonymize(f, - session.getProject(), - session.getSubject(), - session.getFolderName(), - true, - c.getId(), - c.getContents()); + Anonymize.anonymize(outputFile, + session.getProject(), + session.getSubject(), + session.getFolderName(), + true, + c.getId(), + c.getContents()); } else { logger.debug("Anonymization is not enabled, allowing session {} {} {} to proceed without anonymization.", session.getProject(), session.getSubject(), session.getName()); } - } else if(session.getPreventAnon()){ + } else if (session.getPreventAnon()) { logger.debug("The session {} {} {} has already been anonymized by the uploader, proceeding without further anonymization.", session.getProject(), session.getSubject(), session.getName()); } } catch (Throwable e) { - logger.debug("Dicom anonymization failed: " + f, e); + logger.debug("Dicom anonymization failed: " + outputFile, e); try { // if we created a row in the database table for this session // delete it. if (getOrCreate.isRight()) { PrearcDatabase.deleteSession(session.getFolderName(), session.getTimestamp(), session.getProject()); } else { - f.delete(); + outputFile.delete(); } } catch (Throwable t) { - logger.debug("Unable to delete relevant file :" + f, e); + logger.debug("Unable to delete relevant file :" + outputFile, e); throw new ServerException(Status.SERVER_ERROR_INTERNAL, t); } throw new ServerException(Status.SERVER_ERROR_INTERNAL, e); @@ -496,6 +390,103 @@ public class GradualDicomImporter extends ImporterHandlerA { } + public GradualDicomImporter setIdentifier(final DicomObjectIdentifier<XnatProjectdata> dicomObjectIdentifier) { + _dicomObjectIdentifier = dicomObjectIdentifier; + return this; + } + + public GradualDicomImporter setNamer(final DicomFileNamer namer) { + _fileNamer = namer; + return this; + } + + private boolean canCreateIn(final XnatProjectdata p) { + try { + return PrearcUtils.canModify(_user, p.getId()); + } catch (Throwable t) { + logger.error("Unable to check permissions for " + _user + " in " + p, t); + return false; + } + } + + private XnatProjectdata getProject(final Object alias, final Callable<XnatProjectdata> lookupProject) { + if (null == _projectCache) { + _projectCache = getUserProjectCache(_user); + } + if (null != alias) { + logger.debug("looking for project matching alias {} from query parameters", alias); + final Element pe = _projectCache.get(alias); + if (null != pe) { + if (isCachedNotWriteableProject(pe)) { + // this alias is cached as a non-writable project name, but user is specifying it. + // maybe they know something we don't; clear cache entry so we can try again. + _projectCache.remove(alias); + return getProject(alias, lookupProject); + } else { + return (XnatProjectdata) pe.getObjectValue(); + } + } else { + logger.trace("cache miss for project alias {}, trying database", alias); + final XnatProjectdata p = XnatProjectdata.getXnatProjectdatasById(alias, _user, false); + if (null != p && canCreateIn(p)) { + _projectCache.put(new Element(alias, p)); + return p; + } else { + for (final XnatProjectdata pa : + XnatProjectdata.getXnatProjectdatasByField("xnat:projectData/aliases/alias/alias", + alias, _user, false)) { + if (canCreateIn(pa)) { + _projectCache.put(new Element(alias, pa)); + return pa; + } + } + } + } + logger.info("storage request specified project {}, which does not exist or user does not have create perms", alias); + } else { + logger.trace("no project alias found in query parameters"); + } + // No alias, or we couldn't match it to a project. Run the identifier to see if that can get a project name/alias. + // (Don't cache alias->identifier-derived-project because we didn't use the alias to derive the project.) + try { + return null == lookupProject ? null : lookupProject.call(); + } catch (Throwable t) { + logger.error("error in project lookup", t); + return null; + } + } + + private File getSafeFile(File sessionDir, String scan, String name, DicomObject o, boolean forceRename) { + String fileName = _fileNamer.makeFileName(o); + while (fileName.charAt(0) == '.') { + fileName = fileName.substring(1); + } + final File safeFile = Files.getImageFile(sessionDir, scan, fileName); + if (forceRename) { + return safeFile; + } + final String valname = Files.toFileNameChars(name); + if (!Files.isValidFilename(valname)) { + return safeFile; + } + final File reqFile = Files.getImageFile(sessionDir, scan, valname); + if (reqFile.exists()) { + try (final FileInputStream fin = new FileInputStream(reqFile)) { + final DicomObject o1 = read(fin, name); + if (Objects.equal(o.get(Tag.SOPInstanceUID), o1.get(Tag.SOPInstanceUID)) && + Objects.equal(o.get(Tag.SOPClassUID), o1.get(Tag.SOPClassUID))) { + return reqFile; // object are equivalent; ok to overwrite + } else { + return safeFile; + } + } catch (Throwable t) { + return safeFile; + } + } else { + return reqFile; + } + } + private boolean shouldIncludeDicomObject(final SeriesImportFilter filter, final DicomObject dicom) { // If we don't have a filter or the filter is turned off, then we include the DICOM object by default (no filtering) if (filter == null || !filter.isEnabled()) { @@ -522,48 +513,30 @@ public class GradualDicomImporter extends ImporterHandlerA { if (null == project) { return null; } - Boolean fromDicomObject = dicomObjectIdentifier.requestsAutoarchive(o); + Boolean fromDicomObject = _dicomObjectIdentifier.requestsAutoarchive(o); if (fromDicomObject != null) { return fromDicomObject ? PrearchiveCode.AutoArchive : PrearchiveCode.Manual; } return PrearchiveCode.code(project.getArcSpecification().getPrearchiveCode()); } - /** - * Adds a cache of project objects on a per-user basis. This is currently used by GradualDicomImporter and DbBackedProjectIdentifier - * - * @param user The user for whom to retrieve the cache. - * @return The user's cache. - */ - public static Cache getUserProjectCache(final UserI user) { - final String cacheName = user.getLogin() + "-projects"; - synchronized (cacheManager) { - if (!cacheManager.cacheExists(cacheName)) { - final CacheConfiguration config = new CacheConfiguration(cacheName, 0) - .copyOnRead(false).copyOnWrite(false) - .eternal(false) - .persistence(new PersistenceConfiguration().strategy(PersistenceConfiguration.Strategy.NONE)) - .timeToLiveSeconds(PROJECT_CACHE_EXPIRY_SECONDS); - final Cache cache = new Cache(config); - cacheManager.addCache(cache); - return cache; - } else { - return cacheManager.getCache(cacheName); - } + private static boolean initializeCanDecompress() { + try { + return Decompress.isSupported(); + } catch (NoClassDefFoundError error) { + return false; } } - public GradualDicomImporter setIdentifier(final DicomObjectIdentifier<XnatProjectdata> identifier) { - this.dicomObjectIdentifier = identifier; - return this; - } - - public GradualDicomImporter setNamer(final DicomFileNamer namer) { - this.namer = namer; - return this; + private static <K, V> String getString(final Map<K, V> m, final K k, final V defaultValue) { + final V v = m.get(k); + if (null == v) { + return null == defaultValue ? null : defaultValue.toString(); + } else { + return v.toString(); + } } - private static DicomObject read(final InputStream in, final String name) throws ClientException { try (final BufferedInputStream bis = new BufferedInputStream(in); final DicomInputStream dis = new DicomInputStream(bis)) { @@ -580,12 +553,7 @@ public class GradualDicomImporter extends ImporterHandlerA { } } - private static Logger slog() { - return LoggerFactory.getLogger(GradualDicomImporter.class); - } - - private static void write(final DicomObject fmi, final DicomObject dataset, - BufferedInputStream remainder, final File f, final String source) + private static void write(final DicomObject fmi, final DicomObject dataset, final BufferedInputStream remainder, final File f, final String source) throws ClientException, IOException { IOException ioexception = null; final FileOutputStream fos = new FileOutputStream(f); @@ -604,7 +572,7 @@ public class GradualDicomImporter extends ImporterHandlerA { } catch (IOException e) { ioexception = e; throw new ClientException(Status.CLIENT_ERROR_BAD_REQUEST, - "error parsing DICOM object", e); + "error parsing DICOM object", e); } final ByteArrayInputStream bis = new ByteArrayInputStream(Decompress.dicomObject2Bytes(dataset, tsuid)); final DicomObject d = Decompress.decompress_image(bis, tsuid); @@ -617,7 +585,7 @@ public class GradualDicomImporter extends ImporterHandlerA { if (t instanceof IOException) { ioexception = (IOException) t; } else { - slog().error("Unable to write decompressed dataset", t); + logger.error("Unable to write decompressed dataset", t); } try { dos.close(); @@ -628,12 +596,12 @@ public class GradualDicomImporter extends ImporterHandlerA { } catch (ClientException e) { throw e; } catch (Throwable t) { - slog().error("Decompression failed; storing in original format " + tsuid, t); + logger.error("Decompression failed; storing in original format " + tsuid, t); dos.writeFileMetaInformation(fmi); dos.writeDataset(dataset, tsuid); if (null != remainder) { final long copied = ByteStreams.copy(remainder, bos); - slog().trace("copied {} additional bytes to {}", copied, f); + logger.trace("copied {} additional bytes to {}", copied, f); } } } else { @@ -641,16 +609,16 @@ public class GradualDicomImporter extends ImporterHandlerA { dos.writeDataset(dataset, tsuid); if (null != remainder) { final long copied = ByteStreams.copy(remainder, bos); - slog().trace("copied {} additional bytes to {}", copied, f); + logger.trace("copied {} additional bytes to {}", copied, f); } } } catch (NoClassDefFoundError t) { - slog().error("Unable to check compression status; storing in original format " + tsuid, t); + logger.error("Unable to check compression status; storing in original format " + tsuid, t); dos.writeFileMetaInformation(fmi); dos.writeDataset(dataset, tsuid); if (null != remainder) { final long copied = ByteStreams.copy(remainder, bos); - slog().trace("copied {} additional bytes to {}", copied, f); + logger.trace("copied {} additional bytes to {}", copied, f); } } } catch (IOException e) { @@ -673,4 +641,24 @@ public class GradualDicomImporter extends ImporterHandlerA { } } } + + private static final Logger logger = LoggerFactory.getLogger(GradualDicomImporter.class); + private static final Object NOT_A_WRITABLE_PROJECT = new Object(); + private static final String DEFAULT_TRANSFER_SYNTAX = TransferSyntax.ImplicitVRLittleEndian.uid(); + private static final String RENAME_PARAM = "rename"; + private static final DicomFileNamer DEFAULT_NAMER = new SOPHashDicomFileNamer(); + private static final long PROJECT_CACHE_EXPIRY_SECONDS = 120; + private static final boolean canDecompress = initializeCanDecompress(); + private static final CacheManager cacheManager = CacheManager.getInstance(); + + private final FileWriterWrapperI _fileWriter; + private final UserI _user; + private final Map<String, Object> _parameters; + + private DicomFileNamer _fileNamer = DEFAULT_NAMER; + + private TransferSyntax _transferSyntax; + private Cache _projectCache; + private DicomFilterService _filterService; + private DicomObjectIdentifier<XnatProjectdata> _dicomObjectIdentifier; } diff --git a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcDatabase.java b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcDatabase.java index 78ce9de3..96d26ffc 100644 --- a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcDatabase.java +++ b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcDatabase.java @@ -832,7 +832,6 @@ public final class PrearcDatabase { } } - private static void pruneDatabase() throws Exception { // construct list of timestamps with extant folders Set<String> timestamps = PrearcDatabase.getPrearchiveFolderTimestamps(); @@ -1792,6 +1791,19 @@ public final class PrearcDatabase { }); } + /** + * Gets the session from a given triple. + * + * @param triple The triple containing the session name, timestamp, and project. + * + * @return The corresponding session data. + * + * @throws Exception When something goes wrong. + */ + public static SessionData getSession(final SessionDataTriple triple) throws Exception { + return getSession(triple.getFolderName(), triple.getTimestamp(), triple.getProject()); + } + /** * Set the prearchive row that corresponds to the given session, timestamp, project triple to the given autoArchive setting. * @@ -1935,37 +1947,6 @@ public final class PrearcDatabase { }.run(); } - /** - * Either retrieve and existing session or create a new one and return it. - * <p/> - * This function is useful if the caller does not care which operation was performed. - * - * @param project - * @param suid - * @param s - * @param tsFile - * @param autoArchive - * - * @return - * - * @throws SQLException - * @throws SessionException - * @throws Exception - */ - public static SessionData getOrCreateSession(final String project, - final String suid, - final SessionData s, - final File tsFile, - final PrearchiveCode autoArchive) - throws SQLException, SessionException, Exception { - Either<SessionData, SessionData> result = PrearcDatabase.eitherGetOrCreateSession(s, tsFile, autoArchive); - if (result.isLeft()) { - return result.getLeft(); - } else { - return result.getRight(); - } - } - /** * Either retrieve and existing session or create a new one. If a session is created an Either object with the "Right" branch set is returned. If we just retrieve one that is already in the prearchive table an Either object with the "Left" branch set is returned. * <p/> @@ -1982,7 +1963,7 @@ public final class PrearcDatabase { */ public static synchronized Either<SessionData, SessionData> eitherGetOrCreateSession(final SessionData sessionData, final File tsFile, final PrearchiveCode autoArchive) throws SQLException, SessionException, Exception { return new PredicatedOp<SessionData, SessionData>() { - SessionData ss; + SessionData _sessionData; /** * Return the found session @@ -1991,7 +1972,7 @@ public final class PrearcDatabase { */ Either<SessionData, SessionData> trueOp() throws SQLException, SessionException, Exception { return new Either<SessionData, SessionData>() { - }.setRight(ss); + }.setRight(_sessionData); } /** @@ -2033,7 +2014,7 @@ public final class PrearcDatabase { } /** - * Test whether session exists. If it find the session the instance variable "SessionData ss" + * Test whether session exists. If it find the session the instance variable "SessionData _sessionData" * is initialized here. * * Originally this function initialized a "ResultSet r" instance variable and the "trueOp()" above @@ -2049,12 +2030,54 @@ public final class PrearcDatabase { constraints.add(DatabaseSession.TAG.searchSql(sessionData.getTag())); constraints.add(DatabaseSession.NAME.searchSql(sessionData.getName())); - ResultSet rs = pdb.executeQuery(null, DatabaseSession.findSessionSql(constraints.toArray(new String[constraints.size()])), null); - boolean found = rs.next(); - if (found) { - ss = DatabaseSession.fillSession(rs); + final ResultSet rs = pdb.executeQuery(null, DatabaseSession.findSessionSql(constraints.toArray(new String[constraints.size()])), null); + if (!rs.next()) { + if(logger.isDebugEnabled()) { + logger.debug("Found no existing session for " + sessionData.getSessionDataTriple().toString() + ". A new session data object will be created for data reception."); + } + return false; + } + + final SessionData sessionData = DatabaseSession.fillSession(rs); + + final PrearcStatus status = sessionData.getStatus(); + if (PrearcStatus.RECEIVING.equals(status)|| PrearcStatus.RECEIVING_INTERRUPT.equals(status)) { + // Obviously if we're receiving we're fine. + if(logger.isDebugEnabled()) { + logger.debug("Receiving incoming data for session " + sessionData.getSessionDataTriple().toString() + ", which is currently in " + status + " state, which is totally fine."); + } + _sessionData = sessionData; + return true; + } + if (status == PrearcStatus.BUILDING) { + // If the session is currently building, then set this session to RECEIVING_INTERRUPT, + // which will allow it to continue receiving but prevent autoarchiving or session + // splitting afterwards. + if(logger.isWarnEnabled()) { + logger.warn("Receiving incoming data for session " + sessionData.getSessionDataTriple().toString() + " in BUILDING state, setting status to RECEIVING_INTERRUPT to block autoarchive and other operations and allow continuation of data reception."); + } + PoolDBUtils.ExecuteNonSelectQuery(DatabaseSession.updateSessionStatusSQL(sessionData.getName(), sessionData.getTimestamp(), sessionData.getProject(), PrearcStatus.RECEIVING_INTERRUPT), null, null); + _sessionData = sessionData; + return true; + } + if (status.isInterruptable()) { + // If the session is interruptable, which means it's not receiving but it's OK to set it to + // receiving (ready, in error, or in conflict), that's OK. Set to RECEIVING and return the + // session. Any other issues will be worked out (or re-occur) later. + if (logger.isInfoEnabled()) { + logger.info("Receiving incoming data for session " + sessionData.getSessionDataTriple().toString() + ", which is currently in the interruptable " + status + " state. Setting status to RECEIVING to allow continuation of data reception."); + } + PoolDBUtils.ExecuteNonSelectQuery(DatabaseSession.updateSessionStatusSQL(sessionData.getName(), sessionData.getTimestamp(), sessionData.getProject(), PrearcStatus.RECEIVING), null, null); + _sessionData = sessionData; + return true; } - return found; + // If the status isn't interruptable, e.g. we're archiving or moving or deleting or whatever, + // then return false: we'll create a new session to receive the incoming data. This may require + // a merge later, but should prevent data loss. + if (logger.isWarnEnabled()) { + logger.warn("Receiving incoming data for session " + sessionData.getSessionDataTriple().toString() + ", which is currently in the non-interruptable " + status + " state. Creating a new RECEIVING session to allow continuation of data reception."); + } + return false; } }.run(); } @@ -2290,6 +2313,19 @@ public final class PrearcDatabase { return StringUtils.join(as.toArray(new String[as.size()]), ","); } + /** + * Update the last modified time of the session to the current time. + * + * @param triple The triple containing the session name, timestamp, and project. + * + * @throws SQLException + * @throws SessionException + * @throws Exception + */ + public static void updateTimestamp(final SessionDataTriple triple) throws SQLException, SessionException, Exception { + updateTimestamp(triple.getFolderName(), triple.getTimestamp(), triple.getProject()); + } + /** * Update the last modified time of the session to the current time. * @@ -2301,8 +2337,7 @@ public final class PrearcDatabase { * @throws SessionException * @throws Exception */ - - public static void setLastModifiedTime(String sess, String timestamp, String proj) throws SQLException, SessionException, Exception { + public static void updateTimestamp(String sess, String timestamp, String proj) throws SQLException, SessionException, Exception { modifySession(sess, timestamp, proj, new SessionOp<java.lang.Void>() { public Void op() throws SQLException, Exception { return null; diff --git a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcTableBuilder.java b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcTableBuilder.java index 816eb750..cf8da7e1 100644 --- a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcTableBuilder.java +++ b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcTableBuilder.java @@ -8,10 +8,6 @@ * * Last modified 7/19/13 4:17 PM */ - -/** - * - */ package org.nrg.xnat.helpers.prearchive; import org.apache.commons.lang3.StringUtils; @@ -19,7 +15,6 @@ import org.nrg.framework.constants.PrearchiveCode; import org.nrg.xdat.bean.XnatImagesessiondataBean; import org.nrg.xdat.bean.reader.XDATXMLReader; import org.nrg.xdat.model.XnatImagesessiondataI; -import org.nrg.xft.XFTTable; import org.nrg.xnat.helpers.prearchive.PrearcUtils.PrearcStatus; import org.nrg.xnat.turbine.utils.ArcSpecManager; import org.slf4j.Logger; @@ -33,304 +28,253 @@ import java.util.*; /** * @author timo - * */ public class PrearcTableBuilder implements PrearcTableBuilderI { - static Logger logger = LoggerFactory.getLogger(PrearcTableBuilder.class); - - public final static String[] PREARC_HEADERS = {"project".intern(),"last_mod".intern(),"uploaded".intern(),"scan_date".intern(),"scan_time".intern(),"subject".intern(),"session".intern(),"status".intern(),"url".intern(),"visit".intern(),"protocol".intern(),"TIMEZONE".intern(),"SOURCE".intern()}; - - public static Object[] buildRow(final Session s,final String urlBase){ - Object[] row = new Object[PREARC_HEADERS.length]; - row[0]=s.getProject(); - row[1]=s.getLastBuiltDate(); - row[2]=s.getUploadDate(); - row[3]=s.getDate(); - row[4]=s.getTime(); - row[5]=PrearcTableBuilder.Session.pickSubjectName(s); - row[6]=PrearcTableBuilder.Session.pickSessionName(s); - row[7]=s.getStatus(); - row[8]=StringUtils.join(new String[]{urlBase,"/".intern(),s.getTimestamp(),"/".intern(),s.getFolderName()}); - row[9]=s.getVisit(); - row[10]=s.getProtocol(); - row[11]=s.getTimeZone(); - row[12]=s.getSource(); - - return row; - } - - - public static XnatImagesessiondataBean parseSession(final File s) throws IOException, SAXException{ - XDATXMLReader parser = new XDATXMLReader(); - return (XnatImagesessiondataBean) parser.parse(s); - } - - public class ProjectPrearchive implements ProjectPrearchiveI { - private Date lastMod; - private XFTTable content; - - public ProjectPrearchive(final Date l, final XFTTable c){ - lastMod=l; - content=c; - } - - /* (non-Javadoc) - * @see org.nrg.xnat.helpers.prearchive.ProjectPrearchiveI#getLastMod() - */ - @Override - public Date getLastMod() { - return lastMod; - } - - /* (non-Javadoc) - * @see org.nrg.xnat.helpers.prearchive.ProjectPrearchiveI#getContent() - */ - @Override - public XFTTable getContent() { - return content; - } - - - } - - @Override - public String[] getColumns() { - return PREARC_HEADERS; - } - - - public static String printSession (Session s) { - ArrayList<String> as = new ArrayList<String>(); - as.add("--Session--"); - as.add("Name : " + s.getFolderName()); - as.add("Status : " + s.getStatus()); - as.add("SubjectId: " + s.getSubjectId()); - as.add("Scan Time : " + s.getTimestamp()); - as.add("Uploaded : " + s.getUploadDate().toString()); - return StringUtils.join(as.toArray(new String[as.size()]), "\n"); - } - - public static class Session implements Comparable<Session> { - private final File sessionXML; - - private XnatImagesessiondataI session=null; - - private SessionData data = new SessionData(); - - //passed in project should override what is in the session xml, if it exists - Session(final File sessdir, final String project) { - data.setFolderName(sessdir.getName()); - - sessionXML = new File(sessdir.getPath() + ".xml"); - if (sessionXML.exists()) { - data.setLastBuiltDate(new Date(sessionXML.lastModified())); - } else { - data.setLastBuiltDate(new Date()); - } - - Date t_uploadDate; - try { - t_uploadDate = PrearcUtils.parseTimestampDirectory(sessdir.getParentFile().getName()); - } catch (final ParseException e) { - logger.error("Unable to parse upload date from session parent " + sessdir.getParentFile(), e); - t_uploadDate = null; - } - data.setUploadDate(t_uploadDate); - - - data.setStatus(PrearcUtils.checkSessionStatus(sessionXML)); - - if(!sessionXML.exists() || sessionXML.length() == 0){ - if(project!=null){ - session=new XnatImagesessiondataBean(); - session.setProject(project); - } - if(PrearcStatus.potentiallyReady(data.getStatus()))data.setStatus(PrearcStatus.RECEIVING); - }else{ - try { - session=parseSession(sessionXML); - - session.setProject(project); - - data.setTag(session.getUid()); - - final String sessionID = session.getId(); - if (null == sessionID || "".equals(sessionID) || "NULL".equals(sessionID)) { - data.setStatus(PrearcStatus.READY); - } else { - data.setStatus(PrearcStatus.ARCHIVING); - } - } catch (Exception e) { - if(PrearcStatus.potentiallyReady(data.getStatus())){ - // The following accounts for the case where a project was passed in but the session XML is unparseable for some reason (eg. it is empty). - // In that case use the project name passed in. - if (project != null && (data.getProject() == null || !data.getProject().equals(project))) { - data.setProject(project); - } - data.setStatus(PrearcStatus.ERROR); - - PrearcUtils.log(sessdir, e); - } - } - } - } - - public SessionData getSessionData (String urlBase) { - //populate the rest of the session data object. - data.setProject(this.getProject()); - data.setScan_date(this.getDate()); - data.setScan_time(this.getTime()); - data.setSubject(PrearcTableBuilder.Session.pickSubjectName(this)); - data.setName(PrearcTableBuilder.Session.pickSessionName(this)); - data.setFolderName(this.getFolderName()); - data.setTag(this.getTag()); - data.setAutoArchive(this.getPrearchiveCode()); - data.setUrl(PrearcUtils.makeUri(urlBase, data.getTimestamp(), data.getFolderName())); - data.setVisit(this.getVisit()); - data.setProtocol(this.getProtocol()); - data.setTimeZone(this.getTimeZone()); - data.setSource(this.getSource()); - return this.data; - } - - public static String pickSubjectName(final PrearcTableBuilder.Session s) { - String ret = ""; - if (StringUtils.isNotEmpty(s.getSubjectId())) { - ret = s.getSubjectId(); - } - if (StringUtils.isEmpty(ret) && StringUtils.isNotEmpty(s.getPatientName())) { - ret = s.getPatientName(); - } - - return ret; - } - - public static String pickSessionName(final PrearcTableBuilder.Session s) { - String ret = ""; - if (StringUtils.isNotEmpty(s.getLabel())) { - ret = s.getLabel(); - } - if (StringUtils.isEmpty(ret) && StringUtils.isNotEmpty(s.getPatientId())) { - ret = s.getPatientId(); - } - - if (StringUtils.isEmpty(ret)){ - return s.getFolderName(); - } - return ret; - } - - public void setFolderName(String name) { - this.data.setFolderName(name); - } - - public String getFolderName() { - return data.getFolderName(); - } - - public void setTag(String name) { - this.data.setTag(name); - } - - public String getTag() { - return data.getTag(); - } - - public void setSessionName(String name) { - this.data.setName(name); - } - - public String getSessionName() { - return data.getName(); - } - - public Date getLastBuiltDate() { - return data.getLastBuiltDate(); - } - public void setLastBuiltDate(Date lastBuiltDate) { - data.setLastBuiltDate(lastBuiltDate); - } - - public Date getUploadDate() { - return data.getUploadDate(); - } - - public String getTimestamp() { - return data.getTimestamp(); - } - - public void setTimestamp(String timestamp) { - this.data.setTimestamp(timestamp); - } - - public String getProject(){ - // Get the project specified in the session.xml. - // If the session.xml file couldn't be parsed return the project field in the the local SessionData object - // which holds the optional project name passed to the constructor in case of an unparseable session.xml. - if (session != null) { - return session.getProject(); - } - else { - if (data != null && data.getProject() != null) { - return data.getProject(); - } - else { - return null; - } - } - } - - public Object getDate(){ - return (session!=null)?session.getDate():null; - } - - public Object getTime(){ - return (session!=null)?session.getTime():null; - } - - public String getSubjectId(){ - return (session!=null)?session.getSubjectId():null; - } - - public String getLabel(){ - return (session!=null)?session.getLabel():null; - - } - public String getVisit(){ - return (session!=null)?session.getVisit():null; - - } - public String getProtocol(){ - return (session!=null)?session.getProtocol():null; - - } - public String getTimeZone(){ - //no need to keep timezone in the image session. - //return (session!=null)?session.getTimeZone():null; - return null; - } - public String getSource(){ - return null; - - } - public String getPatientId() { - return (session!=null)?session.getDcmpatientid():null; - } - - public String getPatientName() { - return (session!=null)?session.getDcmpatientname():null; - } - - public PrearcStatus getStatus(){ - return data.getStatus(); - } - - public File getSessionXML(){ - return this.sessionXML; - } - - public PrearchiveCode getPrearchiveCode() { - final String project = this.getProject(); + static Logger logger = LoggerFactory.getLogger(PrearcTableBuilder.class); + + public final static String[] PREARC_HEADERS = {"project", "last_mod", "uploaded", "scan_date", "scan_time", "subject", "session", "status", "url", "visit", "protocol", "TIMEZONE", "SOURCE"}; + + public static XnatImagesessiondataBean parseSession(final File s) throws IOException, SAXException { + XDATXMLReader parser = new XDATXMLReader(); + return (XnatImagesessiondataBean) parser.parse(s); + } + + @Override + public String[] getColumns() { + return PREARC_HEADERS; + } + + public static class Session implements Comparable<Session> { + private final File sessionXML; + + private XnatImagesessiondataI session = null; + + private SessionData data = new SessionData(); + + //passed in project should override what is in the session xml, if it exists + Session(final File folder, final String project) { + data.setFolderName(folder.getName()); + + sessionXML = new File(folder.getPath() + ".xml"); + if (sessionXML.exists()) { + data.setLastBuiltDate(new Date(sessionXML.lastModified())); + } else { + data.setLastBuiltDate(new Date()); + } + + Date t_uploadDate; + try { + t_uploadDate = PrearcUtils.parseTimestampDirectory(folder.getParentFile().getName()); + } catch (final ParseException e) { + logger.error("Unable to parse upload date from session parent " + folder.getParentFile(), e); + t_uploadDate = null; + } + data.setUploadDate(t_uploadDate); + + data.setStatus(PrearcUtils.checkSessionStatus(sessionXML)); + + if (!sessionXML.exists() || sessionXML.length() == 0) { + if (project != null) { + session = new XnatImagesessiondataBean(); + session.setProject(project); + } + if (PrearcStatus.potentiallyReady(data.getStatus())) { + data.setStatus(PrearcStatus.RECEIVING); + } + } else { + try { + session = parseSession(sessionXML); + + session.setProject(project); + + data.setTag(session.getUid()); + + final String sessionID = session.getId(); + if (null == sessionID || "".equals(sessionID) || "NULL".equals(sessionID)) { + data.setStatus(PrearcStatus.READY); + } else { + data.setStatus(PrearcStatus.ARCHIVING); + } + } catch (Exception e) { + if (PrearcStatus.potentiallyReady(data.getStatus())) { + // The following accounts for the case where a project was passed in but the session XML is unparseable for some reason (eg. it is empty). + // In that case use the project name passed in. + if (project != null && (data.getProject() == null || !data.getProject().equals(project))) { + data.setProject(project); + } + data.setStatus(PrearcStatus.ERROR); + + PrearcUtils.log(folder, e); + } + } + } + } + + public SessionData getSessionData(String urlBase) { + //populate the rest of the session data object. + data.setProject(getProject()); + data.setScan_date(getDate()); + data.setScan_time(getTime()); + data.setSubject(PrearcTableBuilder.Session.pickSubjectName(this)); + data.setName(PrearcTableBuilder.Session.pickSessionName(this)); + data.setFolderName(getFolderName()); + data.setTag(getTag()); + data.setAutoArchive(getPrearchiveCode()); + data.setUrl(PrearcUtils.makeUri(urlBase, data.getTimestamp(), data.getFolderName())); + data.setVisit(getVisit()); + data.setProtocol(getProtocol()); + data.setTimeZone(getTimeZone()); + data.setSource(getSource()); + return data; + } + + public static String pickSubjectName(final PrearcTableBuilder.Session s) { + String ret = ""; + if (StringUtils.isNotEmpty(s.getSubjectId())) { + ret = s.getSubjectId(); + } + if (StringUtils.isEmpty(ret) && StringUtils.isNotEmpty(s.getPatientName())) { + ret = s.getPatientName(); + } + + return ret; + } + + public static String pickSessionName(final PrearcTableBuilder.Session s) { + String ret = ""; + if (StringUtils.isNotEmpty(s.getLabel())) { + ret = s.getLabel(); + } + if (StringUtils.isEmpty(ret) && StringUtils.isNotEmpty(s.getPatientId())) { + ret = s.getPatientId(); + } + + if (StringUtils.isEmpty(ret)) { + return s.getFolderName(); + } + return ret; + } + + @SuppressWarnings("unused") + public void setFolderName(String name) { + data.setFolderName(name); + } + + public String getFolderName() { + return data.getFolderName(); + } + + public void setTag(String name) { + data.setTag(name); + } + + public String getTag() { + return data.getTag(); + } + + @SuppressWarnings("unused") + public void setSessionName(String name) { + data.setName(name); + } + + @SuppressWarnings("unused") + public String getSessionName() { + return data.getName(); + } + + public Date getLastBuiltDate() { + return data.getLastBuiltDate(); + } + + @SuppressWarnings("unused") + public void setLastBuiltDate(Date lastBuiltDate) { + data.setLastBuiltDate(lastBuiltDate); + } + + @SuppressWarnings("unused") + public Date getUploadDate() { + return data.getUploadDate(); + } + + public String getTimestamp() { + return data.getTimestamp(); + } + + public void setTimestamp(String timestamp) { + data.setTimestamp(timestamp); + } + + public String getProject() { + // Get the project specified in the session.xml. + // If the session.xml file couldn't be parsed return the project field in the the local SessionData object + // which holds the optional project name passed to the constructor in case of an unparseable session.xml. + if (session != null) { + return session.getProject(); + } else { + if (data != null && data.getProject() != null) { + return data.getProject(); + } else { + return null; + } + } + } + + public Object getDate() { + return (session != null) ? session.getDate() : null; + } + + public Object getTime() { + return (session != null) ? session.getTime() : null; + } + + public String getSubjectId() { + return (session != null) ? session.getSubjectId() : null; + } + + public String getLabel() { + return (session != null) ? session.getLabel() : null; + + } + + public String getVisit() { + return (session != null) ? session.getVisit() : null; + + } + + public String getProtocol() { + return (session != null) ? session.getProtocol() : null; + + } + + public String getTimeZone() { + //no need to keep timezone in the image session. + //return (session!=null)?session.getTimeZone():null; + return null; + } + + public String getSource() { + return null; + + } + + public String getPatientId() { + return (session != null) ? session.getDcmpatientid() : null; + } + + public String getPatientName() { + return (session != null) ? session.getDcmpatientname() : null; + } + + public PrearcStatus getStatus() { + return data.getStatus(); + } + + public File getSessionXML() { + return sessionXML; + } + + public PrearchiveCode getPrearchiveCode() { + final String project = getProject(); if (project == null || project.equals(PrearcUtils.COMMON)) { logger.info("Found null or unassigned project, returning prearchive code of Manual"); return PrearchiveCode.Manual; // Unassigned projects will not have a known prearchive code @@ -338,56 +282,68 @@ public class PrearcTableBuilder implements PrearcTableBuilderI { final Integer prearchiveCode = ArcSpecManager.GetInstance().getPrearchiveCodeForProject(project); if (prearchiveCode == null) { if (logger.isWarnEnabled()) { - logger.warn("Found a prearchive entry " + this.getFolderName() + " with a project that didn't return an archive code: " + project); + logger.warn("Found a prearchive entry " + getFolderName() + " with a project that didn't return an archive code: " + project); } return PrearchiveCode.Manual; } return PrearchiveCode.code(prearchiveCode); } - - /* - * (non-Javadoc) - * - * @see java.lang.Comparable#compareTo(java.lang.Object) - */ - @Override - public int compareTo(final Session other) { - return getLastBuiltDate().compareTo(other.getLastBuiltDate()); - } - } - - public SortedMap<Date, Collection<Session>> getPrearcSessions(final File prearcDir) throws IOException, SAXException { - final SortedMap<Date, Collection<Session>> sessions = new TreeMap<Date, Collection<Session>>(); - if(PrearcUtils.isTimestampDirectory.accept(prearcDir)){ - for (final File sessdir : prearcDir.listFiles(PrearcUtils.isDirectory)) { - final Session session = buildSessionObject(sessdir,prearcDir.getName(),null); - - final Date builtDate = session.getLastBuiltDate(); - - if (!sessions.containsKey(builtDate)) { - sessions.put(builtDate, new ArrayList<Session>(1)); - } - sessions.get(builtDate).add(session); - } - }else{ - for (final File tsdir : prearcDir.listFiles(PrearcUtils.isTimestampDirectory)) { - for (final File sessdir : tsdir.listFiles(PrearcUtils.isDirectory)) { - final Session session = buildSessionObject(sessdir,tsdir.getName(),prearcDir.getName()); - - final Date builtDate = session.getLastBuiltDate(); - if (!sessions.containsKey(builtDate)) { - sessions.put(builtDate, new ArrayList<Session>(1)); - } - sessions.get(builtDate).add(session); - } - } - } - return sessions; - } - - public static Session buildSessionObject(final File sessdir,final String timestamp, final String project){ - final Session session = new Session(sessdir,project); - session.setTimestamp(timestamp); - return session; - } + + /* + * (non-Javadoc) + * + * @see java.lang.Comparable#compareTo(java.lang.Object) + */ + @Override + public int compareTo(final Session other) { + return getLastBuiltDate().compareTo(other.getLastBuiltDate()); + } + } + + public SortedMap<Date, Collection<Session>> getPrearcSessions(final File prearcDir) throws IOException, SAXException { + if (prearcDir == null) { + return null; + } + final SortedMap<Date, Collection<Session>> sessions = new TreeMap<>(); + if (PrearcUtils.isTimestampDirectory.accept(prearcDir)) { + final File[] folders = prearcDir.listFiles(PrearcUtils.isDirectory); + if (folders != null) { + for (final File folder : folders) { + final Session session = buildSessionObject(folder, prearcDir.getName(), null); + + final Date builtDate = session.getLastBuiltDate(); + + if (!sessions.containsKey(builtDate)) { + sessions.put(builtDate, new ArrayList<Session>(1)); + } + sessions.get(builtDate).add(session); + } + } + } else { + final File[] timestampFolders = prearcDir.listFiles(PrearcUtils.isTimestampDirectory); + if (timestampFolders != null) { + for (final File timestampFolder : timestampFolders) { + final File[] folders = timestampFolder.listFiles(PrearcUtils.isDirectory); + if (folders != null) { + for (final File folder : folders) { + final Session session = buildSessionObject(folder, timestampFolder.getName(), prearcDir.getName()); + + final Date builtDate = session.getLastBuiltDate(); + if (!sessions.containsKey(builtDate)) { + sessions.put(builtDate, new ArrayList<Session>(1)); + } + sessions.get(builtDate).add(session); + } + } + } + } + } + return sessions; + } + + public static Session buildSessionObject(final File folder, final String timestamp, final String project) { + final Session session = new Session(folder, project); + session.setTimestamp(timestamp); + return session; + } } diff --git a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcUtils.java b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcUtils.java index b9bf650b..3a1d0f36 100644 --- a/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcUtils.java +++ b/src/main/java/org/nrg/xnat/helpers/prearchive/PrearcUtils.java @@ -92,38 +92,48 @@ public class PrearcUtils { public enum PrearcStatus { ARCHIVING, - BUILDING, - CONFLICT, + BUILDING(true), + CONFLICT(true), DELETING, - ERROR, + ERROR(true), MOVING, - READY, - RECEIVING, + READY(true), + RECEIVING(true), + RECEIVING_INTERRUPT(true), SEPARATING, QUEUED_ARCHIVING, - QUEUED_BUILDING, + QUEUED_BUILDING(true), QUEUED_DELETING, QUEUED_MOVING, QUEUED_SEPARATING, _ARCHIVING, - _BUILDING, - _CONFLICT, + _BUILDING(true), + _CONFLICT(true), _DELETING, _MOVING, - _RECEIVING, - _SEPARATING, - - _QUEUED_ARCHIVING, - _QUEUED_BUILDING, - _QUEUED_DELETING, - _QUEUED_MOVING, - _QUEUED_SEPARATING; + _RECEIVING(true), + _RECEIVING_INTERRUPT(true), + _SEPARATING; public static boolean potentiallyReady(PrearcStatus status) { return (status == null || status.equals(READY)); } + + public boolean isInterruptable() { + return _interruptable; + } + + PrearcStatus() { + this(true); + } + + PrearcStatus(final boolean interruptable) { + _interruptable = interruptable; + } + + private final boolean _interruptable; } private static Logger logger() { @@ -135,7 +145,7 @@ public class PrearcUtils { public static Map<PrearcStatus, PrearcStatus> createInProcessMap() { Map<PrearcStatus, PrearcStatus> map = new HashMap<>(); for (PrearcStatus s : PrearcStatus.values()) { - if (s != PrearcStatus.READY && s != PrearcStatus.ERROR && s.toString().charAt(0) != '_') { + if (s != PrearcStatus.READY && s != PrearcStatus.ERROR && s.toString().charAt(0) != '_' && !s.toString().startsWith("QUEUED_")) { map.put(s, PrearcStatus.valueOf("_" + s.name())); } } diff --git a/src/main/java/org/nrg/xnat/helpers/prearchive/SessionDataTriple.java b/src/main/java/org/nrg/xnat/helpers/prearchive/SessionDataTriple.java index ba0c0e31..284210df 100644 --- a/src/main/java/org/nrg/xnat/helpers/prearchive/SessionDataTriple.java +++ b/src/main/java/org/nrg/xnat/helpers/prearchive/SessionDataTriple.java @@ -10,98 +10,130 @@ */ package org.nrg.xnat.helpers.prearchive; +import org.apache.commons.lang3.StringUtils; import org.nrg.xnat.restlet.XNATApplication; import org.nrg.xnat.restlet.actions.PrearcImporterA.PrearcSession; -import java.io.File; import java.io.Serializable; import java.net.MalformedURLException; -import java.util.HashMap; import java.util.Map; public class SessionDataTriple implements Serializable { + public SessionDataTriple() { + // Default constructor + } + + public SessionDataTriple(final String folderName, final String timestamp, final String project) { + setFolderName(folderName); + setTimestamp(timestamp); + setProject(project); + } + + public static SessionDataTriple fromMap(Map<String, String> m) { + return new SessionDataTriple().setFolderName(m.get("SESSION_LABEL")) + .setProject(m.get("PROJECT_ID")) + .setTimestamp(m.get("SESSION_TIMESTAMP")); + } + + public static SessionDataTriple fromURI(final String uri) throws MalformedURLException { + final PrearcUriParserUtils.SessionParser parser = new PrearcUriParserUtils.SessionParser(new PrearcUriParserUtils.UriParser(XNATApplication.PREARC_SESSION_URI)); + return SessionDataTriple.fromMap(parser.readUri(uri)); + } + + public static SessionDataTriple fromPrearcSession(final PrearcSession session) { + return new SessionDataTriple().setFolderName(session.getFolderName()) + .setProject(session.getProject()) + .setTimestamp(session.getTimestamp()); + } + + public String getFolderName() { + return _folderName; + } + + public SessionDataTriple setFolderName(final String name) { + _folderName = name; + return this; + } + + public SessionDataTriple setFolderName(final Object object) { + if (object != null) { + setFolderName(object.toString()); + } + return this; + } + + public String getTimestamp() { + return _timestamp; + } + + public SessionDataTriple setTimestamp(final String timestamp) { + _timestamp = timestamp; + return this; + } + + public SessionDataTriple setTimestamp(final Object object) { + if (object != null) { + setTimestamp(object.toString()); + } + return this; + } + + public String getProject() { + return _project; + } + + public SessionDataTriple setProject(final String project) { + if (StringUtils.isNotBlank(project)) { + _project = project; + } else { + _project = PrearcUtils.COMMON; + } + return this; + } + + public SessionDataTriple setProject(final Object object) { + setProject(object.toString()); + return this; + } + + @Override + public String toString() { + return _folderName + ':' + _timestamp + ':' + _project; + } + + @Override + public boolean equals(final Object other) { + if (this == other) { + return true; + } + if (other == null || getClass() != other.getClass()) { + return false; + } + + final SessionDataTriple that = (SessionDataTriple) other; + + return _folderName != null + ? _folderName.equals(that._folderName) + : that._folderName == null + && (_timestamp != null + ? _timestamp.equals(that._timestamp) + : that._timestamp == null + && (_project != null + ? _project.equals(that._project) + : that._project == null)); + } + + @Override + public int hashCode() { + int result = _folderName != null ? _folderName.hashCode() : 0; + result = 31 * result + (_timestamp != null ? _timestamp.hashCode() : 0); + result = 31 * result + (_project != null ? _project.hashCode() : 0); + return result; + } + private static final long serialVersionUID = 7764386535994779313L; - private String folderName; - private String timestamp; - private String project; - - public SessionDataTriple() { - } - public String getFolderName() { - return this.folderName; - } - public SessionDataTriple setFolderName(String name) { - this.folderName = name; - return this; - } - public SessionDataTriple setFolderName(Object o) { - if (o != null) { - this.setFolderName((String)o); - } - return this; - } - - public String getTimestamp() { - return this.timestamp; - } - public SessionDataTriple setTimestamp(String timestamp) { - this.timestamp = timestamp; - return this; - } - public SessionDataTriple setTimestamp(Object o) { - if (o != null) { - this.setTimestamp((String)o); - } - return this; - } - - public String getProject() { - return this.project; - } - public SessionDataTriple setProject(String project) { - if (project != null) { - this.project = project; - } - else { - this.project = PrearcUtils.COMMON; - } - return this; - } - public SessionDataTriple setProject(Object o) { - this.setProject((String)o); - return this; - } - - public static SessionDataTriple makeTriple (String sess, String timestamp, String proj) { - return new SessionDataTriple().setFolderName(sess).setProject(proj).setTimestamp(timestamp); - } - - public Map<String,String> toMap () { - Map<String,String> ret = new HashMap<String,String>(); - ret.put("PROJECT_ID", this.getProject()); - ret.put("SESSION_TIMESTAMP", this.getTimestamp()); - ret.put("SESSION_LABEL", this.getFolderName()); - return ret; - } - - public static SessionDataTriple fromMap (Map<String,String> m) { - return new SessionDataTriple().setFolderName(m.get("SESSION_LABEL")) - .setProject(m.get("PROJECT_ID")) - .setTimestamp(m.get("SESSION_TIMESTAMP")); - } - public static SessionDataTriple fromFile (final String project, final File f) { - return new SessionDataTriple().setFolderName(f.getName()) - .setProject(project) - .setTimestamp(f.getParentFile().getName()); - } - public static SessionDataTriple fromURI (final String uri) throws MalformedURLException { - final PrearcUriParserUtils.SessionParser parser = new PrearcUriParserUtils.SessionParser(new PrearcUriParserUtils.UriParser(XNATApplication.PREARC_SESSION_URI)); - return SessionDataTriple.fromMap(parser.readUri(uri)); - } - public static SessionDataTriple fromPrearcSession (final PrearcSession session) { - return new SessionDataTriple().setFolderName(session.getFolderName()) - .setProject(session.getProject()) - .setTimestamp(session.getTimestamp()); - } + private String _folderName; + private String _timestamp; + private String _project; } \ No newline at end of file diff --git a/src/main/java/org/nrg/xnat/helpers/prearchive/handlers/PrearchiveRebuildHandler.java b/src/main/java/org/nrg/xnat/helpers/prearchive/handlers/PrearchiveRebuildHandler.java index 937a10c0..95a5c666 100644 --- a/src/main/java/org/nrg/xnat/helpers/prearchive/handlers/PrearchiveRebuildHandler.java +++ b/src/main/java/org/nrg/xnat/helpers/prearchive/handlers/PrearchiveRebuildHandler.java @@ -7,6 +7,7 @@ import org.nrg.xdat.bean.reader.XDATXMLReader; import org.nrg.xnat.archive.FinishImageUpload; import org.nrg.xnat.helpers.prearchive.PrearcDatabase; import org.nrg.xnat.helpers.prearchive.PrearcUtils; +import org.nrg.xnat.helpers.prearchive.SessionData; import org.nrg.xnat.restlet.actions.PrearcImporterA; import org.nrg.xnat.services.messaging.prearchive.PrearchiveOperationRequest; import org.slf4j.Logger; @@ -38,42 +39,55 @@ public class PrearchiveRebuildHandler extends AbstractPrearchiveOperationHandler } } else if (PrearcDatabase.setStatus(getSessionData().getFolderName(), getSessionData().getTimestamp(), getSessionData().getProject(), PrearcUtils.PrearcStatus.BUILDING)) { PrearcDatabase.buildSession(getSessionDir(), getSessionData().getFolderName(), getSessionData().getTimestamp(), getSessionData().getProject(), getSessionData().getVisit(), getSessionData().getProtocol(), getSessionData().getTimeZone(), getSessionData().getSource()); - final boolean separatePetMr = PrearcUtils.isUnassigned(getSessionData()) ? PrearcUtils.shouldSeparatePetMr() : PrearcUtils.shouldSeparatePetMr(getSessionData().getProject()); - if (separatePetMr) { - if (_log.isDebugEnabled()) { - _log.debug("Found create separate PET and MR sessions setting for project {}, now working to separate that.", getSessionData().getProject()); - } - final File sessionXml = new File(getSessionDir() + ".xml"); - if (sessionXml.exists()) { + + // We need to check whether the session was updated to RECEIVING_INTERRUPT while the rebuild operation + // was happening. If that happened, that means more data started to arrive during the rebuild. If not, + // we'll proceed down the path where we check for session splits and autoarchive. If so, we'll just + // reset the status to RECEIVING and update the session timestamp. + final SessionData current = PrearcDatabase.getSession(getSessionData().getSessionDataTriple()); + if (current.getStatus() != PrearcUtils.PrearcStatus.RECEIVING_INTERRUPT) { + final boolean separatePetMr = PrearcUtils.isUnassigned(getSessionData()) ? PrearcUtils.shouldSeparatePetMr() : PrearcUtils.shouldSeparatePetMr(getSessionData().getProject()); + if (separatePetMr) { if (_log.isDebugEnabled()) { - _log.debug("Found the session XML in the file {}, processing.", sessionXml.getAbsolutePath()); + _log.debug("Found create separate PET and MR sessions setting for project {}, now working to separate that.", getSessionData().getProject()); } - final XnatImagesessiondataBean bean = (XnatImagesessiondataBean) new XDATXMLReader().parse(sessionXml); - if (bean instanceof XnatPetmrsessiondataBean) { + final File sessionXml = new File(getSessionDir() + ".xml"); + if (sessionXml.exists()) { if (_log.isDebugEnabled()) { - _log.debug("Found a PET/MR session XML in the file {} with the separate PET/MR flag set to true for the site or project, creating a new request to separate the session.", sessionXml.getAbsolutePath()); + _log.debug("Found the session XML in the file {}, processing.", sessionXml.getAbsolutePath()); } - PrearcUtils.resetStatus(getUser(), getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), true); - final PrearchiveOperationRequest request = new PrearchiveOperationRequest(getUser(), getSessionData(), getSessionDir(), "Separate"); - XDAT.sendJmsRequest(request); - return; - } else if (_log.isDebugEnabled()) { - _log.debug("Found a session XML for a {} session in the file {}. Not PET/MR so not separating.", bean.getFullSchemaElementName(), sessionXml.getAbsolutePath()); + final XnatImagesessiondataBean bean = (XnatImagesessiondataBean) new XDATXMLReader().parse(sessionXml); + if (bean instanceof XnatPetmrsessiondataBean) { + if (_log.isDebugEnabled()) { + _log.debug("Found a PET/MR session XML in the file {} with the separate PET/MR flag set to true for the site or project, creating a new request to separate the session.", sessionXml.getAbsolutePath()); + } + PrearcUtils.resetStatus(getUser(), getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), true); + final PrearchiveOperationRequest request = new PrearchiveOperationRequest(getUser(), getSessionData(), getSessionDir(), "Separate"); + XDAT.sendJmsRequest(request); + return; + } else if (_log.isDebugEnabled()) { + _log.debug("Found a session XML for a {} session in the file {}. Not PET/MR so not separating.", bean.getFullSchemaElementName(), sessionXml.getAbsolutePath()); + } + } else { + _log.warn("Tried to rebuild a session from the path {}, but that session XML doesn't exist.", sessionXml.getAbsolutePath()); } - } else { - _log.warn("Tried to rebuild a session from the path {}, but that session XML doesn't exist.", sessionXml.getAbsolutePath()); } - } - PrearcUtils.resetStatus(getUser(), getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), true); + PrearcUtils.resetStatus(getUser(), getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), true); - // we don't want to autoarchive a session that's just being rebuilt - // but we still want to autoarchive sessions that just came from RECEIVING STATE - final PrearcImporterA.PrearcSession session = new PrearcImporterA.PrearcSession(getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), null, getUser()); - final FinishImageUpload uploader = new FinishImageUpload(null, getUser(), session, null, false, true, false); - if (receiving || !uploader.isAutoArchive()) { - _log.debug("Processing queue entry for {} in project {} to archive {}", getUser().getUsername(), getSessionData().getProject(), getSessionData().getExternalUrl()); - uploader.call(); + // we don't want to autoarchive a session that's just being rebuilt + // but we still want to autoarchive sessions that just came from RECEIVING STATE + final PrearcImporterA.PrearcSession session = new PrearcImporterA.PrearcSession(getSessionData().getProject(), getSessionData().getTimestamp(), getSessionData().getFolderName(), null, getUser()); + final FinishImageUpload uploader = new FinishImageUpload(null, getUser(), session, null, false, true, false); + if (receiving || !uploader.isAutoArchive()) { + _log.debug("Processing queue entry for {} in project {} to archive {}", getUser().getUsername(), getSessionData().getProject(), getSessionData().getExternalUrl()); + uploader.call(); + } + } else { + if (_log.isInfoEnabled()) { + _log.info("Found session " + getSessionData().getSessionDataTriple() + " in RECEIVING_INTERRUPT state, meaning that data began arriving while session was in an interruptable non-receiving state. No session split or autoarchive checks will be performed and session will be restored to RECEIVING state."); + } + PrearcDatabase.setStatus(getSessionData().getFolderName(), getSessionData().getTimestamp(), getSessionData().getProject(), PrearcUtils.PrearcStatus.RECEIVING); } } } catch (Exception e) { diff --git a/src/main/java/org/nrg/xnat/restlet/actions/PrearcBlankSession.java b/src/main/java/org/nrg/xnat/restlet/actions/PrearcBlankSession.java deleted file mode 100644 index 9f5cc72e..00000000 --- a/src/main/java/org/nrg/xnat/restlet/actions/PrearcBlankSession.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * org.nrg.xnat.restlet.actions.PrearcBlankSession - * XNAT http://www.xnat.org - * Copyright (c) 2014, Washington University School of Medicine - * All Rights Reserved - * - * Released under the Simplified BSD. - * - * Last modified 7/10/13 9:04 PM - */ -package org.nrg.xnat.restlet.actions; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.log4j.Logger; -import org.nrg.action.ClientException; -import org.nrg.action.ServerException; -import org.nrg.xft.security.UserI; -import org.nrg.xnat.helpers.prearchive.PrearcDatabase; -import org.nrg.xnat.helpers.prearchive.PrearcUtils; -import org.nrg.xnat.helpers.prearchive.PrearcUtils.PrearcStatus; -import org.nrg.xnat.helpers.prearchive.SessionData; -import org.nrg.xnat.restlet.actions.importer.ImporterHandlerA; -import org.nrg.xnat.restlet.util.FileWriterWrapperI; -import org.restlet.data.Status; - -public class PrearcBlankSession extends ImporterHandlerA { - static Logger logger = Logger.getLogger(PrearcBlankSession.class); - - private static final String PROJECT = "project"; - private static final String SUID= "suid"; - private static final String SESSION = "session"; - private static final String SUBJECT = "subject"; // optional, added to the prearchive table if it is present - - private final UserI user; - private final FileWriterWrapperI fi; // never used, we only check if it is null - private final Map<String,Object> params; - - /** - * Check that \"project\", \"session\" and \"suid\" are present. - * - * @param params - * @param fi - * @throws ClientException - */ - private void validate(Map<String,Object> params, FileWriterWrapperI fi) throws ClientException { - if (!params.containsKey(PROJECT) || - !params.containsKey(SUID) || - !params.containsKey(SESSION)) { - throw new ClientException(Status.CLIENT_ERROR_BAD_REQUEST,"Cannot build a blank row for this session without the \"project\", \"session\" and \"suid\" parameters", new IllegalArgumentException()); - } - if (fi != null) { - throw new ClientException(Status.CLIENT_ERROR_BAD_REQUEST, "Cannot upload binary data while creating a blank row for this session", new IllegalArgumentException()); - } - } - - /** - * Helper class to create a blank row in the prearchive table - * @param uID2 - * @param u - * @param fi - * @param project_id - * @param additionalValues - */ - public PrearcBlankSession(final Object uID2 , // ignored - final UserI u, // ignored - final FileWriterWrapperI fi, // should be null, we are not expecting a file when creating a blank row - Map<String,Object> params - ){ - super((uID2==null)?u:uID2,u,fi,params); - this.user=u; - this.fi = fi; - this.params = params; - } - - @Override - public List<String> call() throws ClientException,ServerException{ - this.validate(this.params, this.fi); - String project = (String) this.params.get(PROJECT); - String session = (String) this.params.get(SESSION); - String suid = (String) this.params.get(SUID); - - try { - SessionData blankSession = PrearcUtils.blankSession(project, session, suid); - Collection<SessionData> dupes = PrearcDatabase.getSessionByUID(suid); - if (dupes.size() != 0) { - throw new ClientException(Status.CLIENT_ERROR_BAD_REQUEST, "A session with Study Instance UID " + suid + " exists.", new IllegalArgumentException()); - } - blankSession.setStatus(PrearcStatus.RECEIVING); - if (this.params.containsKey(SUBJECT)) { - blankSession.setSubject((String)this.params.get(SUBJECT)); - } - PrearcDatabase.addSession(blankSession); - Map<String,Object> additionalValues = new HashMap<String,Object>(); - additionalValues.put(SUID, blankSession.getTag()); - List<String> ret = new ArrayList<String>(); - ret.add(PrearcUtils.buildURI(blankSession.getProject(), blankSession.getTimestamp(), blankSession.getFolderName())); - return ret; - } - catch (Exception e) { - logger.error("Unable to create blank session", e); - throw new ClientException(Status.SERVER_ERROR_INTERNAL,e.getMessage(), new IllegalArgumentException()); - } - } -} diff --git a/src/main/java/org/nrg/xnat/restlet/resources/prearchive/PrearcSessionResource.java b/src/main/java/org/nrg/xnat/restlet/resources/prearchive/PrearcSessionResource.java index 28761e00..98bcad9b 100644 --- a/src/main/java/org/nrg/xnat/restlet/resources/prearchive/PrearcSessionResource.java +++ b/src/main/java/org/nrg/xnat/restlet/resources/prearchive/PrearcSessionResource.java @@ -404,7 +404,7 @@ public final class PrearcSessionResource extends SecureResource { return new FileRepresentation(sessionXML, variant.getMediaType(), 0); } else if (MediaType.APPLICATION_JSON.equals(mt)) { List<SessionDataTriple> l = new ArrayList<SessionDataTriple>(); - l.add(new SessionDataTriple().setFolderName(sessionDir.getName()).setProject(project).setTimestamp(timestamp)); + l.add(new SessionDataTriple(sessionDir.getName(), project, timestamp)); XFTTable table = null; try { table = PrearcUtils.convertArrayLtoTable(PrearcDatabase.buildRows(l)); diff --git a/src/main/java/org/nrg/xnat/restlet/services/Importer.java b/src/main/java/org/nrg/xnat/restlet/services/Importer.java index e73c3a72..db5d926b 100644 --- a/src/main/java/org/nrg/xnat/restlet/services/Importer.java +++ b/src/main/java/org/nrg/xnat/restlet/services/Importer.java @@ -301,9 +301,28 @@ public class Importer extends SecureResource { } protected void respondToException(Exception e, Status status) { - logger.error("",e); + final Throwable cause = e.getCause(); + if (cause != null && cause instanceof ExceptionInInitializerError && ((ExceptionInInitializerError) cause).getException() != null) { + final ExceptionInInitializerError error = (ExceptionInInitializerError) cause; + final StringBuilder buffer = new StringBuilder("An error occurred initializing an object during the import operation: "); + buffer.append(error.getException().getMessage()); + final StackTraceElement[] stackTrace = error.getException().getStackTrace(); + if (stackTrace != null) { + int lines = 0; + for (final StackTraceElement element : stackTrace) { + buffer.append(System.lineSeparator()).append(" ").append(element.toString()); + lines++; + if (lines > 5) { + break; + } + } + } + logger.error(buffer.toString()); + } else { + logger.error("", e); + } if (this.requested_format!=null && this.requested_format.equalsIgnoreCase("HTML")) { - response = new ArrayList<String>(); + response = new ArrayList<>(); response.add(e.getMessage()); returnDefaultRepresentation(); } else { diff --git a/src/main/resources/META-INF/xnat/spawner/site-admin-elements.yaml b/src/main/resources/META-INF/xnat/spawner/site-admin-elements.yaml index d0830b1b..f8b4927b 100644 --- a/src/main/resources/META-INF/xnat/spawner/site-admin-elements.yaml +++ b/src/main/resources/META-INF/xnat/spawner/site-admin-elements.yaml @@ -984,6 +984,37 @@ manageDataTypes: name: displayNameForGenericImageSession.plural label: "Plural Display Name For Generic Image Session Singular" +sessionBuilder: + kind: panel.form + name: sessionBuilder + label: "Session Builder" + method: POST + contentType: json + action: /xapi/siteConfig/batch + load: XNAT.data.siteConfig + refresh: /xapi/siteConfig + contents: + sessionXmlRebuilderRepeat: + kind: panel.input.number + id: sessionXmlRebuilderRepeat + name: sessionXmlRebuilderRepeat + label: Session Idle Check Interval + placeholder: Interval in milliseconds + description: > + This controls how often the system checks to see if any incoming DICOM sessions in the prearchive have + been idle for longer than the configured session idle time. This value should be specified in + milliseconds and defaults to 60,000 ms or one minute. + sessionXmlRebuilderInterval: + kind: panel.input.number + id: sessionXmlRebuilderInterval + name: sessionXmlRebuilderInterval + label: Session Idle Time + placeholder: Time in minutes + description: > + This tells the system how long a DICOM session should sit idle—that is, with no new data added to the + session—before attempting to build a session document from the DICOM data. This value is specified in + minutes and defaults to 5 minutes. + anonymization: kind: panel.form name: Anonymization @@ -1137,16 +1168,6 @@ sessionUploadMethod: description: > Details on how to configure an Upload Applet script may be found <a href="https://wiki.xnat.org/display/XKB/Adding+parameters+and+launch+requirements+for+the+upload+applet" target="_blank">here</a>. - sessionXmlRebuilderRepeat: - kind: panel.input.number - id: sessionXmlRebuilderRepeat - name: sessionXmlRebuilderRepeat - label: Session Xml Rebuilder Repeat - sessionXmlRebuilderInterval: - kind: panel.input.number - id: sessionXmlRebuilderInterval - name: sessionXmlRebuilderInterval - label: Session Xml Rebuilder Interval dicomScpReceivers: kind: panel @@ -1415,6 +1436,7 @@ adminPage: label: "Session Upload, Import & Anonymization" group: manageData contents: + ${sessionBuilder} ${anonymization} ${seriesImportFilter} ${petTracers} -- GitLab