diff --git a/src/main/config/logback.xml.example b/src/main/config/logback.xml.example index 21bb62a0..94d6f323 100644 --- a/src/main/config/logback.xml.example +++ b/src/main/config/logback.xml.example @@ -23,6 +23,4 @@ - - diff --git a/src/main/config/run.properties.example b/src/main/config/run.properties.example index 2dadc9bd..3bb3b59b 100644 --- a/src/main/config/run.properties.example +++ b/src/main/config/run.properties.example @@ -30,12 +30,6 @@ storageUnit = dataset tidyBlockSize = 500 !enableWrite = true -# File checking properties. Deprecated -!filesCheck.parallelCount = 5 -!filesCheck.gapSeconds = 5 -!filesCheck.lastIdFile = ${HOME}/ids/lastIdFile -!filesCheck.errorLog = ${HOME}/ids/errorLog - # Link properties. Deprecated !linkLifetimeSeconds = 3600 diff --git a/src/main/java/org/icatproject/ids/FileChecker.java b/src/main/java/org/icatproject/ids/FileChecker.java deleted file mode 100644 index 5ac373fa..00000000 --- a/src/main/java/org/icatproject/ids/FileChecker.java +++ /dev/null @@ -1,351 +0,0 @@ -package org.icatproject.ids; - -import java.io.IOException; -import java.io.InputStream; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardOpenOption; -import java.text.DateFormat; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Timer; -import java.util.TimerTask; -import java.util.zip.CRC32; -import java.util.zip.ZipEntry; -import java.util.zip.ZipInputStream; - -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; -import jakarta.ejb.EJB; -import jakarta.ejb.Singleton; -import jakarta.ejb.Startup; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.icatproject.Datafile; -import org.icatproject.Dataset; -import org.icatproject.EntityBaseBean; -import org.icatproject.IcatException_Exception; -import org.icatproject.ids.exceptions.InsufficientPrivilegesException; -import org.icatproject.ids.exceptions.InternalException; -import org.icatproject.ids.plugin.ArchiveStorageInterface; -import org.icatproject.ids.plugin.DsInfo; -import org.icatproject.ids.plugin.MainStorageInterface; -import org.icatproject.ids.plugin.ZipMapperInterface; - -@Singleton -@Startup -public class FileChecker { - - @EJB - IcatReader reader; - - private Long maxId; - - private final static String ls = System.getProperty("line.separator"); - - private class CrcAndLength { - - private Long fileSize; - - private String checksum; - - private CrcAndLength(Datafile df) { - this.checksum = df.getChecksum(); - this.fileSize = df.getFileSize(); - } - - } - - private class CheckOneFile implements Runnable { - - private EntityBaseBean eb; - - private CheckOneFile(EntityBaseBean eb) { - this.eb = eb; - } - - @Override - public void run() { - - if (twoLevel) { - Dataset ds = (Dataset) eb; - logger.info("Checking Dataset " + ds.getId() + " (" + ds.getName() + ")"); - Map crcAndLength = new HashMap<>(); - for (Datafile df : ds.getDatafiles()) { - if (df.getLocation() != null) { - crcAndLength.put(df.getName(), new CrcAndLength(df)); - } - } - if (!crcAndLength.isEmpty()) { - String dfName = null; - - DsInfo dsInfo; - try { - dsInfo = new DsInfoImpl(ds); - } catch (InsufficientPrivilegesException e) { - report(ds, dfName, "Reports: " + e.getClass().getSimpleName() + " " + e.getMessage()); - return; - } - Path tPath = null; - try { - tPath = Files.createTempFile(null, null); - archiveStorage.get(dsInfo, tPath); - try (ZipInputStream zis = new ZipInputStream(Files.newInputStream(tPath))) { - ZipEntry ze = zis.getNextEntry(); - while (ze != null) { - dfName = zipMapper.getFileName(ze.getName()); - CRC32 crc = new CRC32(); - byte[] bytes = new byte[1024]; - int length; - long n = 0; - while ((length = zis.read(bytes)) >= 0) { - crc.update(bytes, 0, length); - n += length; - } - - CrcAndLength cl = crcAndLength.get(dfName); - if (cl == null) { - report(ds, dfName, "not found in map"); - } else if (cl.fileSize == null) { - report(ds, dfName, "file size null"); - } else if (cl.fileSize != n) { - report(ds, dfName, "file size wrong"); - } else if (cl.checksum == null) { - report(ds, dfName, "checksum null"); - } else if (!cl.checksum.equals(Long.toHexString(crc.getValue()))) { - report(ds, dfName, "checksum wrong"); - } - - crcAndLength.remove(dfName); - ze = zis.getNextEntry(); - } - if (!crcAndLength.isEmpty()) { - report(ds, null, "zip file incomplete"); - } - } - } catch (IOException e) { - report(ds, dfName, - e.getClass().getName() + (e.getMessage() != null ? " " + e.getMessage() : "")); - } catch (Throwable e) { - e.printStackTrace(); - logger.error("Throwable " + e.getClass() + " " + e.getMessage()); - } finally { - if (tPath != null) { - try { - Files.deleteIfExists(tPath); - } catch (IOException e) { - // Ignore - } - } - } - } - } else { - Datafile df = (Datafile) eb; - logger.debug("Checking Datafile " + df.getId() + " (" + df.getName() + ")"); - String location; - try { - location = IdsBean.getLocation(df.getId(), df.getLocation()); - } catch (InsufficientPrivilegesException | InternalException e) { - report(df, e.getClass() + " " + e.getMessage()); - return; - } - - try (InputStream is = mainStorage.get(location, df.getCreateId(), df.getModId())) { - CRC32 crc = new CRC32(); - byte[] bytes = new byte[1024]; - int length; - long n = 0; - while ((length = is.read(bytes)) >= 0) { - crc.update(bytes, 0, length); - n += length; - } - is.close(); - if (df.getFileSize() == null) { - report(df, "file size null"); - } else if (df.getFileSize() != n) { - report(df, "file size wrong"); - } else if (df.getChecksum() == null) { - report(df, "checksum null"); - } else if (!df.getChecksum().equals(Long.toHexString(crc.getValue()))) { - report(df, "checksum wrong"); - } - } catch (IOException e) { - report(df, e.getClass() + " " + e.getMessage()); - } catch (Throwable e) { - logger.error("Throwable " + e.getClass() + " " + e.getMessage()); - } - - } - - } - - private void report(Datafile df, String emsg) { - String msg = "Datafile " + df.getId() + " (" + df.getName() + ") "; - msg += emsg; - logger.info(msg); - DateFormat dft = DateFormat.getDateTimeInstance(); - try { - Files.write(filesCheckErrorLog, (dft.format(new Date()) + ": " + msg + ls).getBytes(), - StandardOpenOption.APPEND, StandardOpenOption.CREATE); - } catch (IOException e) { - logger.error("Unable to write FileChecker log file " + e.getClass() + " " + e.getMessage()); - } - } - - private void report(Dataset ds, String dfName, String emsg) { - String msg = "Dataset " + ds.getId() + " (" + ds.getName() + ") "; - if (dfName != null) { - msg = msg + "datafile " + dfName + " "; - } - msg += emsg; - logger.info(msg); - DateFormat dft = DateFormat.getDateTimeInstance(); - try { - Files.write(filesCheckErrorLog, (dft.format(new Date()) + ": " + msg + ls).getBytes(), - StandardOpenOption.APPEND, StandardOpenOption.CREATE); - } catch (IOException e) { - logger.error("Unable to write FileChecker log file " + e.getClass() + " " + e.getMessage()); - } - } - } - - public class Action extends TimerTask { - - @Override - public void run() { - - try { - String query; - if (twoLevel) { - if (maxId != null) { - query = "SELECT ds FROM Dataset ds WHERE ds.id > " + maxId - + " ORDER BY ds.id INCLUDE ds.datafiles, ds.investigation.facility LIMIT 0, " - + filesCheckParallelCount; - } else { - query = "SELECT ds FROM Dataset ds ORDER BY ds.id INCLUDE ds.datafiles, ds.investigation.facility LIMIT 0, " - + filesCheckParallelCount; - } - } else { - if (maxId != null) { - query = "SELECT df FROM Datafile df WHERE df.id > " + maxId + " AND df.location IS NOT NULL ORDER BY df.id LIMIT 0, " - + filesCheckParallelCount; - } else { - query = "SELECT df FROM Datafile df WHERE df.location IS NOT NULL ORDER BY df.id LIMIT 0, " + filesCheckParallelCount; - } - } - List os = reader.search(query); - - logger.debug(query + " returned " + os.size() + " results"); - if (os.isEmpty()) { - maxId = null; - } else { - - List threads = new ArrayList(os.size()); - for (Object o : os) { - EntityBaseBean eb = (EntityBaseBean) o; - Thread worker = new Thread(new CheckOneFile(eb)); - worker.start(); - threads.add(worker); - maxId = eb.getId(); - } - for (Thread thread : threads) { - try { - thread.join(); - } catch (InterruptedException e) { - logger.info("Thread interrupted"); - return; - } - } - } - - try { - if (maxId != null) { - Files.write(filesCheckLastIdFile, Long.toString(maxId).getBytes()); - } else { - Files.deleteIfExists(filesCheckLastIdFile); - } - } catch (IOException e) { - logger.error("Unable to write FileChecker last id file " + e.getClass() + " " + e.getMessage()); - } - - } catch (IcatException_Exception e) { - logger.error(e.getFaultInfo().getType() + " " + e.getMessage()); - } catch (Throwable e) { - logger.error("Throwable " + e.getClass() + " " + e.getMessage()); - } finally { - timer.schedule(new Action(), filesCheckGapMillis); - } - } - - } - - private final static Logger logger = LoggerFactory.getLogger(FileChecker.class); - - private Timer timer = new Timer(); - - private int filesCheckParallelCount; - private long filesCheckGapMillis; - private Path filesCheckLastIdFile; - private Path filesCheckErrorLog; - - private boolean twoLevel; - - private MainStorageInterface mainStorage; - - private ArchiveStorageInterface archiveStorage; - - private ZipMapperInterface zipMapper; - - private PropertyHandler propertyHandler; - - @PostConstruct - public void init() { - - propertyHandler = PropertyHandler.getInstance(); - StorageUnit storageUnit = propertyHandler.getStorageUnit(); - filesCheckParallelCount = propertyHandler.getFilesCheckParallelCount(); - if (filesCheckParallelCount > 0) { - logger.warn("The FileChecker is deprecated and slated for removal in ids.server 3.0"); - if (storageUnit == null || storageUnit == StorageUnit.DATASET) { - filesCheckGapMillis = propertyHandler.getFilesCheckGapMillis(); - filesCheckLastIdFile = propertyHandler.getFilesCheckLastIdFile(); - filesCheckErrorLog = propertyHandler.getFilesCheckErrorLog(); - mainStorage = propertyHandler.getMainStorage(); - archiveStorage = propertyHandler.getArchiveStorage(); - twoLevel = archiveStorage != null; - zipMapper = propertyHandler.getZipMapper(); - - maxId = null; - if (Files.exists(filesCheckLastIdFile)) { - try { - maxId = Long.parseLong(Files.readAllLines(filesCheckLastIdFile, StandardCharsets.UTF_8).get(0)); - } catch (Exception e) { - logger.error("Unable to read FileChecker last id file " + e.getClass() + " " + e.getMessage()); - } - } - - timer.schedule(new Action(), filesCheckGapMillis); - - logger.info("FileChecker started with maxId: " + maxId); - } else { - logger.info("FileChecker not supported for storageUnit " + storageUnit); - } - } else { - logger.info("FileChecker startup not requested"); - } - - } - - @PreDestroy - public void exit() { - timer.cancel(); - logger.info("FileChecker stopped"); - } - -} diff --git a/src/main/java/org/icatproject/ids/PropertyHandler.java b/src/main/java/org/icatproject/ids/PropertyHandler.java index 72cc617f..8893a084 100644 --- a/src/main/java/org/icatproject/ids/PropertyHandler.java +++ b/src/main/java/org/icatproject/ids/PropertyHandler.java @@ -51,10 +51,6 @@ public static Logger getLogger() { private ArchiveStorageInterface archiveStorage; private Path cacheDir; private boolean enableWrite; - private Path filesCheckErrorLog; - private int filesCheckGapMillis; - private Path filesCheckLastIdFile; - private int filesCheckParallelCount; private ICAT icatService; private long linkLifetimeMillis; @@ -198,23 +194,6 @@ private PropertyHandler() { abort(cacheDir + " must be an existing directory"); } - if (props.has("filesCheck.parallelCount")) { - filesCheckParallelCount = props.getNonNegativeInt("filesCheck.parallelCount"); - } else { - filesCheckParallelCount = 0; - } - if (filesCheckParallelCount > 0) { - filesCheckGapMillis = props.getPositiveInt("filesCheck.gapSeconds") * 1000; - filesCheckLastIdFile = props.getFile("filesCheck.lastIdFile").toPath(); - if (!Files.exists(filesCheckLastIdFile.getParent())) { - abort("Directory for " + filesCheckLastIdFile + " does not exist"); - } - filesCheckErrorLog = props.getFile("filesCheck.errorLog").toPath(); - if (!Files.exists(filesCheckErrorLog.getParent())) { - abort("Directory for " + filesCheckErrorLog + " does not exist"); - } - } - if (props.has("linkLifetimeSeconds")) { linkLifetimeMillis = props.getNonNegativeLong("linkLifetimeSeconds") * 1000L; } else { @@ -266,22 +245,6 @@ public boolean getEnableWrite() { return enableWrite; } - public Path getFilesCheckErrorLog() { - return filesCheckErrorLog; - } - - public long getFilesCheckGapMillis() { - return filesCheckGapMillis; - } - - public Path getFilesCheckLastIdFile() { - return filesCheckLastIdFile; - } - - public int getFilesCheckParallelCount() { - return filesCheckParallelCount; - } - public synchronized ICAT getIcatService() { // Keep trying every 10 seconds to connect to ICAT. Each failure // will produce an error message. diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml index c6cd0ad2..ba06d172 100644 --- a/src/main/resources/logback.xml +++ b/src/main/resources/logback.xml @@ -23,6 +23,4 @@ - - diff --git a/src/main/scripts/setup b/src/main/scripts/setup index e0594813..6ba0a6e4 100755 --- a/src/main/scripts/setup +++ b/src/main/scripts/setup @@ -40,19 +40,6 @@ if arg == "INSTALL": if not (idsProperties.get("delayDatafileOperationsSeconds")): abort("delayDatafileOperationsSeconds is not set in run.properties") - if int(idsProperties.get("filesCheck.parallelCount", 0)) > 0: - warnings.warn("The FileChecker is deprecated and slated for removal in ids.server 3.0") - if not idsProperties.get("filesCheck.gapSeconds"): abort("filesCheck.gapSeconds is not set in run.properties") - if not idsProperties.get("filesCheck.lastIdFile"): abort("filesCheck.lastIdFile is not set in run.properties") - parent = os.path.dirname(os.path.expandvars(idsProperties["filesCheck.lastIdFile"])) - if not os.path.exists(parent): - abort("Please create directory " + parent + " for filesCheck.lastIdFile specified in run.properties") - if not idsProperties.get("filesCheck.errorLog"): abort("filesCheck.errorLog is not set in run.properties") - parent = os.path.dirname(os.path.expandvars(idsProperties["filesCheck.errorLog"])) - if not os.path.exists(parent): - abort("Please create directory " + parent + " for filesCheck.errorLog specified in run.properties") - if not idsProperties.get("reader"): abort("reader is not set in run.properties") - if int(idsProperties.get("linkLifetimeSeconds", 0)) > 0: warnings.warn("The getLink API call is deprecated and slated for removal in ids.server 3.0") diff --git a/src/site/xhtml/installation.xhtml.vm b/src/site/xhtml/installation.xhtml.vm index 2f4ec985..4fcaaeae 100644 --- a/src/site/xhtml/installation.xhtml.vm +++ b/src/site/xhtml/installation.xhtml.vm @@ -251,58 +251,6 @@ please consult the documentation for your plugin(s). - -

Properties for file checking

-

When a datafile is added to the IDS its length and checksum are - computed and stored in ICAT. File checking, if enabled, cycles through - all the stored data making sure that they can be read and that files - have the expected size and checksum.

-

Deprecated: the FileChecker is deprecated and slated for removal - along with the properties in this section in ids.server 3.0.

-
-
filesCheck.parallelCount
-
- Optional, default zero. If non zero then the readability of the - data will be checked. The behaviour is dependent upon whether or not - archive storage has a been requested. In the case of single level - storage this is done in groups of files where the group size is - defined by this parameter. If archive storage has been requested then - only the archive is checked. Each file in the archive holds a - complete dataset and this filesCheck.parallelCount parameter then - defines how many dataset files will be checked in parallel. -

In the case of checking datasets in the archive storage these - are unzipped on the fly to compute the checksum of each file inside - the zip file as well as its length.

-

If the archive storage has a long latency then it is useful to - have a "large" value, however a thread is started for each stored - file so the value of this parameter should not be too large.

-

If this is set to zero then the FileChecker is disabled and all other - properties in this section will be ignored.

-
-
filesCheck.gapSeconds
-
the number of seconds to wait before launching a check of the - next batch of datafiles or datasets. -
-
filesCheck.lastIdFile
-
the location of a file which is used to store the id value of - the last datafile or dataset to be checked. This is so that if the - IDS is restarted it will continue checking where it left off. If this - file is deleted the ids will restart checking from the beginning. The - parameters filesCheck.parallelCount and filesCheck.gapSeconds should - be set so that the data are all checked with the desired frequency - but without excessive I/O. A nagios plugin might check that this file - is being written periodically and that its contents change. -
-
filesCheck.errorLog
-
the file with a list of errors found. The file is not kept - open but instead is opened in append mode each time a problem is - spotted and then closed. A nagios plugin might be set up to watch - this file. Entries in the file are data stamped and new entries are - simply appended without regard for the existence of an entry for the - same file. -
-
-

Check that the ids server works

Enter a url of the form diff --git a/src/site/xhtml/release-notes.xhtml b/src/site/xhtml/release-notes.xhtml index 1e3bff32..654949a3 100644 --- a/src/site/xhtml/release-notes.xhtml +++ b/src/site/xhtml/release-notes.xhtml @@ -6,6 +6,11 @@

IDS Server Release Notes

+

3.0.0 (not yet released)

+
    +
  • #147: Drop FileChecker, deprecated in 2.0.0.
  • +
+

2.0.0

Make the transition to Payara 6

    diff --git a/src/test/java/org/icatproject/ids/integration/one/FileCheckerTest.java b/src/test/java/org/icatproject/ids/integration/one/FileCheckerTest.java deleted file mode 100644 index 7f23dc8b..00000000 --- a/src/test/java/org/icatproject/ids/integration/one/FileCheckerTest.java +++ /dev/null @@ -1,94 +0,0 @@ -package org.icatproject.ids.integration.one; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; -import org.junit.BeforeClass; -import org.junit.Test; - -import org.icatproject.Datafile; -import org.icatproject.EntityBaseBean; -import org.icatproject.ids.integration.BaseTest; -import org.icatproject.ids.integration.util.Setup; - -public class FileCheckerTest extends BaseTest { - - private static Path errorLog; - - @BeforeClass - public static void setup() throws Exception { - setup = new Setup("one.properties"); - icatsetup(); - errorLog = setup.getErrorLog(); - } - - @Test - public void everythingTest() throws Exception { - - List os = icatWS.search(sessionId, "Datafile"); - for (Object o : os) { - icatWS.delete(sessionId, (EntityBaseBean) o); - } - - Files.deleteIfExists(errorLog); - - Long dfid = 0L; - for (int i = 0; i < 3; i++) { - dfid = testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_" + i, - datasetIds.get(0), supportedDatafileFormat.getId(), "A rather splendid datafile", 201); - } - - Datafile df = (Datafile) icatWS.get(sessionId, "Datafile INCLUDE 1", dfid); - - assertFalse(Files.exists(errorLog)); - - Long fileSize = df.getFileSize(); - String checksum = df.getChecksum(); - - df.setFileSize(fileSize + 1); - icatWS.update(sessionId, df); - checkHas("Datafile", dfid, "file size wrong"); - - df.setFileSize(fileSize); - df.setChecksum("Aardvark"); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Datafile", dfid, "checksum wrong"); - - df.setChecksum(null); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Datafile", dfid, "checksum null"); - - df.setChecksum(checksum); - df.setLocation("Zoo"); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Datafile", dfid, "Zoo\" does not contain hash."); - - } - - private void checkHas(String type, Long id, String message) throws IOException, InterruptedException { - Set lines = new HashSet(); - while (!Files.exists(errorLog)) { - Thread.sleep(10); - } - for (String line : Files.readAllLines(errorLog, Charset.defaultCharset())) { - int n = line.indexOf(": ") + 2; - lines.add(line.substring(n)); - } - assertEquals(1, lines.size()); - String msg = new ArrayList(lines).get(0); - assertTrue(msg + ":" + type + " " + id, msg.startsWith(type + " " + id)); - assertTrue(msg + ":" + message, msg.endsWith(message)); - } -} diff --git a/src/test/java/org/icatproject/ids/integration/two/FileCheckerTest.java b/src/test/java/org/icatproject/ids/integration/two/FileCheckerTest.java deleted file mode 100644 index c69214ab..00000000 --- a/src/test/java/org/icatproject/ids/integration/two/FileCheckerTest.java +++ /dev/null @@ -1,202 +0,0 @@ -package org.icatproject.ids.integration.two; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import org.junit.BeforeClass; -import org.junit.Test; - -import org.icatproject.Datafile; -import org.icatproject.DatafileFormat; -import org.icatproject.Dataset; -import org.icatproject.DatasetType; -import org.icatproject.Facility; -import org.icatproject.Investigation; -import org.icatproject.InvestigationType; -import org.icatproject.ids.integration.BaseTest; -import org.icatproject.ids.integration.util.Setup; -import org.icatproject.ids.integration.util.client.DataSelection; - -public class FileCheckerTest extends BaseTest { - - private static Path errorLog; - - @BeforeClass - public static void setup() throws Exception { - setup = new Setup("two.properties"); - icatsetup(); - errorLog = setup.getErrorLog(); - } - - @Test - public void everythingTest() throws Exception { - - Path fileOnArchiveStorage = getFileOnArchiveStorage(datasetIds.get(0)); - - testingClient.restore(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - waitForIds(); - - testingClient.delete(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - Long dfid = 0L; - for (int i = 0; i < 3; i++) { - dfid = testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_" + i, - datasetIds.get(0), supportedDatafileFormat.getId(), "A rather splendid datafile", 201); - } - - Datafile df = (Datafile) icatWS.get(sessionId, "Datafile INCLUDE 1", dfid); - - waitForIds(); - - Files.deleteIfExists(errorLog); - - Long fileSize = df.getFileSize(); - String checksum = df.getChecksum(); - - df.setFileSize(fileSize + 1); - icatWS.update(sessionId, df); - checkHas("Dataset", datasetIds.get(0), "file size wrong"); - - df.setFileSize(fileSize); - df.setChecksum("Aardvark"); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "checksum wrong"); - - df.setChecksum(null); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "checksum null"); - - df.setChecksum(checksum); - icatWS.update(sessionId, df); - Files.delete(fileOnArchiveStorage); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "/" + datasetIds.get(0)); - } - - @Test - public void badZip() throws Exception { - - Path fileOnArchiveStorage = getFileOnArchiveStorage(datasetIds.get(0)); - - testingClient.restore(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - waitForIds(); - - testingClient.delete(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - for (int i = 0; i < 3; i++) { - testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_" + i, datasetIds.get(0), - supportedDatafileFormat.getId(), "A rather splendid datafile", 201); - } - - long timestamp = System.currentTimeMillis(); - - Facility fac = new Facility(); - fac.setName("Facility_" + timestamp); - fac.setId(icatWS.create(sessionId, fac)); - - DatasetType dsType = new DatasetType(); - dsType.setFacility(fac); - dsType.setName("DatasetType_" + timestamp); - dsType.setId(icatWS.create(sessionId, dsType)); - - supportedDatafileFormat = new DatafileFormat(); - supportedDatafileFormat.setFacility(fac); - supportedDatafileFormat.setName("test_format"); - supportedDatafileFormat.setVersion("42.0.0"); - supportedDatafileFormat.setId(icatWS.create(sessionId, supportedDatafileFormat)); - - InvestigationType invType = new InvestigationType(); - invType.setName("Not null"); - invType.setFacility(fac); - invType.setId(icatWS.create(sessionId, invType)); - - Investigation inv = new Investigation(); - inv.setName("Investigation_" + timestamp); - inv.setType(invType); - inv.setTitle("Not null"); - inv.setFacility(fac); - inv.setVisitId("N/A"); - inv.setId(icatWS.create(sessionId, inv)); - investigationId = inv.getId(); - String invLoc = inv.getId() + "/"; - - for (int i = 0; i < 10; i++) { - Dataset ds1 = new Dataset(); - ds1.setName("ds1_" + i); - ds1.setLocation(invLoc + ds1.getId()); - ds1.setType(dsType); - ds1.setInvestigation(inv); - ds1.setId(icatWS.create(sessionId, ds1)); - - testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_1" + timestamp, - ds1.getId(), supportedDatafileFormat.getId(), null, 201); - - testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_2" + timestamp, - ds1.getId(), supportedDatafileFormat.getId(), null, 201); - } - - System.out.println("About to wait"); - waitForIds(); - - truncate(fileOnArchiveStorage, 300); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "java.io.EOFException Unexpected end of ZLIB input stream"); - - truncate(fileOnArchiveStorage, 0); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "zip file incomplete"); - - } - - private void truncate(Path fileOnArchiveStorage, int size) throws IOException { - System.out.println(fileOnArchiveStorage + " will be truncated to " + size); - InputStream is = Files.newInputStream(fileOnArchiveStorage); - Path t = Files.createTempFile(null, null); - OutputStream os = Files.newOutputStream(t); - - byte[] buf = new byte[10000]; - int off = 0; - int len = buf.length; - int n; - - while ((n = is.read(buf, off, len)) > 0) { - off += n; - len -= n; - } - os.write(buf, 0, size); - is.close(); - os.close(); - Files.move(t, fileOnArchiveStorage, StandardCopyOption.REPLACE_EXISTING); - } - - private void checkHas(String type, Long id, String message) throws IOException, InterruptedException { - Set lines = new HashSet(); - System.out.println("Watching " + errorLog); - while (!Files.exists(errorLog)) { - Thread.sleep(10); - } - for (String line : Files.readAllLines(errorLog, Charset.defaultCharset())) { - int n = line.indexOf(": ") + 2; - lines.add(line.substring(n)); - } - assertEquals(1, lines.size()); - String msg = new ArrayList(lines).get(0); - assertTrue(msg + ":" + type + " " + id, msg.startsWith(type + " " + id)); - assertTrue(msg + ":" + message, msg.endsWith(message)); - } - -} diff --git a/src/test/java/org/icatproject/ids/integration/twodf/FileCheckerTest.java b/src/test/java/org/icatproject/ids/integration/twodf/FileCheckerTest.java deleted file mode 100644 index 4eeec619..00000000 --- a/src/test/java/org/icatproject/ids/integration/twodf/FileCheckerTest.java +++ /dev/null @@ -1,100 +0,0 @@ -package org.icatproject.ids.integration.twodf; - -import java.io.IOException; -import java.nio.charset.Charset; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; - -import org.icatproject.Datafile; -import org.icatproject.ids.integration.BaseTest; -import org.icatproject.ids.integration.util.Setup; -import org.icatproject.ids.integration.util.client.DataSelection; - -public class FileCheckerTest extends BaseTest { - - private static Path errorLog; - - @BeforeClass - public static void setup() throws Exception { - setup = new Setup("twodf.properties"); - icatsetup(); - errorLog = setup.getErrorLog(); - } - - @Ignore("File checker not currently supported for storageUnit DATAFILE") - @Test - public void everythingTest() throws Exception { - - Path fileOnArchiveStorage = getFileOnArchiveStorage(datasetIds.get(0)); - testingClient.restore(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - waitForIds(); - - testingClient.delete(sessionId, new DataSelection().addDataset(datasetIds.get(0)), 204); - - Long dfid = 0L; - for (int i = 0; i < 3; i++) { - dfid = testingClient.put(sessionId, Files.newInputStream(newFileLocation), "uploaded_file_" + i, - datasetIds.get(0), supportedDatafileFormat.getId(), "A rather splendid datafile", 201); - } - Datafile df = (Datafile) icatWS.get(sessionId, "Datafile INCLUDE 1", dfid); - - waitForIds(); - Files.deleteIfExists(errorLog); - - Long fileSize = df.getFileSize(); - String checksum = df.getChecksum(); - - df.setFileSize(fileSize + 1); - icatWS.update(sessionId, df); - - checkHas("Dataset", datasetIds.get(0), "file size wrong"); - - df.setFileSize(null); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "file size null"); - - df.setFileSize(fileSize); - df.setChecksum("Aardvark"); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "checksum wrong"); - - df.setChecksum(null); - icatWS.update(sessionId, df); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "checksum null"); - - df.setChecksum(checksum); - icatWS.update(sessionId, df); - Files.delete(fileOnArchiveStorage); - Files.deleteIfExists(errorLog); - checkHas("Dataset", datasetIds.get(0), "/" + datasetIds.get(0)); - } - - private void checkHas(String type, Long id, String message) throws IOException, InterruptedException { - Set lines = new HashSet(); - while (!Files.exists(errorLog)) { - Thread.sleep(10); - } - for (String line : Files.readAllLines(errorLog, Charset.defaultCharset())) { - int n = line.indexOf(": ") + 2; - lines.add(line.substring(n)); - } - assertEquals(1, lines.size()); - String msg = new ArrayList(lines).get(0); - assertTrue(msg + ":" + type + " " + id, msg.startsWith(type + " " + id)); - assertTrue(msg + ":" + message, msg.endsWith(message)); - } - -} diff --git a/src/test/java/org/icatproject/ids/integration/util/Setup.java b/src/test/java/org/icatproject/ids/integration/util/Setup.java index cec1cb18..8d74494c 100644 --- a/src/test/java/org/icatproject/ids/integration/util/Setup.java +++ b/src/test/java/org/icatproject/ids/integration/util/Setup.java @@ -42,11 +42,6 @@ public String getStorageUnit() { return storageUnit; } - public Path getErrorLog() { - return errorLog; - } - - private Path errorLog; private String storageUnit; private boolean twoLevel; private String key; @@ -117,8 +112,6 @@ public Setup(String runPropertyFile) throws Exception { storageUnit = storageUnitString.toUpperCase(); } - errorLog = runProperties.getPath("filesCheck.errorLog"); - } public void setReliability(double d) throws IOException { diff --git a/src/test/resources/one.properties b/src/test/resources/one.properties index 7f41ecf2..04bd608a 100644 --- a/src/test/resources/one.properties +++ b/src/test/resources/one.properties @@ -11,11 +11,6 @@ reader = db username root password password key = wombat maxIdsInQuery = 1000 -filesCheck.parallelCount = 2 -filesCheck.gapSeconds = 3 -filesCheck.lastIdFile = ${HOME}/data/ids/lastIdFile -filesCheck.errorLog = ${HOME}/data/ids/errorLog - linkLifetimeSeconds = 3600 log.list = READ WRITE INFO LINK MIGRATE PREPARE diff --git a/src/test/resources/two.properties b/src/test/resources/two.properties index c5f2c608..0cb2fa7e 100644 --- a/src/test/resources/two.properties +++ b/src/test/resources/two.properties @@ -19,11 +19,6 @@ stopArchivingLevel1024bytes = 4000000 storageUnit = dataset tidyBlockSize = 500 -filesCheck.parallelCount = 2 -filesCheck.gapSeconds = 3 -filesCheck.lastIdFile = ${HOME}/data/ids/lastIdFile -filesCheck.errorLog = ${HOME}/data/ids/errorLog - linkLifetimeSeconds = 3600 log.list = READ WRITE INFO LINK MIGRATE PREPARE diff --git a/src/test/resources/twodf.properties b/src/test/resources/twodf.properties index 0c89c43e..30c2b0a6 100644 --- a/src/test/resources/twodf.properties +++ b/src/test/resources/twodf.properties @@ -19,11 +19,6 @@ stopArchivingLevel1024bytes = 4000000 storageUnit = datafile tidyBlockSize = 500 -filesCheck.parallelCount = 2 -filesCheck.gapSeconds = 3 -filesCheck.lastIdFile = ${HOME}/data/ids/lastIdFile -filesCheck.errorLog = ${HOME}/data/ids/errorLog - linkLifetimeSeconds = 3600 log.list = READ WRITE INFO LINK MIGRATE PREPARE