From 2e5f166437994f6df0fc95ef8323b1a94c716c09 Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 17 Nov 2025 09:54:24 -0800 Subject: [PATCH 1/7] Disallow negative sample amount --- .../assay/AbstractAssayTsvDataHandler.java | 4 +- .../api/assay/DefaultAssayRunCreator.java | 2556 +++++------ .../api/data/ColumnRenderPropertiesImpl.java | 1 + .../validator/AbstractColumnValidator.java | 3 +- .../api/data/validator/ColumnValidator.java | 66 +- .../api/data/validator/PropertyValidator.java | 10 +- .../api/dataiterator/ValidatorIterator.java | 31 +- .../api/exp/property/IPropertyValidator.java | 115 +- .../api/exp/property/ValidatorKind.java | 189 +- .../api/query/DefaultQueryUpdateService.java | 1906 ++++----- .../test/integration/SampleTypeCrud.ispec.ts | 123 +- .../labkey/experiment/ExpDataIterators.java | 4 +- .../experiment/api/ExpMaterialTableImpl.java | 3731 +++++++++-------- .../api/SampleTypeUpdateServiceDI.java | 23 + .../api/property/LengthValidator.java | 3 +- .../api/property/LookupValidator.java | 594 +-- .../api/property/PropertyValidatorImpl.java | 529 +-- .../api/property/RangeValidator.java | 273 +- .../api/property/RegExValidator.java | 249 +- .../api/property/TextChoiceValidator.java | 3 +- 20 files changed, 5313 insertions(+), 5100 deletions(-) diff --git a/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java b/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java index 9373ee83ee9..825f84d91f4 100644 --- a/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java +++ b/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java @@ -902,7 +902,7 @@ else if (o != remapped) { try { - String error = validator.validate(rowNum, o, validatorContext); + String error = validator.validate(rowNum, o, validatorContext, null); if (error != null) errors.add(new PropertyValidationError(error, pd.getName())); } @@ -1060,7 +1060,7 @@ else if (validatorMap.containsKey(pd)) { for (ColumnValidator validator : validatorMap.get(pd)) { - String error = validator.validate(rowNum, o, validatorContext); + String error = validator.validate(rowNum, o, validatorContext, null); if (error != null) errors.add(new PropertyValidationError(error, pd.getName())); } diff --git a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java index f7fbae5aaa8..06f4d4855f3 100644 --- a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java +++ b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java @@ -1,1278 +1,1278 @@ -/* - * Copyright (c) 2011-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.assay; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONArray; -import org.labkey.api.action.ApiUsageException; -import org.labkey.api.assay.actions.AssayRunUploadForm; -import org.labkey.api.assay.pipeline.AssayRunAsyncContext; -import org.labkey.api.assay.pipeline.AssayUploadPipelineJob; -import org.labkey.api.assay.sample.AssaySampleLookupContext; -import org.labkey.api.assay.transform.DataTransformService; -import org.labkey.api.assay.transform.TransformDataHandler; -import org.labkey.api.assay.transform.TransformResult; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.RemapCache; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.XarContext; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataRunInput; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpObject; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolApplication; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpRunItem; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ProvenanceService; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.Lookup; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.pipeline.PipelineValidationException; -import org.labkey.api.query.AbstractQueryUpdateService; -import org.labkey.api.query.BatchValidationException; -import org.labkey.api.query.PropertyValidationError; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.study.assay.ParticipantVisitResolver; -import org.labkey.api.study.assay.ParticipantVisitResolverType; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.ViewBackgroundInfo; -import org.labkey.api.writer.ContainerUser; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static java.util.Collections.unmodifiableCollection; -import static org.labkey.api.assay.AssayFileWriter.TEMP_DIR_NAME; - -public class DefaultAssayRunCreator implements AssayRunCreator -{ - private static final Logger LOG = LogManager.getLogger(DefaultAssayRunCreator.class); - public static final String CROSS_RUN_DATA_INPUT_ROLE = "cross run input"; - - private final ProviderType _provider; - - public DefaultAssayRunCreator(ProviderType provider) - { - _provider = provider; - } - - public TransformResult transform(AssayRunUploadContext context, ExpRun run) throws ValidationException - { - return DataTransformService.get().transformAndValidate(context, run, DataTransformService.TransformOperation.INSERT); - } - /** - * Create and save an experiment run synchronously or asynchronously in a background job depending upon the assay design. - * - * @param context The context used to create and save the batch and run. - * @param batchId if not null, the run group that's already created for this batch. If null, a new one will be created. - * @return Pair of batch and run that were inserted. ExpBatch will not be null, but ExpRun may be null when inserting the run async. - */ - @Override - public Pair saveExperimentRun( - AssayRunUploadContext context, - @Nullable Long batchId, - boolean forceAsync, - Map transactionDetails - ) throws ExperimentException, ValidationException - { - ExpExperiment exp = null; - if (batchId != null) - { - exp = ExperimentService.get().getExpExperiment(batchId); - } - - AssayProvider provider = context.getProvider(); - ExpProtocol protocol = context.getProtocol(); - ExpRun run = null; - - try (DbScope.Transaction transaction = ExperimentService.get().getSchema().getScope().ensureTransaction(ExperimentService.get().getProtocolImportLock())) - { - TransactionAuditProvider.TransactionAuditEvent auditEvent = transaction.getAuditEvent(); - if (auditEvent == null) - { - auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(context.getContainer(), context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT, transactionDetails); - AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); - } - context.init(); - // Check if assay protocol is configured to import in the background. - // Issue 26811: If we don't have a view, assume that we are on a background job thread already. - boolean importInBackground = forceAsync || (provider.isBackgroundUpload(protocol) && HttpView.hasCurrentView()); - if (!importInBackground) - { - if ((Object) context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE) instanceof File errFile) - { - throw new ClassCastException("FileLike expected: " + errFile + " context: " + context.getClass() + " " + context); - } - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); - run = AssayService.get().createExperimentRun(context.getName(), context.getContainer(), protocol, null == primaryFile ? null : primaryFile.toNioPathForRead().toFile()); - run.setComments(context.getComments()); - run.setWorkflowTaskId(context.getWorkflowTask()); - - exp = saveExperimentRun(context, exp, run, false, transactionDetails); - - // re-fetch the run after it has been fully constructed - run = ExperimentService.get().getExpRun(run.getRowId()); - - context.uploadComplete(run); - } - else - { - context.uploadComplete(null); - context.setTransactionAuditId(transaction.getAuditId()); - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportOptions, "BackgroundImport"); - exp = saveExperimentRunAsync(context, exp); - } - transaction.commit(); - } - - return Pair.of(exp, run); - } - - private ExpExperiment saveExperimentRunAsync(AssayRunUploadContext context, @Nullable ExpExperiment batch) throws ExperimentException - { - try - { - // Whether we need to save batch properties - boolean forceSaveBatchProps = false; - if (batch == null) - { - // No batch yet, so make one - batch = AssayService.get().createStandardBatch(context.getContainer(), null, context.getProtocol()); - batch.save(context.getUser()); - // It's brand new, so we need to eventually set its properties - forceSaveBatchProps = true; - } - - // Queue up a pipeline job to do the actual import in the background - ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); - - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - // Check if the primary file from the previous import is no longer present for a re-run - if (primaryFile == null && !context.getUploadedData().isEmpty()) - { - // Choose another file as the primary - primaryFile = context.getUploadedData().entrySet().iterator().next().getValue(); - } - Objects.requireNonNull(primaryFile); - AssayRunAsyncContext asyncContext = context.getProvider().createRunAsyncContext(context); - final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob<>( - asyncContext, - info, - batch, - forceSaveBatchProps, - PipelineService.get().getPipelineRootSetting(context.getContainer()), - primaryFile.toNioPathForRead().toFile() - ); - - context.setPipelineJobGUID(pipelineJob.getJobGUID()); - - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), null, pipelineJob.getJobGUID()); - resultsFileWriter.savePostedFiles(context); - - // Don't queue the job until the transaction is committed, since otherwise the thread - // that's running the job might start before it can access the job's row in the database. - ExperimentService.get().getSchema().getScope().addCommitTask(() -> { - try - { - PipelineService.get().queueJob(pipelineJob, asyncContext.getJobNotificationProvider()); - } - catch (PipelineValidationException e) - { - throw UnexpectedException.wrap(e); - } - }, DbScope.CommitTaskOption.POSTCOMMIT); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - - return batch; - } - - /** - * @param batch if not null, the run group that's already created for this batch. If null, a new one needs to be created - * @param run The run to save - * @return the run and batch that were inserted - */ - @Override - public ExpExperiment saveExperimentRun( - final AssayRunUploadContext context, - @Nullable ExpExperiment batch, - @NotNull ExpRun run, - boolean forceSaveBatchProps, - @Nullable Map transactionDetails - ) throws ExperimentException, ValidationException - { - context.setAutoFillDefaultResultColumns(run.getRowId() > 0); // need to setAutoFillDefaultResultColumns before run is saved - - final Container container = context.getContainer(); - - Map inputMaterials = new HashMap<>(); - Map inputDatas = new HashMap<>(); - Map outputMaterials = new HashMap<>(); - Map outputDatas = new HashMap<>(); - Map transformedDatas = new HashMap<>(); - - Map runProperties = context.getRunProperties(); - Map unresolvedRunProperties = context.getUnresolvedRunProperties(); - Map batchProperties = context.getBatchProperties(); - - Map allProperties = new HashMap<>(); - allProperties.putAll(runProperties); - allProperties.putAll(batchProperties); - - ParticipantVisitResolverType resolverType = null; - for (Map.Entry entry : allProperties.entrySet()) - { - if (entry.getKey().getName().equals(AbstractAssayProvider.PARTICIPANT_VISIT_RESOLVER_PROPERTY_NAME)) - { - resolverType = AbstractAssayProvider.findType(entry.getValue(), getProvider().getParticipantVisitResolverTypes()); - if (resolverType != null) - { - resolverType.configureRun(context, run, inputDatas); - } - break; - } - } - - // TODO: Share these RemapCache and materialCache instances with AbstractAssayTsvDataHandler.checkData and ExpressionMatrixDataHandler.importFile - // Cache of resolved alternate lookup keys -> rowId - final RemapCache cache = new RemapCache(true); - // Cache of rowId -> ExpMaterial - final Map materialCache = new LongHashMap<>(); - - addInputMaterials(context, inputMaterials, cache, materialCache); - addInputDatas(context, inputDatas); - addOutputMaterials(context, outputMaterials, cache, materialCache); - addOutputDatas(context, inputDatas, outputDatas); - - boolean success = false; - DbScope scope = ExperimentService.get().getSchema().getScope(); - try (DbScope.Transaction transaction = scope.ensureTransaction(ExperimentService.get().getProtocolImportLock())) - { - if (transaction.getAuditId() == null) - { - var auditAction = context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT; - if (context.getTransactionAuditId() != null) - { - var auditEvent = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, context.getTransactionAuditId()); - transaction.setAuditEvent(auditEvent); - } - else - { - var auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(container, auditAction, transactionDetails); - AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); - } - } - boolean saveBatchProps = forceSaveBatchProps; - - // Add any material/data inputs related to the specimen IDs, etc in the incoming data. - // Some subclasses may actually create ExpMaterials or do other database changes, so do this inside the - // overall transaction - resolveParticipantVisits(context, inputMaterials, inputDatas, outputMaterials, outputDatas, allProperties, resolverType); - - // Check for circular inputs/outputs - checkForCycles(inputMaterials, outputMaterials); - checkForCycles(inputDatas, outputDatas); - - // Create the batch, if needed - if (batch == null) - { - // Make sure that we have a batch to associate with this run - batch = AssayService.get().createStandardBatch(run.getContainer(), null, context.getProtocol()); - batch.save(context.getUser()); - saveBatchProps = true; - } - run.save(context.getUser()); - // Add the run to the batch so that we can find it when we're loading the data files - batch.addRuns(context.getUser(), run); - assert batch.equals(run.getBatch()) : "Run's batch should be the current batch"; - - ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); - XarContext xarContext = new AssayUploadXarContext("Simple Run Creation", context); - - run = ExperimentService.get().saveSimpleExperimentRun( - run, - inputMaterials, - inputDatas, - outputMaterials, - outputDatas, - transformedDatas, - info, - context.getLogger() != null ? context.getLogger() : LOG, - false - ); - - // handle data transformation - TransformResult transformResult = transform(context, run); - - if (transformResult.getWarnings() != null && context instanceof AssayRunUploadForm uploadForm) - { - context.setTransformResult(transformResult); - uploadForm.setName(run.getName()); - uploadForm.setComments(run.getComments()); - throw new ValidationException(" "); - } - - if (saveBatchProps) - saveProperties(context, batch, transformResult.getBatchProperties(), batchProperties); - if (null != transformResult.getAssayId()) - run.setName(transformResult.getAssayId()); - if (null != transformResult.getComments()) - run.setComments(transformResult.getComments()); - saveProperties(context, run, transformResult.getRunProperties(), runProperties); - - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); - resultsFileWriter.savePostedFiles(context); - - Path assayResultsRunDir = AssayResultsFileWriter.getAssayFilesDirectoryPath(run); - if (null != assayResultsRunDir && !FileUtil.hasCloudScheme(assayResultsRunDir)) - { - FileLike assayResultFileRoot = FileSystemLike.wrapFile(assayResultsRunDir); - if (assayResultFileRoot != null) - QueryService.get().setEnvironment(QueryService.Environment.ASSAYFILESPATH, assayResultFileRoot); - } - - importResultData(context, run, inputDatas, outputDatas, info, xarContext, transformResult); - - var reRunId = context.getReRunId(); - if (reRunId != null && getProvider().getReRunSupport() == AssayProvider.ReRunSupport.ReRunAndReplace) - { - final ExpRun replacedRun = ExperimentService.get().getExpRun(reRunId); - if (replacedRun == null) - throw new ExperimentException(String.format("Unable to find run to be replaced (RowId %d)", reRunId)); - - if (replacedRun.getContainer().hasPermission(context.getUser(), UpdatePermission.class)) - { - replacedRun.setReplacedByRun(run); - replacedRun.save(context.getUser()); - } - - String auditMessage = String.format("Run id %d was replaced by run id %d", replacedRun.getRowId(), run.getRowId()); - ExperimentService.get().auditRunEvent(context.getUser(), context.getProtocol(), replacedRun, null, auditMessage, context.getAuditUserComment()); - - transaction.addCommitTask(() -> replacedRun.archiveDataFiles(context.getUser()), DbScope.CommitTaskOption.POSTCOMMIT); - // Issue 51710: Remove replaced assay runs from the search index - transaction.addCommitTask(() -> AssayService.get().deindexAssayRuns(List.of(replacedRun)), DbScope.CommitTaskOption.POSTCOMMIT); - } - - AssayService.get().ensureUniqueBatchName(batch, context.getProtocol(), context.getUser()); - - ExperimentService.get().onRunDataCreated(context.getProtocol(), run, container, context.getUser()); - - transaction.commit(); - success = true; - - // Inspect the run properties for a “prov:objectInputs” property that is a list of LSID strings. - // Attach run's starting protocol application with starting input LSIDs. - Object provInputsProperty = unresolvedRunProperties.get(ProvenanceService.PROVENANCE_INPUT_PROPERTY); - if (provInputsProperty != null) - { - ProvenanceService pvs = ProvenanceService.get(); - Set runInputLSIDs = null; - if (provInputsProperty instanceof String provInputs) - { - // parse as a JSONArray of values or a comma-separated list of values - if (provInputs.startsWith("[") && provInputs.endsWith("]")) - provInputsProperty = new JSONArray(provInputs); - else - runInputLSIDs = Set.of(provInputs.split(",")); - } - - if (provInputsProperty instanceof JSONArray jsonArray) - { - runInputLSIDs = jsonArray.toList().stream() - .map(String::valueOf) - .collect(Collectors.toSet()); - } - - if (runInputLSIDs != null && !runInputLSIDs.isEmpty()) - { - ExpProtocolApplication inputProtocolApp = run.getInputProtocolApplication(); - pvs.addProvenanceInputs(container, inputProtocolApp, runInputLSIDs); - } - } - - ExperimentService.get().queueSyncRunEdges(run); - - return batch; - } - catch (IOException | ConvertHelper.FileConversionException | BatchValidationException e) - { - // HACK: Rethrowing these as ApiUsageException avoids any upstream consequences of wrapping them in ExperimentException. - // Namely, that they are logged to the server/mothership. There has to be a better way. - if (e instanceof ConvertHelper.FileConversionException fce) - throw new ApiUsageException(fce.getMessage(), fce); - else if (e instanceof BatchValidationException bve) - throw new ApiUsageException(bve.getMessage(), bve); - - throw new ExperimentException(e); - } - finally - { - if (!success) - { - // clean up the run results file dir here if it was created, for non-async imports - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); - resultsFileWriter.cleanupPostedFiles(context.getContainer(), false); - - cleanPrimaryFile(context); - } - } - } - - private void cleanPrimaryFile(AssayRunUploadContext context) throws ExperimentException - { - // Do not clear the primary file for run re-imports - if (context.getReRunId() != null) - return; - - try - { - // Issue 51300: don't keep the primary file if the new run failed to save - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - - // If the uploaded file is in the temp directory, then do not delete it as it may be reused in the next import attempt. - if (primaryFile != null && primaryFile.exists() && !primaryFile.getPath().contains(TEMP_DIR_NAME)) - primaryFile.delete(); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - } - - private void resolveParticipantVisits( - AssayRunUploadContext context, - Map inputMaterials, - Map inputDatas, - Map outputMaterials, - Map outputDatas, - Map allProperties, - @Nullable ParticipantVisitResolverType resolverType - ) throws ExperimentException - { - try - { - ParticipantVisitResolver resolver = null; - if (resolverType != null) - { - String targetStudyId = null; - for (Map.Entry property : allProperties.entrySet()) - { - if (AbstractAssayProvider.TARGET_STUDY_PROPERTY_NAME.equals(property.getKey().getName())) - { - targetStudyId = property.getValue(); - break; - } - } - Container targetStudy = null; - if (targetStudyId != null && !targetStudyId.isEmpty()) - targetStudy = ContainerManager.getForId(targetStudyId); - - resolver = resolverType.createResolver( - unmodifiableCollection(inputMaterials.keySet()), - unmodifiableCollection(inputDatas.keySet()), - unmodifiableCollection(outputMaterials.keySet()), - unmodifiableCollection(outputDatas.keySet()), - context.getContainer(), - targetStudy, context.getUser()); - } - - resolveExtraRunData(resolver, context, inputMaterials, inputDatas, outputMaterials, outputDatas); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - } - - protected void importStandardResultData( - AssayRunUploadContext context, - ExpRun run, - Map inputDatas, - Map outputDatas, - ViewBackgroundInfo info, - XarContext xarContext - ) throws ExperimentException, BatchValidationException - { - DataIteratorBuilder rawData = context.getRawData(); - List insertedDatas = new ArrayList<>(); - - if (rawData != null) - { - insertedDatas.addAll(outputDatas.keySet()); - - ExpData primaryData = null; - // Decide which file to treat as the primary, to which the data rows will be attached - for (Map.Entry entry : outputDatas.entrySet()) - { - if (ExpDataRunInput.DEFAULT_ROLE.equalsIgnoreCase(entry.getValue())) - { - primaryData = entry.getKey(); - } - } - if (primaryData == null && !insertedDatas.isEmpty()) - primaryData = insertedDatas.get(0); - - if (primaryData != null) - { - TsvDataHandler dataHandler = new TsvDataHandler(); - dataHandler.setAllowEmptyData(true); - dataHandler.importRows(primaryData, context.getUser(), run, context.getProtocol(), getProvider(), rawData, null, context.shouldAutoFillDefaultResultColumns(), context); - } - } - else - { - for (Map.Entry entry : inputDatas.entrySet()) - { - // skip any of the cross run inputData that are already in the outputData - if (CROSS_RUN_DATA_INPUT_ROLE.equals(entry.getValue())) - continue; - - insertedDatas.add(entry.getKey()); - } - - insertedDatas.addAll(outputDatas.keySet()); - - Logger logger = context.getLogger() != null ? context.getLogger() : LOG; - for (ExpData insertedData : insertedDatas) - { - ExperimentDataHandler dataHandler = insertedData.findDataHandler(); - - FileLike fileLike = FileSystemLike.wrapFile(insertedData.getFile()); - if (dataHandler instanceof AbstractAssayTsvDataHandler tsvHandler) - { - tsvHandler.importFile(insertedData, fileLike, info, logger, xarContext, context.isAllowLookupByAlternateKey(), context.shouldAutoFillDefaultResultColumns()); - } - else - { - dataHandler.importFile(insertedData, fileLike, info, logger, xarContext); - } - } - } - } - - private void importResultData( - AssayRunUploadContext context, - ExpRun run, - Map inputDatas, - Map outputDatas, - ViewBackgroundInfo info, - XarContext xarContext, - TransformResult transformResult - ) throws ExperimentException, BatchValidationException - { - if (transformResult.getTransformedData().isEmpty()) - { - importStandardResultData(context, run, inputDatas, outputDatas, info, xarContext); - return; - } - - DataType dataType = context.getProvider().getDataType(); - if (dataType == null) - { - // we know that we are importing transformed data at this point - dataType = TsvDataHandler.RELATED_TRANSFORM_FILE_DATA_TYPE; - } - - ExpData data = ExperimentService.get().createData(context.getContainer(), dataType); - ExperimentDataHandler handler = data.findDataHandler(); - - // this should assert to always be true - if (handler instanceof TransformDataHandler transformDataHandler) - { - for (Map.Entry entry : transformResult.getTransformedData().entrySet()) - { - ExpData expData = entry.getKey(); - // The object may have already been claimed by - if (expData.getSourceApplication() == null) - { - expData.setSourceApplication(run.getOutputProtocolApplication()); - } - expData.save(context.getUser()); - - run.getOutputProtocolApplication().addDataInput(context.getUser(), expData, ExpDataRunInput.IMPORTED_DATA_ROLE); - // Add to the cached list of outputs - run.getDataOutputs().add(expData); - - transformDataHandler.importTransformDataMap(expData, context, run, entry.getValue()); - } - } - } - - protected void addInputMaterials( - AssayRunUploadContext context, - Map inputMaterials, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - addMaterials(context, inputMaterials, context.getInputMaterials(), null, cache, materialCache); - - // Find lookups to a SampleType and add the resolved material as an input sample - for (Map.Entry entry : context.getRunProperties().entrySet()) - { - String value = StringUtils.trimToNull(entry.getValue()); - if (value == null) - continue; - - // Lookup must point at "Samples.*", "exp.materials.*", or "exp.Materials" - DomainProperty dp = entry.getKey(); - var sampleLookup = AssaySampleLookupContext.checkSampleLookup(context.getContainer(), context.getUser(), dp); - if (!sampleLookup.isLookup()) - continue; - - String role = AssayService.get().getPropertyInputLineageRole(dp); - addMaterials(context, inputMaterials, Map.of(value, role), sampleLookup.expSampleType(), cache, materialCache); - } - } - - protected void addInputDatas( - AssayRunUploadContext context, - @NotNull Map inputDatas - ) throws ExperimentException, ValidationException - { - Logger log = context.getLogger() != null ? context.getLogger() : LOG; - - Map inputs = context.getInputDatas(); - addDatas(context.getContainer(), inputDatas, inputs, log); - - // Inspect the uploaded files which will be added as outputs of the run - if (context.isAllowCrossRunFileInputs()) - { - Map files = context.getUploadedData(); - for (Map.Entry entry : files.entrySet()) - { - String key = entry.getKey(); - if (AssayDataCollector.PRIMARY_FILE.equals(key)) - { - FileLike file = entry.getValue(); - - // Check if the file is created by a run - // Don't use getExpDataByURL(String). That method expects string in a very particular format. - ExpData existingData = ExperimentService.get().getExpDataByURL(file.toNioPathForRead(), context.getContainer()); - if (existingData != null && existingData.getRunId() != null && !inputDatas.containsKey(existingData)) - { - // Add this file as an input to the run. When we add the outputs to the run, we will detect - // that this file was already added as an input and create a new exp.data for the same file - // path and attach it as an output. - log.debug("found existing cross run file input: name={}, rowId={}, dataFileUrl={}", existingData.getName(), existingData.getRowId(), existingData.getDataFileUrl()); - inputDatas.put(existingData, CROSS_RUN_DATA_INPUT_ROLE); - } - } - } - } - } - - // CONSIDER: Move this to ExperimentService - // Resolve submitted values into ExpData objects - protected void addDatas(Container c, @NotNull Map resolved, @NotNull Map unresolved, @Nullable Logger log) throws ValidationException - { - for (Map.Entry entry : unresolved.entrySet()) - { - Object o = entry.getKey(); - String role = entry.getValue(); - - if (o instanceof ExpData expData) - { - resolved.put(expData, role); - } - else - { - File file = ExpDataFileConverter.convert(o); - if (file != null) - { - ExpData data = ExperimentService.get().getExpDataByURL(file, c); - if (data == null) - { - DataType dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - data = createData(c, file, file.getName(), dataType, false, true, log); - } - - resolved.put(data, role); - } - } - } - } - - public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData) throws ValidationException - { - return generateResultData(user, container, provider, dataArray, outputData, null); - } - - public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData, @Nullable Logger log) throws ValidationException - { - if (log == null) - log = LOG; - - ExpData newData = null; - - // Don't create an empty result data file if there are other outputs from this run, or if the user didn't - // include any data rows - if (!dataArray.isEmpty() && outputData.isEmpty()) - { - DataType dataType = provider.getDataType(); - if (dataType == null) - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - - newData = createData(container, "Analysis Results", dataType, log); - newData.save(user); - outputData.put(newData, ExpDataRunInput.DEFAULT_ROLE); - } - - return newData; - } - - // Find an existing ExpData for the File or null. - public static @Nullable ExpData findExistingData(Container c, @Nullable File file, @Nullable Logger log) - { - if (file == null) - return null; - - if (log == null) - log = LOG; - - List existing = ExperimentService.get().getAllExpDataByURL(file, c); - if (!existing.isEmpty()) - { - for (ExpData d : existing) - { - log.debug("found existing exp.data for file, rowId={}, runId={}, dataFileUrl={}", d.getRowId(), d.getRunId(), d.getDataFileUrl()); - } - - // pick the most recently created one - return existing.get(0); - } - - return null; - } - - public static @NotNull ExpData createData(Container c, String name, @NotNull DataType dataType, @Nullable Logger log) throws ValidationException - { - // NOTE: reuseExistingData and errorOnDataOwned flags are irrelevant when we aren't providing a File - return createData(c, null, name, dataType, false, false, log); - } - - public static @NotNull ExpData createData( - Container c, - File file, - String name, - @Nullable DataType dataType, - boolean reuseExistingData, - boolean errorIfDataOwned, - @Nullable Logger log - ) throws ValidationException - { - if (log == null) - log = LOG; - - ExpData data = findExistingData(c, file, log); - - ExpRun previousRun; - if (data != null && null != (previousRun = data.getRun())) - { - // There's an existing data, but it's already marked as being created by another run - String msg = "File '" + data.getName() + "' has been previously imported in run '" + previousRun.getName() + "' (" + previousRun.getRowId() + ")"; - if (reuseExistingData && errorIfDataOwned) - throw new ValidationException(msg); - - log.debug(msg); - - // Create a new one for the same path so the new run can claim it as its own - if (!reuseExistingData) - { - log.debug("ignoring existing exp.data, will create a new one"); - data = null; - } - } - - if (data == null) - { - if (dataType == null) - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - - log.debug("creating assay exp.data for file. dataType={}, file={}", dataType.getNamespacePrefix(), file); - data = ExperimentService.get().createData(c, dataType, name); - data.setLSID(ExperimentService.get().generateGuidLSID(c, dataType)); - if (file != null) - { - data.setDataFileURI(FileUtil.getAbsoluteCaseSensitiveFile(file).toURI()); - } - } - else - { - if (dataType != null && !dataType.matches(new Lsid(data.getLSID()))) - { - // Reset its LSID so that it's the correct type // CONSIDER: creating a new ExpData with the correct type instead - String newLsid = ExperimentService.get().generateGuidLSID(c, dataType); - log.debug("LSID doesn't match desired type. Changed the LSID from '{}' to '{}'", data.getLSID(), newLsid); - data.setLSID(newLsid); - } - } - return data; - } - - protected void addOutputMaterials( - AssayRunUploadContext context, - Map outputMaterials, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - addMaterials(context, outputMaterials, context.getOutputMaterials(), null, cache, materialCache); - } - - protected void addMaterials( - AssayRunUploadContext context, - @NotNull Map resolved, - @NotNull Map unresolved, - @Nullable ExpSampleType sampleType, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - for (Map.Entry entry : unresolved.entrySet()) - { - Object sampleIdentifier = entry.getKey(); - ExpMaterial material = ExperimentService.get().findExpMaterial(context.getContainer(), context.getUser(), sampleIdentifier, sampleType, cache, materialCache); - if (material == null) - throw new ExperimentException("Unable to resolve sample: " + sampleIdentifier); - - if (!resolved.containsKey(material)) - { - if (!material.isOperationPermitted(SampleTypeService.SampleOperations.AddAssayData)) - throw new ExperimentException(SampleTypeService.get().getOperationNotPermittedMessage(Collections.singleton(material), SampleTypeService.SampleOperations.AddAssayData)); - if (sampleType == null || sampleType.getLSID().equals(material.getCpasType())) - resolved.put(material, entry.getValue()); - } - } - } - - protected void addOutputDatas( - AssayRunUploadContext context, - Map inputDatas, - Map outputDatas - ) throws ExperimentException, ValidationException - { - Logger log = context.getLogger() != null ? context.getLogger() : LOG; - - // Create set of existing input files - Set inputFiles = new HashSet<>(); - for (ExpData inputData : inputDatas.keySet()) - { - FileLike f = inputData.getFileLike(); - if (f != null) - inputFiles.add(f); - } - - Map files = context.getUploadedData(); - - AssayDataType dataType; - for (Map.Entry entry : files.entrySet()) - { - FileLike file = entry.getValue(); - dataType = context.getProvider().getDataType(); - - // Reuse existing exp.data as the assay output file unless: - // - we are re-importing the run - // - or the output file is already one of the input files and if we are allowing cross-run file inputs - boolean reuseExistingData = true; - if (context.getReRunId() != null) - reuseExistingData = false; - if (context.isAllowCrossRunFileInputs() && inputFiles.contains(file)) - reuseExistingData = false; - - // For Luminex re-import, we want to reuse the existing exp.data but not - // throw an error when we discover that the exp.data is already owned. The - // original run will be duplicated for re-import and then will be deleted. - boolean errorIfDataOwned = getProvider().getReRunSupport() != AssayProvider.ReRunSupport.ReRunAndDelete; - - log.debug("adding output data: file={}", file.toNioPathForRead()); - log.debug(" context.getReRunId()={}", context.getReRunId()); - log.debug(" provider.getReRunSupport()={}", getProvider().getReRunSupport()); - log.debug(" context.allowCrossRunFileInputs={}", context.isAllowCrossRunFileInputs()); - log.debug(" inputFiles.contains(file)={}", inputFiles.contains(file)); - log.debug("==> reuseExistingData = {}", reuseExistingData); - log.debug("==> errorIfDataOwned = {}", errorIfDataOwned); - - ExpData data = DefaultAssayRunCreator.createData(context.getContainer(), file.toNioPathForRead().toFile(), file.getName(), dataType, reuseExistingData, errorIfDataOwned, log); - String role = ExpDataRunInput.DEFAULT_ROLE; - if (dataType != null && dataType.getFileType().isType(file)) - { - if (dataType.getRole() != null) - { - role = dataType.getRole(); - } - } - outputDatas.put(data, role); - } - - FileLike primaryFile = files.get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - { - addRelatedOutputDatas(context, inputFiles, outputDatas, primaryFile); - } - - Map outputs = context.getOutputDatas(); - addDatas(context.getContainer(), outputDatas, outputs, log); - } - - /** - * Add files that follow the general naming convention (same basename) as the primary file - */ - public void addRelatedOutputDatas( - AssayRunUploadContext context, - Set inputFiles, - Map outputDatas, - final FileLike primaryFile - ) throws ValidationException - { - AssayDataType dataType = getProvider().getDataType(); - final String baseName = dataType == null ? null : dataType.getFileType().getBaseName(primaryFile.toNioPathForRead()); - if (baseName != null) - { - // Grab all the files that are related based on naming convention - File primary = primaryFile.toNioPathForRead().toFile(); - File parent = primary.getParentFile(); - // converting to File land to reuse the FileFilter - File[] relatedFiles = parent.listFiles(getRelatedOutputDataFileFilter(primary, baseName)); - if (relatedFiles != null) - { - for (File f : relatedFiles) - { - FileLike relatedFile = primaryFile.getParent().resolveChild(f.getName()); - // Ignore files already considered inputs to the run - if (inputFiles.contains(relatedFile)) - continue; - - Pair dataOutput = createdRelatedOutputData(context, baseName, f); - if (dataOutput != null) - { - outputDatas.put(dataOutput.getKey(), dataOutput.getValue()); - } - } - } - } - } - - protected void resolveExtraRunData( - ParticipantVisitResolver resolver, - AssayRunUploadContext context, - Map inputMaterials, - Map inputDatas, - Map outputMaterials, - Map outputDatas - ) throws ExperimentException - { - } - - /** - * Create an ExpData object for the file, and figure out what its role name should be - * @return null if the file is already linked to another run - */ - @Nullable - public static Pair createdRelatedOutputData(AssayRunUploadContext context, String baseName, File relatedFile) throws ValidationException - { - String roleName = null; - DataType dataType = null; - for (AssayDataType inputType : context.getProvider().getRelatedDataTypes()) - { - // Check if we recognize it as a specially handled file type - if (inputType.getFileType().isMatch(relatedFile.getName(), baseName)) - { - roleName = inputType.getRole(); - dataType = inputType; - break; - } - } - // If not, make up a new type and role for it - if (roleName == null) - { - roleName = relatedFile.getName().substring(baseName.length()); - while (!roleName.isEmpty() && (roleName.startsWith(".") || roleName.startsWith("-") || roleName.startsWith("_") || roleName.startsWith(" "))) - { - roleName = roleName.substring(1); - } - if (roleName.isEmpty()) - { - roleName = null; - } - } - if (dataType == null) - { - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - } - - // Find an existing data that isn't owned by another run or create a new own - ExpData data = findExistingData(context.getContainer(), relatedFile, context.getLogger()); - if (data != null) - { - if (data.getSourceApplication() == null) - return new Pair<>(data, roleName); - - // The file is already linked to another run, so this one must have not created it - return null; - } - - data = createData(context.getContainer(), relatedFile, relatedFile.getName(), dataType, true, true, context.getLogger()); - assert data.getSourceApplication() == null; - return Pair.of(data, roleName); - } - - // Disallow creating a run with inputs which are also outputs - protected void checkForCycles( - Map inputs, - Map outputs - ) throws ExperimentException - { - for (ExpRunItem input : inputs.keySet()) - { - if (outputs.containsKey(input)) - { - String role = outputs.get(input); - throw new ExperimentException("Circular input/output '" + input.getName() + "' with role '" + role + "'"); - } - } - } - - private void saveProperties( - final AssayRunUploadContext context, - ExpObject expObject, - Map transformResultProperties, - Map properties - ) throws ValidationException - { - Map propsToSave = transformResultProperties.isEmpty() ? properties : transformResultProperties; - List errors = validateProperties(context, propsToSave); - if (!errors.isEmpty()) - throw new ValidationException(errors); - - savePropertyObject(expObject, propsToSave, context.getUser()); - } - - protected void savePropertyObject(ExpObject object, Map properties, User user) throws ValidationException - { - for (Map.Entry entry : properties.entrySet()) - { - DomainProperty pd = entry.getKey(); - String value = entry.getValue(); - - // resolve any file links for batch or run properties - if (PropertyType.FILE_LINK.getTypeUri().equals(pd.getType().getTypeURI())) - { - File resolvedFile = ExpDataFileConverter.convert(value); - if (resolvedFile != null) - value = resolvedFile.getAbsolutePath(); - } - - // Treat the empty string as a null in the database, which is our normal behavior when receiving data - // from HTML forms. - if (StringUtils.trimToNull(value) == null) - { - value = null; - } - if (value != null) - { - object.setProperty(user, pd.getPropertyDescriptor(), value); - } - else - { - // We still need to validate blanks - List errors = new ArrayList<>(); - OntologyManager.validateProperty(pd.getValidators(), pd.getPropertyDescriptor(), new ObjectProperty(object.getLSID(), object.getContainer(), pd.getPropertyDescriptor(), value), errors, new ValidatorContext(pd.getContainer(), user)); - if (!errors.isEmpty()) - throw new ValidationException(errors); - } - } - } - - public static List validateColumnProperties(ContainerUser context, Map properties) - { - List errors = new ArrayList<>(); - RemapCache cache = new RemapCache(); - for (Map.Entry entry : properties.entrySet()) - { - validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); - } - return errors; - } - - public static List validateProperties(ContainerUser context, Map properties) - { - List errors = new ArrayList<>(); - RemapCache cache = new RemapCache(); - for (Map.Entry entry : properties.entrySet()) - { - validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); - } - return errors; - } - - private static void validateProperty(ContainerUser context, ColumnInfo columnInfo, String value, RemapCache cache, List errors) - { - Lookup lookup = null; - if (columnInfo.isLookup()) - { - ForeignKey fk = columnInfo.getFk(); - lookup = new Lookup(fk.getLookupContainer(), fk.getLookupSchemaName(), fk.getLookupTableName()); - } - validateProperty(context, ColumnValidators.create(columnInfo, null), value, columnInfo.getName(), - false, lookup, columnInfo.getJavaClass(), cache, errors); - } - - private static void validateProperty(ContainerUser context, DomainProperty dp, String value, RemapCache cache, List errors) - { - String label = dp.getPropertyDescriptor().getNonBlankCaption(); - PropertyType type = dp.getPropertyDescriptor().getPropertyType(); - validateProperty(context, ColumnValidators.create(null, dp), value, label, dp.isRequired(), - dp.getLookup(), type.getJavaType(), cache, errors); - } - - private static void validateProperty( - ContainerUser context, - List validators, - String value, - String label, - Boolean required, - Lookup lookup, - Class type, - RemapCache cache, - List errors - ) - { - boolean missing = (value == null || value.isEmpty()); - int rowNum = 0; - - if (required && missing) - { - errors.add(new SimpleValidationError(label + " is required and must be of type " + ColumnInfo.getFriendlyTypeName(type) + ".")); - } - else if (!missing) - { - try - { - Object o; - if (type == File.class) - o = ExpDataFileConverter.convert(value); - else - o = ConvertUtils.convert(value, type); - ValidatorContext validatorContext = new ValidatorContext(context.getContainer(), context.getUser()); - for (ColumnValidator validator : validators) - { - String msg = validator.validate(rowNum, o, validatorContext); - if (msg != null) - errors.add(new PropertyValidationError(msg, label)); - } - } - catch (ConversionException e) - { - String message; - if (e instanceof ConvertHelper.FileConversionException fce) - message = fce.getMessage(); - else - { - message = ConvertHelper.getStandardConversionErrorMessage(value, label, type); - if (e.getCause() instanceof ArithmeticException) - message += ": " + e.getCause().getLocalizedMessage(); - else - message += "."; - } - - // Attempt to resolve lookups by display value - boolean skipError = false; - if (lookup != null) - { - Object remappedValue = OntologyManager.getRemappedValueForLookup(context.getUser(), context.getContainer(), cache, lookup, value); - if (remappedValue != null) - skipError = true; - } - - if (!skipError) - errors.add(new SimpleValidationError(message)); - } - } - } - - protected FileFilter getRelatedOutputDataFileFilter(final File primaryFile, final String baseName) - { - // baseName doesn't include the trailing '.', so add it here. We want to associate myRun.jpg - // with myRun.xls, but we don't want to associate myRun2.xls with myRun.xls (which will happen without - // the trailing dot in the check). - return f -> f.getName().startsWith(baseName + ".") && !primaryFile.equals(f); - } - - protected ProviderType getProvider() - { - return _provider; - } -} +/* + * Copyright (c) 2011-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.assay; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONArray; +import org.labkey.api.action.ApiUsageException; +import org.labkey.api.assay.actions.AssayRunUploadForm; +import org.labkey.api.assay.pipeline.AssayRunAsyncContext; +import org.labkey.api.assay.pipeline.AssayUploadPipelineJob; +import org.labkey.api.assay.sample.AssaySampleLookupContext; +import org.labkey.api.assay.transform.DataTransformService; +import org.labkey.api.assay.transform.TransformDataHandler; +import org.labkey.api.assay.transform.TransformResult; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.RemapCache; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.XarContext; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataRunInput; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpObject; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolApplication; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpRunItem; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ProvenanceService; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.Lookup; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.pipeline.PipelineValidationException; +import org.labkey.api.query.AbstractQueryUpdateService; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.PropertyValidationError; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.study.assay.ParticipantVisitResolver; +import org.labkey.api.study.assay.ParticipantVisitResolverType; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.api.writer.ContainerUser; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableCollection; +import static org.labkey.api.assay.AssayFileWriter.TEMP_DIR_NAME; + +public class DefaultAssayRunCreator implements AssayRunCreator +{ + private static final Logger LOG = LogManager.getLogger(DefaultAssayRunCreator.class); + public static final String CROSS_RUN_DATA_INPUT_ROLE = "cross run input"; + + private final ProviderType _provider; + + public DefaultAssayRunCreator(ProviderType provider) + { + _provider = provider; + } + + public TransformResult transform(AssayRunUploadContext context, ExpRun run) throws ValidationException + { + return DataTransformService.get().transformAndValidate(context, run, DataTransformService.TransformOperation.INSERT); + } + /** + * Create and save an experiment run synchronously or asynchronously in a background job depending upon the assay design. + * + * @param context The context used to create and save the batch and run. + * @param batchId if not null, the run group that's already created for this batch. If null, a new one will be created. + * @return Pair of batch and run that were inserted. ExpBatch will not be null, but ExpRun may be null when inserting the run async. + */ + @Override + public Pair saveExperimentRun( + AssayRunUploadContext context, + @Nullable Long batchId, + boolean forceAsync, + Map transactionDetails + ) throws ExperimentException, ValidationException + { + ExpExperiment exp = null; + if (batchId != null) + { + exp = ExperimentService.get().getExpExperiment(batchId); + } + + AssayProvider provider = context.getProvider(); + ExpProtocol protocol = context.getProtocol(); + ExpRun run = null; + + try (DbScope.Transaction transaction = ExperimentService.get().getSchema().getScope().ensureTransaction(ExperimentService.get().getProtocolImportLock())) + { + TransactionAuditProvider.TransactionAuditEvent auditEvent = transaction.getAuditEvent(); + if (auditEvent == null) + { + auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(context.getContainer(), context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT, transactionDetails); + AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); + } + context.init(); + // Check if assay protocol is configured to import in the background. + // Issue 26811: If we don't have a view, assume that we are on a background job thread already. + boolean importInBackground = forceAsync || (provider.isBackgroundUpload(protocol) && HttpView.hasCurrentView()); + if (!importInBackground) + { + if ((Object) context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE) instanceof File errFile) + { + throw new ClassCastException("FileLike expected: " + errFile + " context: " + context.getClass() + " " + context); + } + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); + run = AssayService.get().createExperimentRun(context.getName(), context.getContainer(), protocol, null == primaryFile ? null : primaryFile.toNioPathForRead().toFile()); + run.setComments(context.getComments()); + run.setWorkflowTaskId(context.getWorkflowTask()); + + exp = saveExperimentRun(context, exp, run, false, transactionDetails); + + // re-fetch the run after it has been fully constructed + run = ExperimentService.get().getExpRun(run.getRowId()); + + context.uploadComplete(run); + } + else + { + context.uploadComplete(null); + context.setTransactionAuditId(transaction.getAuditId()); + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportOptions, "BackgroundImport"); + exp = saveExperimentRunAsync(context, exp); + } + transaction.commit(); + } + + return Pair.of(exp, run); + } + + private ExpExperiment saveExperimentRunAsync(AssayRunUploadContext context, @Nullable ExpExperiment batch) throws ExperimentException + { + try + { + // Whether we need to save batch properties + boolean forceSaveBatchProps = false; + if (batch == null) + { + // No batch yet, so make one + batch = AssayService.get().createStandardBatch(context.getContainer(), null, context.getProtocol()); + batch.save(context.getUser()); + // It's brand new, so we need to eventually set its properties + forceSaveBatchProps = true; + } + + // Queue up a pipeline job to do the actual import in the background + ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); + + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + // Check if the primary file from the previous import is no longer present for a re-run + if (primaryFile == null && !context.getUploadedData().isEmpty()) + { + // Choose another file as the primary + primaryFile = context.getUploadedData().entrySet().iterator().next().getValue(); + } + Objects.requireNonNull(primaryFile); + AssayRunAsyncContext asyncContext = context.getProvider().createRunAsyncContext(context); + final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob<>( + asyncContext, + info, + batch, + forceSaveBatchProps, + PipelineService.get().getPipelineRootSetting(context.getContainer()), + primaryFile.toNioPathForRead().toFile() + ); + + context.setPipelineJobGUID(pipelineJob.getJobGUID()); + + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), null, pipelineJob.getJobGUID()); + resultsFileWriter.savePostedFiles(context); + + // Don't queue the job until the transaction is committed, since otherwise the thread + // that's running the job might start before it can access the job's row in the database. + ExperimentService.get().getSchema().getScope().addCommitTask(() -> { + try + { + PipelineService.get().queueJob(pipelineJob, asyncContext.getJobNotificationProvider()); + } + catch (PipelineValidationException e) + { + throw UnexpectedException.wrap(e); + } + }, DbScope.CommitTaskOption.POSTCOMMIT); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + + return batch; + } + + /** + * @param batch if not null, the run group that's already created for this batch. If null, a new one needs to be created + * @param run The run to save + * @return the run and batch that were inserted + */ + @Override + public ExpExperiment saveExperimentRun( + final AssayRunUploadContext context, + @Nullable ExpExperiment batch, + @NotNull ExpRun run, + boolean forceSaveBatchProps, + @Nullable Map transactionDetails + ) throws ExperimentException, ValidationException + { + context.setAutoFillDefaultResultColumns(run.getRowId() > 0); // need to setAutoFillDefaultResultColumns before run is saved + + final Container container = context.getContainer(); + + Map inputMaterials = new HashMap<>(); + Map inputDatas = new HashMap<>(); + Map outputMaterials = new HashMap<>(); + Map outputDatas = new HashMap<>(); + Map transformedDatas = new HashMap<>(); + + Map runProperties = context.getRunProperties(); + Map unresolvedRunProperties = context.getUnresolvedRunProperties(); + Map batchProperties = context.getBatchProperties(); + + Map allProperties = new HashMap<>(); + allProperties.putAll(runProperties); + allProperties.putAll(batchProperties); + + ParticipantVisitResolverType resolverType = null; + for (Map.Entry entry : allProperties.entrySet()) + { + if (entry.getKey().getName().equals(AbstractAssayProvider.PARTICIPANT_VISIT_RESOLVER_PROPERTY_NAME)) + { + resolverType = AbstractAssayProvider.findType(entry.getValue(), getProvider().getParticipantVisitResolverTypes()); + if (resolverType != null) + { + resolverType.configureRun(context, run, inputDatas); + } + break; + } + } + + // TODO: Share these RemapCache and materialCache instances with AbstractAssayTsvDataHandler.checkData and ExpressionMatrixDataHandler.importFile + // Cache of resolved alternate lookup keys -> rowId + final RemapCache cache = new RemapCache(true); + // Cache of rowId -> ExpMaterial + final Map materialCache = new LongHashMap<>(); + + addInputMaterials(context, inputMaterials, cache, materialCache); + addInputDatas(context, inputDatas); + addOutputMaterials(context, outputMaterials, cache, materialCache); + addOutputDatas(context, inputDatas, outputDatas); + + boolean success = false; + DbScope scope = ExperimentService.get().getSchema().getScope(); + try (DbScope.Transaction transaction = scope.ensureTransaction(ExperimentService.get().getProtocolImportLock())) + { + if (transaction.getAuditId() == null) + { + var auditAction = context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT; + if (context.getTransactionAuditId() != null) + { + var auditEvent = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, context.getTransactionAuditId()); + transaction.setAuditEvent(auditEvent); + } + else + { + var auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(container, auditAction, transactionDetails); + AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); + } + } + boolean saveBatchProps = forceSaveBatchProps; + + // Add any material/data inputs related to the specimen IDs, etc in the incoming data. + // Some subclasses may actually create ExpMaterials or do other database changes, so do this inside the + // overall transaction + resolveParticipantVisits(context, inputMaterials, inputDatas, outputMaterials, outputDatas, allProperties, resolverType); + + // Check for circular inputs/outputs + checkForCycles(inputMaterials, outputMaterials); + checkForCycles(inputDatas, outputDatas); + + // Create the batch, if needed + if (batch == null) + { + // Make sure that we have a batch to associate with this run + batch = AssayService.get().createStandardBatch(run.getContainer(), null, context.getProtocol()); + batch.save(context.getUser()); + saveBatchProps = true; + } + run.save(context.getUser()); + // Add the run to the batch so that we can find it when we're loading the data files + batch.addRuns(context.getUser(), run); + assert batch.equals(run.getBatch()) : "Run's batch should be the current batch"; + + ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); + XarContext xarContext = new AssayUploadXarContext("Simple Run Creation", context); + + run = ExperimentService.get().saveSimpleExperimentRun( + run, + inputMaterials, + inputDatas, + outputMaterials, + outputDatas, + transformedDatas, + info, + context.getLogger() != null ? context.getLogger() : LOG, + false + ); + + // handle data transformation + TransformResult transformResult = transform(context, run); + + if (transformResult.getWarnings() != null && context instanceof AssayRunUploadForm uploadForm) + { + context.setTransformResult(transformResult); + uploadForm.setName(run.getName()); + uploadForm.setComments(run.getComments()); + throw new ValidationException(" "); + } + + if (saveBatchProps) + saveProperties(context, batch, transformResult.getBatchProperties(), batchProperties); + if (null != transformResult.getAssayId()) + run.setName(transformResult.getAssayId()); + if (null != transformResult.getComments()) + run.setComments(transformResult.getComments()); + saveProperties(context, run, transformResult.getRunProperties(), runProperties); + + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); + resultsFileWriter.savePostedFiles(context); + + Path assayResultsRunDir = AssayResultsFileWriter.getAssayFilesDirectoryPath(run); + if (null != assayResultsRunDir && !FileUtil.hasCloudScheme(assayResultsRunDir)) + { + FileLike assayResultFileRoot = FileSystemLike.wrapFile(assayResultsRunDir); + if (assayResultFileRoot != null) + QueryService.get().setEnvironment(QueryService.Environment.ASSAYFILESPATH, assayResultFileRoot); + } + + importResultData(context, run, inputDatas, outputDatas, info, xarContext, transformResult); + + var reRunId = context.getReRunId(); + if (reRunId != null && getProvider().getReRunSupport() == AssayProvider.ReRunSupport.ReRunAndReplace) + { + final ExpRun replacedRun = ExperimentService.get().getExpRun(reRunId); + if (replacedRun == null) + throw new ExperimentException(String.format("Unable to find run to be replaced (RowId %d)", reRunId)); + + if (replacedRun.getContainer().hasPermission(context.getUser(), UpdatePermission.class)) + { + replacedRun.setReplacedByRun(run); + replacedRun.save(context.getUser()); + } + + String auditMessage = String.format("Run id %d was replaced by run id %d", replacedRun.getRowId(), run.getRowId()); + ExperimentService.get().auditRunEvent(context.getUser(), context.getProtocol(), replacedRun, null, auditMessage, context.getAuditUserComment()); + + transaction.addCommitTask(() -> replacedRun.archiveDataFiles(context.getUser()), DbScope.CommitTaskOption.POSTCOMMIT); + // Issue 51710: Remove replaced assay runs from the search index + transaction.addCommitTask(() -> AssayService.get().deindexAssayRuns(List.of(replacedRun)), DbScope.CommitTaskOption.POSTCOMMIT); + } + + AssayService.get().ensureUniqueBatchName(batch, context.getProtocol(), context.getUser()); + + ExperimentService.get().onRunDataCreated(context.getProtocol(), run, container, context.getUser()); + + transaction.commit(); + success = true; + + // Inspect the run properties for a “prov:objectInputs” property that is a list of LSID strings. + // Attach run's starting protocol application with starting input LSIDs. + Object provInputsProperty = unresolvedRunProperties.get(ProvenanceService.PROVENANCE_INPUT_PROPERTY); + if (provInputsProperty != null) + { + ProvenanceService pvs = ProvenanceService.get(); + Set runInputLSIDs = null; + if (provInputsProperty instanceof String provInputs) + { + // parse as a JSONArray of values or a comma-separated list of values + if (provInputs.startsWith("[") && provInputs.endsWith("]")) + provInputsProperty = new JSONArray(provInputs); + else + runInputLSIDs = Set.of(provInputs.split(",")); + } + + if (provInputsProperty instanceof JSONArray jsonArray) + { + runInputLSIDs = jsonArray.toList().stream() + .map(String::valueOf) + .collect(Collectors.toSet()); + } + + if (runInputLSIDs != null && !runInputLSIDs.isEmpty()) + { + ExpProtocolApplication inputProtocolApp = run.getInputProtocolApplication(); + pvs.addProvenanceInputs(container, inputProtocolApp, runInputLSIDs); + } + } + + ExperimentService.get().queueSyncRunEdges(run); + + return batch; + } + catch (IOException | ConvertHelper.FileConversionException | BatchValidationException e) + { + // HACK: Rethrowing these as ApiUsageException avoids any upstream consequences of wrapping them in ExperimentException. + // Namely, that they are logged to the server/mothership. There has to be a better way. + if (e instanceof ConvertHelper.FileConversionException fce) + throw new ApiUsageException(fce.getMessage(), fce); + else if (e instanceof BatchValidationException bve) + throw new ApiUsageException(bve.getMessage(), bve); + + throw new ExperimentException(e); + } + finally + { + if (!success) + { + // clean up the run results file dir here if it was created, for non-async imports + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); + resultsFileWriter.cleanupPostedFiles(context.getContainer(), false); + + cleanPrimaryFile(context); + } + } + } + + private void cleanPrimaryFile(AssayRunUploadContext context) throws ExperimentException + { + // Do not clear the primary file for run re-imports + if (context.getReRunId() != null) + return; + + try + { + // Issue 51300: don't keep the primary file if the new run failed to save + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + + // If the uploaded file is in the temp directory, then do not delete it as it may be reused in the next import attempt. + if (primaryFile != null && primaryFile.exists() && !primaryFile.getPath().contains(TEMP_DIR_NAME)) + primaryFile.delete(); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + } + + private void resolveParticipantVisits( + AssayRunUploadContext context, + Map inputMaterials, + Map inputDatas, + Map outputMaterials, + Map outputDatas, + Map allProperties, + @Nullable ParticipantVisitResolverType resolverType + ) throws ExperimentException + { + try + { + ParticipantVisitResolver resolver = null; + if (resolverType != null) + { + String targetStudyId = null; + for (Map.Entry property : allProperties.entrySet()) + { + if (AbstractAssayProvider.TARGET_STUDY_PROPERTY_NAME.equals(property.getKey().getName())) + { + targetStudyId = property.getValue(); + break; + } + } + Container targetStudy = null; + if (targetStudyId != null && !targetStudyId.isEmpty()) + targetStudy = ContainerManager.getForId(targetStudyId); + + resolver = resolverType.createResolver( + unmodifiableCollection(inputMaterials.keySet()), + unmodifiableCollection(inputDatas.keySet()), + unmodifiableCollection(outputMaterials.keySet()), + unmodifiableCollection(outputDatas.keySet()), + context.getContainer(), + targetStudy, context.getUser()); + } + + resolveExtraRunData(resolver, context, inputMaterials, inputDatas, outputMaterials, outputDatas); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + } + + protected void importStandardResultData( + AssayRunUploadContext context, + ExpRun run, + Map inputDatas, + Map outputDatas, + ViewBackgroundInfo info, + XarContext xarContext + ) throws ExperimentException, BatchValidationException + { + DataIteratorBuilder rawData = context.getRawData(); + List insertedDatas = new ArrayList<>(); + + if (rawData != null) + { + insertedDatas.addAll(outputDatas.keySet()); + + ExpData primaryData = null; + // Decide which file to treat as the primary, to which the data rows will be attached + for (Map.Entry entry : outputDatas.entrySet()) + { + if (ExpDataRunInput.DEFAULT_ROLE.equalsIgnoreCase(entry.getValue())) + { + primaryData = entry.getKey(); + } + } + if (primaryData == null && !insertedDatas.isEmpty()) + primaryData = insertedDatas.get(0); + + if (primaryData != null) + { + TsvDataHandler dataHandler = new TsvDataHandler(); + dataHandler.setAllowEmptyData(true); + dataHandler.importRows(primaryData, context.getUser(), run, context.getProtocol(), getProvider(), rawData, null, context.shouldAutoFillDefaultResultColumns(), context); + } + } + else + { + for (Map.Entry entry : inputDatas.entrySet()) + { + // skip any of the cross run inputData that are already in the outputData + if (CROSS_RUN_DATA_INPUT_ROLE.equals(entry.getValue())) + continue; + + insertedDatas.add(entry.getKey()); + } + + insertedDatas.addAll(outputDatas.keySet()); + + Logger logger = context.getLogger() != null ? context.getLogger() : LOG; + for (ExpData insertedData : insertedDatas) + { + ExperimentDataHandler dataHandler = insertedData.findDataHandler(); + + FileLike fileLike = FileSystemLike.wrapFile(insertedData.getFile()); + if (dataHandler instanceof AbstractAssayTsvDataHandler tsvHandler) + { + tsvHandler.importFile(insertedData, fileLike, info, logger, xarContext, context.isAllowLookupByAlternateKey(), context.shouldAutoFillDefaultResultColumns()); + } + else + { + dataHandler.importFile(insertedData, fileLike, info, logger, xarContext); + } + } + } + } + + private void importResultData( + AssayRunUploadContext context, + ExpRun run, + Map inputDatas, + Map outputDatas, + ViewBackgroundInfo info, + XarContext xarContext, + TransformResult transformResult + ) throws ExperimentException, BatchValidationException + { + if (transformResult.getTransformedData().isEmpty()) + { + importStandardResultData(context, run, inputDatas, outputDatas, info, xarContext); + return; + } + + DataType dataType = context.getProvider().getDataType(); + if (dataType == null) + { + // we know that we are importing transformed data at this point + dataType = TsvDataHandler.RELATED_TRANSFORM_FILE_DATA_TYPE; + } + + ExpData data = ExperimentService.get().createData(context.getContainer(), dataType); + ExperimentDataHandler handler = data.findDataHandler(); + + // this should assert to always be true + if (handler instanceof TransformDataHandler transformDataHandler) + { + for (Map.Entry entry : transformResult.getTransformedData().entrySet()) + { + ExpData expData = entry.getKey(); + // The object may have already been claimed by + if (expData.getSourceApplication() == null) + { + expData.setSourceApplication(run.getOutputProtocolApplication()); + } + expData.save(context.getUser()); + + run.getOutputProtocolApplication().addDataInput(context.getUser(), expData, ExpDataRunInput.IMPORTED_DATA_ROLE); + // Add to the cached list of outputs + run.getDataOutputs().add(expData); + + transformDataHandler.importTransformDataMap(expData, context, run, entry.getValue()); + } + } + } + + protected void addInputMaterials( + AssayRunUploadContext context, + Map inputMaterials, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + addMaterials(context, inputMaterials, context.getInputMaterials(), null, cache, materialCache); + + // Find lookups to a SampleType and add the resolved material as an input sample + for (Map.Entry entry : context.getRunProperties().entrySet()) + { + String value = StringUtils.trimToNull(entry.getValue()); + if (value == null) + continue; + + // Lookup must point at "Samples.*", "exp.materials.*", or "exp.Materials" + DomainProperty dp = entry.getKey(); + var sampleLookup = AssaySampleLookupContext.checkSampleLookup(context.getContainer(), context.getUser(), dp); + if (!sampleLookup.isLookup()) + continue; + + String role = AssayService.get().getPropertyInputLineageRole(dp); + addMaterials(context, inputMaterials, Map.of(value, role), sampleLookup.expSampleType(), cache, materialCache); + } + } + + protected void addInputDatas( + AssayRunUploadContext context, + @NotNull Map inputDatas + ) throws ExperimentException, ValidationException + { + Logger log = context.getLogger() != null ? context.getLogger() : LOG; + + Map inputs = context.getInputDatas(); + addDatas(context.getContainer(), inputDatas, inputs, log); + + // Inspect the uploaded files which will be added as outputs of the run + if (context.isAllowCrossRunFileInputs()) + { + Map files = context.getUploadedData(); + for (Map.Entry entry : files.entrySet()) + { + String key = entry.getKey(); + if (AssayDataCollector.PRIMARY_FILE.equals(key)) + { + FileLike file = entry.getValue(); + + // Check if the file is created by a run + // Don't use getExpDataByURL(String). That method expects string in a very particular format. + ExpData existingData = ExperimentService.get().getExpDataByURL(file.toNioPathForRead(), context.getContainer()); + if (existingData != null && existingData.getRunId() != null && !inputDatas.containsKey(existingData)) + { + // Add this file as an input to the run. When we add the outputs to the run, we will detect + // that this file was already added as an input and create a new exp.data for the same file + // path and attach it as an output. + log.debug("found existing cross run file input: name={}, rowId={}, dataFileUrl={}", existingData.getName(), existingData.getRowId(), existingData.getDataFileUrl()); + inputDatas.put(existingData, CROSS_RUN_DATA_INPUT_ROLE); + } + } + } + } + } + + // CONSIDER: Move this to ExperimentService + // Resolve submitted values into ExpData objects + protected void addDatas(Container c, @NotNull Map resolved, @NotNull Map unresolved, @Nullable Logger log) throws ValidationException + { + for (Map.Entry entry : unresolved.entrySet()) + { + Object o = entry.getKey(); + String role = entry.getValue(); + + if (o instanceof ExpData expData) + { + resolved.put(expData, role); + } + else + { + File file = ExpDataFileConverter.convert(o); + if (file != null) + { + ExpData data = ExperimentService.get().getExpDataByURL(file, c); + if (data == null) + { + DataType dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + data = createData(c, file, file.getName(), dataType, false, true, log); + } + + resolved.put(data, role); + } + } + } + } + + public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData) throws ValidationException + { + return generateResultData(user, container, provider, dataArray, outputData, null); + } + + public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData, @Nullable Logger log) throws ValidationException + { + if (log == null) + log = LOG; + + ExpData newData = null; + + // Don't create an empty result data file if there are other outputs from this run, or if the user didn't + // include any data rows + if (!dataArray.isEmpty() && outputData.isEmpty()) + { + DataType dataType = provider.getDataType(); + if (dataType == null) + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + + newData = createData(container, "Analysis Results", dataType, log); + newData.save(user); + outputData.put(newData, ExpDataRunInput.DEFAULT_ROLE); + } + + return newData; + } + + // Find an existing ExpData for the File or null. + public static @Nullable ExpData findExistingData(Container c, @Nullable File file, @Nullable Logger log) + { + if (file == null) + return null; + + if (log == null) + log = LOG; + + List existing = ExperimentService.get().getAllExpDataByURL(file, c); + if (!existing.isEmpty()) + { + for (ExpData d : existing) + { + log.debug("found existing exp.data for file, rowId={}, runId={}, dataFileUrl={}", d.getRowId(), d.getRunId(), d.getDataFileUrl()); + } + + // pick the most recently created one + return existing.get(0); + } + + return null; + } + + public static @NotNull ExpData createData(Container c, String name, @NotNull DataType dataType, @Nullable Logger log) throws ValidationException + { + // NOTE: reuseExistingData and errorOnDataOwned flags are irrelevant when we aren't providing a File + return createData(c, null, name, dataType, false, false, log); + } + + public static @NotNull ExpData createData( + Container c, + File file, + String name, + @Nullable DataType dataType, + boolean reuseExistingData, + boolean errorIfDataOwned, + @Nullable Logger log + ) throws ValidationException + { + if (log == null) + log = LOG; + + ExpData data = findExistingData(c, file, log); + + ExpRun previousRun; + if (data != null && null != (previousRun = data.getRun())) + { + // There's an existing data, but it's already marked as being created by another run + String msg = "File '" + data.getName() + "' has been previously imported in run '" + previousRun.getName() + "' (" + previousRun.getRowId() + ")"; + if (reuseExistingData && errorIfDataOwned) + throw new ValidationException(msg); + + log.debug(msg); + + // Create a new one for the same path so the new run can claim it as its own + if (!reuseExistingData) + { + log.debug("ignoring existing exp.data, will create a new one"); + data = null; + } + } + + if (data == null) + { + if (dataType == null) + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + + log.debug("creating assay exp.data for file. dataType={}, file={}", dataType.getNamespacePrefix(), file); + data = ExperimentService.get().createData(c, dataType, name); + data.setLSID(ExperimentService.get().generateGuidLSID(c, dataType)); + if (file != null) + { + data.setDataFileURI(FileUtil.getAbsoluteCaseSensitiveFile(file).toURI()); + } + } + else + { + if (dataType != null && !dataType.matches(new Lsid(data.getLSID()))) + { + // Reset its LSID so that it's the correct type // CONSIDER: creating a new ExpData with the correct type instead + String newLsid = ExperimentService.get().generateGuidLSID(c, dataType); + log.debug("LSID doesn't match desired type. Changed the LSID from '{}' to '{}'", data.getLSID(), newLsid); + data.setLSID(newLsid); + } + } + return data; + } + + protected void addOutputMaterials( + AssayRunUploadContext context, + Map outputMaterials, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + addMaterials(context, outputMaterials, context.getOutputMaterials(), null, cache, materialCache); + } + + protected void addMaterials( + AssayRunUploadContext context, + @NotNull Map resolved, + @NotNull Map unresolved, + @Nullable ExpSampleType sampleType, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + for (Map.Entry entry : unresolved.entrySet()) + { + Object sampleIdentifier = entry.getKey(); + ExpMaterial material = ExperimentService.get().findExpMaterial(context.getContainer(), context.getUser(), sampleIdentifier, sampleType, cache, materialCache); + if (material == null) + throw new ExperimentException("Unable to resolve sample: " + sampleIdentifier); + + if (!resolved.containsKey(material)) + { + if (!material.isOperationPermitted(SampleTypeService.SampleOperations.AddAssayData)) + throw new ExperimentException(SampleTypeService.get().getOperationNotPermittedMessage(Collections.singleton(material), SampleTypeService.SampleOperations.AddAssayData)); + if (sampleType == null || sampleType.getLSID().equals(material.getCpasType())) + resolved.put(material, entry.getValue()); + } + } + } + + protected void addOutputDatas( + AssayRunUploadContext context, + Map inputDatas, + Map outputDatas + ) throws ExperimentException, ValidationException + { + Logger log = context.getLogger() != null ? context.getLogger() : LOG; + + // Create set of existing input files + Set inputFiles = new HashSet<>(); + for (ExpData inputData : inputDatas.keySet()) + { + FileLike f = inputData.getFileLike(); + if (f != null) + inputFiles.add(f); + } + + Map files = context.getUploadedData(); + + AssayDataType dataType; + for (Map.Entry entry : files.entrySet()) + { + FileLike file = entry.getValue(); + dataType = context.getProvider().getDataType(); + + // Reuse existing exp.data as the assay output file unless: + // - we are re-importing the run + // - or the output file is already one of the input files and if we are allowing cross-run file inputs + boolean reuseExistingData = true; + if (context.getReRunId() != null) + reuseExistingData = false; + if (context.isAllowCrossRunFileInputs() && inputFiles.contains(file)) + reuseExistingData = false; + + // For Luminex re-import, we want to reuse the existing exp.data but not + // throw an error when we discover that the exp.data is already owned. The + // original run will be duplicated for re-import and then will be deleted. + boolean errorIfDataOwned = getProvider().getReRunSupport() != AssayProvider.ReRunSupport.ReRunAndDelete; + + log.debug("adding output data: file={}", file.toNioPathForRead()); + log.debug(" context.getReRunId()={}", context.getReRunId()); + log.debug(" provider.getReRunSupport()={}", getProvider().getReRunSupport()); + log.debug(" context.allowCrossRunFileInputs={}", context.isAllowCrossRunFileInputs()); + log.debug(" inputFiles.contains(file)={}", inputFiles.contains(file)); + log.debug("==> reuseExistingData = {}", reuseExistingData); + log.debug("==> errorIfDataOwned = {}", errorIfDataOwned); + + ExpData data = DefaultAssayRunCreator.createData(context.getContainer(), file.toNioPathForRead().toFile(), file.getName(), dataType, reuseExistingData, errorIfDataOwned, log); + String role = ExpDataRunInput.DEFAULT_ROLE; + if (dataType != null && dataType.getFileType().isType(file)) + { + if (dataType.getRole() != null) + { + role = dataType.getRole(); + } + } + outputDatas.put(data, role); + } + + FileLike primaryFile = files.get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + { + addRelatedOutputDatas(context, inputFiles, outputDatas, primaryFile); + } + + Map outputs = context.getOutputDatas(); + addDatas(context.getContainer(), outputDatas, outputs, log); + } + + /** + * Add files that follow the general naming convention (same basename) as the primary file + */ + public void addRelatedOutputDatas( + AssayRunUploadContext context, + Set inputFiles, + Map outputDatas, + final FileLike primaryFile + ) throws ValidationException + { + AssayDataType dataType = getProvider().getDataType(); + final String baseName = dataType == null ? null : dataType.getFileType().getBaseName(primaryFile.toNioPathForRead()); + if (baseName != null) + { + // Grab all the files that are related based on naming convention + File primary = primaryFile.toNioPathForRead().toFile(); + File parent = primary.getParentFile(); + // converting to File land to reuse the FileFilter + File[] relatedFiles = parent.listFiles(getRelatedOutputDataFileFilter(primary, baseName)); + if (relatedFiles != null) + { + for (File f : relatedFiles) + { + FileLike relatedFile = primaryFile.getParent().resolveChild(f.getName()); + // Ignore files already considered inputs to the run + if (inputFiles.contains(relatedFile)) + continue; + + Pair dataOutput = createdRelatedOutputData(context, baseName, f); + if (dataOutput != null) + { + outputDatas.put(dataOutput.getKey(), dataOutput.getValue()); + } + } + } + } + } + + protected void resolveExtraRunData( + ParticipantVisitResolver resolver, + AssayRunUploadContext context, + Map inputMaterials, + Map inputDatas, + Map outputMaterials, + Map outputDatas + ) throws ExperimentException + { + } + + /** + * Create an ExpData object for the file, and figure out what its role name should be + * @return null if the file is already linked to another run + */ + @Nullable + public static Pair createdRelatedOutputData(AssayRunUploadContext context, String baseName, File relatedFile) throws ValidationException + { + String roleName = null; + DataType dataType = null; + for (AssayDataType inputType : context.getProvider().getRelatedDataTypes()) + { + // Check if we recognize it as a specially handled file type + if (inputType.getFileType().isMatch(relatedFile.getName(), baseName)) + { + roleName = inputType.getRole(); + dataType = inputType; + break; + } + } + // If not, make up a new type and role for it + if (roleName == null) + { + roleName = relatedFile.getName().substring(baseName.length()); + while (!roleName.isEmpty() && (roleName.startsWith(".") || roleName.startsWith("-") || roleName.startsWith("_") || roleName.startsWith(" "))) + { + roleName = roleName.substring(1); + } + if (roleName.isEmpty()) + { + roleName = null; + } + } + if (dataType == null) + { + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + } + + // Find an existing data that isn't owned by another run or create a new own + ExpData data = findExistingData(context.getContainer(), relatedFile, context.getLogger()); + if (data != null) + { + if (data.getSourceApplication() == null) + return new Pair<>(data, roleName); + + // The file is already linked to another run, so this one must have not created it + return null; + } + + data = createData(context.getContainer(), relatedFile, relatedFile.getName(), dataType, true, true, context.getLogger()); + assert data.getSourceApplication() == null; + return Pair.of(data, roleName); + } + + // Disallow creating a run with inputs which are also outputs + protected void checkForCycles( + Map inputs, + Map outputs + ) throws ExperimentException + { + for (ExpRunItem input : inputs.keySet()) + { + if (outputs.containsKey(input)) + { + String role = outputs.get(input); + throw new ExperimentException("Circular input/output '" + input.getName() + "' with role '" + role + "'"); + } + } + } + + private void saveProperties( + final AssayRunUploadContext context, + ExpObject expObject, + Map transformResultProperties, + Map properties + ) throws ValidationException + { + Map propsToSave = transformResultProperties.isEmpty() ? properties : transformResultProperties; + List errors = validateProperties(context, propsToSave); + if (!errors.isEmpty()) + throw new ValidationException(errors); + + savePropertyObject(expObject, propsToSave, context.getUser()); + } + + protected void savePropertyObject(ExpObject object, Map properties, User user) throws ValidationException + { + for (Map.Entry entry : properties.entrySet()) + { + DomainProperty pd = entry.getKey(); + String value = entry.getValue(); + + // resolve any file links for batch or run properties + if (PropertyType.FILE_LINK.getTypeUri().equals(pd.getType().getTypeURI())) + { + File resolvedFile = ExpDataFileConverter.convert(value); + if (resolvedFile != null) + value = resolvedFile.getAbsolutePath(); + } + + // Treat the empty string as a null in the database, which is our normal behavior when receiving data + // from HTML forms. + if (StringUtils.trimToNull(value) == null) + { + value = null; + } + if (value != null) + { + object.setProperty(user, pd.getPropertyDescriptor(), value); + } + else + { + // We still need to validate blanks + List errors = new ArrayList<>(); + OntologyManager.validateProperty(pd.getValidators(), pd.getPropertyDescriptor(), new ObjectProperty(object.getLSID(), object.getContainer(), pd.getPropertyDescriptor(), value), errors, new ValidatorContext(pd.getContainer(), user)); + if (!errors.isEmpty()) + throw new ValidationException(errors); + } + } + } + + public static List validateColumnProperties(ContainerUser context, Map properties) + { + List errors = new ArrayList<>(); + RemapCache cache = new RemapCache(); + for (Map.Entry entry : properties.entrySet()) + { + validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); + } + return errors; + } + + public static List validateProperties(ContainerUser context, Map properties) + { + List errors = new ArrayList<>(); + RemapCache cache = new RemapCache(); + for (Map.Entry entry : properties.entrySet()) + { + validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); + } + return errors; + } + + private static void validateProperty(ContainerUser context, ColumnInfo columnInfo, String value, RemapCache cache, List errors) + { + Lookup lookup = null; + if (columnInfo.isLookup()) + { + ForeignKey fk = columnInfo.getFk(); + lookup = new Lookup(fk.getLookupContainer(), fk.getLookupSchemaName(), fk.getLookupTableName()); + } + validateProperty(context, ColumnValidators.create(columnInfo, null), value, columnInfo.getName(), + false, lookup, columnInfo.getJavaClass(), cache, errors); + } + + private static void validateProperty(ContainerUser context, DomainProperty dp, String value, RemapCache cache, List errors) + { + String label = dp.getPropertyDescriptor().getNonBlankCaption(); + PropertyType type = dp.getPropertyDescriptor().getPropertyType(); + validateProperty(context, ColumnValidators.create(null, dp), value, label, dp.isRequired(), + dp.getLookup(), type.getJavaType(), cache, errors); + } + + private static void validateProperty( + ContainerUser context, + List validators, + String value, + String label, + Boolean required, + Lookup lookup, + Class type, + RemapCache cache, + List errors + ) + { + boolean missing = (value == null || value.isEmpty()); + int rowNum = 0; + + if (required && missing) + { + errors.add(new SimpleValidationError(label + " is required and must be of type " + ColumnInfo.getFriendlyTypeName(type) + ".")); + } + else if (!missing) + { + try + { + Object o; + if (type == File.class) + o = ExpDataFileConverter.convert(value); + else + o = ConvertUtils.convert(value, type); + ValidatorContext validatorContext = new ValidatorContext(context.getContainer(), context.getUser()); + for (ColumnValidator validator : validators) + { + String msg = validator.validate(rowNum, o, validatorContext, null); + if (msg != null) + errors.add(new PropertyValidationError(msg, label)); + } + } + catch (ConversionException e) + { + String message; + if (e instanceof ConvertHelper.FileConversionException fce) + message = fce.getMessage(); + else + { + message = ConvertHelper.getStandardConversionErrorMessage(value, label, type); + if (e.getCause() instanceof ArithmeticException) + message += ": " + e.getCause().getLocalizedMessage(); + else + message += "."; + } + + // Attempt to resolve lookups by display value + boolean skipError = false; + if (lookup != null) + { + Object remappedValue = OntologyManager.getRemappedValueForLookup(context.getUser(), context.getContainer(), cache, lookup, value); + if (remappedValue != null) + skipError = true; + } + + if (!skipError) + errors.add(new SimpleValidationError(message)); + } + } + } + + protected FileFilter getRelatedOutputDataFileFilter(final File primaryFile, final String baseName) + { + // baseName doesn't include the trailing '.', so add it here. We want to associate myRun.jpg + // with myRun.xls, but we don't want to associate myRun2.xls with myRun.xls (which will happen without + // the trailing dot in the check). + return f -> f.getName().startsWith(baseName + ".") && !primaryFile.equals(f); + } + + protected ProviderType getProvider() + { + return _provider; + } +} diff --git a/api/src/org/labkey/api/data/ColumnRenderPropertiesImpl.java b/api/src/org/labkey/api/data/ColumnRenderPropertiesImpl.java index aa30afeb5d1..25b989cf011 100644 --- a/api/src/org/labkey/api/data/ColumnRenderPropertiesImpl.java +++ b/api/src/org/labkey/api/data/ColumnRenderPropertiesImpl.java @@ -48,6 +48,7 @@ public abstract class ColumnRenderPropertiesImpl implements MutableColumnRenderP public static final String STORAGE_UNIQUE_ID_CONCEPT_URI = "http://www.labkey.org/types#storageUniqueId"; public static final String STORAGE_UNIQUE_ID_SEQUENCE_PREFIX = "org.labkey.api.StorageUniqueId"; public static final String TEXT_CHOICE_CONCEPT_URI = "http://www.labkey.org/types#textChoice"; + public static final String NON_NEGATIVE_NUMBER_CONCEPT_URI = "http://www.labkey.org/types#nonNegativeNumber"; protected SortDirection _sortDirection = SortDirection.ASC; protected String _inputType; diff --git a/api/src/org/labkey/api/data/validator/AbstractColumnValidator.java b/api/src/org/labkey/api/data/validator/AbstractColumnValidator.java index d28db959727..a36fd97b5b6 100644 --- a/api/src/org/labkey/api/data/validator/AbstractColumnValidator.java +++ b/api/src/org/labkey/api/data/validator/AbstractColumnValidator.java @@ -15,6 +15,7 @@ */ package org.labkey.api.data.validator; +import org.jetbrains.annotations.Nullable; import org.labkey.api.exp.MvFieldWrapper; import org.labkey.api.exp.property.ValidatorContext; @@ -37,7 +38,7 @@ public String validate(int rowNum, Object o) } @Override - public String validate(int rowNum, Object value, ValidatorContext validatorContext) + public String validate(int rowNum, Object value, ValidatorContext validatorContext, @Nullable Object providedValue) { return validate(rowNum, value); } diff --git a/api/src/org/labkey/api/data/validator/ColumnValidator.java b/api/src/org/labkey/api/data/validator/ColumnValidator.java index 4c0cc8c8626..56fad6acc14 100644 --- a/api/src/org/labkey/api/data/validator/ColumnValidator.java +++ b/api/src/org/labkey/api/data/validator/ColumnValidator.java @@ -1,35 +1,31 @@ -/* - * Copyright (c) 2014-2016 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.data.validator; - -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.exp.property.ValidatorContext; - -/** - * Column-level value validation run just before insert or update. - */ -public interface ColumnValidator -{ - String validate(int rowNum, Object value); - - String validate(int rowNum, Object value, ValidatorContext validatorContext); - - default String validate(int rowNum, Object value, DataIterator data) - { - return validate(rowNum, value); - } - -} +/* + * Copyright (c) 2014-2016 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.data.validator; + +import org.jetbrains.annotations.Nullable; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.exp.property.ValidatorContext; + +/** + * Column-level value validation run just before insert or update. + */ +public interface ColumnValidator +{ + String validate(int rowNum, Object value); + + String validate(int rowNum, Object value, ValidatorContext validatorContext, @Nullable Object providedValue); + +} diff --git a/api/src/org/labkey/api/data/validator/PropertyValidator.java b/api/src/org/labkey/api/data/validator/PropertyValidator.java index fc8a91b8314..83b4fa24c3f 100644 --- a/api/src/org/labkey/api/data/validator/PropertyValidator.java +++ b/api/src/org/labkey/api/data/validator/PropertyValidator.java @@ -15,6 +15,7 @@ */ package org.labkey.api.data.validator; +import org.jetbrains.annotations.Nullable; import org.labkey.api.data.ColumnRenderProperties; import org.labkey.api.exp.property.IPropertyValidator; import org.labkey.api.exp.property.ValidatorContext; @@ -51,12 +52,12 @@ public String validate(int rowNum, Object value) } @Override - public String validate(int rowNum, Object value, ValidatorContext validatorContext) + public String validate(int rowNum, Object value, ValidatorContext validatorContext, @Nullable Object providedValue) { // Don't validate null values, #15683, #19352 if (null == value) return null; - if (kind.validate(propertyValidator, _columnRenderProperties , value, errors, validatorContext)) + if (kind.validate(propertyValidator, _columnRenderProperties , value, errors, validatorContext, providedValue)) return null; if (errors.isEmpty()) return null; @@ -64,4 +65,9 @@ public String validate(int rowNum, Object value, ValidatorContext validatorConte errors.clear(); return msg; } + + public IPropertyValidator getPropertyValidator() + { + return propertyValidator; + } } diff --git a/api/src/org/labkey/api/dataiterator/ValidatorIterator.java b/api/src/org/labkey/api/dataiterator/ValidatorIterator.java index f729d0ad3d9..ca63069bfd4 100644 --- a/api/src/org/labkey/api/dataiterator/ValidatorIterator.java +++ b/api/src/org/labkey/api/dataiterator/ValidatorIterator.java @@ -166,7 +166,32 @@ public boolean next() throws BatchValidationException for (ColumnValidator v : l) { Object value = _data.get(i); - String msg = validate(v, rowNum, value, _data); + Object providedDataValue = value; + if (v instanceof PropertyValidator pv) + { + if (pv.getPropertyValidator() != null) + { + // Use :::provided:::Amount in non-negative validator message, instead of converted Amount + String providedDataColumn = pv.getPropertyValidator().getColumnNameProvidedData(); + if (providedDataColumn != null) + { + // Get the value from the provided data column + int providedDataColIndex = -1; + for (int colIndex = 0; colIndex < _data.getColumnCount(); colIndex++) + { + ColumnInfo colInfo = _data.getColumnInfo(colIndex); + if (colInfo != null && providedDataColumn.equalsIgnoreCase(colInfo.getName())) + { + providedDataColIndex = colIndex; + break; + } + } + if (providedDataColIndex != -1) + providedDataValue = _data.get(providedDataColIndex); + } + } + } + String msg = validate(v, rowNum, value, _data, providedDataValue); if (null != msg) { @@ -206,12 +231,12 @@ public boolean next() throws BatchValidationException return true; } - protected String validate(ColumnValidator v, int rowNum, Object value, DataIterator data) + protected String validate(ColumnValidator v, int rowNum, Object value, DataIterator data, @Nullable Object providedValue) { String msg; // CONSIDER: add validatorContext to ColumnValidator.validate() always if (v instanceof PropertyValidator) - msg = v.validate(rowNum, value, validatorContext); + msg = v.validate(rowNum, value, validatorContext, providedValue); else msg = v.validate(rowNum, value); diff --git a/api/src/org/labkey/api/exp/property/IPropertyValidator.java b/api/src/org/labkey/api/exp/property/IPropertyValidator.java index dadd18f8036..9e0c9d44f81 100644 --- a/api/src/org/labkey/api/exp/property/IPropertyValidator.java +++ b/api/src/org/labkey/api/exp/property/IPropertyValidator.java @@ -1,57 +1,60 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.exp.property; - -import org.labkey.api.data.Container; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; - -import java.util.List; -import java.util.Map; - -/* -* User: Karl Lum -* Date: Aug 8, 2008 -* Time: 9:17:09 AM -*/ -public interface IPropertyValidator -{ - int getPropertyId(); - long getRowId(); - String getName(); - String getTypeURI(); - String getDescription(); - String getExpressionValue(); - String getErrorMessage(); - Map getProperties(); - - Container getContainer(); - - ValidatorKind getType(); - - void setPropertyId(int propertyId); - void setName(String name); - void setDescription(String description); - void setExpressionValue(String expression); - void setErrorMessage(String message); - void setProperty(String key, String value); - - IPropertyValidator save(User user, Container container) throws ValidationException; - - boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache); +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.exp.property; + +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.Container; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; + +import java.util.List; +import java.util.Map; + +/* +* User: Karl Lum +* Date: Aug 8, 2008 +* Time: 9:17:09 AM +*/ +public interface IPropertyValidator +{ + int getPropertyId(); + long getRowId(); + String getName(); + String getTypeURI(); + String getDescription(); + String getExpressionValue(); + String getErrorMessage(); + Map getProperties(); + + Container getContainer(); + + ValidatorKind getType(); + + void setPropertyId(int propertyId); + void setName(String name); + void setDescription(String description); + void setExpressionValue(String expression); + void setErrorMessage(String message); + void setProperty(String key, String value); + void setColumnNameProvidedData(String columnNameProvidedData); + @Nullable String getColumnNameProvidedData(); + + IPropertyValidator save(User user, Container container) throws ValidationException; + + boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache); } \ No newline at end of file diff --git a/api/src/org/labkey/api/exp/property/ValidatorKind.java b/api/src/org/labkey/api/exp/property/ValidatorKind.java index 6ca092e83ac..d09f296f85b 100644 --- a/api/src/org/labkey/api/exp/property/ValidatorKind.java +++ b/api/src/org/labkey/api/exp/property/ValidatorKind.java @@ -1,95 +1,96 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.exp.property; - -import org.jetbrains.annotations.NotNull; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.query.ValidationError; -import org.labkey.data.xml.ValidatorPropertyType; -import org.labkey.data.xml.ValidatorType; -import org.labkey.data.xml.ValidatorsType; - -import java.util.LinkedList; -import java.util.List; - -/* -* User: Karl Lum -* Date: Aug 8, 2008 -* Time: 10:45:38 AM -*/ -public interface ValidatorKind -{ - String NAMESPACE = "PropertyValidator"; - - String getName(); - String getTypeURI(); - String getDescription(); - - IPropertyValidator createInstance(); - boolean isValid(IPropertyValidator validator, List errors); - boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, List errors, ValidatorContext validatorCache); - - // Standard save-validator-to-XML method. ValidatorKind implementations can customize this by overriding. - default void convertToXml(IPropertyValidator v, ValidatorsType validatorsXml) - { - ValidatorType validatorType = validatorsXml.addNewValidator(); - validatorType.setTypeURI(v.getTypeURI()); - validatorType.setName(v.getName()); - - if (null != v.getDescription()) - validatorType.setDescription(v.getDescription()); - if (null != v.getErrorMessage()) - validatorType.setErrorMessage(v.getErrorMessage()); - if (null != v.getExpressionValue()) - validatorType.setExpression(v.getExpressionValue()); - - v.getProperties().forEach((name, value) -> { - ValidatorPropertyType pv = validatorType.addNewProperty(); - pv.setName(name); - pv.setValue(value); - }); - } - - static List convertFromXML(ValidatorsType validatorsXml) - { - List list = new LinkedList<>(); - - if (null != validatorsXml) - { - ValidatorType[] validators = validatorsXml.getValidatorArray(); - - for (ValidatorType v : validators) - { - IPropertyValidator pv = PropertyService.get().createValidator(v.getTypeURI()); - pv.setName(v.getName()); - - if (null != v.getDescription()) - pv.setDescription(v.getDescription()); - if (null != v.getErrorMessage()) - pv.setErrorMessage(v.getErrorMessage()); - if (null != v.getExpression()) - pv.setExpressionValue(v.getExpression()); - - for (ValidatorPropertyType prop : v.getPropertyArray()) - pv.setProperty(prop.getName(), prop.getValue()); - - list.add(pv); - } - } - - return list; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.exp.property; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.query.ValidationError; +import org.labkey.data.xml.ValidatorPropertyType; +import org.labkey.data.xml.ValidatorType; +import org.labkey.data.xml.ValidatorsType; + +import java.util.LinkedList; +import java.util.List; + +/* +* User: Karl Lum +* Date: Aug 8, 2008 +* Time: 10:45:38 AM +*/ +public interface ValidatorKind +{ + String NAMESPACE = "PropertyValidator"; + + String getName(); + String getTypeURI(); + String getDescription(); + + IPropertyValidator createInstance(); + boolean isValid(IPropertyValidator validator, List errors); + boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, List errors, ValidatorContext validatorCache, @Nullable Object providedValue); + + // Standard save-validator-to-XML method. ValidatorKind implementations can customize this by overriding. + default void convertToXml(IPropertyValidator v, ValidatorsType validatorsXml) + { + ValidatorType validatorType = validatorsXml.addNewValidator(); + validatorType.setTypeURI(v.getTypeURI()); + validatorType.setName(v.getName()); + + if (null != v.getDescription()) + validatorType.setDescription(v.getDescription()); + if (null != v.getErrorMessage()) + validatorType.setErrorMessage(v.getErrorMessage()); + if (null != v.getExpressionValue()) + validatorType.setExpression(v.getExpressionValue()); + + v.getProperties().forEach((name, value) -> { + ValidatorPropertyType pv = validatorType.addNewProperty(); + pv.setName(name); + pv.setValue(value); + }); + } + + static List convertFromXML(ValidatorsType validatorsXml) + { + List list = new LinkedList<>(); + + if (null != validatorsXml) + { + ValidatorType[] validators = validatorsXml.getValidatorArray(); + + for (ValidatorType v : validators) + { + IPropertyValidator pv = PropertyService.get().createValidator(v.getTypeURI()); + pv.setName(v.getName()); + + if (null != v.getDescription()) + pv.setDescription(v.getDescription()); + if (null != v.getErrorMessage()) + pv.setErrorMessage(v.getErrorMessage()); + if (null != v.getExpression()) + pv.setExpressionValue(v.getExpression()); + + for (ValidatorPropertyType prop : v.getPropertyArray()) + pv.setProperty(prop.getName(), prop.getValue()); + + list.add(pv); + } + } + + return list; + } } \ No newline at end of file diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index 46d9fa2d61e..07512ab4ef9 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,953 +1,953 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value); - } - } - } - - private void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = ConvertUtils.convert(pkValue.toString(), pk.getJavaObjectClass()); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getDbTable(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = null==value ? null : convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().apply(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters != null) - { - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - } - -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = ConvertUtils.convert(pkValue.toString(), pk.getJavaObjectClass()); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = null==value ? null : convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().apply(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters != null) + { + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + } + +} diff --git a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts index 9be2539267c..5c55f84a970 100644 --- a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts +++ b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts @@ -103,7 +103,7 @@ beforeAll(async () => { name: SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME, aliquotNameExpression: "", nameExpression: "", - metricUnit: 'mL' + metricUnit: 'g' } }; await server.post('property', 'createDomain', createPayload, {...topFolderOptions, ...designerReaderOptions}).expect((result) => { @@ -958,3 +958,124 @@ describe('Aliquot crud', () => { }); +describe('Amount/Unit CRUD', () => { + it ("Test Amounts/Units validation on insert/import/update/merge", async () => { + const dataType = SAMPLE_ALIQUOT_IMPORT_NO_NAME_PATTERN_NAME; + const NO_UNIT_ERROR = 'A Units value must be provided when Amounts are provided.'; + const NO_AMOUNT_ERROR = 'An Amount value must be provided when Units are provided.'; + const INCOMPATIBLE_ERROR = 'Units value (L) is not compatible with the ' + dataType + ' display units (g).'; + const NEGATIVE_ERROR = "Value '-1.1' for field 'Amount' is invalid. Amounts must be non-negative."; + + const dataName = "S-amountCrud"; + + let errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\nData1\t1\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_UNIT_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tUnits\nData1\tkg\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_AMOUNT_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t1.1\tL\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(INCOMPATIBLE_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t-1.1\tkg\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + await server.post('query', 'insertRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + name: dataName, + amount: -1.1, + units: 'kg', + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(NEGATIVE_ERROR); + }); + const sampleRows = await ExperimentCRUDUtils.insertRows(server, [{ + name: dataName, + amount: 123, + units: 'kg', + }], 'samples', dataType, topFolderOptions, editorUserOptions); + + const sampleRowId = caseInsensitive(sampleRows[0], 'rowId'); + + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tAmount\n" + dataName + "\t321", dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_UNIT_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\n" + dataName + "\t321", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_UNIT_ERROR); + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + Amount: 321, + rowId: sampleRowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(NO_UNIT_ERROR); + }); + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + StoredAmount: 321, + rowId: sampleRowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(NO_UNIT_ERROR); + }); + + + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tUnits\n" + dataName + "\tg", dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_AMOUNT_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tUnits\n" + dataName + "\tg", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NO_AMOUNT_ERROR); + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + Units: 'kg', + rowId: sampleRowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(NO_AMOUNT_ERROR); + }); + + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tAmount\tUnits\n" + dataName + "\t321\tL", dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(INCOMPATIBLE_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\n" + dataName + "\t321\tL", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(INCOMPATIBLE_ERROR); + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + Amount: 321, + Units: 'L', + rowId: sampleRowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(INCOMPATIBLE_ERROR); + }); + + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tAmount\tUnits\n" + dataName + "\t-1.1\tkg", dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\n" + dataName + "\t-1.1\tkg", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + await server.post('query', 'updateRows', { + schemaName: 'samples', + queryName: dataType, + rows: [{ + Amount: -1, + Units: 'kg', + rowId: sampleRowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + // Note that the row by row update error is different from DIB. This is OK for now since we are planning to deprecate row by row updates. + expect(errorResp['exception']).toContain("Value '-1000.0 (g)' for field 'Amount' is invalid. Amounts must be non-negative."); + }); + + }); + +}); + diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index 716bda48f0d..f470308fb57 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -303,10 +303,10 @@ else if (isUpdateOnly && columnNameMap.containsKey(AliquotedFromLSID.name())) } @Override - protected String validate(ColumnValidator v, int rowNum, Object value, DataIterator data) + protected String validate(ColumnValidator v, int rowNum, Object value, DataIterator data, Object providedValue) { if (!(v instanceof RequiredValidator) || _aliquotedFromColIdx < 0) - return super.validate(v, rowNum, value, data); + return super.validate(v, rowNum, value, data, providedValue); String aliquotedFromValue = null; Object aliquotedFromObj = data.get(_aliquotedFromColIdx); diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index 80393dc07d5..e49d411e5b1 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -1,1856 +1,1875 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.collections4.ListUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.math3.util.Precision; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.assay.plate.AssayPlateMetadataService; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.cache.BlockingCache; -import org.labkey.api.cache.CacheManager; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.compliance.TableRules; -import org.labkey.api.compliance.TableRulesManager; -import org.labkey.api.data.ColumnHeaderType; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DataColumn; -import org.labkey.api.data.DataRegion; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DisplayColumn; -import org.labkey.api.data.DisplayColumnFactory; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MaterializedQueryHelper; -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.data.PHI; -import org.labkey.api.data.RenderContext; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.UnionContainerFilter; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.LoggingDataIterator; -import org.labkey.api.dataiterator.SimpleTranslator; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ExperimentUrls; -import org.labkey.api.exp.api.NameExpressionOptionService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.DomainUtil; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpMaterialTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.inventory.InventoryService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.qc.SampleStatusService; -import org.labkey.api.query.AliasedColumn; -import org.labkey.api.query.DetailsURL; -import org.labkey.api.query.ExprColumn; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.LookupForeignKey; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryForeignKey; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.QueryUrls; -import org.labkey.api.query.RowIdForeignKey; -import org.labkey.api.query.SchemaKey; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.column.BuiltInColumnTypes; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HeartBeat; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.StringExpression; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.experiment.ExpDataIterators; -import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.lineage.LineageMethod; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLETYPE_FILE_DIRECTORY; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; -import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; -import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; - -public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable -{ - ExpSampleTypeImpl _ss; - Set _uniqueIdFields; - boolean _supportTableRules = true; - - public static final Set MATERIAL_ALT_MERGE_KEYS; - public static final Set MATERIAL_ALT_UPDATE_KEYS; - static { - MATERIAL_ALT_MERGE_KEYS = Set.of(Column.MaterialSourceId.name(), Column.Name.name()); - MATERIAL_ALT_UPDATE_KEYS = Set.of(Column.LSID.name()); - } - - public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) - { - super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); - setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); - setPublicSchemaName(ExpSchema.SCHEMA_NAME); - addAllowablePermission(InsertPermission.class); - addAllowablePermission(UpdatePermission.class); - addAllowablePermission(MoveEntitiesPermission.class); - setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); - setSampleType(sampleType); - } - - public Set getUniqueIdFields() - { - if (_uniqueIdFields == null) - { - _uniqueIdFields = new CaseInsensitiveHashSet(); - _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); - } - return _uniqueIdFields; - } - - @Override - protected ColumnInfo resolveColumn(String name) - { - ColumnInfo result = super.resolveColumn(name); - if (result == null) - { - if ("CpasType".equalsIgnoreCase(name)) - result = createColumn(Column.SampleSet.name(), Column.SampleSet); - else if (Column.Property.name().equalsIgnoreCase(name)) - result = createPropertyColumn(Column.Property.name()); - else if (Column.QueryableInputs.name().equalsIgnoreCase(name)) - result = createColumn(Column.QueryableInputs.name(), Column.QueryableInputs); - } - return result; - } - - @Override - public ColumnInfo getExpObjectColumn() - { - var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); - ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); - return ret; - } - - @Override - public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) - { - if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) - { - // Special case sample auditing to help build a useful timeline view - return SampleTypeServiceImpl.get(); - } - - return super.getAuditHandler(auditBehaviorType); - } - - @Override - public MutableColumnInfo createColumn(String alias, Column column) - { - switch (column) - { - case Folder -> - { - return wrapColumn(alias, _rootTable.getColumn("Container")); - } - case LSID -> - { - return wrapColumn(alias, _rootTable.getColumn(Column.LSID.name())); - } - case MaterialSourceId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.MaterialSourceId.name())); - columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case RootMaterialRowId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.RootMaterialRowId.name())); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.RowId.name())); - columnInfo.setLabel("Root Material"); - columnInfo.setUserEditable(false); - - // NK: Here we mark the column as not required AND nullable which is the opposite of the database where - // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. - // See ExpMaterialValidatorIterator. - columnInfo.setRequired(false); - columnInfo.setNullable(true); - - return columnInfo; - } - case AliquotedFromLSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.AliquotedFromLSID.name())); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.LSID.name())); - columnInfo.setLabel("Aliquoted From Parent"); - return columnInfo; - } - case IsAliquot -> - { - String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RootMaterialRowId.name(); - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); - ExprColumn columnInfo = new ExprColumn(this, FieldKey.fromParts(Column.IsAliquot.name()), new SQLFragment( - "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) - .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 - .append(" ELSE NULL END)"), JdbcType.BOOLEAN); - columnInfo.setLabel("Is Aliquot"); - columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(false); - return columnInfo; - } - case Name -> - { - var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); - // shut off this field in insert and update views if user specified names are not allowed - if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) - { - nameCol.setShownInInsertView(false); - nameCol.setShownInUpdateView(false); - } - return nameCol; - } - case RawAmount -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - return columnInfo; - } - case StoredAmount -> - { - String label = StoredAmount.label(); - Set importAliases = Set.of(label, "Stored Amount"); - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setLabel(label); - columnInfo.setImportAliasesSet(importAliases); - columnInfo.setDescription("The amount of this sample currently on hand."); - return columnInfo; - } - } - case RawUnits -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - return columnInfo; - } - case Units -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Column.Units.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); - columnInfo.setFk(fk); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - return columnInfo; - } - } - case Description -> - { - return wrapColumn(alias, _rootTable.getColumn(Column.Description.name())); - } - case SampleSet -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); - // NOTE: populateColumns() overwrites this with a QueryForeignKey. Can this be removed? - columnInfo.setFk(new LookupForeignKey(getContainerFilter(), null, null, null, null, "LSID", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - return columnInfo; - } - case SourceProtocolLSID -> - { - // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module - ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( - "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + - " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); - columnInfo.setLabel("Source Protocol"); - columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case SourceProtocolApplication -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); - columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - columnInfo.setAutoIncrement(false); - return columnInfo; - } - case SourceApplicationInput -> - { - var col = createEdgeColumn(alias, Column.SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); - col.setHidden(true); - return col; - } - case RunApplication -> - { - SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") - .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") - .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") - .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) - .append(")"); - - var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); - col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); - col.setUserEditable(false); - col.setReadOnly(true); - col.setHidden(true); - return col; - } - case RunApplicationOutput -> - { - var col = createEdgeColumn(alias, Column.RunApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); - return col; - } - case Run -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); - ret.setReadOnly(true); - return ret; - } - case RowId -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); - // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction - ret.setSortDirection(Sort.SortDirection.DESC); - ret.setFk(new RowIdForeignKey(ret)); - ret.setUserEditable(false); - ret.setHidden(true); - ret.setShownInInsertView(false); - ret.setHasDbSequence(true); - ret.setIsRootDbSequence(true); - return ret; - } - case Property -> - { - return createPropertyColumn(alias); - } - case Flag -> - { - return createFlagColumn(alias); - } - case Created -> - { - return wrapColumn(alias, _rootTable.getColumn("Created")); - } - case CreatedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); - } - case Modified -> - { - return wrapColumn(alias, _rootTable.getColumn("Modified")); - } - case ModifiedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); - } - case Alias -> - { - return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); - } - case Inputs -> - { - return createLineageColumn(this, alias, true, false); - } - case QueryableInputs -> - { - return createLineageColumn(this, alias, true, true); - } - case Outputs -> - { - return createLineageColumn(this, alias, false, false); - } - case Properties -> - { - return createPropertiesColumn(alias); - } - case SampleState -> - { - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); - var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); - ret.setLabel("Status"); - ret.setHidden(!statusEnabled); - ret.setShownInDetailsView(statusEnabled); - ret.setShownInInsertView(statusEnabled); - ret.setShownInUpdateView(statusEnabled); - ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - return ret; - } - case AliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); - ret.setLabel(ALIQUOT_COUNT_LABEL); - return ret; - } - case AliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(ALIQUOT_VOLUME_LABEL); - return ret; - } - case AvailableAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); - return ret; - } - case AvailableAliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); - ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); - return ret; - } - case AliquotUnit -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); - ret.setShownInDetailsView(false); - return ret; - } - case MaterialExpDate -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("MaterialExpDate")); - ret.setLabel("Expiration Date"); - ret.setShownInDetailsView(true); - ret.setShownInInsertView(true); - ret.setShownInUpdateView(true); - return ret; - } - default -> throw new IllegalArgumentException("Unknown column " + column); - } - } - - @Override - public MutableColumnInfo createPropertyColumn(String alias) - { - var ret = super.createPropertyColumn(alias); - if (_ss != null) - { - final TableInfo t = _ss.getTinfo(); - if (t != null) - { - ret.setFk(new LookupForeignKey() - { - @Override - public TableInfo getLookupTableInfo() - { - return t; - } - - @Override - protected ColumnInfo getPkColumn(TableInfo table) - { - return t.getColumn("lsid"); - } - }); - } - } - ret.setIsUnselectable(true); - ret.setDescription("A holder for any custom fields associated with this sample"); - ret.setHidden(true); - return ret; - } - - private Unit getSampleTypeUnit() - { - Unit typeUnit = null; - if (_ss != null && _ss.getMetricUnit() != null) - typeUnit = Unit.fromName(_ss.getMetricUnit()); - return typeUnit; - } - - private void setSampleType(@Nullable ExpSampleType st) - { - checkLocked(); - if (_ss != null) - { - throw new IllegalStateException("Cannot unset sample type"); - } - if (st != null && !(st instanceof ExpSampleTypeImpl)) - { - throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); - } - _ss = (ExpSampleTypeImpl) st; - if (_ss != null) - { - setPublicSchemaName(SamplesSchema.SCHEMA_NAME); - setName(st.getName()); - - String description = _ss.getDescription(); - if (StringUtils.isEmpty(description)) - description = "Contains one row per sample in the " + _ss.getName() + " sample type"; - setDescription(description); - - if (canUserAccessPhi()) - { - ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); - setImportURL(new DetailsURL(url)); - } - } - } - - public ExpSampleType getSampleType() - { - return _ss; - } - - @Override - protected void populateColumns() - { - var st = getSampleType(); - var rowIdCol = addColumn(Column.RowId); - addColumn(Column.MaterialSourceId); - addColumn(Column.SourceProtocolApplication); - addColumn(Column.SourceApplicationInput); - addColumn(Column.RunApplication); - addColumn(Column.RunApplicationOutput); - addColumn(Column.SourceProtocolLSID); - - var nameCol = addColumn(Column.Name); - if (st != null && st.hasNameAsIdCol()) - { - // Show the Name field but don't mark is as required when using name expressions - if (st.hasNameExpression()) - { - var nameExpression = st.getNameExpression(); - nameCol.setNameExpression(nameExpression); - nameCol.setNullable(true); - String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); - String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); - nameCol.setDescription(desc); - } - else - { - nameCol.setNullable(false); - } - } - else - { - nameCol.setReadOnly(true); - nameCol.setShownInInsertView(false); - } - - addColumn(Column.Alias); - addColumn(Column.Description); - - var typeColumnInfo = addColumn(Column.SampleSet); - typeColumnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) - { - @Override - protected ContainerFilter getLookupContainerFilter() - { - // Be sure that we can resolve the sample type if it's defined in a separate container. - // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. - // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has - // parents in current sample type and a sample type in the parent container - Set containers = new HashSet<>(); - if (null != st) - containers.add(st.getContainer()); - containers.add(getContainer()); - if (getContainer().getProject() != null) - containers.add(getContainer().getProject()); - containers.add(ContainerManager.getSharedContainer()); - ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); - - if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) - cf = new UnionContainerFilter(_containerFilter, cf); - return cf; - } - }); - - typeColumnInfo.setReadOnly(true); - typeColumnInfo.setUserEditable(false); - typeColumnInfo.setShownInInsertView(false); - - addColumn(Column.MaterialExpDate); - addContainerColumn(Column.Folder, null); - var runCol = addColumn(Column.Run); - runCol.setFk(new ExpSchema(_userSchema.getUser(), getContainer()).getRunIdForeignKey(getContainerFilter())); - runCol.setShownInInsertView(false); - runCol.setShownInUpdateView(false); - - var colLSID = addColumn(Column.LSID); - colLSID.setHidden(true); - colLSID.setReadOnly(true); - colLSID.setUserEditable(false); - colLSID.setShownInInsertView(false); - colLSID.setShownInDetailsView(false); - colLSID.setShownInUpdateView(false); - - var rootRowId = addColumn(Column.RootMaterialRowId); - rootRowId.setHidden(true); - rootRowId.setReadOnly(true); - rootRowId.setUserEditable(false); - rootRowId.setShownInInsertView(false); - rootRowId.setShownInDetailsView(false); - rootRowId.setShownInUpdateView(false); - - var aliquotParentLSID = addColumn(Column.AliquotedFromLSID); - aliquotParentLSID.setHidden(true); - aliquotParentLSID.setReadOnly(true); - aliquotParentLSID.setUserEditable(false); - aliquotParentLSID.setShownInInsertView(false); - aliquotParentLSID.setShownInDetailsView(false); - aliquotParentLSID.setShownInUpdateView(false); - - addColumn(Column.IsAliquot); - addColumn(Column.Created); - addColumn(Column.CreatedBy); - addColumn(Column.Modified); - addColumn(Column.ModifiedBy); - - List defaultCols = new ArrayList<>(); - defaultCols.add(FieldKey.fromParts(Column.Name)); - defaultCols.add(FieldKey.fromParts(Column.MaterialExpDate)); - boolean hasProductFolders = getContainer().hasProductFolders(); - if (hasProductFolders) - defaultCols.add(FieldKey.fromParts(Column.Folder)); - defaultCols.add(FieldKey.fromParts(Column.Run)); - - if (st == null) - defaultCols.add(FieldKey.fromParts(Column.SampleSet)); - - addColumn(Column.Flag); - - var statusColInfo = addColumn(Column.SampleState); - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); - statusColInfo.setShownInDetailsView(statusEnabled); - statusColInfo.setShownInInsertView(statusEnabled); - statusColInfo.setShownInUpdateView(statusEnabled); - statusColInfo.setHidden(!statusEnabled); - statusColInfo.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - if (statusEnabled) - defaultCols.add(FieldKey.fromParts(Column.SampleState)); - statusColInfo.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - - // TODO is this a real Domain??? - if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) - { - defaultCols.add(FieldKey.fromParts(Column.Flag)); - addSampleTypeColumns(st, defaultCols); - - setName(_ss.getName()); - - ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); - gridUrl.addParameter("rowId", st.getRowId()); - setGridURL(new DetailsURL(gridUrl)); - } - - List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); - defaultCols.addAll(calculatedFieldKeys); - - addColumn(Column.AliquotCount); - addColumn(Column.AliquotVolume); - addColumn(Column.AliquotUnit); - addColumn(Column.AvailableAliquotCount); - addColumn(Column.AvailableAliquotVolume); - - addColumn(Column.StoredAmount); - defaultCols.add(FieldKey.fromParts(Column.StoredAmount)); - - addColumn(Column.Units); - defaultCols.add(FieldKey.fromParts(Column.Units)); - - var rawAmountColumn = addColumn(Column.RawAmount); - rawAmountColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.StoredAmount)); - - } - }; - } - }); - rawAmountColumn.setHidden(true); - rawAmountColumn.setShownInDetailsView(false); - rawAmountColumn.setShownInInsertView(false); - rawAmountColumn.setShownInUpdateView(false); - - var rawUnitsColumn = addColumn(Column.RawUnits); - rawUnitsColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.Units)); - - } - }; - } - }); - rawUnitsColumn.setHidden(true); - rawUnitsColumn.setShownInDetailsView(false); - rawUnitsColumn.setShownInInsertView(false); - rawUnitsColumn.setShownInUpdateView(false); - - if (InventoryService.get() != null && (st == null || !st.isMedia())) - defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); - - SQLFragment sql; - UserSchema plateUserSchema; - // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset - // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying - // against tables in that schema. - if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) - plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); - else - plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); - - if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) - { - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); - SQLFragment existsSubquery = new SQLFragment() - .append("SELECT 1 FROM ") - .append(plateUserSchema.getTable("Well"), "well") - .append(" WHERE well.sampleid = ").append(rowIdField); - - sql = new SQLFragment() - .append("CASE WHEN EXISTS (") - .append(existsSubquery) - .append(") THEN 'Plated' ") - .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 - .append("ELSE NULL END"); - } - else - { - sql = new SQLFragment("(SELECT NULL)"); - } - var col = new ExprColumn(this, Column.IsPlated.name(), sql, JdbcType.VARCHAR); - col.setDescription("Whether the sample that has been plated, if plating is supported."); - col.setUserEditable(false); - col.setReadOnly(true); - col.setShownInDetailsView(false); - col.setShownInInsertView(false); - col.setShownInUpdateView(false); - if (plateUserSchema != null) - col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + Column.RowId.name() + "}")); - addColumn(col); - - addVocabularyDomains(); - - addColumn(Column.Properties); - - var colInputs = addColumn(Column.Inputs); - addMethod("Inputs", new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); - - var colOutputs = addColumn(Column.Outputs); - addMethod("Outputs", new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); - - addExpObjectMethod(); - - ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); - DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); - nameCol.setURL(url); - rowIdCol.setURL(url); - setDetailsURL(url); - - if (canUserAccessPhi()) - { - ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); - setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); - - ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); - setInsertURL(new DetailsURL(insertActionURL)); - } - else - { - setImportURL(LINK_DISABLER); - setInsertURL(LINK_DISABLER); - setUpdateURL(LINK_DISABLER); - } - - setTitleColumn(Column.Name.toString()); - - setDefaultVisibleColumns(defaultCols); - - MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn("rowid"), _ss, true); - addColumn(lineageLookup); - } - - private ContainerFilter getSampleStatusLookupContainerFilter() - { - // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared - // for the sample status lookup since in the app project context we want to share status definitions across - // a given project instead of creating duplicate statuses in each subfolder project. - ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); - type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; - return type.create(getUserSchema()); - } - - @Override - public Domain getDomain() - { - return getDomain(false); - } - - @Override - public Domain getDomain(boolean forUpdate) - { - return _ss == null ? null : _ss.getDomain(forUpdate); - } - - - public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) - { - if (nameExpression == null) - return currentDescription; - - StringBuilder sb = new StringBuilder(); - if (currentDescription != null && !currentDescription.isEmpty()) - { - sb.append(currentDescription); - if (!currentDescription.endsWith(".")) - sb.append("."); - sb.append("\n"); - } - - sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); - sb.append(nameExpression); - sb.append("."); - if (!StringUtils.isEmpty(nameExpressionPreview)) - { - sb.append("\nExample of name that will be generated from the current pattern: \n"); - sb.append(nameExpressionPreview); - } - - return sb.toString(); - } - - private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) - { - TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); - if (null == dbTable) - return; - - UserSchema schema = getUserSchema(); - Domain domain = st.getDomain(); - ColumnInfo rowIdColumn = getColumn(Column.RowId); - ColumnInfo lsidColumn = getColumn(Column.LSID); - ColumnInfo nameColumn = getColumn(Column.Name); - - visibleColumns.remove(FieldKey.fromParts(Column.Run.name())); - - // When not using name expressions, mark the ID columns as required. - // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. - final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); - - Set mvColumns = domain.getProperties().stream() - .filter(ImportAliasable::isMvEnabled) - .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) - .collect(Collectors.toSet()); - - for (ColumnInfo dbColumn : dbTable.getColumns()) - { - // Don't include PHI columns in full text search index - // CONSIDER: Can we move this to a base class? Maybe in .addColumn() - if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) - continue; - - if ( - rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - lsidColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) - ) - { - continue; - } - - var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); - - // TODO missing values? comments? flags? - DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); - var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); - if (propColumn.getName().equalsIgnoreCase("genid")) - { - propColumn.setHidden(true); - propColumn.setUserEditable(false); - propColumn.setShownInDetailsView(false); - propColumn.setShownInInsertView(false); - propColumn.setShownInUpdateView(false); - } - if (null != dp) - { - PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), - SchemaKey.fromParts("samples"), st.getName(), FieldKey.fromParts("RowId"), null, getLookupContainerFilter()); - - if (idCols.contains(dp)) - { - propColumn.setNullable(false); - propColumn.setDisplayColumnFactory(new IdColumnRendererFactory()); - } - - // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected - if (!propColumn.isHidden()) - { - visibleColumns.add(propColumn.getFieldKey()); - } - - if (propColumn.isMvEnabled()) - { - // The column in the physical table has a "_MVIndicator" suffix, but we want to expose - // it with a "MVIndicator" suffix (no underscore) - var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, - StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); - mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); - mvColumn.setSqlTypeName("VARCHAR"); - mvColumn.setPropertyURI(dp.getPropertyURI()); - mvColumn.setNullable(true); - mvColumn.setUserEditable(false); - mvColumn.setHidden(true); - mvColumn.setMvIndicatorColumn(true); - - addColumn(mvColumn); - propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); - } - } - - if (!mvColumns.contains(propColumn.getFieldKey())) - addColumn(propColumn); - - } - - setDefaultVisibleColumns(visibleColumns); - } - - // These are mostly fields that are wrapped by fields with different names (see createColumn()) - // we could handle each case separately, but this is easier - static final Set wrappedFieldKeys = Set.of( - new FieldKey(null, "objectid"), - new FieldKey(null, "RowId"), - new FieldKey(null, "LSID"), // Flag - new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication - new FieldKey(null, "runId"), // Run, RunApplication - new FieldKey(null, "CpasType")); // SampleSet - static final Set ALL_COLUMNS = Set.of(); - - private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) - { - if (null == selectedColumns) - return ALL_COLUMNS; - selectedColumns = new TreeSet<>(selectedColumns); - if (selectedColumns.contains(new FieldKey(null, StoredAmount))) - selectedColumns.add(new FieldKey(null, Units)); - if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) - selectedColumns.add(new FieldKey(null, Column.AliquotedFromLSID.name())); - if (selectedColumns.contains(new FieldKey(null, Column.IsAliquot.name()))) - selectedColumns.add(new FieldKey(null, Column.RootMaterialRowId.name())); - selectedColumns.addAll(wrappedFieldKeys); - if (null != getFilter()) - selectedColumns.addAll(getFilter().getAllFieldKeys()); - return selectedColumns; - } - - @NotNull - @Override - public SQLFragment getFromSQL(String alias) - { - return getFromSQL(alias, null); - } - - @Override - public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) - { - SQLFragment sql = new SQLFragment("("); - boolean usedMaterialized; - - - // SELECT FROM - /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) - * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. - * Also, we may use RemapCache for material lookup outside a transaction - */ - boolean onlyMaterialColums = false; - if (null != selectedColumns && !selectedColumns.isEmpty()) - onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); - if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) - { - sql.append(getMaterializedSQL()); - usedMaterialized = true; - } - else - { - sql.append(getJoinSQL(selectedColumns)); - usedMaterialized = false; - } - - // WHERE - SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); - sql.append("\n").append(filterFrag); - if (_ss != null && !usedMaterialized) - { - if (!filterFrag.isEmpty()) - sql.append(" AND "); - else - sql.append(" WHERE "); - sql.append("CpasType = ").appendValue(_ss.getLSID()); - } - sql.append(") ").appendIdentifier(alias); - - return getTransformedFromSQL(sql); - } - - @Override - public void setSupportTableRules(boolean b) - { - this._supportTableRules = b; - } - - @Override - public boolean supportTableRules() // intentional override - { - return _supportTableRules; - } - - @Override - protected @NotNull TableRules findTableRules() - { - Container definitionContainer = getUserSchema().getContainer(); - if (null != _ss) - definitionContainer = _ss.getContainer(); - return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); - } - - - static class InvalidationCounters - { - public final AtomicLong update, insert, delete, rollup; - InvalidationCounters() - { - long l = System.currentTimeMillis(); - update = new AtomicLong(l); - insert = new AtomicLong(l); - delete = new AtomicLong(l); - rollup = new AtomicLong(l); - } - } - - static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); - static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); - static final AtomicBoolean initializedListeners = new AtomicBoolean(false); - - // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() - public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - var scope = ExperimentServiceImpl.getExpSchema().getScope(); - var runnable = new RefreshMaterializedViewRunnable(lsid, reason); - scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); - } - - private static class RefreshMaterializedViewRunnable implements Runnable - { - private final String _lsid; - private final SampleTypeServiceImpl.SampleChangeType _reason; - - public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - _lsid = lsid; - _reason = reason; - } - - @Override - public void run() - { - if (_reason == schema) - { - /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. - * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the - * MQH from the cache to force the SQL to be regenerated. - */ - _materializedQueries.remove(_lsid); - return; - } - var counters = getInvalidateCounters(_lsid); - switch (_reason) - { - case insert -> counters.insert.incrementAndGet(); - case rollup -> counters.rollup.incrementAndGet(); - case update -> counters.update.incrementAndGet(); - case delete -> counters.delete.incrementAndGet(); - default -> throw new IllegalStateException("Unexpected value: " + _reason); - } - } - - @Override - public boolean equals(Object obj) - { - return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); - } - } - - private static InvalidationCounters getInvalidateCounters(String lsid) - { - if (!initializedListeners.getAndSet(true)) - { - CacheManager.addListener(_invalidationCounters::clear); - } - return _invalidationCounters.computeIfAbsent(lsid, (unused) -> - new InvalidationCounters() - ); - } - - /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ - private SQLFragment getMaterializedSQL() - { - if (null == _ss) - return getJoinSQL(null); - - var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> - { - /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. - * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, - * even if just to help with race-conditions. - * - * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. - */ - SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); - return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") - .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") - .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") - .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) - .build(); - }); - return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); - } - - - /** - * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. - * It does not help with incremental updates (except for providing the upsert() method). - * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. - */ - static class _MaterializedQueryHelper extends MaterializedQueryHelper - { - final String _lsid; - - static class Builder extends MaterializedQueryHelper.Builder - { - String _lsid; - - public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) - { - super(prefix, scope, select); - this._lsid = lsid; - } - - @Override - public _MaterializedQueryHelper build() - { - return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); - } - } - - _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, - boolean isSelectIntoSql) - { - super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); - this._lsid = lsid; - } - - @Override - protected Materialized createMaterialized(String txCacheKey) - { - DbSchema temp = DbSchema.getTemp(); - String name = _prefix + "_" + GUID.makeHash(); - _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); - initMaterialized(materialized); - return materialized; - } - - @Override - protected void incrementalUpdateBeforeSelect(Materialized m) - { - _Materialized materialized = (_Materialized) m; - - boolean lockAcquired = false; - try - { - lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); - if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) - throw materialized._loadException; - - if (!materialized.incrementalDeleteCheck.stillValid(0)) - executeIncrementalDelete(); - if (!materialized.incrementalRollupCheck.stillValid(0)) - executeIncrementalRollup(); - if (!materialized.incrementalInsertCheck.stillValid(0)) - executeIncrementalInsert(); - } - catch (RuntimeException|InterruptedException ex) - { - RuntimeException rex = UnexpectedException.wrap(ex); - materialized.setError(rex); - // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. - - // Ensure that next refresh starts clean - _materializedQueries.remove(_lsid); - getInvalidateCounters(_lsid).update.incrementAndGet(); - throw rex; - } - finally - { - if (lockAcquired) - materialized.getLock().unlock(); - } - } - - void upsertWithRetry(SQLFragment sql) - { - // not actually read-only, but we don't want to start an explicit transaction - _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); - } - - void executeIncrementalInsert() - { - SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") - .append("SELECT * FROM (") - .append(getViewSourceSql()).append(") viewsource_\n") - .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); - upsertWithRetry(incremental); - } - - void executeIncrementalDelete() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - // POSTGRES bug??? the obvious query is _very_ slow O(n^2) - // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) - SQLFragment incremental = new SQLFragment() - .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") - .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); - upsertWithRetry(incremental); - } - - void executeIncrementalRollup() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - SQLFragment incremental = new SQLFragment(); - if (d.isPostgreSQL()) - { - incremental - .append("UPDATE temp.${NAME} AS st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM exp.Material AS expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") - .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") - .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") - .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") - .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") - .append(")"); - } - else - { - // SQL Server 2022 supports IS DISTINCT FROM - incremental - .append("UPDATE st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM temp.${NAME} st, exp.Material expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") - .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") - .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") - .append(")"); - } - upsertWithRetry(incremental); - } - } - - static class _Materialized extends MaterializedQueryHelper.Materialized - { - final MaterializedQueryHelper.Invalidator incrementalInsertCheck; - final MaterializedQueryHelper.Invalidator incrementalRollupCheck; - final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; - - _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) - { - super(mqh, tableName, cacheKey, created, sql); - final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); - incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); - incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); - incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); - } - - @Override - public void reset() - { - super.reset(); - long now = HeartBeat.currentTimeMillis(); - incrementalInsertCheck.stillValid(now); - incrementalRollupCheck.stillValid(now); - incrementalDeleteCheck.stillValid(now); - } - - Lock getLock() - { - return _loadingLock; - } - } - - - /* SELECT and JOIN, does not include WHERE */ - private SQLFragment getJoinSQL(Set selectedColumns) - { - TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); - Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); - provisionedCols.remove(Column.RowId.name()); - provisionedCols.remove(Column.LSID.name()); - provisionedCols.remove(Column.Name.name()); - boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); - - boolean hasSampleColumns = false; - boolean hasAliquotColumns = false; - - Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); - selectedColumns = computeInnerSelectedColumns(selectedColumns); - - SQLFragment sql = new SQLFragment(); - sql.appendComment("", getSqlDialect()); - sql.append("SELECT "); - String comma = ""; - for (String materialCol : materialCols) - { - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) - { - sql.append(comma).append("m.").appendIdentifier(materialCol); - comma = ", "; - } - } - if (null != provisioned && hasProvisionedColumns) - { - for (ColumnInfo propertyColumn : provisioned.getColumns()) - { - // don't select twice - if ( - Column.RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.LSID.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) - ) - { - continue; - } - - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) - { - sql.append(comma); - boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) - || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); - if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - else if (rootField) - { - sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasSampleColumns = true; - } - else - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - comma = ", "; - } - } - } - - sql.append("\nFROM "); - sql.append(_rootTable, "m"); - if (hasSampleColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); - if (hasAliquotColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); - - sql.appendComment("", getSqlDialect()); - return sql; - } - - private class IdColumnRendererFactory implements DisplayColumnFactory - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new IdColumnRenderer(colInfo); - } - } - - private static class IdColumnRenderer extends DataColumn - { - public IdColumnRenderer(ColumnInfo col) - { - super(col); - } - - @Override - protected boolean isDisabledInput(RenderContext ctx) - { - return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; - } - } - - private static class SampleTypeAmountDisplayColumn extends ExprColumn - { - public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) - { - super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" / ? AS ") - .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") - .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.getValue()), - JdbcType.DOUBLE); - - setLabel(label); - setImportAliasesSet(importAliases); - } - } - - private static class SampleTypeUnitDisplayColumn extends ExprColumn - { - public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) - { - super(parent, FieldKey.fromParts(Column.Units.name()), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.toString()), - JdbcType.VARCHAR); - } - } - - @Override - public QueryUpdateService getUpdateService() - { - return new SampleTypeUpdateServiceDI(this, _ss); - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) - { - if (_ss == null) - { - // Allow read and delete for exp.Materials. - // Don't allow insert/update on exp.Materials without a sample type. - if (perm == DeletePermission.class || perm == ReadPermission.class) - return getContainer().hasPermission(user, perm); - return false; - } - - if (_ss.isMedia() && perm == ReadPermission.class) - return getContainer().hasPermission(user, MediaReadPermission.class); - - return super.hasPermission(user, perm); - } - - @NotNull - @Override - public List getUniqueIndices() - { - // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" - // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. - // Some FKs to ExpMaterialTable don't include the "Name" column (e.g. NabBaseTable.Specimen) - String indexName = "idx_material_ak"; - List ret = new ArrayList<>(super.getUniqueIndices()); - if (getColumn("Name") != null) - ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); - else - ret.removeIf( def -> def.name().equals(indexName)); - return Collections.unmodifiableList(ret); - } - - - // - // UpdatableTableInfo - // - - - @Override - public @Nullable Long getOwnerObjectId() - { - return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); - } - - @Nullable - @Override - public CaseInsensitiveHashMap remapSchemaColumns() - { - CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); - - if (null != getRealTable().getColumn("container") && null != getColumn("folder")) - { - m.put("container", "folder"); - } - - for (ColumnInfo col : getColumns()) - { - if (col.getMvColumnName() != null) - m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); - } - - return m; - } - - @Override - public Set getAltMergeKeys(DataIteratorContext context) - { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); - - return MATERIAL_ALT_MERGE_KEYS; - } - - @NotNull - @Override - public Set getAltKeysForUpdate() - { - return MATERIAL_ALT_UPDATE_KEYS; - } - - @Override - @NotNull - public List> getAdditionalRequiredInsertColumns() - { - if (getSampleType() == null) - return Collections.emptyList(); - - try - { - return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); - } - catch (IOException e) - { - return Collections.emptyList(); - } - } - - @Override - public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) - { - TableInfo propertiesTable = _ss.getTinfo(); - - // The specimens sample type doesn't have a properties table - if (propertiesTable == null) - { - return data; - } - - long sampleTypeObjectId = requireNonNull(getOwnerObjectId()); - - // TODO: subclass PersistDataIteratorBuilder to index Materials! not DataClass! - try - { - var persist = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, getUserSchema().getContainer(), getUserSchema().getUser(), _ss.getImportAliasesIncludingAliquot(), sampleTypeObjectId) - .setFileLinkDirectory(SAMPLETYPE_FILE_DIRECTORY); - ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); - - persist.setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> - { - List lsids = searchIndexDataKeys.lsids(); - List orderedRowIds = searchIndexDataKeys.orderedRowIds(); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterials(sublist)) - expMaterial.index(q, this); - }) - ); - - ListUtils.partition(lsids, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterialsByLsid(sublist)) - expMaterial.index(q, this); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT) - ); - - DataIteratorBuilder builder = LoggingDataIterator.wrap(persist); - return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(builder, getUserSchema().getContainer(), getUserSchema().getUser(), ExperimentService.get().getTinfoMaterialAliasMap(), _ss, true)); - } - catch (IOException e) - { - throw new UncheckedIOException(e); - } - } - - @Override - @NotNull - public AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.DETAILED; - } - - static final Set excludeFromDetailedAuditField; - static - { - var set = new CaseInsensitiveHashSet(); - set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); - set.addAll(ExpDataIterators.NOT_FOR_UPDATE); - // We don't want the inventory columns to show up in the sample timeline audit record; - // they are captured in their own audit record. - set.addAll(InventoryService.InventoryStatusColumn.names()); - excludeFromDetailedAuditField = Collections.unmodifiableSet(set); - } - - @Override - public @NotNull Set getExcludedDetailedUpdateAuditFields() - { - // uniqueId fields don't change in reality, so exclude them from the audit updates - Set excluded = new CaseInsensitiveHashSet(); - excluded.addAll(this.getUniqueIdFields()); - excluded.addAll(excludeFromDetailedAuditField); - return excluded; - } - - @Override - public List> getImportTemplates(ViewContext ctx) - { - // respect any metadata overrides - if (getRawImportTemplates() != null) - return super.getImportTemplates(ctx); - - List> templates = new ArrayList<>(); - ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); - url.addParameter("headerType", ColumnHeaderType.ImportField.name()); - try - { - if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) - { - for (String aliasKey : getSampleType().getImportAliases().keySet()) - url.addParameter("includeColumn", aliasKey); - } - } - catch (IOException e) - {} - templates.add(Pair.of("Download Template", url.toString())); - return templates; - } - - @Override - public void overlayMetadata(String tableName, UserSchema schema, Collection errors) - { - if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) - { - Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); - if (null != metadata) - { - overlayMetadata(metadata, schema, errors); - } - } - super.overlayMetadata(tableName, schema, errors); - } - - static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn - { - private Unit typeUnit; - private boolean applySampleTypePrecision = true; - - public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) { - super(col, false); - this.typeUnit = typeUnit; - this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user - } - - @Override - public Object getDisplayValue(RenderContext ctx) - { - Object value = super.getDisplayValue(ctx); - if (this.applySampleTypePrecision && value != null) - { - int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); - value = Precision.round(Double.valueOf(value.toString()), scale); - } - return value; - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Precision; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.assay.plate.AssayPlateMetadataService; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.cache.BlockingCache; +import org.labkey.api.cache.CacheManager; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.compliance.TableRules; +import org.labkey.api.compliance.TableRulesManager; +import org.labkey.api.data.ColumnHeaderType; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DataColumn; +import org.labkey.api.data.DataRegion; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DisplayColumn; +import org.labkey.api.data.DisplayColumnFactory; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MaterializedQueryHelper; +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.data.PHI; +import org.labkey.api.data.RenderContext; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.UnionContainerFilter; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.LoggingDataIterator; +import org.labkey.api.dataiterator.SimpleTranslator; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ExperimentUrls; +import org.labkey.api.exp.api.NameExpressionOptionService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.DomainUtil; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpMaterialTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.inventory.InventoryService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.qc.SampleStatusService; +import org.labkey.api.query.AliasedColumn; +import org.labkey.api.query.DetailsURL; +import org.labkey.api.query.ExprColumn; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.LookupForeignKey; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryForeignKey; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.QueryUrls; +import org.labkey.api.query.RowIdForeignKey; +import org.labkey.api.query.SchemaKey; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.column.BuiltInColumnTypes; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HeartBeat; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.StringExpression; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.experiment.ExpDataIterators; +import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.lineage.LineageMethod; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLETYPE_FILE_DIRECTORY; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; +import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; +import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; + +public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable +{ + ExpSampleTypeImpl _ss; + Set _uniqueIdFields; + boolean _supportTableRules = true; + + public static final Set MATERIAL_ALT_MERGE_KEYS; + public static final Set MATERIAL_ALT_UPDATE_KEYS; + public static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); + static { + MATERIAL_ALT_MERGE_KEYS = Set.of(Column.MaterialSourceId.name(), Column.Name.name()); + MATERIAL_ALT_UPDATE_KEYS = Set.of(Column.LSID.name()); + + Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); + IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); + amountValidator.setExpressionValue("~gte=0"); + amountValidator.setErrorMessage("Amounts must be non-negative."); + amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); + AMOUNT_RANGE_VALIDATORS.add(amountValidator); + } + + public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) + { + super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); + setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); + setPublicSchemaName(ExpSchema.SCHEMA_NAME); + addAllowablePermission(InsertPermission.class); + addAllowablePermission(UpdatePermission.class); + addAllowablePermission(MoveEntitiesPermission.class); + setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); + setSampleType(sampleType); + } + + public Set getUniqueIdFields() + { + if (_uniqueIdFields == null) + { + _uniqueIdFields = new CaseInsensitiveHashSet(); + _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); + } + return _uniqueIdFields; + } + + @Override + protected ColumnInfo resolveColumn(String name) + { + ColumnInfo result = super.resolveColumn(name); + if (result == null) + { + if ("CpasType".equalsIgnoreCase(name)) + result = createColumn(Column.SampleSet.name(), Column.SampleSet); + else if (Column.Property.name().equalsIgnoreCase(name)) + result = createPropertyColumn(Column.Property.name()); + else if (Column.QueryableInputs.name().equalsIgnoreCase(name)) + result = createColumn(Column.QueryableInputs.name(), Column.QueryableInputs); + } + return result; + } + + @Override + public ColumnInfo getExpObjectColumn() + { + var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); + ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); + return ret; + } + + @Override + public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) + { + if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) + { + // Special case sample auditing to help build a useful timeline view + return SampleTypeServiceImpl.get(); + } + + return super.getAuditHandler(auditBehaviorType); + } + + @Override + public MutableColumnInfo createColumn(String alias, Column column) + { + switch (column) + { + case Folder -> + { + return wrapColumn(alias, _rootTable.getColumn("Container")); + } + case LSID -> + { + return wrapColumn(alias, _rootTable.getColumn(Column.LSID.name())); + } + case MaterialSourceId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.MaterialSourceId.name())); + columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case RootMaterialRowId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.RootMaterialRowId.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.RowId.name())); + columnInfo.setLabel("Root Material"); + columnInfo.setUserEditable(false); + + // NK: Here we mark the column as not required AND nullable which is the opposite of the database where + // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. + // See ExpMaterialValidatorIterator. + columnInfo.setRequired(false); + columnInfo.setNullable(true); + + return columnInfo; + } + case AliquotedFromLSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.AliquotedFromLSID.name())); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.LSID.name())); + columnInfo.setLabel("Aliquoted From Parent"); + return columnInfo; + } + case IsAliquot -> + { + String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RootMaterialRowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); + ExprColumn columnInfo = new ExprColumn(this, FieldKey.fromParts(Column.IsAliquot.name()), new SQLFragment( + "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) + .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 + .append(" ELSE NULL END)"), JdbcType.BOOLEAN); + columnInfo.setLabel("Is Aliquot"); + columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(false); + return columnInfo; + } + case Name -> + { + var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); + // shut off this field in insert and update views if user specified names are not allowed + if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) + { + nameCol.setShownInInsertView(false); + nameCol.setShownInUpdateView(false); + } + return nameCol; + } + case RawAmount -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + case StoredAmount -> + { + String label = StoredAmount.label(); + Set importAliases = Set.of(label, "Stored Amount"); + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setLabel(label); + columnInfo.setImportAliasesSet(importAliases); + columnInfo.setDescription("The amount of this sample currently on hand."); + return columnInfo; + } + } + case RawUnits -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + return columnInfo; + } + case Units -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Column.Units.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + columnInfo.setFk(fk); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + return columnInfo; + } + } + case Description -> + { + return wrapColumn(alias, _rootTable.getColumn(Column.Description.name())); + } + case SampleSet -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); + // NOTE: populateColumns() overwrites this with a QueryForeignKey. Can this be removed? + columnInfo.setFk(new LookupForeignKey(getContainerFilter(), null, null, null, null, "LSID", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + return columnInfo; + } + case SourceProtocolLSID -> + { + // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module + ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( + "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + + " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); + columnInfo.setLabel("Source Protocol"); + columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case SourceProtocolApplication -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); + columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + columnInfo.setAutoIncrement(false); + return columnInfo; + } + case SourceApplicationInput -> + { + var col = createEdgeColumn(alias, Column.SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); + col.setHidden(true); + return col; + } + case RunApplication -> + { + SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") + .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") + .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") + .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) + .append(")"); + + var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); + col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); + col.setUserEditable(false); + col.setReadOnly(true); + col.setHidden(true); + return col; + } + case RunApplicationOutput -> + { + var col = createEdgeColumn(alias, Column.RunApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); + return col; + } + case Run -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); + ret.setReadOnly(true); + return ret; + } + case RowId -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); + // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction + ret.setSortDirection(Sort.SortDirection.DESC); + ret.setFk(new RowIdForeignKey(ret)); + ret.setUserEditable(false); + ret.setHidden(true); + ret.setShownInInsertView(false); + ret.setHasDbSequence(true); + ret.setIsRootDbSequence(true); + return ret; + } + case Property -> + { + return createPropertyColumn(alias); + } + case Flag -> + { + return createFlagColumn(alias); + } + case Created -> + { + return wrapColumn(alias, _rootTable.getColumn("Created")); + } + case CreatedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); + } + case Modified -> + { + return wrapColumn(alias, _rootTable.getColumn("Modified")); + } + case ModifiedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); + } + case Alias -> + { + return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); + } + case Inputs -> + { + return createLineageColumn(this, alias, true, false); + } + case QueryableInputs -> + { + return createLineageColumn(this, alias, true, true); + } + case Outputs -> + { + return createLineageColumn(this, alias, false, false); + } + case Properties -> + { + return createPropertiesColumn(alias); + } + case SampleState -> + { + boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); + var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); + ret.setLabel("Status"); + ret.setHidden(!statusEnabled); + ret.setShownInDetailsView(statusEnabled); + ret.setShownInInsertView(statusEnabled); + ret.setShownInUpdateView(statusEnabled); + ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + return ret; + } + case AliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); + ret.setLabel(ALIQUOT_COUNT_LABEL); + return ret; + } + case AliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(ALIQUOT_VOLUME_LABEL); + return ret; + } + case AvailableAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); + return ret; + } + case AvailableAliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); + ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); + return ret; + } + case AliquotUnit -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); + ret.setShownInDetailsView(false); + return ret; + } + case MaterialExpDate -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("MaterialExpDate")); + ret.setLabel("Expiration Date"); + ret.setShownInDetailsView(true); + ret.setShownInInsertView(true); + ret.setShownInUpdateView(true); + return ret; + } + default -> throw new IllegalArgumentException("Unknown column " + column); + } + } + + @Override + public MutableColumnInfo createPropertyColumn(String alias) + { + var ret = super.createPropertyColumn(alias); + if (_ss != null) + { + final TableInfo t = _ss.getTinfo(); + if (t != null) + { + ret.setFk(new LookupForeignKey() + { + @Override + public TableInfo getLookupTableInfo() + { + return t; + } + + @Override + protected ColumnInfo getPkColumn(TableInfo table) + { + return t.getColumn("lsid"); + } + }); + } + } + ret.setIsUnselectable(true); + ret.setDescription("A holder for any custom fields associated with this sample"); + ret.setHidden(true); + return ret; + } + + private Unit getSampleTypeUnit() + { + Unit typeUnit = null; + if (_ss != null && _ss.getMetricUnit() != null) + typeUnit = Unit.fromName(_ss.getMetricUnit()); + return typeUnit; + } + + private void setSampleType(@Nullable ExpSampleType st) + { + checkLocked(); + if (_ss != null) + { + throw new IllegalStateException("Cannot unset sample type"); + } + if (st != null && !(st instanceof ExpSampleTypeImpl)) + { + throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); + } + _ss = (ExpSampleTypeImpl) st; + if (_ss != null) + { + setPublicSchemaName(SamplesSchema.SCHEMA_NAME); + setName(st.getName()); + + String description = _ss.getDescription(); + if (StringUtils.isEmpty(description)) + description = "Contains one row per sample in the " + _ss.getName() + " sample type"; + setDescription(description); + + if (canUserAccessPhi()) + { + ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); + setImportURL(new DetailsURL(url)); + } + } + } + + public ExpSampleType getSampleType() + { + return _ss; + } + + @Override + protected void populateColumns() + { + var st = getSampleType(); + var rowIdCol = addColumn(Column.RowId); + addColumn(Column.MaterialSourceId); + addColumn(Column.SourceProtocolApplication); + addColumn(Column.SourceApplicationInput); + addColumn(Column.RunApplication); + addColumn(Column.RunApplicationOutput); + addColumn(Column.SourceProtocolLSID); + + var nameCol = addColumn(Column.Name); + if (st != null && st.hasNameAsIdCol()) + { + // Show the Name field but don't mark is as required when using name expressions + if (st.hasNameExpression()) + { + var nameExpression = st.getNameExpression(); + nameCol.setNameExpression(nameExpression); + nameCol.setNullable(true); + String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); + String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); + nameCol.setDescription(desc); + } + else + { + nameCol.setNullable(false); + } + } + else + { + nameCol.setReadOnly(true); + nameCol.setShownInInsertView(false); + } + + addColumn(Column.Alias); + addColumn(Column.Description); + + var typeColumnInfo = addColumn(Column.SampleSet); + typeColumnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) + { + @Override + protected ContainerFilter getLookupContainerFilter() + { + // Be sure that we can resolve the sample type if it's defined in a separate container. + // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. + // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has + // parents in current sample type and a sample type in the parent container + Set containers = new HashSet<>(); + if (null != st) + containers.add(st.getContainer()); + containers.add(getContainer()); + if (getContainer().getProject() != null) + containers.add(getContainer().getProject()); + containers.add(ContainerManager.getSharedContainer()); + ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); + + if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) + cf = new UnionContainerFilter(_containerFilter, cf); + return cf; + } + }); + + typeColumnInfo.setReadOnly(true); + typeColumnInfo.setUserEditable(false); + typeColumnInfo.setShownInInsertView(false); + + addColumn(Column.MaterialExpDate); + addContainerColumn(Column.Folder, null); + var runCol = addColumn(Column.Run); + runCol.setFk(new ExpSchema(_userSchema.getUser(), getContainer()).getRunIdForeignKey(getContainerFilter())); + runCol.setShownInInsertView(false); + runCol.setShownInUpdateView(false); + + var colLSID = addColumn(Column.LSID); + colLSID.setHidden(true); + colLSID.setReadOnly(true); + colLSID.setUserEditable(false); + colLSID.setShownInInsertView(false); + colLSID.setShownInDetailsView(false); + colLSID.setShownInUpdateView(false); + + var rootRowId = addColumn(Column.RootMaterialRowId); + rootRowId.setHidden(true); + rootRowId.setReadOnly(true); + rootRowId.setUserEditable(false); + rootRowId.setShownInInsertView(false); + rootRowId.setShownInDetailsView(false); + rootRowId.setShownInUpdateView(false); + + var aliquotParentLSID = addColumn(Column.AliquotedFromLSID); + aliquotParentLSID.setHidden(true); + aliquotParentLSID.setReadOnly(true); + aliquotParentLSID.setUserEditable(false); + aliquotParentLSID.setShownInInsertView(false); + aliquotParentLSID.setShownInDetailsView(false); + aliquotParentLSID.setShownInUpdateView(false); + + addColumn(Column.IsAliquot); + addColumn(Column.Created); + addColumn(Column.CreatedBy); + addColumn(Column.Modified); + addColumn(Column.ModifiedBy); + + List defaultCols = new ArrayList<>(); + defaultCols.add(FieldKey.fromParts(Column.Name)); + defaultCols.add(FieldKey.fromParts(Column.MaterialExpDate)); + boolean hasProductFolders = getContainer().hasProductFolders(); + if (hasProductFolders) + defaultCols.add(FieldKey.fromParts(Column.Folder)); + defaultCols.add(FieldKey.fromParts(Column.Run)); + + if (st == null) + defaultCols.add(FieldKey.fromParts(Column.SampleSet)); + + addColumn(Column.Flag); + + var statusColInfo = addColumn(Column.SampleState); + boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); + statusColInfo.setShownInDetailsView(statusEnabled); + statusColInfo.setShownInInsertView(statusEnabled); + statusColInfo.setShownInUpdateView(statusEnabled); + statusColInfo.setHidden(!statusEnabled); + statusColInfo.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + if (statusEnabled) + defaultCols.add(FieldKey.fromParts(Column.SampleState)); + statusColInfo.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + + // TODO is this a real Domain??? + if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) + { + defaultCols.add(FieldKey.fromParts(Column.Flag)); + addSampleTypeColumns(st, defaultCols); + + setName(_ss.getName()); + + ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); + gridUrl.addParameter("rowId", st.getRowId()); + setGridURL(new DetailsURL(gridUrl)); + } + + List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); + defaultCols.addAll(calculatedFieldKeys); + + addColumn(Column.AliquotCount); + addColumn(Column.AliquotVolume); + addColumn(Column.AliquotUnit); + addColumn(Column.AvailableAliquotCount); + addColumn(Column.AvailableAliquotVolume); + + addColumn(Column.StoredAmount); + defaultCols.add(FieldKey.fromParts(Column.StoredAmount)); + + addColumn(Column.Units); + defaultCols.add(FieldKey.fromParts(Column.Units)); + + var rawAmountColumn = addColumn(Column.RawAmount); + rawAmountColumn.setDisplayColumnFactory(new DisplayColumnFactory() + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new DataColumn(colInfo) + { + @Override + public void addQueryFieldKeys(Set keys) + { + super.addQueryFieldKeys(keys); + keys.add(FieldKey.fromParts(Column.StoredAmount)); + + } + }; + } + }); + rawAmountColumn.setHidden(true); + rawAmountColumn.setShownInDetailsView(false); + rawAmountColumn.setShownInInsertView(false); + rawAmountColumn.setShownInUpdateView(false); + + var rawUnitsColumn = addColumn(Column.RawUnits); + rawUnitsColumn.setDisplayColumnFactory(new DisplayColumnFactory() + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new DataColumn(colInfo) + { + @Override + public void addQueryFieldKeys(Set keys) + { + super.addQueryFieldKeys(keys); + keys.add(FieldKey.fromParts(Column.Units)); + + } + }; + } + }); + rawUnitsColumn.setHidden(true); + rawUnitsColumn.setShownInDetailsView(false); + rawUnitsColumn.setShownInInsertView(false); + rawUnitsColumn.setShownInUpdateView(false); + + if (InventoryService.get() != null && (st == null || !st.isMedia())) + defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); + + SQLFragment sql; + UserSchema plateUserSchema; + // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset + // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying + // against tables in that schema. + if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) + plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); + else + plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); + + if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) + { + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); + SQLFragment existsSubquery = new SQLFragment() + .append("SELECT 1 FROM ") + .append(plateUserSchema.getTable("Well"), "well") + .append(" WHERE well.sampleid = ").append(rowIdField); + + sql = new SQLFragment() + .append("CASE WHEN EXISTS (") + .append(existsSubquery) + .append(") THEN 'Plated' ") + .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 + .append("ELSE NULL END"); + } + else + { + sql = new SQLFragment("(SELECT NULL)"); + } + var col = new ExprColumn(this, Column.IsPlated.name(), sql, JdbcType.VARCHAR); + col.setDescription("Whether the sample that has been plated, if plating is supported."); + col.setUserEditable(false); + col.setReadOnly(true); + col.setShownInDetailsView(false); + col.setShownInInsertView(false); + col.setShownInUpdateView(false); + if (plateUserSchema != null) + col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + Column.RowId.name() + "}")); + addColumn(col); + + addVocabularyDomains(); + + addColumn(Column.Properties); + + var colInputs = addColumn(Column.Inputs); + addMethod("Inputs", new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); + + var colOutputs = addColumn(Column.Outputs); + addMethod("Outputs", new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); + + addExpObjectMethod(); + + ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); + DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); + nameCol.setURL(url); + rowIdCol.setURL(url); + setDetailsURL(url); + + if (canUserAccessPhi()) + { + ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); + setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); + + ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); + setInsertURL(new DetailsURL(insertActionURL)); + } + else + { + setImportURL(LINK_DISABLER); + setInsertURL(LINK_DISABLER); + setUpdateURL(LINK_DISABLER); + } + + setTitleColumn(Column.Name.toString()); + + setDefaultVisibleColumns(defaultCols); + + MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn("rowid"), _ss, true); + addColumn(lineageLookup); + } + + private ContainerFilter getSampleStatusLookupContainerFilter() + { + // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared + // for the sample status lookup since in the app project context we want to share status definitions across + // a given project instead of creating duplicate statuses in each subfolder project. + ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); + type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; + return type.create(getUserSchema()); + } + + @Override + public Domain getDomain() + { + return getDomain(false); + } + + @Override + public Domain getDomain(boolean forUpdate) + { + return _ss == null ? null : _ss.getDomain(forUpdate); + } + + + public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) + { + if (nameExpression == null) + return currentDescription; + + StringBuilder sb = new StringBuilder(); + if (currentDescription != null && !currentDescription.isEmpty()) + { + sb.append(currentDescription); + if (!currentDescription.endsWith(".")) + sb.append("."); + sb.append("\n"); + } + + sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); + sb.append(nameExpression); + sb.append("."); + if (!StringUtils.isEmpty(nameExpressionPreview)) + { + sb.append("\nExample of name that will be generated from the current pattern: \n"); + sb.append(nameExpressionPreview); + } + + return sb.toString(); + } + + private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) + { + TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); + if (null == dbTable) + return; + + UserSchema schema = getUserSchema(); + Domain domain = st.getDomain(); + ColumnInfo rowIdColumn = getColumn(Column.RowId); + ColumnInfo lsidColumn = getColumn(Column.LSID); + ColumnInfo nameColumn = getColumn(Column.Name); + + visibleColumns.remove(FieldKey.fromParts(Column.Run.name())); + + // When not using name expressions, mark the ID columns as required. + // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. + final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); + + Set mvColumns = domain.getProperties().stream() + .filter(ImportAliasable::isMvEnabled) + .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) + .collect(Collectors.toSet()); + + for (ColumnInfo dbColumn : dbTable.getColumns()) + { + // Don't include PHI columns in full text search index + // CONSIDER: Can we move this to a base class? Maybe in .addColumn() + if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) + continue; + + if ( + rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + lsidColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) + ) + { + continue; + } + + var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); + + // TODO missing values? comments? flags? + DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); + var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); + if (propColumn.getName().equalsIgnoreCase("genid")) + { + propColumn.setHidden(true); + propColumn.setUserEditable(false); + propColumn.setShownInDetailsView(false); + propColumn.setShownInInsertView(false); + propColumn.setShownInUpdateView(false); + } + if (null != dp) + { + PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), + SchemaKey.fromParts("samples"), st.getName(), FieldKey.fromParts("RowId"), null, getLookupContainerFilter()); + + if (idCols.contains(dp)) + { + propColumn.setNullable(false); + propColumn.setDisplayColumnFactory(new IdColumnRendererFactory()); + } + + // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected + if (!propColumn.isHidden()) + { + visibleColumns.add(propColumn.getFieldKey()); + } + + if (propColumn.isMvEnabled()) + { + // The column in the physical table has a "_MVIndicator" suffix, but we want to expose + // it with a "MVIndicator" suffix (no underscore) + var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, + StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); + mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); + mvColumn.setSqlTypeName("VARCHAR"); + mvColumn.setPropertyURI(dp.getPropertyURI()); + mvColumn.setNullable(true); + mvColumn.setUserEditable(false); + mvColumn.setHidden(true); + mvColumn.setMvIndicatorColumn(true); + + addColumn(mvColumn); + propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); + } + } + + if (!mvColumns.contains(propColumn.getFieldKey())) + addColumn(propColumn); + + } + + setDefaultVisibleColumns(visibleColumns); + } + + // These are mostly fields that are wrapped by fields with different names (see createColumn()) + // we could handle each case separately, but this is easier + static final Set wrappedFieldKeys = Set.of( + new FieldKey(null, "objectid"), + new FieldKey(null, "RowId"), + new FieldKey(null, "LSID"), // Flag + new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication + new FieldKey(null, "runId"), // Run, RunApplication + new FieldKey(null, "CpasType")); // SampleSet + static final Set ALL_COLUMNS = Set.of(); + + private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) + { + if (null == selectedColumns) + return ALL_COLUMNS; + selectedColumns = new TreeSet<>(selectedColumns); + if (selectedColumns.contains(new FieldKey(null, StoredAmount))) + selectedColumns.add(new FieldKey(null, Units)); + if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) + selectedColumns.add(new FieldKey(null, Column.AliquotedFromLSID.name())); + if (selectedColumns.contains(new FieldKey(null, Column.IsAliquot.name()))) + selectedColumns.add(new FieldKey(null, Column.RootMaterialRowId.name())); + selectedColumns.addAll(wrappedFieldKeys); + if (null != getFilter()) + selectedColumns.addAll(getFilter().getAllFieldKeys()); + return selectedColumns; + } + + @NotNull + @Override + public SQLFragment getFromSQL(String alias) + { + return getFromSQL(alias, null); + } + + @Override + public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) + { + SQLFragment sql = new SQLFragment("("); + boolean usedMaterialized; + + + // SELECT FROM + /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) + * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. + * Also, we may use RemapCache for material lookup outside a transaction + */ + boolean onlyMaterialColums = false; + if (null != selectedColumns && !selectedColumns.isEmpty()) + onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); + if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) + { + sql.append(getMaterializedSQL()); + usedMaterialized = true; + } + else + { + sql.append(getJoinSQL(selectedColumns)); + usedMaterialized = false; + } + + // WHERE + SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); + sql.append("\n").append(filterFrag); + if (_ss != null && !usedMaterialized) + { + if (!filterFrag.isEmpty()) + sql.append(" AND "); + else + sql.append(" WHERE "); + sql.append("CpasType = ").appendValue(_ss.getLSID()); + } + sql.append(") ").appendIdentifier(alias); + + return getTransformedFromSQL(sql); + } + + @Override + public void setSupportTableRules(boolean b) + { + this._supportTableRules = b; + } + + @Override + public boolean supportTableRules() // intentional override + { + return _supportTableRules; + } + + @Override + protected @NotNull TableRules findTableRules() + { + Container definitionContainer = getUserSchema().getContainer(); + if (null != _ss) + definitionContainer = _ss.getContainer(); + return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); + } + + + static class InvalidationCounters + { + public final AtomicLong update, insert, delete, rollup; + InvalidationCounters() + { + long l = System.currentTimeMillis(); + update = new AtomicLong(l); + insert = new AtomicLong(l); + delete = new AtomicLong(l); + rollup = new AtomicLong(l); + } + } + + static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); + static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); + static final AtomicBoolean initializedListeners = new AtomicBoolean(false); + + // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() + public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + var scope = ExperimentServiceImpl.getExpSchema().getScope(); + var runnable = new RefreshMaterializedViewRunnable(lsid, reason); + scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); + } + + private static class RefreshMaterializedViewRunnable implements Runnable + { + private final String _lsid; + private final SampleTypeServiceImpl.SampleChangeType _reason; + + public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + _lsid = lsid; + _reason = reason; + } + + @Override + public void run() + { + if (_reason == schema) + { + /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. + * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the + * MQH from the cache to force the SQL to be regenerated. + */ + _materializedQueries.remove(_lsid); + return; + } + var counters = getInvalidateCounters(_lsid); + switch (_reason) + { + case insert -> counters.insert.incrementAndGet(); + case rollup -> counters.rollup.incrementAndGet(); + case update -> counters.update.incrementAndGet(); + case delete -> counters.delete.incrementAndGet(); + default -> throw new IllegalStateException("Unexpected value: " + _reason); + } + } + + @Override + public boolean equals(Object obj) + { + return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); + } + } + + private static InvalidationCounters getInvalidateCounters(String lsid) + { + if (!initializedListeners.getAndSet(true)) + { + CacheManager.addListener(_invalidationCounters::clear); + } + return _invalidationCounters.computeIfAbsent(lsid, (unused) -> + new InvalidationCounters() + ); + } + + /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ + private SQLFragment getMaterializedSQL() + { + if (null == _ss) + return getJoinSQL(null); + + var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> + { + /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. + * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, + * even if just to help with race-conditions. + * + * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. + */ + SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); + return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") + .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") + .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") + .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) + .build(); + }); + return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); + } + + + /** + * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. + * It does not help with incremental updates (except for providing the upsert() method). + * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. + */ + static class _MaterializedQueryHelper extends MaterializedQueryHelper + { + final String _lsid; + + static class Builder extends MaterializedQueryHelper.Builder + { + String _lsid; + + public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) + { + super(prefix, scope, select); + this._lsid = lsid; + } + + @Override + public _MaterializedQueryHelper build() + { + return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); + } + } + + _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, + boolean isSelectIntoSql) + { + super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); + this._lsid = lsid; + } + + @Override + protected Materialized createMaterialized(String txCacheKey) + { + DbSchema temp = DbSchema.getTemp(); + String name = _prefix + "_" + GUID.makeHash(); + _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); + initMaterialized(materialized); + return materialized; + } + + @Override + protected void incrementalUpdateBeforeSelect(Materialized m) + { + _Materialized materialized = (_Materialized) m; + + boolean lockAcquired = false; + try + { + lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); + if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) + throw materialized._loadException; + + if (!materialized.incrementalDeleteCheck.stillValid(0)) + executeIncrementalDelete(); + if (!materialized.incrementalRollupCheck.stillValid(0)) + executeIncrementalRollup(); + if (!materialized.incrementalInsertCheck.stillValid(0)) + executeIncrementalInsert(); + } + catch (RuntimeException|InterruptedException ex) + { + RuntimeException rex = UnexpectedException.wrap(ex); + materialized.setError(rex); + // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. + + // Ensure that next refresh starts clean + _materializedQueries.remove(_lsid); + getInvalidateCounters(_lsid).update.incrementAndGet(); + throw rex; + } + finally + { + if (lockAcquired) + materialized.getLock().unlock(); + } + } + + void upsertWithRetry(SQLFragment sql) + { + // not actually read-only, but we don't want to start an explicit transaction + _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); + } + + void executeIncrementalInsert() + { + SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") + .append("SELECT * FROM (") + .append(getViewSourceSql()).append(") viewsource_\n") + .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); + upsertWithRetry(incremental); + } + + void executeIncrementalDelete() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + // POSTGRES bug??? the obvious query is _very_ slow O(n^2) + // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) + SQLFragment incremental = new SQLFragment() + .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") + .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); + upsertWithRetry(incremental); + } + + void executeIncrementalRollup() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + SQLFragment incremental = new SQLFragment(); + if (d.isPostgreSQL()) + { + incremental + .append("UPDATE temp.${NAME} AS st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM exp.Material AS expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") + .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") + .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") + .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") + .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") + .append(")"); + } + else + { + // SQL Server 2022 supports IS DISTINCT FROM + incremental + .append("UPDATE st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM temp.${NAME} st, exp.Material expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") + .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") + .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") + .append(")"); + } + upsertWithRetry(incremental); + } + } + + static class _Materialized extends MaterializedQueryHelper.Materialized + { + final MaterializedQueryHelper.Invalidator incrementalInsertCheck; + final MaterializedQueryHelper.Invalidator incrementalRollupCheck; + final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; + + _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) + { + super(mqh, tableName, cacheKey, created, sql); + final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); + incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); + incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); + incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); + } + + @Override + public void reset() + { + super.reset(); + long now = HeartBeat.currentTimeMillis(); + incrementalInsertCheck.stillValid(now); + incrementalRollupCheck.stillValid(now); + incrementalDeleteCheck.stillValid(now); + } + + Lock getLock() + { + return _loadingLock; + } + } + + + /* SELECT and JOIN, does not include WHERE */ + private SQLFragment getJoinSQL(Set selectedColumns) + { + TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); + Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); + provisionedCols.remove(Column.RowId.name()); + provisionedCols.remove(Column.LSID.name()); + provisionedCols.remove(Column.Name.name()); + boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); + + boolean hasSampleColumns = false; + boolean hasAliquotColumns = false; + + Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); + selectedColumns = computeInnerSelectedColumns(selectedColumns); + + SQLFragment sql = new SQLFragment(); + sql.appendComment("", getSqlDialect()); + sql.append("SELECT "); + String comma = ""; + for (String materialCol : materialCols) + { + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) + { + sql.append(comma).append("m.").appendIdentifier(materialCol); + comma = ", "; + } + } + if (null != provisioned && hasProvisionedColumns) + { + for (ColumnInfo propertyColumn : provisioned.getColumns()) + { + // don't select twice + if ( + Column.RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Column.LSID.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Column.Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) + ) + { + continue; + } + + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) + { + sql.append(comma); + boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) + || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); + if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + else if (rootField) + { + sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasSampleColumns = true; + } + else + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + comma = ", "; + } + } + } + + sql.append("\nFROM "); + sql.append(_rootTable, "m"); + if (hasSampleColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); + if (hasAliquotColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); + + sql.appendComment("", getSqlDialect()); + return sql; + } + + private class IdColumnRendererFactory implements DisplayColumnFactory + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new IdColumnRenderer(colInfo); + } + } + + private static class IdColumnRenderer extends DataColumn + { + public IdColumnRenderer(ColumnInfo col) + { + super(col); + } + + @Override + protected boolean isDisabledInput(RenderContext ctx) + { + return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; + } + } + + private static class SampleTypeAmountDisplayColumn extends ExprColumn + { + public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) + { + super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" / ? AS ") + .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") + .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.getValue()), + JdbcType.DOUBLE); + + setLabel(label); + setImportAliasesSet(importAliases); + } + } + + private static class SampleTypeUnitDisplayColumn extends ExprColumn + { + public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) + { + super(parent, FieldKey.fromParts(Column.Units.name()), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.toString()), + JdbcType.VARCHAR); + } + } + + @Override + public QueryUpdateService getUpdateService() + { + return new SampleTypeUpdateServiceDI(this, _ss); + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) + { + if (_ss == null) + { + // Allow read and delete for exp.Materials. + // Don't allow insert/update on exp.Materials without a sample type. + if (perm == DeletePermission.class || perm == ReadPermission.class) + return getContainer().hasPermission(user, perm); + return false; + } + + if (_ss.isMedia() && perm == ReadPermission.class) + return getContainer().hasPermission(user, MediaReadPermission.class); + + return super.hasPermission(user, perm); + } + + @NotNull + @Override + public List getUniqueIndices() + { + // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" + // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. + // Some FKs to ExpMaterialTable don't include the "Name" column (e.g. NabBaseTable.Specimen) + String indexName = "idx_material_ak"; + List ret = new ArrayList<>(super.getUniqueIndices()); + if (getColumn("Name") != null) + ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); + else + ret.removeIf( def -> def.name().equals(indexName)); + return Collections.unmodifiableList(ret); + } + + + // + // UpdatableTableInfo + // + + + @Override + public @Nullable Long getOwnerObjectId() + { + return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); + } + + @Nullable + @Override + public CaseInsensitiveHashMap remapSchemaColumns() + { + CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); + + if (null != getRealTable().getColumn("container") && null != getColumn("folder")) + { + m.put("container", "folder"); + } + + for (ColumnInfo col : getColumns()) + { + if (col.getMvColumnName() != null) + m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); + } + + return m; + } + + @Override + public Set getAltMergeKeys(DataIteratorContext context) + { + if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) + return getAltKeysForUpdate(); + + return MATERIAL_ALT_MERGE_KEYS; + } + + @NotNull + @Override + public Set getAltKeysForUpdate() + { + return MATERIAL_ALT_UPDATE_KEYS; + } + + @Override + @NotNull + public List> getAdditionalRequiredInsertColumns() + { + if (getSampleType() == null) + return Collections.emptyList(); + + try + { + return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); + } + catch (IOException e) + { + return Collections.emptyList(); + } + } + + @Override + public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) + { + TableInfo propertiesTable = _ss.getTinfo(); + + // The specimens sample type doesn't have a properties table + if (propertiesTable == null) + { + return data; + } + + long sampleTypeObjectId = requireNonNull(getOwnerObjectId()); + + // TODO: subclass PersistDataIteratorBuilder to index Materials! not DataClass! + try + { + var persist = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, getUserSchema().getContainer(), getUserSchema().getUser(), _ss.getImportAliasesIncludingAliquot(), sampleTypeObjectId) + .setFileLinkDirectory(SAMPLETYPE_FILE_DIRECTORY); + ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); + SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); + + persist.setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> + { + List lsids = searchIndexDataKeys.lsids(); + List orderedRowIds = searchIndexDataKeys.orderedRowIds(); + + // Issue 51263: order by RowId to reduce deadlock + ListUtils.partition(orderedRowIds, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterials(sublist)) + expMaterial.index(q, this); + }) + ); + + ListUtils.partition(lsids, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterialsByLsid(sublist)) + expMaterial.index(q, this); + }) + ); + }, DbScope.CommitTaskOption.POSTCOMMIT) + ); + + DataIteratorBuilder builder = LoggingDataIterator.wrap(persist); + return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(builder, getUserSchema().getContainer(), getUserSchema().getUser(), ExperimentService.get().getTinfoMaterialAliasMap(), _ss, true)); + } + catch (IOException e) + { + throw new UncheckedIOException(e); + } + } + + @Override + @NotNull + public AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.DETAILED; + } + + static final Set excludeFromDetailedAuditField; + static + { + var set = new CaseInsensitiveHashSet(); + set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); + set.addAll(ExpDataIterators.NOT_FOR_UPDATE); + // We don't want the inventory columns to show up in the sample timeline audit record; + // they are captured in their own audit record. + set.addAll(InventoryService.InventoryStatusColumn.names()); + excludeFromDetailedAuditField = Collections.unmodifiableSet(set); + } + + @Override + public @NotNull Set getExcludedDetailedUpdateAuditFields() + { + // uniqueId fields don't change in reality, so exclude them from the audit updates + Set excluded = new CaseInsensitiveHashSet(); + excluded.addAll(this.getUniqueIdFields()); + excluded.addAll(excludeFromDetailedAuditField); + return excluded; + } + + @Override + public List> getImportTemplates(ViewContext ctx) + { + // respect any metadata overrides + if (getRawImportTemplates() != null) + return super.getImportTemplates(ctx); + + List> templates = new ArrayList<>(); + ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); + url.addParameter("headerType", ColumnHeaderType.ImportField.name()); + try + { + if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) + { + for (String aliasKey : getSampleType().getImportAliases().keySet()) + url.addParameter("includeColumn", aliasKey); + } + } + catch (IOException e) + {} + templates.add(Pair.of("Download Template", url.toString())); + return templates; + } + + @Override + public void overlayMetadata(String tableName, UserSchema schema, Collection errors) + { + if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) + { + Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); + if (null != metadata) + { + overlayMetadata(metadata, schema, errors); + } + } + super.overlayMetadata(tableName, schema, errors); + } + + static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn + { + private Unit typeUnit; + private boolean applySampleTypePrecision = true; + + public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) { + super(col, false); + this.typeUnit = typeUnit; + this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user + } + + @Override + public Object getDisplayValue(RenderContext ctx) + { + Object value = super.getDisplayValue(ctx); + if (this.applySampleTypePrecision && value != null) + { + int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); + value = Precision.round(Double.valueOf(value.toString()), scale); + } + return value; + } + } +} diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 9cb45a4f468..aaf440a7466 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -832,6 +832,29 @@ public static boolean isAliquotStatusChangeNeedRecalc(Collection available return false; } + // Customize negative amount error message when the provided unit doesn't match sample type unit. + // For example, provided value of "-1 kg" would have been converted to "-1000 mg" by now. + // This updateRow (going to be deprecated) inconsistent with the data iterator code path, which use provided value "-1" in error message. + // TODO: remove this override when consolidating sample update method to remove row by row update + @Override + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + if (row.containsKey(col.getColumnName())) + { + // if provided value is present, validate provided + Object value = row.get(col.getColumnName()); + Object providedValue = null; + if (_sampleType != null && value != null && (StoredAmount.name().equalsIgnoreCase(col.getColumnName()) || "Amount".equalsIgnoreCase(col.getColumnName()))) + { + providedValue = value + " (" + _sampleType.getMetricUnit() + ")"; + } + validateValue(col, value, providedValue); + } + } + } + @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException diff --git a/experiment/src/org/labkey/experiment/api/property/LengthValidator.java b/experiment/src/org/labkey/experiment/api/property/LengthValidator.java index 63af14bd54f..2da37e79bc7 100644 --- a/experiment/src/org/labkey/experiment/api/property/LengthValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/LengthValidator.java @@ -16,6 +16,7 @@ package org.labkey.experiment.api.property; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.data.ColumnRenderProperties; import org.labkey.api.exp.property.DefaultPropertyValidator; import org.labkey.api.exp.property.IPropertyValidator; @@ -68,7 +69,7 @@ public boolean isValid(IPropertyValidator validator, List error @Override public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache) + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) { assert value != null : "Shouldn't be validating a null value"; String[] parts = validator.getExpressionValue().split("="); diff --git a/experiment/src/org/labkey/experiment/api/property/LookupValidator.java b/experiment/src/org/labkey/experiment/api/property/LookupValidator.java index 0cd1a54b988..ba31803df1f 100644 --- a/experiment/src/org/labkey/experiment/api/property/LookupValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/LookupValidator.java @@ -1,296 +1,298 @@ -/* - * Copyright (c) 2010-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.jetbrains.annotations.NotNull; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.PropertyValidationError; -import org.labkey.api.query.QuerySchema; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.security.User; -import org.labkey.experiment.api.ExpMaterialTableImpl; - -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -/** - * User: jeckels - * Date: Jan 26, 2010 - */ -public class LookupValidator extends DefaultPropertyValidator implements ValidatorKind -{ - @Override - public String getName() - { - return "Lookup Property Validator"; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.Lookup).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - return true; - } - - private static class LookupKey - { - private final String _schema; - private final String _query; - private final String _container; - private final JdbcType _type; - - public LookupKey(PropertyDescriptor field) - { - _schema = field.getLookupSchema(); - _query = field.getLookupQuery(); - _container = field.getLookupContainer(); - _type = field.getJdbcType(); - } - - public LookupKey(ForeignKey fk, JdbcType jdbcType) - { - _schema = fk.getLookupSchemaName(); - _query = fk.getLookupTableName(); - _container = (null == fk.getLookupContainer() ? null : fk.getLookupContainer().getId()); - _type = jdbcType; - } - - @Override - public boolean equals(Object o) - { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - LookupKey that = (LookupKey) o; - - if (_container != null ? !_container.equals(that._container) : that._container != null) return false; - if (_query != null ? !_query.equals(that._query) : that._query != null) return false; - if (_type != null ? !_type.equals(that._type) : that._type != null) return false; - return !(_schema != null ? !_schema.equals(that._schema) : that._schema != null); - } - - @Override - public int hashCode() - { - int result = _schema != null ? _schema.hashCode() : 0; - result = 31 * result + (_query != null ? _query.hashCode() : 0); - result = 31 * result + (_container != null ? _container.hashCode() : 0); - result = 31 * result + (_type != null ? _type.hashCode() : 0); - return result; - } - } - - private static class LookupValues extends HashSet - { - final private Container _container; - - public LookupValues(ColumnInfo field, Container defaultContainer, List errors) - { - if (field.getFk().getLookupContainer() != null) - { - _container = field.getFk().getLookupContainer(); - } - else - { - _container = defaultContainer; - } - - processTableInfo(field.getFk().getLookupTableInfo(), field.getJdbcType(), field.getFk().getLookupTableName(), field.getNonBlankCaption(), errors); - } - - public LookupValues(PropertyDescriptor field, Container defaultContainer, User user, List errors) - { - if (field.getLookupContainer() != null) - { - _container = ContainerManager.getForId(field.getLookupContainer()); - } - else - { - _container = defaultContainer; - } - - if (user == null) - { - throw new IllegalArgumentException("Must supply a user"); - } - - if (_container == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target folder for field '" + field.getNonBlankCaption() + "'")); - } - else - { - QuerySchema userSchema = QueryService.get().getUserSchema(user, _container, field.getLookupSchema()); - if (userSchema == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target schema ('" + field.getLookupSchema() + "') for field '" + field.getNonBlankCaption() + "'")); - } - else - { - processTableInfo(userSchema.getTableForInsert(field.getLookupQuery()), field.getJdbcType(), field.getLookupQuery(), field.getNonBlankCaption(), errors); - } - } - } - - private void processTableInfo(TableInfo tableInfo, JdbcType jdbcType, String queryName, String label, List errors) - { - if (tableInfo == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target query ('" + queryName + "') for field '" + label + "'")); - } - else - { - List keyCols = tableInfo.getPkColumns(); - - if (keyCols.size() != 1) - { - errors.add(new SimpleValidationError("Could not validate target query ('" + queryName + "') because it has " + keyCols.size() + " columns instead of one for the field '" + label + "'")); - } - else - { - ColumnInfo lookupTargetCol = keyCols.get(0); - // Hack for sample types - see also revision 37612 - if (lookupTargetCol.getJdbcType() != jdbcType && jdbcType.isText() && tableInfo instanceof ExpMaterialTableImpl) - { - ColumnInfo nameCol = tableInfo.getColumn(ExpMaterialTableImpl.Column.Name.toString()); - assert nameCol != null : "Could not find Name column in SampleType table"; - if (nameCol != null) - { - lookupTargetCol = nameCol; - } - } - Collection keys = new TableSelector(lookupTargetCol).getCollection(lookupTargetCol.getJavaObjectClass()); - addAll(keys); - } - } - } - - public Container getContainer() - { - return _container; - } - } - - @Override - public boolean validate(IPropertyValidator validator, - ColumnRenderProperties crpField, - @NotNull Object value, - List errors, - ValidatorContext validatorCache) - { - //noinspection ConstantConditions - assert value != null : "Shouldn't be validating a null value"; - - if (value != null) - value = ConvertHelper.convert(value, crpField.getJavaObjectClass()); - - if (crpField instanceof PropertyDescriptor field) - { - if (field.getLookupQuery() != null && field.getLookupSchema() != null) - { - LookupKey key = new LookupKey(field); - - LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); - if (validValues == null) - { - validValues = new LookupValues(field, validatorCache.getContainer(), validatorCache.getUser(), errors); - } - return isLookupValid(value, errors, validatorCache, key, field.getLookupSchema(), - field.getLookupQuery(), field.getNonBlankCaption(), validValues); - } - } - else if (crpField instanceof ColumnInfo field) - { - if (field.getFk() != null) - { - LookupKey key = new LookupKey(field.getFk(), field.getJdbcType()); - LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); - - if (validValues == null) - { - validValues = new LookupValues(field, validatorCache.getContainer(), errors); - } - return isLookupValid(value, errors, validatorCache, key, field.getFk().getLookupSchemaName(), - field.getFk().getLookupTableName(), field.getNonBlankCaption(), validValues); - } - } - else - { - throw new IllegalArgumentException("Unknown column type : '" + crpField.getClass() + "'"); - } - - return true; - } - - private boolean isLookupValid(@NotNull Object value, - List errors, - ValidatorContext validatorCache, - LookupKey key, - String schemaName, - String queryName, - String label, - LookupValues validValues) - { - validatorCache.put(LookupValidator.class, key, validValues); - - if (validValues.contains(value)) - { - return true; - } - - errors.add(new PropertyValidationError("Value '" + value + "' was not present in lookup target '" + - schemaName + "." + queryName + "' for field '" + label + "'",label)); - - return false; - } -} +/* + * Copyright (c) 2010-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.PropertyValidationError; +import org.labkey.api.query.QuerySchema; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.security.User; +import org.labkey.experiment.api.ExpMaterialTableImpl; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; + +/** + * User: jeckels + * Date: Jan 26, 2010 + */ +public class LookupValidator extends DefaultPropertyValidator implements ValidatorKind +{ + @Override + public String getName() + { + return "Lookup Property Validator"; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.Lookup).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + return true; + } + + private static class LookupKey + { + private final String _schema; + private final String _query; + private final String _container; + private final JdbcType _type; + + public LookupKey(PropertyDescriptor field) + { + _schema = field.getLookupSchema(); + _query = field.getLookupQuery(); + _container = field.getLookupContainer(); + _type = field.getJdbcType(); + } + + public LookupKey(ForeignKey fk, JdbcType jdbcType) + { + _schema = fk.getLookupSchemaName(); + _query = fk.getLookupTableName(); + _container = (null == fk.getLookupContainer() ? null : fk.getLookupContainer().getId()); + _type = jdbcType; + } + + @Override + public boolean equals(Object o) + { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LookupKey that = (LookupKey) o; + + if (_container != null ? !_container.equals(that._container) : that._container != null) return false; + if (_query != null ? !_query.equals(that._query) : that._query != null) return false; + if (_type != null ? !_type.equals(that._type) : that._type != null) return false; + return !(_schema != null ? !_schema.equals(that._schema) : that._schema != null); + } + + @Override + public int hashCode() + { + int result = _schema != null ? _schema.hashCode() : 0; + result = 31 * result + (_query != null ? _query.hashCode() : 0); + result = 31 * result + (_container != null ? _container.hashCode() : 0); + result = 31 * result + (_type != null ? _type.hashCode() : 0); + return result; + } + } + + private static class LookupValues extends HashSet + { + final private Container _container; + + public LookupValues(ColumnInfo field, Container defaultContainer, List errors) + { + if (field.getFk().getLookupContainer() != null) + { + _container = field.getFk().getLookupContainer(); + } + else + { + _container = defaultContainer; + } + + processTableInfo(field.getFk().getLookupTableInfo(), field.getJdbcType(), field.getFk().getLookupTableName(), field.getNonBlankCaption(), errors); + } + + public LookupValues(PropertyDescriptor field, Container defaultContainer, User user, List errors) + { + if (field.getLookupContainer() != null) + { + _container = ContainerManager.getForId(field.getLookupContainer()); + } + else + { + _container = defaultContainer; + } + + if (user == null) + { + throw new IllegalArgumentException("Must supply a user"); + } + + if (_container == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target folder for field '" + field.getNonBlankCaption() + "'")); + } + else + { + QuerySchema userSchema = QueryService.get().getUserSchema(user, _container, field.getLookupSchema()); + if (userSchema == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target schema ('" + field.getLookupSchema() + "') for field '" + field.getNonBlankCaption() + "'")); + } + else + { + processTableInfo(userSchema.getTableForInsert(field.getLookupQuery()), field.getJdbcType(), field.getLookupQuery(), field.getNonBlankCaption(), errors); + } + } + } + + private void processTableInfo(TableInfo tableInfo, JdbcType jdbcType, String queryName, String label, List errors) + { + if (tableInfo == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target query ('" + queryName + "') for field '" + label + "'")); + } + else + { + List keyCols = tableInfo.getPkColumns(); + + if (keyCols.size() != 1) + { + errors.add(new SimpleValidationError("Could not validate target query ('" + queryName + "') because it has " + keyCols.size() + " columns instead of one for the field '" + label + "'")); + } + else + { + ColumnInfo lookupTargetCol = keyCols.get(0); + // Hack for sample types - see also revision 37612 + if (lookupTargetCol.getJdbcType() != jdbcType && jdbcType.isText() && tableInfo instanceof ExpMaterialTableImpl) + { + ColumnInfo nameCol = tableInfo.getColumn(ExpMaterialTableImpl.Column.Name.toString()); + assert nameCol != null : "Could not find Name column in SampleType table"; + if (nameCol != null) + { + lookupTargetCol = nameCol; + } + } + Collection keys = new TableSelector(lookupTargetCol).getCollection(lookupTargetCol.getJavaObjectClass()); + addAll(keys); + } + } + } + + public Container getContainer() + { + return _container; + } + } + + @Override + public boolean validate(IPropertyValidator validator, + ColumnRenderProperties crpField, + @NotNull Object value, + List errors, + ValidatorContext validatorCache, + @Nullable Object providedValue) + { + //noinspection ConstantConditions + assert value != null : "Shouldn't be validating a null value"; + + if (value != null) + value = ConvertHelper.convert(value, crpField.getJavaObjectClass()); + + if (crpField instanceof PropertyDescriptor field) + { + if (field.getLookupQuery() != null && field.getLookupSchema() != null) + { + LookupKey key = new LookupKey(field); + + LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); + if (validValues == null) + { + validValues = new LookupValues(field, validatorCache.getContainer(), validatorCache.getUser(), errors); + } + return isLookupValid(value, errors, validatorCache, key, field.getLookupSchema(), + field.getLookupQuery(), field.getNonBlankCaption(), validValues); + } + } + else if (crpField instanceof ColumnInfo field) + { + if (field.getFk() != null) + { + LookupKey key = new LookupKey(field.getFk(), field.getJdbcType()); + LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); + + if (validValues == null) + { + validValues = new LookupValues(field, validatorCache.getContainer(), errors); + } + return isLookupValid(value, errors, validatorCache, key, field.getFk().getLookupSchemaName(), + field.getFk().getLookupTableName(), field.getNonBlankCaption(), validValues); + } + } + else + { + throw new IllegalArgumentException("Unknown column type : '" + crpField.getClass() + "'"); + } + + return true; + } + + private boolean isLookupValid(@NotNull Object value, + List errors, + ValidatorContext validatorCache, + LookupKey key, + String schemaName, + String queryName, + String label, + LookupValues validValues) + { + validatorCache.put(LookupValidator.class, key, validValues); + + if (validValues.contains(value)) + { + return true; + } + + errors.add(new PropertyValidationError("Value '" + value + "' was not present in lookup target '" + + schemaName + "." + queryName + "' for field '" + label + "'",label)); + + return false; + } +} diff --git a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java index 9adfefd64f3..f5f21a331e2 100644 --- a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java +++ b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java @@ -1,260 +1,271 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Strings; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.Table; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; -import org.labkey.api.util.PageFlowUtil; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public class PropertyValidatorImpl implements IPropertyValidator -{ - private PropertyValidator _validator; - private PropertyValidator _validatorOld; - private boolean _deleted; - - public PropertyValidatorImpl(PropertyValidator validator) - { - _validator = validator; - } - - @Override - public int getPropertyId() - { - return _validator.getPropertyId(); - } - - @Override - public void setPropertyId(int propertyId) - { - edit().setPropertyId(propertyId); - } - - @Override - public String getName() - { - return _validator.getName(); - } - - @Override - public void setName(String name) - { - if (Strings.CS.equals(name, getName())) - return; - - edit().setName(name); - } - - @Override - public String getDescription() - { - return _validator.getDescription(); - } - - @Override - public void setDescription(String description) - { - if (Strings.CS.equals(description, getDescription())) - return; - edit().setDescription(description); - } - - @Override - public String getTypeURI() - { - return _validator.getTypeURI(); - } - - public void setTypeURI(String typeURI) - { - edit().setTypeURI(typeURI); - } - - @Override - public String getExpressionValue() - { - return _validator.getExpression(); - } - - @Override - public void setExpressionValue(String expression) - { - if (Strings.CS.equals(getExpressionValue(), expression)) - return; - - edit().setExpression(expression); - } - - @Override - public Container getContainer() - { - return ContainerManager.getForId(_validator.getContainer()); - } - - public void setContainer(String container) - { - edit().setContainer(container); - } - - public void setRowId(long rowId) - { - edit().setRowId(rowId); - } - - @Override - public long getRowId() - { - return _validator.getRowId(); - } - - @Override - public String getErrorMessage() - { - return _validator.getErrorMessage(); - } - - @Override - public Map getProperties() - { - return PageFlowUtil.mapFromQueryString(_validator.getProperties()); - } - - @Override - public void setErrorMessage(String message) - { - if (Strings.CS.equals(getErrorMessage(), message)) - return; - - edit().setErrorMessage(message); - } - - @Override - public void setProperty(String key, String value) - { - Map props = getProperties(); - if (Strings.CS.equals(props.get(key), value)) - return; - - props.put(key, value); - edit().setProperties(PageFlowUtil.toQueryString(props.entrySet())); - } - - @Override - public ValidatorKind getType() - { - return PropertyService.get().getValidatorKind(getTypeURI()); - } - - @Override - public IPropertyValidator save(User user, Container container) throws ValidationException - { - ValidatorKind kind = getType(); - List errors = new ArrayList<>(); - - if (kind != null && !kind.isValid(this, errors)) - { - throw new ValidationException(errors); - } - - if (isNew()) - { - if (0 == _validator.getPropertyId()) - throw new IllegalStateException("Validator requires a valid propertyId"); - setContainer(container.getId()); - return new PropertyValidatorImpl(Table.insert(user, DomainPropertyManager.getTinfoValidator(), _validator)); - } - else - { - String cid = _validator.getContainer(); - int propid = _validator.getPropertyId(); - long rowid = _validator.getRowId(); - return new PropertyValidatorImpl(Table.update(user, DomainPropertyManager.getTinfoValidator(), _validator, new Object[] {cid, propid, rowid})); - } - } - - public void delete() - { - _deleted = true; - } - - public boolean isDeleted() - { - return _deleted; - } - - @Override - public boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache) - { - // Don't validate null values, #15683 - if (null == value) - return true; - - ValidatorKind kind = getType(); - - if (kind != null) - return kind.validate(this, prop, value, errors, validatorCache); - else - errors.add(new SimpleValidationError("Validator type : " + getTypeURI() + " does not exist.")); - - return false; - } - - public boolean isNew() - { - return _validator.getRowId() == 0; - } - - public boolean isDirty() - { - return _validatorOld != null || _deleted; - } - - private PropertyValidator edit() - { - if (getRowId() == 0) - return _validator; - if (_validatorOld == null) - { - _validatorOld = _validator; - _validator = _validatorOld.clone(); - } - return _validator; - } - - public String toString() - { - StringBuilder sb = new StringBuilder(_validator.getName()); - if (_validator.getDescription() != null) - sb.append(" (").append(_validator.getDescription()).append(")"); - - return sb.toString(); - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Strings; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.Table; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; +import org.labkey.api.util.PageFlowUtil; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class PropertyValidatorImpl implements IPropertyValidator +{ + private PropertyValidator _validator; + private PropertyValidator _validatorOld; + private boolean _deleted; + private String columnNameProvidedData; + + public PropertyValidatorImpl(PropertyValidator validator) + { + _validator = validator; + } + + @Override + public int getPropertyId() + { + return _validator.getPropertyId(); + } + + @Override + public void setPropertyId(int propertyId) + { + edit().setPropertyId(propertyId); + } + + @Override + public String getName() + { + return _validator.getName(); + } + + @Override + public void setName(String name) + { + if (Strings.CS.equals(name, getName())) + return; + + edit().setName(name); + } + + @Override + public String getDescription() + { + return _validator.getDescription(); + } + + @Override + public void setDescription(String description) + { + if (Strings.CS.equals(description, getDescription())) + return; + edit().setDescription(description); + } + + @Override + public String getTypeURI() + { + return _validator.getTypeURI(); + } + + public void setTypeURI(String typeURI) + { + edit().setTypeURI(typeURI); + } + + @Override + public String getExpressionValue() + { + return _validator.getExpression(); + } + + @Override + public void setExpressionValue(String expression) + { + if (Strings.CS.equals(getExpressionValue(), expression)) + return; + + edit().setExpression(expression); + } + + @Override + public Container getContainer() + { + return ContainerManager.getForId(_validator.getContainer()); + } + + public void setContainer(String container) + { + edit().setContainer(container); + } + + public void setRowId(long rowId) + { + edit().setRowId(rowId); + } + + @Override + public long getRowId() + { + return _validator.getRowId(); + } + + @Override + public String getErrorMessage() + { + return _validator.getErrorMessage(); + } + + @Override + public Map getProperties() + { + return PageFlowUtil.mapFromQueryString(_validator.getProperties()); + } + + @Override + public void setErrorMessage(String message) + { + if (Strings.CS.equals(getErrorMessage(), message)) + return; + + edit().setErrorMessage(message); + } + + @Override + public void setProperty(String key, String value) + { + Map props = getProperties(); + if (Strings.CS.equals(props.get(key), value)) + return; + + props.put(key, value); + edit().setProperties(PageFlowUtil.toQueryString(props.entrySet())); + } + + @Override + public ValidatorKind getType() + { + return PropertyService.get().getValidatorKind(getTypeURI()); + } + + public String getColumnNameProvidedData() + { + return columnNameProvidedData; + } + + public void setColumnNameProvidedData(String columnNameProvidedData) + { + this.columnNameProvidedData = columnNameProvidedData; + } + + @Override + public IPropertyValidator save(User user, Container container) throws ValidationException + { + ValidatorKind kind = getType(); + List errors = new ArrayList<>(); + + if (kind != null && !kind.isValid(this, errors)) + { + throw new ValidationException(errors); + } + + if (isNew()) + { + if (0 == _validator.getPropertyId()) + throw new IllegalStateException("Validator requires a valid propertyId"); + setContainer(container.getId()); + return new PropertyValidatorImpl(Table.insert(user, DomainPropertyManager.getTinfoValidator(), _validator)); + } + else + { + String cid = _validator.getContainer(); + int propid = _validator.getPropertyId(); + long rowid = _validator.getRowId(); + return new PropertyValidatorImpl(Table.update(user, DomainPropertyManager.getTinfoValidator(), _validator, new Object[] {cid, propid, rowid})); + } + } + + public void delete() + { + _deleted = true; + } + + public boolean isDeleted() + { + return _deleted; + } + + @Override + public boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache) + { + // Don't validate null values, #15683 + if (null == value) + return true; + + ValidatorKind kind = getType(); + + if (kind != null) + return kind.validate(this, prop, value, errors, validatorCache, null); + else + errors.add(new SimpleValidationError("Validator type : " + getTypeURI() + " does not exist.")); + + return false; + } + + public boolean isNew() + { + return _validator.getRowId() == 0; + } + + public boolean isDirty() + { + return _validatorOld != null || _deleted; + } + + private PropertyValidator edit() + { + if (getRowId() == 0) + return _validator; + if (_validatorOld == null) + { + _validatorOld = _validator; + _validator = _validatorOld.clone(); + } + return _validator; + } + + public String toString() + { + StringBuilder sb = new StringBuilder(_validator.getName()); + if (_validator.getDescription() != null) + sb.append(" (").append(_validator.getDescription()).append(")"); + + return sb.toString(); + } } \ No newline at end of file diff --git a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java index 0b3a241dcf5..1dbb8ad72a8 100644 --- a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java @@ -1,137 +1,138 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.ValidationError; -import org.labkey.api.util.DateUtil; -import org.labkey.api.util.Pair; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -/* -* User: Karl Lum -* Date: Aug 17, 2008 -* Time: 12:43:30 PM -*/ -public class RangeValidator extends DefaultPropertyValidator implements ValidatorKind -{ - @Override - public String getName() - { - return "Range Property Validator"; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.Range).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - return true; - } - - @Override - public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache) - { - //noinspection ConstantConditions - assert value != null : "Shouldn't be validating a null value"; - - for (Pair constraint : parseExpression(validator.getExpressionValue())) - { - if (!isValid(value, constraint)) - { - createErrorMessage(validator, field, value, errors); - return false; - } - } - return true; - } - - @SuppressWarnings("unchecked") - private Pair[] parseExpression(String expression) - { - List> constraints = new ArrayList<>(); - String[] parts = expression.split("&"); - for (String part : parts) - { - Pair constraint = parsePart(part); - if (constraint != null) - constraints.add(constraint); - } - return constraints.toArray(new Pair[0]); - } - - private Pair parsePart(String expression) - { - String[] parts = expression.split("="); - if (parts.length == 2) - { - return new Pair<>(parts[0], parts[1]); - } - return null; - } - - private boolean isValid(Object value, Pair constraint) - { - if (NumberUtils.isCreatable(String.valueOf(value))) - { - int comparison = Double.compare(NumberUtils.toDouble(String.valueOf(value)), NumberUtils.toDouble(constraint.getValue())); - return comparisonValid(comparison, constraint.getKey()); - } - else if (value instanceof Date) - { - Date dateConstraint = new Date(DateUtil.parseDateTime(constraint.getValue())); - int comparison = ((Date) value).compareTo(dateConstraint); - - // Issue 46094: handle "~date" based filter types (i.e. ~dateeq, ~dategt, etc.) - String type = constraint.getKey(); - if (type != null && type.startsWith("~date")) - type = type.replace("~date", "~"); - - return comparisonValid(comparison, type); - } - return false; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.ValidationError; +import org.labkey.api.util.DateUtil; +import org.labkey.api.util.Pair; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +/* +* User: Karl Lum +* Date: Aug 17, 2008 +* Time: 12:43:30 PM +*/ +public class RangeValidator extends DefaultPropertyValidator implements ValidatorKind +{ + @Override + public String getName() + { + return "Range Property Validator"; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.Range).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + return true; + } + + @Override + public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) + { + //noinspection ConstantConditions + assert value != null : "Shouldn't be validating a null value"; + + for (Pair constraint : parseExpression(validator.getExpressionValue())) + { + if (!isValid(value, constraint)) + { + createErrorMessage(validator, field, providedValue == null ? value : providedValue, errors); + return false; + } + } + return true; + } + + @SuppressWarnings("unchecked") + private Pair[] parseExpression(String expression) + { + List> constraints = new ArrayList<>(); + String[] parts = expression.split("&"); + for (String part : parts) + { + Pair constraint = parsePart(part); + if (constraint != null) + constraints.add(constraint); + } + return constraints.toArray(new Pair[0]); + } + + private Pair parsePart(String expression) + { + String[] parts = expression.split("="); + if (parts.length == 2) + { + return new Pair<>(parts[0], parts[1]); + } + return null; + } + + private boolean isValid(Object value, Pair constraint) + { + if (NumberUtils.isCreatable(String.valueOf(value))) + { + int comparison = Double.compare(NumberUtils.toDouble(String.valueOf(value)), NumberUtils.toDouble(constraint.getValue())); + return comparisonValid(comparison, constraint.getKey()); + } + else if (value instanceof Date) + { + Date dateConstraint = new Date(DateUtil.parseDateTime(constraint.getValue())); + int comparison = ((Date) value).compareTo(dateConstraint); + + // Issue 46094: handle "~date" based filter types (i.e. ~dateeq, ~dategt, etc.) + String type = constraint.getKey(); + if (type != null && type.startsWith("~date")) + type = type.replace("~date", "~"); + + return comparisonValid(comparison, type); + } + return false; + } } \ No newline at end of file diff --git a/experiment/src/org/labkey/experiment/api/property/RegExValidator.java b/experiment/src/org/labkey/experiment/api/property/RegExValidator.java index 9e35c478cd1..b3c953c1227 100644 --- a/experiment/src/org/labkey/experiment/api/property/RegExValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/RegExValidator.java @@ -1,125 +1,126 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.BooleanUtils; -import org.jetbrains.annotations.NotNull; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; - -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; - -/* -* User: Karl Lum -* Date: Aug 11, 2008 -* Time: 10:52:22 AM -*/ -public class RegExValidator extends DefaultPropertyValidator implements ValidatorKind -{ - public static final String FAIL_ON_MATCH = "failOnMatch"; - - @Override - public String getName() - { - return "Regular Expression Property Validator"; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.RegEx).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - try - { - //noinspection ResultOfMethodCallIgnored - Pattern.compile(validator.getExpressionValue()); - return true; - } - catch (PatternSyntaxException se) - { - String sb = "The " + - getName() + - ": '" + - validator.getName() + - "' has a syntax error : " + - se.getMessage(); - - errors.add(new SimpleValidationError(sb)); - } - return false; - } - - @Override - public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache) - { - assert value != null : "Shouldn't be validating a null value"; - - try - { - Pattern expression = (Pattern)validatorCache.get(RegExValidator.class, validator.getExpressionValue()); - if (expression == null) - { - expression = Pattern.compile(validator.getExpressionValue()); - // Cache the pattern so that it can be reused - validatorCache.put(RegExValidator.class, validator.getExpressionValue(), expression); - } - Matcher matcher = expression.matcher(String.valueOf(value)); - boolean failOnMatch = BooleanUtils.toBoolean(validator.getProperties().get(FAIL_ON_MATCH)); - boolean matched = matcher.matches(); - - if ((matched && failOnMatch) || (!matched && !failOnMatch)) - { - createErrorMessage(validator, field, value, errors); - return false; - } - return true; - } - catch (PatternSyntaxException se) - { - errors.add(new SimpleValidationError(se.getMessage())); - } - return false; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.BooleanUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; + +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +/* +* User: Karl Lum +* Date: Aug 11, 2008 +* Time: 10:52:22 AM +*/ +public class RegExValidator extends DefaultPropertyValidator implements ValidatorKind +{ + public static final String FAIL_ON_MATCH = "failOnMatch"; + + @Override + public String getName() + { + return "Regular Expression Property Validator"; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.RegEx).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + try + { + //noinspection ResultOfMethodCallIgnored + Pattern.compile(validator.getExpressionValue()); + return true; + } + catch (PatternSyntaxException se) + { + String sb = "The " + + getName() + + ": '" + + validator.getName() + + "' has a syntax error : " + + se.getMessage(); + + errors.add(new SimpleValidationError(sb)); + } + return false; + } + + @Override + public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) + { + assert value != null : "Shouldn't be validating a null value"; + + try + { + Pattern expression = (Pattern)validatorCache.get(RegExValidator.class, validator.getExpressionValue()); + if (expression == null) + { + expression = Pattern.compile(validator.getExpressionValue()); + // Cache the pattern so that it can be reused + validatorCache.put(RegExValidator.class, validator.getExpressionValue(), expression); + } + Matcher matcher = expression.matcher(String.valueOf(value)); + boolean failOnMatch = BooleanUtils.toBoolean(validator.getProperties().get(FAIL_ON_MATCH)); + boolean matched = matcher.matches(); + + if ((matched && failOnMatch) || (!matched && !failOnMatch)) + { + createErrorMessage(validator, field, value, errors); + return false; + } + return true; + } + catch (PatternSyntaxException se) + { + errors.add(new SimpleValidationError(se.getMessage())); + } + return false; + } } \ No newline at end of file diff --git a/experiment/src/org/labkey/experiment/api/property/TextChoiceValidator.java b/experiment/src/org/labkey/experiment/api/property/TextChoiceValidator.java index da946740ab8..5c4be86ac7e 100644 --- a/experiment/src/org/labkey/experiment/api/property/TextChoiceValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/TextChoiceValidator.java @@ -16,6 +16,7 @@ package org.labkey.experiment.api.property; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.data.ColumnRenderProperties; import org.labkey.api.data.MultiChoice; import org.labkey.api.exp.property.IPropertyValidator; @@ -53,7 +54,7 @@ public boolean isValid(IPropertyValidator validator, List error @Override public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache) + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) { assert value != null : "Shouldn't be validating a null value"; From 58e53e101e434d57be8009e6e4145cf05ee9578f Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 17 Nov 2025 10:08:48 -0800 Subject: [PATCH 2/7] CRLF --- .../api/assay/DefaultAssayRunCreator.java | 2556 +++++------ .../api/data/validator/ColumnValidator.java | 62 +- .../api/exp/property/IPropertyValidator.java | 118 +- .../api/exp/property/ValidatorKind.java | 190 +- .../api/query/DefaultQueryUpdateService.java | 1906 ++++----- .../experiment/api/ExpMaterialTableImpl.java | 3750 ++++++++--------- .../api/property/LookupValidator.java | 596 +-- .../api/property/PropertyValidatorImpl.java | 540 +-- .../api/property/RangeValidator.java | 274 +- .../api/property/RegExValidator.java | 250 +- 10 files changed, 5121 insertions(+), 5121 deletions(-) diff --git a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java index 06f4d4855f3..53cd6ffbc4c 100644 --- a/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java +++ b/api/src/org/labkey/api/assay/DefaultAssayRunCreator.java @@ -1,1278 +1,1278 @@ -/* - * Copyright (c) 2011-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.assay; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONArray; -import org.labkey.api.action.ApiUsageException; -import org.labkey.api.assay.actions.AssayRunUploadForm; -import org.labkey.api.assay.pipeline.AssayRunAsyncContext; -import org.labkey.api.assay.pipeline.AssayUploadPipelineJob; -import org.labkey.api.assay.sample.AssaySampleLookupContext; -import org.labkey.api.assay.transform.DataTransformService; -import org.labkey.api.assay.transform.TransformDataHandler; -import org.labkey.api.assay.transform.TransformResult; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.RemapCache; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.XarContext; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataRunInput; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpObject; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolApplication; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpRunItem; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ProvenanceService; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.Lookup; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.pipeline.PipelineValidationException; -import org.labkey.api.query.AbstractQueryUpdateService; -import org.labkey.api.query.BatchValidationException; -import org.labkey.api.query.PropertyValidationError; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.study.assay.ParticipantVisitResolver; -import org.labkey.api.study.assay.ParticipantVisitResolverType; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.ViewBackgroundInfo; -import org.labkey.api.writer.ContainerUser; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.io.FileFilter; -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static java.util.Collections.unmodifiableCollection; -import static org.labkey.api.assay.AssayFileWriter.TEMP_DIR_NAME; - -public class DefaultAssayRunCreator implements AssayRunCreator -{ - private static final Logger LOG = LogManager.getLogger(DefaultAssayRunCreator.class); - public static final String CROSS_RUN_DATA_INPUT_ROLE = "cross run input"; - - private final ProviderType _provider; - - public DefaultAssayRunCreator(ProviderType provider) - { - _provider = provider; - } - - public TransformResult transform(AssayRunUploadContext context, ExpRun run) throws ValidationException - { - return DataTransformService.get().transformAndValidate(context, run, DataTransformService.TransformOperation.INSERT); - } - /** - * Create and save an experiment run synchronously or asynchronously in a background job depending upon the assay design. - * - * @param context The context used to create and save the batch and run. - * @param batchId if not null, the run group that's already created for this batch. If null, a new one will be created. - * @return Pair of batch and run that were inserted. ExpBatch will not be null, but ExpRun may be null when inserting the run async. - */ - @Override - public Pair saveExperimentRun( - AssayRunUploadContext context, - @Nullable Long batchId, - boolean forceAsync, - Map transactionDetails - ) throws ExperimentException, ValidationException - { - ExpExperiment exp = null; - if (batchId != null) - { - exp = ExperimentService.get().getExpExperiment(batchId); - } - - AssayProvider provider = context.getProvider(); - ExpProtocol protocol = context.getProtocol(); - ExpRun run = null; - - try (DbScope.Transaction transaction = ExperimentService.get().getSchema().getScope().ensureTransaction(ExperimentService.get().getProtocolImportLock())) - { - TransactionAuditProvider.TransactionAuditEvent auditEvent = transaction.getAuditEvent(); - if (auditEvent == null) - { - auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(context.getContainer(), context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT, transactionDetails); - AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); - } - context.init(); - // Check if assay protocol is configured to import in the background. - // Issue 26811: If we don't have a view, assume that we are on a background job thread already. - boolean importInBackground = forceAsync || (provider.isBackgroundUpload(protocol) && HttpView.hasCurrentView()); - if (!importInBackground) - { - if ((Object) context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE) instanceof File errFile) - { - throw new ClassCastException("FileLike expected: " + errFile + " context: " + context.getClass() + " " + context); - } - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); - run = AssayService.get().createExperimentRun(context.getName(), context.getContainer(), protocol, null == primaryFile ? null : primaryFile.toNioPathForRead().toFile()); - run.setComments(context.getComments()); - run.setWorkflowTaskId(context.getWorkflowTask()); - - exp = saveExperimentRun(context, exp, run, false, transactionDetails); - - // re-fetch the run after it has been fully constructed - run = ExperimentService.get().getExpRun(run.getRowId()); - - context.uploadComplete(run); - } - else - { - context.uploadComplete(null); - context.setTransactionAuditId(transaction.getAuditId()); - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportOptions, "BackgroundImport"); - exp = saveExperimentRunAsync(context, exp); - } - transaction.commit(); - } - - return Pair.of(exp, run); - } - - private ExpExperiment saveExperimentRunAsync(AssayRunUploadContext context, @Nullable ExpExperiment batch) throws ExperimentException - { - try - { - // Whether we need to save batch properties - boolean forceSaveBatchProps = false; - if (batch == null) - { - // No batch yet, so make one - batch = AssayService.get().createStandardBatch(context.getContainer(), null, context.getProtocol()); - batch.save(context.getUser()); - // It's brand new, so we need to eventually set its properties - forceSaveBatchProps = true; - } - - // Queue up a pipeline job to do the actual import in the background - ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); - - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - // Check if the primary file from the previous import is no longer present for a re-run - if (primaryFile == null && !context.getUploadedData().isEmpty()) - { - // Choose another file as the primary - primaryFile = context.getUploadedData().entrySet().iterator().next().getValue(); - } - Objects.requireNonNull(primaryFile); - AssayRunAsyncContext asyncContext = context.getProvider().createRunAsyncContext(context); - final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob<>( - asyncContext, - info, - batch, - forceSaveBatchProps, - PipelineService.get().getPipelineRootSetting(context.getContainer()), - primaryFile.toNioPathForRead().toFile() - ); - - context.setPipelineJobGUID(pipelineJob.getJobGUID()); - - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), null, pipelineJob.getJobGUID()); - resultsFileWriter.savePostedFiles(context); - - // Don't queue the job until the transaction is committed, since otherwise the thread - // that's running the job might start before it can access the job's row in the database. - ExperimentService.get().getSchema().getScope().addCommitTask(() -> { - try - { - PipelineService.get().queueJob(pipelineJob, asyncContext.getJobNotificationProvider()); - } - catch (PipelineValidationException e) - { - throw UnexpectedException.wrap(e); - } - }, DbScope.CommitTaskOption.POSTCOMMIT); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - - return batch; - } - - /** - * @param batch if not null, the run group that's already created for this batch. If null, a new one needs to be created - * @param run The run to save - * @return the run and batch that were inserted - */ - @Override - public ExpExperiment saveExperimentRun( - final AssayRunUploadContext context, - @Nullable ExpExperiment batch, - @NotNull ExpRun run, - boolean forceSaveBatchProps, - @Nullable Map transactionDetails - ) throws ExperimentException, ValidationException - { - context.setAutoFillDefaultResultColumns(run.getRowId() > 0); // need to setAutoFillDefaultResultColumns before run is saved - - final Container container = context.getContainer(); - - Map inputMaterials = new HashMap<>(); - Map inputDatas = new HashMap<>(); - Map outputMaterials = new HashMap<>(); - Map outputDatas = new HashMap<>(); - Map transformedDatas = new HashMap<>(); - - Map runProperties = context.getRunProperties(); - Map unresolvedRunProperties = context.getUnresolvedRunProperties(); - Map batchProperties = context.getBatchProperties(); - - Map allProperties = new HashMap<>(); - allProperties.putAll(runProperties); - allProperties.putAll(batchProperties); - - ParticipantVisitResolverType resolverType = null; - for (Map.Entry entry : allProperties.entrySet()) - { - if (entry.getKey().getName().equals(AbstractAssayProvider.PARTICIPANT_VISIT_RESOLVER_PROPERTY_NAME)) - { - resolverType = AbstractAssayProvider.findType(entry.getValue(), getProvider().getParticipantVisitResolverTypes()); - if (resolverType != null) - { - resolverType.configureRun(context, run, inputDatas); - } - break; - } - } - - // TODO: Share these RemapCache and materialCache instances with AbstractAssayTsvDataHandler.checkData and ExpressionMatrixDataHandler.importFile - // Cache of resolved alternate lookup keys -> rowId - final RemapCache cache = new RemapCache(true); - // Cache of rowId -> ExpMaterial - final Map materialCache = new LongHashMap<>(); - - addInputMaterials(context, inputMaterials, cache, materialCache); - addInputDatas(context, inputDatas); - addOutputMaterials(context, outputMaterials, cache, materialCache); - addOutputDatas(context, inputDatas, outputDatas); - - boolean success = false; - DbScope scope = ExperimentService.get().getSchema().getScope(); - try (DbScope.Transaction transaction = scope.ensureTransaction(ExperimentService.get().getProtocolImportLock())) - { - if (transaction.getAuditId() == null) - { - var auditAction = context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT; - if (context.getTransactionAuditId() != null) - { - var auditEvent = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, context.getTransactionAuditId()); - transaction.setAuditEvent(auditEvent); - } - else - { - var auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(container, auditAction, transactionDetails); - AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); - } - } - boolean saveBatchProps = forceSaveBatchProps; - - // Add any material/data inputs related to the specimen IDs, etc in the incoming data. - // Some subclasses may actually create ExpMaterials or do other database changes, so do this inside the - // overall transaction - resolveParticipantVisits(context, inputMaterials, inputDatas, outputMaterials, outputDatas, allProperties, resolverType); - - // Check for circular inputs/outputs - checkForCycles(inputMaterials, outputMaterials); - checkForCycles(inputDatas, outputDatas); - - // Create the batch, if needed - if (batch == null) - { - // Make sure that we have a batch to associate with this run - batch = AssayService.get().createStandardBatch(run.getContainer(), null, context.getProtocol()); - batch.save(context.getUser()); - saveBatchProps = true; - } - run.save(context.getUser()); - // Add the run to the batch so that we can find it when we're loading the data files - batch.addRuns(context.getUser(), run); - assert batch.equals(run.getBatch()) : "Run's batch should be the current batch"; - - ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); - XarContext xarContext = new AssayUploadXarContext("Simple Run Creation", context); - - run = ExperimentService.get().saveSimpleExperimentRun( - run, - inputMaterials, - inputDatas, - outputMaterials, - outputDatas, - transformedDatas, - info, - context.getLogger() != null ? context.getLogger() : LOG, - false - ); - - // handle data transformation - TransformResult transformResult = transform(context, run); - - if (transformResult.getWarnings() != null && context instanceof AssayRunUploadForm uploadForm) - { - context.setTransformResult(transformResult); - uploadForm.setName(run.getName()); - uploadForm.setComments(run.getComments()); - throw new ValidationException(" "); - } - - if (saveBatchProps) - saveProperties(context, batch, transformResult.getBatchProperties(), batchProperties); - if (null != transformResult.getAssayId()) - run.setName(transformResult.getAssayId()); - if (null != transformResult.getComments()) - run.setComments(transformResult.getComments()); - saveProperties(context, run, transformResult.getRunProperties(), runProperties); - - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); - resultsFileWriter.savePostedFiles(context); - - Path assayResultsRunDir = AssayResultsFileWriter.getAssayFilesDirectoryPath(run); - if (null != assayResultsRunDir && !FileUtil.hasCloudScheme(assayResultsRunDir)) - { - FileLike assayResultFileRoot = FileSystemLike.wrapFile(assayResultsRunDir); - if (assayResultFileRoot != null) - QueryService.get().setEnvironment(QueryService.Environment.ASSAYFILESPATH, assayResultFileRoot); - } - - importResultData(context, run, inputDatas, outputDatas, info, xarContext, transformResult); - - var reRunId = context.getReRunId(); - if (reRunId != null && getProvider().getReRunSupport() == AssayProvider.ReRunSupport.ReRunAndReplace) - { - final ExpRun replacedRun = ExperimentService.get().getExpRun(reRunId); - if (replacedRun == null) - throw new ExperimentException(String.format("Unable to find run to be replaced (RowId %d)", reRunId)); - - if (replacedRun.getContainer().hasPermission(context.getUser(), UpdatePermission.class)) - { - replacedRun.setReplacedByRun(run); - replacedRun.save(context.getUser()); - } - - String auditMessage = String.format("Run id %d was replaced by run id %d", replacedRun.getRowId(), run.getRowId()); - ExperimentService.get().auditRunEvent(context.getUser(), context.getProtocol(), replacedRun, null, auditMessage, context.getAuditUserComment()); - - transaction.addCommitTask(() -> replacedRun.archiveDataFiles(context.getUser()), DbScope.CommitTaskOption.POSTCOMMIT); - // Issue 51710: Remove replaced assay runs from the search index - transaction.addCommitTask(() -> AssayService.get().deindexAssayRuns(List.of(replacedRun)), DbScope.CommitTaskOption.POSTCOMMIT); - } - - AssayService.get().ensureUniqueBatchName(batch, context.getProtocol(), context.getUser()); - - ExperimentService.get().onRunDataCreated(context.getProtocol(), run, container, context.getUser()); - - transaction.commit(); - success = true; - - // Inspect the run properties for a “prov:objectInputs” property that is a list of LSID strings. - // Attach run's starting protocol application with starting input LSIDs. - Object provInputsProperty = unresolvedRunProperties.get(ProvenanceService.PROVENANCE_INPUT_PROPERTY); - if (provInputsProperty != null) - { - ProvenanceService pvs = ProvenanceService.get(); - Set runInputLSIDs = null; - if (provInputsProperty instanceof String provInputs) - { - // parse as a JSONArray of values or a comma-separated list of values - if (provInputs.startsWith("[") && provInputs.endsWith("]")) - provInputsProperty = new JSONArray(provInputs); - else - runInputLSIDs = Set.of(provInputs.split(",")); - } - - if (provInputsProperty instanceof JSONArray jsonArray) - { - runInputLSIDs = jsonArray.toList().stream() - .map(String::valueOf) - .collect(Collectors.toSet()); - } - - if (runInputLSIDs != null && !runInputLSIDs.isEmpty()) - { - ExpProtocolApplication inputProtocolApp = run.getInputProtocolApplication(); - pvs.addProvenanceInputs(container, inputProtocolApp, runInputLSIDs); - } - } - - ExperimentService.get().queueSyncRunEdges(run); - - return batch; - } - catch (IOException | ConvertHelper.FileConversionException | BatchValidationException e) - { - // HACK: Rethrowing these as ApiUsageException avoids any upstream consequences of wrapping them in ExperimentException. - // Namely, that they are logged to the server/mothership. There has to be a better way. - if (e instanceof ConvertHelper.FileConversionException fce) - throw new ApiUsageException(fce.getMessage(), fce); - else if (e instanceof BatchValidationException bve) - throw new ApiUsageException(bve.getMessage(), bve); - - throw new ExperimentException(e); - } - finally - { - if (!success) - { - // clean up the run results file dir here if it was created, for non-async imports - AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); - resultsFileWriter.cleanupPostedFiles(context.getContainer(), false); - - cleanPrimaryFile(context); - } - } - } - - private void cleanPrimaryFile(AssayRunUploadContext context) throws ExperimentException - { - // Do not clear the primary file for run re-imports - if (context.getReRunId() != null) - return; - - try - { - // Issue 51300: don't keep the primary file if the new run failed to save - FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); - - // If the uploaded file is in the temp directory, then do not delete it as it may be reused in the next import attempt. - if (primaryFile != null && primaryFile.exists() && !primaryFile.getPath().contains(TEMP_DIR_NAME)) - primaryFile.delete(); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - } - - private void resolveParticipantVisits( - AssayRunUploadContext context, - Map inputMaterials, - Map inputDatas, - Map outputMaterials, - Map outputDatas, - Map allProperties, - @Nullable ParticipantVisitResolverType resolverType - ) throws ExperimentException - { - try - { - ParticipantVisitResolver resolver = null; - if (resolverType != null) - { - String targetStudyId = null; - for (Map.Entry property : allProperties.entrySet()) - { - if (AbstractAssayProvider.TARGET_STUDY_PROPERTY_NAME.equals(property.getKey().getName())) - { - targetStudyId = property.getValue(); - break; - } - } - Container targetStudy = null; - if (targetStudyId != null && !targetStudyId.isEmpty()) - targetStudy = ContainerManager.getForId(targetStudyId); - - resolver = resolverType.createResolver( - unmodifiableCollection(inputMaterials.keySet()), - unmodifiableCollection(inputDatas.keySet()), - unmodifiableCollection(outputMaterials.keySet()), - unmodifiableCollection(outputDatas.keySet()), - context.getContainer(), - targetStudy, context.getUser()); - } - - resolveExtraRunData(resolver, context, inputMaterials, inputDatas, outputMaterials, outputDatas); - } - catch (IOException e) - { - throw new ExperimentException(e); - } - } - - protected void importStandardResultData( - AssayRunUploadContext context, - ExpRun run, - Map inputDatas, - Map outputDatas, - ViewBackgroundInfo info, - XarContext xarContext - ) throws ExperimentException, BatchValidationException - { - DataIteratorBuilder rawData = context.getRawData(); - List insertedDatas = new ArrayList<>(); - - if (rawData != null) - { - insertedDatas.addAll(outputDatas.keySet()); - - ExpData primaryData = null; - // Decide which file to treat as the primary, to which the data rows will be attached - for (Map.Entry entry : outputDatas.entrySet()) - { - if (ExpDataRunInput.DEFAULT_ROLE.equalsIgnoreCase(entry.getValue())) - { - primaryData = entry.getKey(); - } - } - if (primaryData == null && !insertedDatas.isEmpty()) - primaryData = insertedDatas.get(0); - - if (primaryData != null) - { - TsvDataHandler dataHandler = new TsvDataHandler(); - dataHandler.setAllowEmptyData(true); - dataHandler.importRows(primaryData, context.getUser(), run, context.getProtocol(), getProvider(), rawData, null, context.shouldAutoFillDefaultResultColumns(), context); - } - } - else - { - for (Map.Entry entry : inputDatas.entrySet()) - { - // skip any of the cross run inputData that are already in the outputData - if (CROSS_RUN_DATA_INPUT_ROLE.equals(entry.getValue())) - continue; - - insertedDatas.add(entry.getKey()); - } - - insertedDatas.addAll(outputDatas.keySet()); - - Logger logger = context.getLogger() != null ? context.getLogger() : LOG; - for (ExpData insertedData : insertedDatas) - { - ExperimentDataHandler dataHandler = insertedData.findDataHandler(); - - FileLike fileLike = FileSystemLike.wrapFile(insertedData.getFile()); - if (dataHandler instanceof AbstractAssayTsvDataHandler tsvHandler) - { - tsvHandler.importFile(insertedData, fileLike, info, logger, xarContext, context.isAllowLookupByAlternateKey(), context.shouldAutoFillDefaultResultColumns()); - } - else - { - dataHandler.importFile(insertedData, fileLike, info, logger, xarContext); - } - } - } - } - - private void importResultData( - AssayRunUploadContext context, - ExpRun run, - Map inputDatas, - Map outputDatas, - ViewBackgroundInfo info, - XarContext xarContext, - TransformResult transformResult - ) throws ExperimentException, BatchValidationException - { - if (transformResult.getTransformedData().isEmpty()) - { - importStandardResultData(context, run, inputDatas, outputDatas, info, xarContext); - return; - } - - DataType dataType = context.getProvider().getDataType(); - if (dataType == null) - { - // we know that we are importing transformed data at this point - dataType = TsvDataHandler.RELATED_TRANSFORM_FILE_DATA_TYPE; - } - - ExpData data = ExperimentService.get().createData(context.getContainer(), dataType); - ExperimentDataHandler handler = data.findDataHandler(); - - // this should assert to always be true - if (handler instanceof TransformDataHandler transformDataHandler) - { - for (Map.Entry entry : transformResult.getTransformedData().entrySet()) - { - ExpData expData = entry.getKey(); - // The object may have already been claimed by - if (expData.getSourceApplication() == null) - { - expData.setSourceApplication(run.getOutputProtocolApplication()); - } - expData.save(context.getUser()); - - run.getOutputProtocolApplication().addDataInput(context.getUser(), expData, ExpDataRunInput.IMPORTED_DATA_ROLE); - // Add to the cached list of outputs - run.getDataOutputs().add(expData); - - transformDataHandler.importTransformDataMap(expData, context, run, entry.getValue()); - } - } - } - - protected void addInputMaterials( - AssayRunUploadContext context, - Map inputMaterials, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - addMaterials(context, inputMaterials, context.getInputMaterials(), null, cache, materialCache); - - // Find lookups to a SampleType and add the resolved material as an input sample - for (Map.Entry entry : context.getRunProperties().entrySet()) - { - String value = StringUtils.trimToNull(entry.getValue()); - if (value == null) - continue; - - // Lookup must point at "Samples.*", "exp.materials.*", or "exp.Materials" - DomainProperty dp = entry.getKey(); - var sampleLookup = AssaySampleLookupContext.checkSampleLookup(context.getContainer(), context.getUser(), dp); - if (!sampleLookup.isLookup()) - continue; - - String role = AssayService.get().getPropertyInputLineageRole(dp); - addMaterials(context, inputMaterials, Map.of(value, role), sampleLookup.expSampleType(), cache, materialCache); - } - } - - protected void addInputDatas( - AssayRunUploadContext context, - @NotNull Map inputDatas - ) throws ExperimentException, ValidationException - { - Logger log = context.getLogger() != null ? context.getLogger() : LOG; - - Map inputs = context.getInputDatas(); - addDatas(context.getContainer(), inputDatas, inputs, log); - - // Inspect the uploaded files which will be added as outputs of the run - if (context.isAllowCrossRunFileInputs()) - { - Map files = context.getUploadedData(); - for (Map.Entry entry : files.entrySet()) - { - String key = entry.getKey(); - if (AssayDataCollector.PRIMARY_FILE.equals(key)) - { - FileLike file = entry.getValue(); - - // Check if the file is created by a run - // Don't use getExpDataByURL(String). That method expects string in a very particular format. - ExpData existingData = ExperimentService.get().getExpDataByURL(file.toNioPathForRead(), context.getContainer()); - if (existingData != null && existingData.getRunId() != null && !inputDatas.containsKey(existingData)) - { - // Add this file as an input to the run. When we add the outputs to the run, we will detect - // that this file was already added as an input and create a new exp.data for the same file - // path and attach it as an output. - log.debug("found existing cross run file input: name={}, rowId={}, dataFileUrl={}", existingData.getName(), existingData.getRowId(), existingData.getDataFileUrl()); - inputDatas.put(existingData, CROSS_RUN_DATA_INPUT_ROLE); - } - } - } - } - } - - // CONSIDER: Move this to ExperimentService - // Resolve submitted values into ExpData objects - protected void addDatas(Container c, @NotNull Map resolved, @NotNull Map unresolved, @Nullable Logger log) throws ValidationException - { - for (Map.Entry entry : unresolved.entrySet()) - { - Object o = entry.getKey(); - String role = entry.getValue(); - - if (o instanceof ExpData expData) - { - resolved.put(expData, role); - } - else - { - File file = ExpDataFileConverter.convert(o); - if (file != null) - { - ExpData data = ExperimentService.get().getExpDataByURL(file, c); - if (data == null) - { - DataType dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - data = createData(c, file, file.getName(), dataType, false, true, log); - } - - resolved.put(data, role); - } - } - } - } - - public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData) throws ValidationException - { - return generateResultData(user, container, provider, dataArray, outputData, null); - } - - public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData, @Nullable Logger log) throws ValidationException - { - if (log == null) - log = LOG; - - ExpData newData = null; - - // Don't create an empty result data file if there are other outputs from this run, or if the user didn't - // include any data rows - if (!dataArray.isEmpty() && outputData.isEmpty()) - { - DataType dataType = provider.getDataType(); - if (dataType == null) - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - - newData = createData(container, "Analysis Results", dataType, log); - newData.save(user); - outputData.put(newData, ExpDataRunInput.DEFAULT_ROLE); - } - - return newData; - } - - // Find an existing ExpData for the File or null. - public static @Nullable ExpData findExistingData(Container c, @Nullable File file, @Nullable Logger log) - { - if (file == null) - return null; - - if (log == null) - log = LOG; - - List existing = ExperimentService.get().getAllExpDataByURL(file, c); - if (!existing.isEmpty()) - { - for (ExpData d : existing) - { - log.debug("found existing exp.data for file, rowId={}, runId={}, dataFileUrl={}", d.getRowId(), d.getRunId(), d.getDataFileUrl()); - } - - // pick the most recently created one - return existing.get(0); - } - - return null; - } - - public static @NotNull ExpData createData(Container c, String name, @NotNull DataType dataType, @Nullable Logger log) throws ValidationException - { - // NOTE: reuseExistingData and errorOnDataOwned flags are irrelevant when we aren't providing a File - return createData(c, null, name, dataType, false, false, log); - } - - public static @NotNull ExpData createData( - Container c, - File file, - String name, - @Nullable DataType dataType, - boolean reuseExistingData, - boolean errorIfDataOwned, - @Nullable Logger log - ) throws ValidationException - { - if (log == null) - log = LOG; - - ExpData data = findExistingData(c, file, log); - - ExpRun previousRun; - if (data != null && null != (previousRun = data.getRun())) - { - // There's an existing data, but it's already marked as being created by another run - String msg = "File '" + data.getName() + "' has been previously imported in run '" + previousRun.getName() + "' (" + previousRun.getRowId() + ")"; - if (reuseExistingData && errorIfDataOwned) - throw new ValidationException(msg); - - log.debug(msg); - - // Create a new one for the same path so the new run can claim it as its own - if (!reuseExistingData) - { - log.debug("ignoring existing exp.data, will create a new one"); - data = null; - } - } - - if (data == null) - { - if (dataType == null) - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - - log.debug("creating assay exp.data for file. dataType={}, file={}", dataType.getNamespacePrefix(), file); - data = ExperimentService.get().createData(c, dataType, name); - data.setLSID(ExperimentService.get().generateGuidLSID(c, dataType)); - if (file != null) - { - data.setDataFileURI(FileUtil.getAbsoluteCaseSensitiveFile(file).toURI()); - } - } - else - { - if (dataType != null && !dataType.matches(new Lsid(data.getLSID()))) - { - // Reset its LSID so that it's the correct type // CONSIDER: creating a new ExpData with the correct type instead - String newLsid = ExperimentService.get().generateGuidLSID(c, dataType); - log.debug("LSID doesn't match desired type. Changed the LSID from '{}' to '{}'", data.getLSID(), newLsid); - data.setLSID(newLsid); - } - } - return data; - } - - protected void addOutputMaterials( - AssayRunUploadContext context, - Map outputMaterials, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - addMaterials(context, outputMaterials, context.getOutputMaterials(), null, cache, materialCache); - } - - protected void addMaterials( - AssayRunUploadContext context, - @NotNull Map resolved, - @NotNull Map unresolved, - @Nullable ExpSampleType sampleType, - @NotNull RemapCache cache, - @NotNull Map materialCache - ) throws ExperimentException, ValidationException - { - for (Map.Entry entry : unresolved.entrySet()) - { - Object sampleIdentifier = entry.getKey(); - ExpMaterial material = ExperimentService.get().findExpMaterial(context.getContainer(), context.getUser(), sampleIdentifier, sampleType, cache, materialCache); - if (material == null) - throw new ExperimentException("Unable to resolve sample: " + sampleIdentifier); - - if (!resolved.containsKey(material)) - { - if (!material.isOperationPermitted(SampleTypeService.SampleOperations.AddAssayData)) - throw new ExperimentException(SampleTypeService.get().getOperationNotPermittedMessage(Collections.singleton(material), SampleTypeService.SampleOperations.AddAssayData)); - if (sampleType == null || sampleType.getLSID().equals(material.getCpasType())) - resolved.put(material, entry.getValue()); - } - } - } - - protected void addOutputDatas( - AssayRunUploadContext context, - Map inputDatas, - Map outputDatas - ) throws ExperimentException, ValidationException - { - Logger log = context.getLogger() != null ? context.getLogger() : LOG; - - // Create set of existing input files - Set inputFiles = new HashSet<>(); - for (ExpData inputData : inputDatas.keySet()) - { - FileLike f = inputData.getFileLike(); - if (f != null) - inputFiles.add(f); - } - - Map files = context.getUploadedData(); - - AssayDataType dataType; - for (Map.Entry entry : files.entrySet()) - { - FileLike file = entry.getValue(); - dataType = context.getProvider().getDataType(); - - // Reuse existing exp.data as the assay output file unless: - // - we are re-importing the run - // - or the output file is already one of the input files and if we are allowing cross-run file inputs - boolean reuseExistingData = true; - if (context.getReRunId() != null) - reuseExistingData = false; - if (context.isAllowCrossRunFileInputs() && inputFiles.contains(file)) - reuseExistingData = false; - - // For Luminex re-import, we want to reuse the existing exp.data but not - // throw an error when we discover that the exp.data is already owned. The - // original run will be duplicated for re-import and then will be deleted. - boolean errorIfDataOwned = getProvider().getReRunSupport() != AssayProvider.ReRunSupport.ReRunAndDelete; - - log.debug("adding output data: file={}", file.toNioPathForRead()); - log.debug(" context.getReRunId()={}", context.getReRunId()); - log.debug(" provider.getReRunSupport()={}", getProvider().getReRunSupport()); - log.debug(" context.allowCrossRunFileInputs={}", context.isAllowCrossRunFileInputs()); - log.debug(" inputFiles.contains(file)={}", inputFiles.contains(file)); - log.debug("==> reuseExistingData = {}", reuseExistingData); - log.debug("==> errorIfDataOwned = {}", errorIfDataOwned); - - ExpData data = DefaultAssayRunCreator.createData(context.getContainer(), file.toNioPathForRead().toFile(), file.getName(), dataType, reuseExistingData, errorIfDataOwned, log); - String role = ExpDataRunInput.DEFAULT_ROLE; - if (dataType != null && dataType.getFileType().isType(file)) - { - if (dataType.getRole() != null) - { - role = dataType.getRole(); - } - } - outputDatas.put(data, role); - } - - FileLike primaryFile = files.get(AssayDataCollector.PRIMARY_FILE); - if (primaryFile != null) - { - addRelatedOutputDatas(context, inputFiles, outputDatas, primaryFile); - } - - Map outputs = context.getOutputDatas(); - addDatas(context.getContainer(), outputDatas, outputs, log); - } - - /** - * Add files that follow the general naming convention (same basename) as the primary file - */ - public void addRelatedOutputDatas( - AssayRunUploadContext context, - Set inputFiles, - Map outputDatas, - final FileLike primaryFile - ) throws ValidationException - { - AssayDataType dataType = getProvider().getDataType(); - final String baseName = dataType == null ? null : dataType.getFileType().getBaseName(primaryFile.toNioPathForRead()); - if (baseName != null) - { - // Grab all the files that are related based on naming convention - File primary = primaryFile.toNioPathForRead().toFile(); - File parent = primary.getParentFile(); - // converting to File land to reuse the FileFilter - File[] relatedFiles = parent.listFiles(getRelatedOutputDataFileFilter(primary, baseName)); - if (relatedFiles != null) - { - for (File f : relatedFiles) - { - FileLike relatedFile = primaryFile.getParent().resolveChild(f.getName()); - // Ignore files already considered inputs to the run - if (inputFiles.contains(relatedFile)) - continue; - - Pair dataOutput = createdRelatedOutputData(context, baseName, f); - if (dataOutput != null) - { - outputDatas.put(dataOutput.getKey(), dataOutput.getValue()); - } - } - } - } - } - - protected void resolveExtraRunData( - ParticipantVisitResolver resolver, - AssayRunUploadContext context, - Map inputMaterials, - Map inputDatas, - Map outputMaterials, - Map outputDatas - ) throws ExperimentException - { - } - - /** - * Create an ExpData object for the file, and figure out what its role name should be - * @return null if the file is already linked to another run - */ - @Nullable - public static Pair createdRelatedOutputData(AssayRunUploadContext context, String baseName, File relatedFile) throws ValidationException - { - String roleName = null; - DataType dataType = null; - for (AssayDataType inputType : context.getProvider().getRelatedDataTypes()) - { - // Check if we recognize it as a specially handled file type - if (inputType.getFileType().isMatch(relatedFile.getName(), baseName)) - { - roleName = inputType.getRole(); - dataType = inputType; - break; - } - } - // If not, make up a new type and role for it - if (roleName == null) - { - roleName = relatedFile.getName().substring(baseName.length()); - while (!roleName.isEmpty() && (roleName.startsWith(".") || roleName.startsWith("-") || roleName.startsWith("_") || roleName.startsWith(" "))) - { - roleName = roleName.substring(1); - } - if (roleName.isEmpty()) - { - roleName = null; - } - } - if (dataType == null) - { - dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; - } - - // Find an existing data that isn't owned by another run or create a new own - ExpData data = findExistingData(context.getContainer(), relatedFile, context.getLogger()); - if (data != null) - { - if (data.getSourceApplication() == null) - return new Pair<>(data, roleName); - - // The file is already linked to another run, so this one must have not created it - return null; - } - - data = createData(context.getContainer(), relatedFile, relatedFile.getName(), dataType, true, true, context.getLogger()); - assert data.getSourceApplication() == null; - return Pair.of(data, roleName); - } - - // Disallow creating a run with inputs which are also outputs - protected void checkForCycles( - Map inputs, - Map outputs - ) throws ExperimentException - { - for (ExpRunItem input : inputs.keySet()) - { - if (outputs.containsKey(input)) - { - String role = outputs.get(input); - throw new ExperimentException("Circular input/output '" + input.getName() + "' with role '" + role + "'"); - } - } - } - - private void saveProperties( - final AssayRunUploadContext context, - ExpObject expObject, - Map transformResultProperties, - Map properties - ) throws ValidationException - { - Map propsToSave = transformResultProperties.isEmpty() ? properties : transformResultProperties; - List errors = validateProperties(context, propsToSave); - if (!errors.isEmpty()) - throw new ValidationException(errors); - - savePropertyObject(expObject, propsToSave, context.getUser()); - } - - protected void savePropertyObject(ExpObject object, Map properties, User user) throws ValidationException - { - for (Map.Entry entry : properties.entrySet()) - { - DomainProperty pd = entry.getKey(); - String value = entry.getValue(); - - // resolve any file links for batch or run properties - if (PropertyType.FILE_LINK.getTypeUri().equals(pd.getType().getTypeURI())) - { - File resolvedFile = ExpDataFileConverter.convert(value); - if (resolvedFile != null) - value = resolvedFile.getAbsolutePath(); - } - - // Treat the empty string as a null in the database, which is our normal behavior when receiving data - // from HTML forms. - if (StringUtils.trimToNull(value) == null) - { - value = null; - } - if (value != null) - { - object.setProperty(user, pd.getPropertyDescriptor(), value); - } - else - { - // We still need to validate blanks - List errors = new ArrayList<>(); - OntologyManager.validateProperty(pd.getValidators(), pd.getPropertyDescriptor(), new ObjectProperty(object.getLSID(), object.getContainer(), pd.getPropertyDescriptor(), value), errors, new ValidatorContext(pd.getContainer(), user)); - if (!errors.isEmpty()) - throw new ValidationException(errors); - } - } - } - - public static List validateColumnProperties(ContainerUser context, Map properties) - { - List errors = new ArrayList<>(); - RemapCache cache = new RemapCache(); - for (Map.Entry entry : properties.entrySet()) - { - validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); - } - return errors; - } - - public static List validateProperties(ContainerUser context, Map properties) - { - List errors = new ArrayList<>(); - RemapCache cache = new RemapCache(); - for (Map.Entry entry : properties.entrySet()) - { - validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); - } - return errors; - } - - private static void validateProperty(ContainerUser context, ColumnInfo columnInfo, String value, RemapCache cache, List errors) - { - Lookup lookup = null; - if (columnInfo.isLookup()) - { - ForeignKey fk = columnInfo.getFk(); - lookup = new Lookup(fk.getLookupContainer(), fk.getLookupSchemaName(), fk.getLookupTableName()); - } - validateProperty(context, ColumnValidators.create(columnInfo, null), value, columnInfo.getName(), - false, lookup, columnInfo.getJavaClass(), cache, errors); - } - - private static void validateProperty(ContainerUser context, DomainProperty dp, String value, RemapCache cache, List errors) - { - String label = dp.getPropertyDescriptor().getNonBlankCaption(); - PropertyType type = dp.getPropertyDescriptor().getPropertyType(); - validateProperty(context, ColumnValidators.create(null, dp), value, label, dp.isRequired(), - dp.getLookup(), type.getJavaType(), cache, errors); - } - - private static void validateProperty( - ContainerUser context, - List validators, - String value, - String label, - Boolean required, - Lookup lookup, - Class type, - RemapCache cache, - List errors - ) - { - boolean missing = (value == null || value.isEmpty()); - int rowNum = 0; - - if (required && missing) - { - errors.add(new SimpleValidationError(label + " is required and must be of type " + ColumnInfo.getFriendlyTypeName(type) + ".")); - } - else if (!missing) - { - try - { - Object o; - if (type == File.class) - o = ExpDataFileConverter.convert(value); - else - o = ConvertUtils.convert(value, type); - ValidatorContext validatorContext = new ValidatorContext(context.getContainer(), context.getUser()); - for (ColumnValidator validator : validators) - { - String msg = validator.validate(rowNum, o, validatorContext, null); - if (msg != null) - errors.add(new PropertyValidationError(msg, label)); - } - } - catch (ConversionException e) - { - String message; - if (e instanceof ConvertHelper.FileConversionException fce) - message = fce.getMessage(); - else - { - message = ConvertHelper.getStandardConversionErrorMessage(value, label, type); - if (e.getCause() instanceof ArithmeticException) - message += ": " + e.getCause().getLocalizedMessage(); - else - message += "."; - } - - // Attempt to resolve lookups by display value - boolean skipError = false; - if (lookup != null) - { - Object remappedValue = OntologyManager.getRemappedValueForLookup(context.getUser(), context.getContainer(), cache, lookup, value); - if (remappedValue != null) - skipError = true; - } - - if (!skipError) - errors.add(new SimpleValidationError(message)); - } - } - } - - protected FileFilter getRelatedOutputDataFileFilter(final File primaryFile, final String baseName) - { - // baseName doesn't include the trailing '.', so add it here. We want to associate myRun.jpg - // with myRun.xls, but we don't want to associate myRun2.xls with myRun.xls (which will happen without - // the trailing dot in the check). - return f -> f.getName().startsWith(baseName + ".") && !primaryFile.equals(f); - } - - protected ProviderType getProvider() - { - return _provider; - } -} +/* + * Copyright (c) 2011-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.assay; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONArray; +import org.labkey.api.action.ApiUsageException; +import org.labkey.api.assay.actions.AssayRunUploadForm; +import org.labkey.api.assay.pipeline.AssayRunAsyncContext; +import org.labkey.api.assay.pipeline.AssayUploadPipelineJob; +import org.labkey.api.assay.sample.AssaySampleLookupContext; +import org.labkey.api.assay.transform.DataTransformService; +import org.labkey.api.assay.transform.TransformDataHandler; +import org.labkey.api.assay.transform.TransformResult; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.RemapCache; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.XarContext; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataRunInput; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpObject; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolApplication; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpRunItem; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ProvenanceService; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.Lookup; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.pipeline.PipelineValidationException; +import org.labkey.api.query.AbstractQueryUpdateService; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.PropertyValidationError; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.study.assay.ParticipantVisitResolver; +import org.labkey.api.study.assay.ParticipantVisitResolverType; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.ViewBackgroundInfo; +import org.labkey.api.writer.ContainerUser; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.io.FileFilter; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static java.util.Collections.unmodifiableCollection; +import static org.labkey.api.assay.AssayFileWriter.TEMP_DIR_NAME; + +public class DefaultAssayRunCreator implements AssayRunCreator +{ + private static final Logger LOG = LogManager.getLogger(DefaultAssayRunCreator.class); + public static final String CROSS_RUN_DATA_INPUT_ROLE = "cross run input"; + + private final ProviderType _provider; + + public DefaultAssayRunCreator(ProviderType provider) + { + _provider = provider; + } + + public TransformResult transform(AssayRunUploadContext context, ExpRun run) throws ValidationException + { + return DataTransformService.get().transformAndValidate(context, run, DataTransformService.TransformOperation.INSERT); + } + /** + * Create and save an experiment run synchronously or asynchronously in a background job depending upon the assay design. + * + * @param context The context used to create and save the batch and run. + * @param batchId if not null, the run group that's already created for this batch. If null, a new one will be created. + * @return Pair of batch and run that were inserted. ExpBatch will not be null, but ExpRun may be null when inserting the run async. + */ + @Override + public Pair saveExperimentRun( + AssayRunUploadContext context, + @Nullable Long batchId, + boolean forceAsync, + Map transactionDetails + ) throws ExperimentException, ValidationException + { + ExpExperiment exp = null; + if (batchId != null) + { + exp = ExperimentService.get().getExpExperiment(batchId); + } + + AssayProvider provider = context.getProvider(); + ExpProtocol protocol = context.getProtocol(); + ExpRun run = null; + + try (DbScope.Transaction transaction = ExperimentService.get().getSchema().getScope().ensureTransaction(ExperimentService.get().getProtocolImportLock())) + { + TransactionAuditProvider.TransactionAuditEvent auditEvent = transaction.getAuditEvent(); + if (auditEvent == null) + { + auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(context.getContainer(), context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT, transactionDetails); + AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); + } + context.init(); + // Check if assay protocol is configured to import in the background. + // Issue 26811: If we don't have a view, assume that we are on a background job thread already. + boolean importInBackground = forceAsync || (provider.isBackgroundUpload(protocol) && HttpView.hasCurrentView()); + if (!importInBackground) + { + if ((Object) context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE) instanceof File errFile) + { + throw new ClassCastException("FileLike expected: " + errFile + " context: " + context.getClass() + " " + context); + } + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); + run = AssayService.get().createExperimentRun(context.getName(), context.getContainer(), protocol, null == primaryFile ? null : primaryFile.toNioPathForRead().toFile()); + run.setComments(context.getComments()); + run.setWorkflowTaskId(context.getWorkflowTask()); + + exp = saveExperimentRun(context, exp, run, false, transactionDetails); + + // re-fetch the run after it has been fully constructed + run = ExperimentService.get().getExpRun(run.getRowId()); + + context.uploadComplete(run); + } + else + { + context.uploadComplete(null); + context.setTransactionAuditId(transaction.getAuditId()); + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportFileName, primaryFile.getName()); + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.ImportOptions, "BackgroundImport"); + exp = saveExperimentRunAsync(context, exp); + } + transaction.commit(); + } + + return Pair.of(exp, run); + } + + private ExpExperiment saveExperimentRunAsync(AssayRunUploadContext context, @Nullable ExpExperiment batch) throws ExperimentException + { + try + { + // Whether we need to save batch properties + boolean forceSaveBatchProps = false; + if (batch == null) + { + // No batch yet, so make one + batch = AssayService.get().createStandardBatch(context.getContainer(), null, context.getProtocol()); + batch.save(context.getUser()); + // It's brand new, so we need to eventually set its properties + forceSaveBatchProps = true; + } + + // Queue up a pipeline job to do the actual import in the background + ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); + + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + // Check if the primary file from the previous import is no longer present for a re-run + if (primaryFile == null && !context.getUploadedData().isEmpty()) + { + // Choose another file as the primary + primaryFile = context.getUploadedData().entrySet().iterator().next().getValue(); + } + Objects.requireNonNull(primaryFile); + AssayRunAsyncContext asyncContext = context.getProvider().createRunAsyncContext(context); + final AssayUploadPipelineJob pipelineJob = new AssayUploadPipelineJob<>( + asyncContext, + info, + batch, + forceSaveBatchProps, + PipelineService.get().getPipelineRootSetting(context.getContainer()), + primaryFile.toNioPathForRead().toFile() + ); + + context.setPipelineJobGUID(pipelineJob.getJobGUID()); + + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), null, pipelineJob.getJobGUID()); + resultsFileWriter.savePostedFiles(context); + + // Don't queue the job until the transaction is committed, since otherwise the thread + // that's running the job might start before it can access the job's row in the database. + ExperimentService.get().getSchema().getScope().addCommitTask(() -> { + try + { + PipelineService.get().queueJob(pipelineJob, asyncContext.getJobNotificationProvider()); + } + catch (PipelineValidationException e) + { + throw UnexpectedException.wrap(e); + } + }, DbScope.CommitTaskOption.POSTCOMMIT); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + + return batch; + } + + /** + * @param batch if not null, the run group that's already created for this batch. If null, a new one needs to be created + * @param run The run to save + * @return the run and batch that were inserted + */ + @Override + public ExpExperiment saveExperimentRun( + final AssayRunUploadContext context, + @Nullable ExpExperiment batch, + @NotNull ExpRun run, + boolean forceSaveBatchProps, + @Nullable Map transactionDetails + ) throws ExperimentException, ValidationException + { + context.setAutoFillDefaultResultColumns(run.getRowId() > 0); // need to setAutoFillDefaultResultColumns before run is saved + + final Container container = context.getContainer(); + + Map inputMaterials = new HashMap<>(); + Map inputDatas = new HashMap<>(); + Map outputMaterials = new HashMap<>(); + Map outputDatas = new HashMap<>(); + Map transformedDatas = new HashMap<>(); + + Map runProperties = context.getRunProperties(); + Map unresolvedRunProperties = context.getUnresolvedRunProperties(); + Map batchProperties = context.getBatchProperties(); + + Map allProperties = new HashMap<>(); + allProperties.putAll(runProperties); + allProperties.putAll(batchProperties); + + ParticipantVisitResolverType resolverType = null; + for (Map.Entry entry : allProperties.entrySet()) + { + if (entry.getKey().getName().equals(AbstractAssayProvider.PARTICIPANT_VISIT_RESOLVER_PROPERTY_NAME)) + { + resolverType = AbstractAssayProvider.findType(entry.getValue(), getProvider().getParticipantVisitResolverTypes()); + if (resolverType != null) + { + resolverType.configureRun(context, run, inputDatas); + } + break; + } + } + + // TODO: Share these RemapCache and materialCache instances with AbstractAssayTsvDataHandler.checkData and ExpressionMatrixDataHandler.importFile + // Cache of resolved alternate lookup keys -> rowId + final RemapCache cache = new RemapCache(true); + // Cache of rowId -> ExpMaterial + final Map materialCache = new LongHashMap<>(); + + addInputMaterials(context, inputMaterials, cache, materialCache); + addInputDatas(context, inputDatas); + addOutputMaterials(context, outputMaterials, cache, materialCache); + addOutputDatas(context, inputDatas, outputDatas); + + boolean success = false; + DbScope scope = ExperimentService.get().getSchema().getScope(); + try (DbScope.Transaction transaction = scope.ensureTransaction(ExperimentService.get().getProtocolImportLock())) + { + if (transaction.getAuditId() == null) + { + var auditAction = context.getReRunId() == null ? QueryService.AuditAction.UPDATE : QueryService.AuditAction.INSERT; + if (context.getTransactionAuditId() != null) + { + var auditEvent = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, context.getTransactionAuditId()); + transaction.setAuditEvent(auditEvent); + } + else + { + var auditEvent = AbstractQueryUpdateService.createTransactionAuditEvent(container, auditAction, transactionDetails); + AbstractQueryUpdateService.addTransactionAuditEvent(transaction, context.getUser(), auditEvent); + } + } + boolean saveBatchProps = forceSaveBatchProps; + + // Add any material/data inputs related to the specimen IDs, etc in the incoming data. + // Some subclasses may actually create ExpMaterials or do other database changes, so do this inside the + // overall transaction + resolveParticipantVisits(context, inputMaterials, inputDatas, outputMaterials, outputDatas, allProperties, resolverType); + + // Check for circular inputs/outputs + checkForCycles(inputMaterials, outputMaterials); + checkForCycles(inputDatas, outputDatas); + + // Create the batch, if needed + if (batch == null) + { + // Make sure that we have a batch to associate with this run + batch = AssayService.get().createStandardBatch(run.getContainer(), null, context.getProtocol()); + batch.save(context.getUser()); + saveBatchProps = true; + } + run.save(context.getUser()); + // Add the run to the batch so that we can find it when we're loading the data files + batch.addRuns(context.getUser(), run); + assert batch.equals(run.getBatch()) : "Run's batch should be the current batch"; + + ViewBackgroundInfo info = new ViewBackgroundInfo(context.getContainer(), context.getUser(), context.getActionURL()); + XarContext xarContext = new AssayUploadXarContext("Simple Run Creation", context); + + run = ExperimentService.get().saveSimpleExperimentRun( + run, + inputMaterials, + inputDatas, + outputMaterials, + outputDatas, + transformedDatas, + info, + context.getLogger() != null ? context.getLogger() : LOG, + false + ); + + // handle data transformation + TransformResult transformResult = transform(context, run); + + if (transformResult.getWarnings() != null && context instanceof AssayRunUploadForm uploadForm) + { + context.setTransformResult(transformResult); + uploadForm.setName(run.getName()); + uploadForm.setComments(run.getComments()); + throw new ValidationException(" "); + } + + if (saveBatchProps) + saveProperties(context, batch, transformResult.getBatchProperties(), batchProperties); + if (null != transformResult.getAssayId()) + run.setName(transformResult.getAssayId()); + if (null != transformResult.getComments()) + run.setComments(transformResult.getComments()); + saveProperties(context, run, transformResult.getRunProperties(), runProperties); + + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); + resultsFileWriter.savePostedFiles(context); + + Path assayResultsRunDir = AssayResultsFileWriter.getAssayFilesDirectoryPath(run); + if (null != assayResultsRunDir && !FileUtil.hasCloudScheme(assayResultsRunDir)) + { + FileLike assayResultFileRoot = FileSystemLike.wrapFile(assayResultsRunDir); + if (assayResultFileRoot != null) + QueryService.get().setEnvironment(QueryService.Environment.ASSAYFILESPATH, assayResultFileRoot); + } + + importResultData(context, run, inputDatas, outputDatas, info, xarContext, transformResult); + + var reRunId = context.getReRunId(); + if (reRunId != null && getProvider().getReRunSupport() == AssayProvider.ReRunSupport.ReRunAndReplace) + { + final ExpRun replacedRun = ExperimentService.get().getExpRun(reRunId); + if (replacedRun == null) + throw new ExperimentException(String.format("Unable to find run to be replaced (RowId %d)", reRunId)); + + if (replacedRun.getContainer().hasPermission(context.getUser(), UpdatePermission.class)) + { + replacedRun.setReplacedByRun(run); + replacedRun.save(context.getUser()); + } + + String auditMessage = String.format("Run id %d was replaced by run id %d", replacedRun.getRowId(), run.getRowId()); + ExperimentService.get().auditRunEvent(context.getUser(), context.getProtocol(), replacedRun, null, auditMessage, context.getAuditUserComment()); + + transaction.addCommitTask(() -> replacedRun.archiveDataFiles(context.getUser()), DbScope.CommitTaskOption.POSTCOMMIT); + // Issue 51710: Remove replaced assay runs from the search index + transaction.addCommitTask(() -> AssayService.get().deindexAssayRuns(List.of(replacedRun)), DbScope.CommitTaskOption.POSTCOMMIT); + } + + AssayService.get().ensureUniqueBatchName(batch, context.getProtocol(), context.getUser()); + + ExperimentService.get().onRunDataCreated(context.getProtocol(), run, container, context.getUser()); + + transaction.commit(); + success = true; + + // Inspect the run properties for a “prov:objectInputs” property that is a list of LSID strings. + // Attach run's starting protocol application with starting input LSIDs. + Object provInputsProperty = unresolvedRunProperties.get(ProvenanceService.PROVENANCE_INPUT_PROPERTY); + if (provInputsProperty != null) + { + ProvenanceService pvs = ProvenanceService.get(); + Set runInputLSIDs = null; + if (provInputsProperty instanceof String provInputs) + { + // parse as a JSONArray of values or a comma-separated list of values + if (provInputs.startsWith("[") && provInputs.endsWith("]")) + provInputsProperty = new JSONArray(provInputs); + else + runInputLSIDs = Set.of(provInputs.split(",")); + } + + if (provInputsProperty instanceof JSONArray jsonArray) + { + runInputLSIDs = jsonArray.toList().stream() + .map(String::valueOf) + .collect(Collectors.toSet()); + } + + if (runInputLSIDs != null && !runInputLSIDs.isEmpty()) + { + ExpProtocolApplication inputProtocolApp = run.getInputProtocolApplication(); + pvs.addProvenanceInputs(container, inputProtocolApp, runInputLSIDs); + } + } + + ExperimentService.get().queueSyncRunEdges(run); + + return batch; + } + catch (IOException | ConvertHelper.FileConversionException | BatchValidationException e) + { + // HACK: Rethrowing these as ApiUsageException avoids any upstream consequences of wrapping them in ExperimentException. + // Namely, that they are logged to the server/mothership. There has to be a better way. + if (e instanceof ConvertHelper.FileConversionException fce) + throw new ApiUsageException(fce.getMessage(), fce); + else if (e instanceof BatchValidationException bve) + throw new ApiUsageException(bve.getMessage(), bve); + + throw new ExperimentException(e); + } + finally + { + if (!success) + { + // clean up the run results file dir here if it was created, for non-async imports + AssayResultsFileWriter> resultsFileWriter = new AssayResultsFileWriter<>(context.getProtocol(), run, null); + resultsFileWriter.cleanupPostedFiles(context.getContainer(), false); + + cleanPrimaryFile(context); + } + } + } + + private void cleanPrimaryFile(AssayRunUploadContext context) throws ExperimentException + { + // Do not clear the primary file for run re-imports + if (context.getReRunId() != null) + return; + + try + { + // Issue 51300: don't keep the primary file if the new run failed to save + FileLike primaryFile = context.getUploadedData().get(AssayDataCollector.PRIMARY_FILE); + + // If the uploaded file is in the temp directory, then do not delete it as it may be reused in the next import attempt. + if (primaryFile != null && primaryFile.exists() && !primaryFile.getPath().contains(TEMP_DIR_NAME)) + primaryFile.delete(); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + } + + private void resolveParticipantVisits( + AssayRunUploadContext context, + Map inputMaterials, + Map inputDatas, + Map outputMaterials, + Map outputDatas, + Map allProperties, + @Nullable ParticipantVisitResolverType resolverType + ) throws ExperimentException + { + try + { + ParticipantVisitResolver resolver = null; + if (resolverType != null) + { + String targetStudyId = null; + for (Map.Entry property : allProperties.entrySet()) + { + if (AbstractAssayProvider.TARGET_STUDY_PROPERTY_NAME.equals(property.getKey().getName())) + { + targetStudyId = property.getValue(); + break; + } + } + Container targetStudy = null; + if (targetStudyId != null && !targetStudyId.isEmpty()) + targetStudy = ContainerManager.getForId(targetStudyId); + + resolver = resolverType.createResolver( + unmodifiableCollection(inputMaterials.keySet()), + unmodifiableCollection(inputDatas.keySet()), + unmodifiableCollection(outputMaterials.keySet()), + unmodifiableCollection(outputDatas.keySet()), + context.getContainer(), + targetStudy, context.getUser()); + } + + resolveExtraRunData(resolver, context, inputMaterials, inputDatas, outputMaterials, outputDatas); + } + catch (IOException e) + { + throw new ExperimentException(e); + } + } + + protected void importStandardResultData( + AssayRunUploadContext context, + ExpRun run, + Map inputDatas, + Map outputDatas, + ViewBackgroundInfo info, + XarContext xarContext + ) throws ExperimentException, BatchValidationException + { + DataIteratorBuilder rawData = context.getRawData(); + List insertedDatas = new ArrayList<>(); + + if (rawData != null) + { + insertedDatas.addAll(outputDatas.keySet()); + + ExpData primaryData = null; + // Decide which file to treat as the primary, to which the data rows will be attached + for (Map.Entry entry : outputDatas.entrySet()) + { + if (ExpDataRunInput.DEFAULT_ROLE.equalsIgnoreCase(entry.getValue())) + { + primaryData = entry.getKey(); + } + } + if (primaryData == null && !insertedDatas.isEmpty()) + primaryData = insertedDatas.get(0); + + if (primaryData != null) + { + TsvDataHandler dataHandler = new TsvDataHandler(); + dataHandler.setAllowEmptyData(true); + dataHandler.importRows(primaryData, context.getUser(), run, context.getProtocol(), getProvider(), rawData, null, context.shouldAutoFillDefaultResultColumns(), context); + } + } + else + { + for (Map.Entry entry : inputDatas.entrySet()) + { + // skip any of the cross run inputData that are already in the outputData + if (CROSS_RUN_DATA_INPUT_ROLE.equals(entry.getValue())) + continue; + + insertedDatas.add(entry.getKey()); + } + + insertedDatas.addAll(outputDatas.keySet()); + + Logger logger = context.getLogger() != null ? context.getLogger() : LOG; + for (ExpData insertedData : insertedDatas) + { + ExperimentDataHandler dataHandler = insertedData.findDataHandler(); + + FileLike fileLike = FileSystemLike.wrapFile(insertedData.getFile()); + if (dataHandler instanceof AbstractAssayTsvDataHandler tsvHandler) + { + tsvHandler.importFile(insertedData, fileLike, info, logger, xarContext, context.isAllowLookupByAlternateKey(), context.shouldAutoFillDefaultResultColumns()); + } + else + { + dataHandler.importFile(insertedData, fileLike, info, logger, xarContext); + } + } + } + } + + private void importResultData( + AssayRunUploadContext context, + ExpRun run, + Map inputDatas, + Map outputDatas, + ViewBackgroundInfo info, + XarContext xarContext, + TransformResult transformResult + ) throws ExperimentException, BatchValidationException + { + if (transformResult.getTransformedData().isEmpty()) + { + importStandardResultData(context, run, inputDatas, outputDatas, info, xarContext); + return; + } + + DataType dataType = context.getProvider().getDataType(); + if (dataType == null) + { + // we know that we are importing transformed data at this point + dataType = TsvDataHandler.RELATED_TRANSFORM_FILE_DATA_TYPE; + } + + ExpData data = ExperimentService.get().createData(context.getContainer(), dataType); + ExperimentDataHandler handler = data.findDataHandler(); + + // this should assert to always be true + if (handler instanceof TransformDataHandler transformDataHandler) + { + for (Map.Entry entry : transformResult.getTransformedData().entrySet()) + { + ExpData expData = entry.getKey(); + // The object may have already been claimed by + if (expData.getSourceApplication() == null) + { + expData.setSourceApplication(run.getOutputProtocolApplication()); + } + expData.save(context.getUser()); + + run.getOutputProtocolApplication().addDataInput(context.getUser(), expData, ExpDataRunInput.IMPORTED_DATA_ROLE); + // Add to the cached list of outputs + run.getDataOutputs().add(expData); + + transformDataHandler.importTransformDataMap(expData, context, run, entry.getValue()); + } + } + } + + protected void addInputMaterials( + AssayRunUploadContext context, + Map inputMaterials, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + addMaterials(context, inputMaterials, context.getInputMaterials(), null, cache, materialCache); + + // Find lookups to a SampleType and add the resolved material as an input sample + for (Map.Entry entry : context.getRunProperties().entrySet()) + { + String value = StringUtils.trimToNull(entry.getValue()); + if (value == null) + continue; + + // Lookup must point at "Samples.*", "exp.materials.*", or "exp.Materials" + DomainProperty dp = entry.getKey(); + var sampleLookup = AssaySampleLookupContext.checkSampleLookup(context.getContainer(), context.getUser(), dp); + if (!sampleLookup.isLookup()) + continue; + + String role = AssayService.get().getPropertyInputLineageRole(dp); + addMaterials(context, inputMaterials, Map.of(value, role), sampleLookup.expSampleType(), cache, materialCache); + } + } + + protected void addInputDatas( + AssayRunUploadContext context, + @NotNull Map inputDatas + ) throws ExperimentException, ValidationException + { + Logger log = context.getLogger() != null ? context.getLogger() : LOG; + + Map inputs = context.getInputDatas(); + addDatas(context.getContainer(), inputDatas, inputs, log); + + // Inspect the uploaded files which will be added as outputs of the run + if (context.isAllowCrossRunFileInputs()) + { + Map files = context.getUploadedData(); + for (Map.Entry entry : files.entrySet()) + { + String key = entry.getKey(); + if (AssayDataCollector.PRIMARY_FILE.equals(key)) + { + FileLike file = entry.getValue(); + + // Check if the file is created by a run + // Don't use getExpDataByURL(String). That method expects string in a very particular format. + ExpData existingData = ExperimentService.get().getExpDataByURL(file.toNioPathForRead(), context.getContainer()); + if (existingData != null && existingData.getRunId() != null && !inputDatas.containsKey(existingData)) + { + // Add this file as an input to the run. When we add the outputs to the run, we will detect + // that this file was already added as an input and create a new exp.data for the same file + // path and attach it as an output. + log.debug("found existing cross run file input: name={}, rowId={}, dataFileUrl={}", existingData.getName(), existingData.getRowId(), existingData.getDataFileUrl()); + inputDatas.put(existingData, CROSS_RUN_DATA_INPUT_ROLE); + } + } + } + } + } + + // CONSIDER: Move this to ExperimentService + // Resolve submitted values into ExpData objects + protected void addDatas(Container c, @NotNull Map resolved, @NotNull Map unresolved, @Nullable Logger log) throws ValidationException + { + for (Map.Entry entry : unresolved.entrySet()) + { + Object o = entry.getKey(); + String role = entry.getValue(); + + if (o instanceof ExpData expData) + { + resolved.put(expData, role); + } + else + { + File file = ExpDataFileConverter.convert(o); + if (file != null) + { + ExpData data = ExperimentService.get().getExpDataByURL(file, c); + if (data == null) + { + DataType dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + data = createData(c, file, file.getName(), dataType, false, true, log); + } + + resolved.put(data, role); + } + } + } + } + + public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData) throws ValidationException + { + return generateResultData(user, container, provider, dataArray, outputData, null); + } + + public static ExpData generateResultData(User user, Container container, AssayProvider provider, List> dataArray, Map outputData, @Nullable Logger log) throws ValidationException + { + if (log == null) + log = LOG; + + ExpData newData = null; + + // Don't create an empty result data file if there are other outputs from this run, or if the user didn't + // include any data rows + if (!dataArray.isEmpty() && outputData.isEmpty()) + { + DataType dataType = provider.getDataType(); + if (dataType == null) + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + + newData = createData(container, "Analysis Results", dataType, log); + newData.save(user); + outputData.put(newData, ExpDataRunInput.DEFAULT_ROLE); + } + + return newData; + } + + // Find an existing ExpData for the File or null. + public static @Nullable ExpData findExistingData(Container c, @Nullable File file, @Nullable Logger log) + { + if (file == null) + return null; + + if (log == null) + log = LOG; + + List existing = ExperimentService.get().getAllExpDataByURL(file, c); + if (!existing.isEmpty()) + { + for (ExpData d : existing) + { + log.debug("found existing exp.data for file, rowId={}, runId={}, dataFileUrl={}", d.getRowId(), d.getRunId(), d.getDataFileUrl()); + } + + // pick the most recently created one + return existing.get(0); + } + + return null; + } + + public static @NotNull ExpData createData(Container c, String name, @NotNull DataType dataType, @Nullable Logger log) throws ValidationException + { + // NOTE: reuseExistingData and errorOnDataOwned flags are irrelevant when we aren't providing a File + return createData(c, null, name, dataType, false, false, log); + } + + public static @NotNull ExpData createData( + Container c, + File file, + String name, + @Nullable DataType dataType, + boolean reuseExistingData, + boolean errorIfDataOwned, + @Nullable Logger log + ) throws ValidationException + { + if (log == null) + log = LOG; + + ExpData data = findExistingData(c, file, log); + + ExpRun previousRun; + if (data != null && null != (previousRun = data.getRun())) + { + // There's an existing data, but it's already marked as being created by another run + String msg = "File '" + data.getName() + "' has been previously imported in run '" + previousRun.getName() + "' (" + previousRun.getRowId() + ")"; + if (reuseExistingData && errorIfDataOwned) + throw new ValidationException(msg); + + log.debug(msg); + + // Create a new one for the same path so the new run can claim it as its own + if (!reuseExistingData) + { + log.debug("ignoring existing exp.data, will create a new one"); + data = null; + } + } + + if (data == null) + { + if (dataType == null) + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + + log.debug("creating assay exp.data for file. dataType={}, file={}", dataType.getNamespacePrefix(), file); + data = ExperimentService.get().createData(c, dataType, name); + data.setLSID(ExperimentService.get().generateGuidLSID(c, dataType)); + if (file != null) + { + data.setDataFileURI(FileUtil.getAbsoluteCaseSensitiveFile(file).toURI()); + } + } + else + { + if (dataType != null && !dataType.matches(new Lsid(data.getLSID()))) + { + // Reset its LSID so that it's the correct type // CONSIDER: creating a new ExpData with the correct type instead + String newLsid = ExperimentService.get().generateGuidLSID(c, dataType); + log.debug("LSID doesn't match desired type. Changed the LSID from '{}' to '{}'", data.getLSID(), newLsid); + data.setLSID(newLsid); + } + } + return data; + } + + protected void addOutputMaterials( + AssayRunUploadContext context, + Map outputMaterials, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + addMaterials(context, outputMaterials, context.getOutputMaterials(), null, cache, materialCache); + } + + protected void addMaterials( + AssayRunUploadContext context, + @NotNull Map resolved, + @NotNull Map unresolved, + @Nullable ExpSampleType sampleType, + @NotNull RemapCache cache, + @NotNull Map materialCache + ) throws ExperimentException, ValidationException + { + for (Map.Entry entry : unresolved.entrySet()) + { + Object sampleIdentifier = entry.getKey(); + ExpMaterial material = ExperimentService.get().findExpMaterial(context.getContainer(), context.getUser(), sampleIdentifier, sampleType, cache, materialCache); + if (material == null) + throw new ExperimentException("Unable to resolve sample: " + sampleIdentifier); + + if (!resolved.containsKey(material)) + { + if (!material.isOperationPermitted(SampleTypeService.SampleOperations.AddAssayData)) + throw new ExperimentException(SampleTypeService.get().getOperationNotPermittedMessage(Collections.singleton(material), SampleTypeService.SampleOperations.AddAssayData)); + if (sampleType == null || sampleType.getLSID().equals(material.getCpasType())) + resolved.put(material, entry.getValue()); + } + } + } + + protected void addOutputDatas( + AssayRunUploadContext context, + Map inputDatas, + Map outputDatas + ) throws ExperimentException, ValidationException + { + Logger log = context.getLogger() != null ? context.getLogger() : LOG; + + // Create set of existing input files + Set inputFiles = new HashSet<>(); + for (ExpData inputData : inputDatas.keySet()) + { + FileLike f = inputData.getFileLike(); + if (f != null) + inputFiles.add(f); + } + + Map files = context.getUploadedData(); + + AssayDataType dataType; + for (Map.Entry entry : files.entrySet()) + { + FileLike file = entry.getValue(); + dataType = context.getProvider().getDataType(); + + // Reuse existing exp.data as the assay output file unless: + // - we are re-importing the run + // - or the output file is already one of the input files and if we are allowing cross-run file inputs + boolean reuseExistingData = true; + if (context.getReRunId() != null) + reuseExistingData = false; + if (context.isAllowCrossRunFileInputs() && inputFiles.contains(file)) + reuseExistingData = false; + + // For Luminex re-import, we want to reuse the existing exp.data but not + // throw an error when we discover that the exp.data is already owned. The + // original run will be duplicated for re-import and then will be deleted. + boolean errorIfDataOwned = getProvider().getReRunSupport() != AssayProvider.ReRunSupport.ReRunAndDelete; + + log.debug("adding output data: file={}", file.toNioPathForRead()); + log.debug(" context.getReRunId()={}", context.getReRunId()); + log.debug(" provider.getReRunSupport()={}", getProvider().getReRunSupport()); + log.debug(" context.allowCrossRunFileInputs={}", context.isAllowCrossRunFileInputs()); + log.debug(" inputFiles.contains(file)={}", inputFiles.contains(file)); + log.debug("==> reuseExistingData = {}", reuseExistingData); + log.debug("==> errorIfDataOwned = {}", errorIfDataOwned); + + ExpData data = DefaultAssayRunCreator.createData(context.getContainer(), file.toNioPathForRead().toFile(), file.getName(), dataType, reuseExistingData, errorIfDataOwned, log); + String role = ExpDataRunInput.DEFAULT_ROLE; + if (dataType != null && dataType.getFileType().isType(file)) + { + if (dataType.getRole() != null) + { + role = dataType.getRole(); + } + } + outputDatas.put(data, role); + } + + FileLike primaryFile = files.get(AssayDataCollector.PRIMARY_FILE); + if (primaryFile != null) + { + addRelatedOutputDatas(context, inputFiles, outputDatas, primaryFile); + } + + Map outputs = context.getOutputDatas(); + addDatas(context.getContainer(), outputDatas, outputs, log); + } + + /** + * Add files that follow the general naming convention (same basename) as the primary file + */ + public void addRelatedOutputDatas( + AssayRunUploadContext context, + Set inputFiles, + Map outputDatas, + final FileLike primaryFile + ) throws ValidationException + { + AssayDataType dataType = getProvider().getDataType(); + final String baseName = dataType == null ? null : dataType.getFileType().getBaseName(primaryFile.toNioPathForRead()); + if (baseName != null) + { + // Grab all the files that are related based on naming convention + File primary = primaryFile.toNioPathForRead().toFile(); + File parent = primary.getParentFile(); + // converting to File land to reuse the FileFilter + File[] relatedFiles = parent.listFiles(getRelatedOutputDataFileFilter(primary, baseName)); + if (relatedFiles != null) + { + for (File f : relatedFiles) + { + FileLike relatedFile = primaryFile.getParent().resolveChild(f.getName()); + // Ignore files already considered inputs to the run + if (inputFiles.contains(relatedFile)) + continue; + + Pair dataOutput = createdRelatedOutputData(context, baseName, f); + if (dataOutput != null) + { + outputDatas.put(dataOutput.getKey(), dataOutput.getValue()); + } + } + } + } + } + + protected void resolveExtraRunData( + ParticipantVisitResolver resolver, + AssayRunUploadContext context, + Map inputMaterials, + Map inputDatas, + Map outputMaterials, + Map outputDatas + ) throws ExperimentException + { + } + + /** + * Create an ExpData object for the file, and figure out what its role name should be + * @return null if the file is already linked to another run + */ + @Nullable + public static Pair createdRelatedOutputData(AssayRunUploadContext context, String baseName, File relatedFile) throws ValidationException + { + String roleName = null; + DataType dataType = null; + for (AssayDataType inputType : context.getProvider().getRelatedDataTypes()) + { + // Check if we recognize it as a specially handled file type + if (inputType.getFileType().isMatch(relatedFile.getName(), baseName)) + { + roleName = inputType.getRole(); + dataType = inputType; + break; + } + } + // If not, make up a new type and role for it + if (roleName == null) + { + roleName = relatedFile.getName().substring(baseName.length()); + while (!roleName.isEmpty() && (roleName.startsWith(".") || roleName.startsWith("-") || roleName.startsWith("_") || roleName.startsWith(" "))) + { + roleName = roleName.substring(1); + } + if (roleName.isEmpty()) + { + roleName = null; + } + } + if (dataType == null) + { + dataType = AbstractAssayProvider.RELATED_FILE_DATA_TYPE; + } + + // Find an existing data that isn't owned by another run or create a new own + ExpData data = findExistingData(context.getContainer(), relatedFile, context.getLogger()); + if (data != null) + { + if (data.getSourceApplication() == null) + return new Pair<>(data, roleName); + + // The file is already linked to another run, so this one must have not created it + return null; + } + + data = createData(context.getContainer(), relatedFile, relatedFile.getName(), dataType, true, true, context.getLogger()); + assert data.getSourceApplication() == null; + return Pair.of(data, roleName); + } + + // Disallow creating a run with inputs which are also outputs + protected void checkForCycles( + Map inputs, + Map outputs + ) throws ExperimentException + { + for (ExpRunItem input : inputs.keySet()) + { + if (outputs.containsKey(input)) + { + String role = outputs.get(input); + throw new ExperimentException("Circular input/output '" + input.getName() + "' with role '" + role + "'"); + } + } + } + + private void saveProperties( + final AssayRunUploadContext context, + ExpObject expObject, + Map transformResultProperties, + Map properties + ) throws ValidationException + { + Map propsToSave = transformResultProperties.isEmpty() ? properties : transformResultProperties; + List errors = validateProperties(context, propsToSave); + if (!errors.isEmpty()) + throw new ValidationException(errors); + + savePropertyObject(expObject, propsToSave, context.getUser()); + } + + protected void savePropertyObject(ExpObject object, Map properties, User user) throws ValidationException + { + for (Map.Entry entry : properties.entrySet()) + { + DomainProperty pd = entry.getKey(); + String value = entry.getValue(); + + // resolve any file links for batch or run properties + if (PropertyType.FILE_LINK.getTypeUri().equals(pd.getType().getTypeURI())) + { + File resolvedFile = ExpDataFileConverter.convert(value); + if (resolvedFile != null) + value = resolvedFile.getAbsolutePath(); + } + + // Treat the empty string as a null in the database, which is our normal behavior when receiving data + // from HTML forms. + if (StringUtils.trimToNull(value) == null) + { + value = null; + } + if (value != null) + { + object.setProperty(user, pd.getPropertyDescriptor(), value); + } + else + { + // We still need to validate blanks + List errors = new ArrayList<>(); + OntologyManager.validateProperty(pd.getValidators(), pd.getPropertyDescriptor(), new ObjectProperty(object.getLSID(), object.getContainer(), pd.getPropertyDescriptor(), value), errors, new ValidatorContext(pd.getContainer(), user)); + if (!errors.isEmpty()) + throw new ValidationException(errors); + } + } + } + + public static List validateColumnProperties(ContainerUser context, Map properties) + { + List errors = new ArrayList<>(); + RemapCache cache = new RemapCache(); + for (Map.Entry entry : properties.entrySet()) + { + validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); + } + return errors; + } + + public static List validateProperties(ContainerUser context, Map properties) + { + List errors = new ArrayList<>(); + RemapCache cache = new RemapCache(); + for (Map.Entry entry : properties.entrySet()) + { + validateProperty(context, entry.getKey(), entry.getValue(), cache, errors); + } + return errors; + } + + private static void validateProperty(ContainerUser context, ColumnInfo columnInfo, String value, RemapCache cache, List errors) + { + Lookup lookup = null; + if (columnInfo.isLookup()) + { + ForeignKey fk = columnInfo.getFk(); + lookup = new Lookup(fk.getLookupContainer(), fk.getLookupSchemaName(), fk.getLookupTableName()); + } + validateProperty(context, ColumnValidators.create(columnInfo, null), value, columnInfo.getName(), + false, lookup, columnInfo.getJavaClass(), cache, errors); + } + + private static void validateProperty(ContainerUser context, DomainProperty dp, String value, RemapCache cache, List errors) + { + String label = dp.getPropertyDescriptor().getNonBlankCaption(); + PropertyType type = dp.getPropertyDescriptor().getPropertyType(); + validateProperty(context, ColumnValidators.create(null, dp), value, label, dp.isRequired(), + dp.getLookup(), type.getJavaType(), cache, errors); + } + + private static void validateProperty( + ContainerUser context, + List validators, + String value, + String label, + Boolean required, + Lookup lookup, + Class type, + RemapCache cache, + List errors + ) + { + boolean missing = (value == null || value.isEmpty()); + int rowNum = 0; + + if (required && missing) + { + errors.add(new SimpleValidationError(label + " is required and must be of type " + ColumnInfo.getFriendlyTypeName(type) + ".")); + } + else if (!missing) + { + try + { + Object o; + if (type == File.class) + o = ExpDataFileConverter.convert(value); + else + o = ConvertUtils.convert(value, type); + ValidatorContext validatorContext = new ValidatorContext(context.getContainer(), context.getUser()); + for (ColumnValidator validator : validators) + { + String msg = validator.validate(rowNum, o, validatorContext, null); + if (msg != null) + errors.add(new PropertyValidationError(msg, label)); + } + } + catch (ConversionException e) + { + String message; + if (e instanceof ConvertHelper.FileConversionException fce) + message = fce.getMessage(); + else + { + message = ConvertHelper.getStandardConversionErrorMessage(value, label, type); + if (e.getCause() instanceof ArithmeticException) + message += ": " + e.getCause().getLocalizedMessage(); + else + message += "."; + } + + // Attempt to resolve lookups by display value + boolean skipError = false; + if (lookup != null) + { + Object remappedValue = OntologyManager.getRemappedValueForLookup(context.getUser(), context.getContainer(), cache, lookup, value); + if (remappedValue != null) + skipError = true; + } + + if (!skipError) + errors.add(new SimpleValidationError(message)); + } + } + } + + protected FileFilter getRelatedOutputDataFileFilter(final File primaryFile, final String baseName) + { + // baseName doesn't include the trailing '.', so add it here. We want to associate myRun.jpg + // with myRun.xls, but we don't want to associate myRun2.xls with myRun.xls (which will happen without + // the trailing dot in the check). + return f -> f.getName().startsWith(baseName + ".") && !primaryFile.equals(f); + } + + protected ProviderType getProvider() + { + return _provider; + } +} diff --git a/api/src/org/labkey/api/data/validator/ColumnValidator.java b/api/src/org/labkey/api/data/validator/ColumnValidator.java index 56fad6acc14..8a8f1d511fa 100644 --- a/api/src/org/labkey/api/data/validator/ColumnValidator.java +++ b/api/src/org/labkey/api/data/validator/ColumnValidator.java @@ -1,31 +1,31 @@ -/* - * Copyright (c) 2014-2016 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.data.validator; - -import org.jetbrains.annotations.Nullable; -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.exp.property.ValidatorContext; - -/** - * Column-level value validation run just before insert or update. - */ -public interface ColumnValidator -{ - String validate(int rowNum, Object value); - - String validate(int rowNum, Object value, ValidatorContext validatorContext, @Nullable Object providedValue); - -} +/* + * Copyright (c) 2014-2016 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.data.validator; + +import org.jetbrains.annotations.Nullable; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.exp.property.ValidatorContext; + +/** + * Column-level value validation run just before insert or update. + */ +public interface ColumnValidator +{ + String validate(int rowNum, Object value); + + String validate(int rowNum, Object value, ValidatorContext validatorContext, @Nullable Object providedValue); + +} diff --git a/api/src/org/labkey/api/exp/property/IPropertyValidator.java b/api/src/org/labkey/api/exp/property/IPropertyValidator.java index 9e0c9d44f81..e43dad5541e 100644 --- a/api/src/org/labkey/api/exp/property/IPropertyValidator.java +++ b/api/src/org/labkey/api/exp/property/IPropertyValidator.java @@ -1,60 +1,60 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.exp.property; - -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.Container; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; - -import java.util.List; -import java.util.Map; - -/* -* User: Karl Lum -* Date: Aug 8, 2008 -* Time: 9:17:09 AM -*/ -public interface IPropertyValidator -{ - int getPropertyId(); - long getRowId(); - String getName(); - String getTypeURI(); - String getDescription(); - String getExpressionValue(); - String getErrorMessage(); - Map getProperties(); - - Container getContainer(); - - ValidatorKind getType(); - - void setPropertyId(int propertyId); - void setName(String name); - void setDescription(String description); - void setExpressionValue(String expression); - void setErrorMessage(String message); - void setProperty(String key, String value); - void setColumnNameProvidedData(String columnNameProvidedData); - @Nullable String getColumnNameProvidedData(); - - IPropertyValidator save(User user, Container container) throws ValidationException; - - boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache); +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.exp.property; + +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.Container; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; + +import java.util.List; +import java.util.Map; + +/* +* User: Karl Lum +* Date: Aug 8, 2008 +* Time: 9:17:09 AM +*/ +public interface IPropertyValidator +{ + int getPropertyId(); + long getRowId(); + String getName(); + String getTypeURI(); + String getDescription(); + String getExpressionValue(); + String getErrorMessage(); + Map getProperties(); + + Container getContainer(); + + ValidatorKind getType(); + + void setPropertyId(int propertyId); + void setName(String name); + void setDescription(String description); + void setExpressionValue(String expression); + void setErrorMessage(String message); + void setProperty(String key, String value); + void setColumnNameProvidedData(String columnNameProvidedData); + @Nullable String getColumnNameProvidedData(); + + IPropertyValidator save(User user, Container container) throws ValidationException; + + boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache); } \ No newline at end of file diff --git a/api/src/org/labkey/api/exp/property/ValidatorKind.java b/api/src/org/labkey/api/exp/property/ValidatorKind.java index d09f296f85b..4f5eeea17b1 100644 --- a/api/src/org/labkey/api/exp/property/ValidatorKind.java +++ b/api/src/org/labkey/api/exp/property/ValidatorKind.java @@ -1,96 +1,96 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.exp.property; - -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.query.ValidationError; -import org.labkey.data.xml.ValidatorPropertyType; -import org.labkey.data.xml.ValidatorType; -import org.labkey.data.xml.ValidatorsType; - -import java.util.LinkedList; -import java.util.List; - -/* -* User: Karl Lum -* Date: Aug 8, 2008 -* Time: 10:45:38 AM -*/ -public interface ValidatorKind -{ - String NAMESPACE = "PropertyValidator"; - - String getName(); - String getTypeURI(); - String getDescription(); - - IPropertyValidator createInstance(); - boolean isValid(IPropertyValidator validator, List errors); - boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, List errors, ValidatorContext validatorCache, @Nullable Object providedValue); - - // Standard save-validator-to-XML method. ValidatorKind implementations can customize this by overriding. - default void convertToXml(IPropertyValidator v, ValidatorsType validatorsXml) - { - ValidatorType validatorType = validatorsXml.addNewValidator(); - validatorType.setTypeURI(v.getTypeURI()); - validatorType.setName(v.getName()); - - if (null != v.getDescription()) - validatorType.setDescription(v.getDescription()); - if (null != v.getErrorMessage()) - validatorType.setErrorMessage(v.getErrorMessage()); - if (null != v.getExpressionValue()) - validatorType.setExpression(v.getExpressionValue()); - - v.getProperties().forEach((name, value) -> { - ValidatorPropertyType pv = validatorType.addNewProperty(); - pv.setName(name); - pv.setValue(value); - }); - } - - static List convertFromXML(ValidatorsType validatorsXml) - { - List list = new LinkedList<>(); - - if (null != validatorsXml) - { - ValidatorType[] validators = validatorsXml.getValidatorArray(); - - for (ValidatorType v : validators) - { - IPropertyValidator pv = PropertyService.get().createValidator(v.getTypeURI()); - pv.setName(v.getName()); - - if (null != v.getDescription()) - pv.setDescription(v.getDescription()); - if (null != v.getErrorMessage()) - pv.setErrorMessage(v.getErrorMessage()); - if (null != v.getExpression()) - pv.setExpressionValue(v.getExpression()); - - for (ValidatorPropertyType prop : v.getPropertyArray()) - pv.setProperty(prop.getName(), prop.getValue()); - - list.add(pv); - } - } - - return list; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.exp.property; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.query.ValidationError; +import org.labkey.data.xml.ValidatorPropertyType; +import org.labkey.data.xml.ValidatorType; +import org.labkey.data.xml.ValidatorsType; + +import java.util.LinkedList; +import java.util.List; + +/* +* User: Karl Lum +* Date: Aug 8, 2008 +* Time: 10:45:38 AM +*/ +public interface ValidatorKind +{ + String NAMESPACE = "PropertyValidator"; + + String getName(); + String getTypeURI(); + String getDescription(); + + IPropertyValidator createInstance(); + boolean isValid(IPropertyValidator validator, List errors); + boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, List errors, ValidatorContext validatorCache, @Nullable Object providedValue); + + // Standard save-validator-to-XML method. ValidatorKind implementations can customize this by overriding. + default void convertToXml(IPropertyValidator v, ValidatorsType validatorsXml) + { + ValidatorType validatorType = validatorsXml.addNewValidator(); + validatorType.setTypeURI(v.getTypeURI()); + validatorType.setName(v.getName()); + + if (null != v.getDescription()) + validatorType.setDescription(v.getDescription()); + if (null != v.getErrorMessage()) + validatorType.setErrorMessage(v.getErrorMessage()); + if (null != v.getExpressionValue()) + validatorType.setExpression(v.getExpressionValue()); + + v.getProperties().forEach((name, value) -> { + ValidatorPropertyType pv = validatorType.addNewProperty(); + pv.setName(name); + pv.setValue(value); + }); + } + + static List convertFromXML(ValidatorsType validatorsXml) + { + List list = new LinkedList<>(); + + if (null != validatorsXml) + { + ValidatorType[] validators = validatorsXml.getValidatorArray(); + + for (ValidatorType v : validators) + { + IPropertyValidator pv = PropertyService.get().createValidator(v.getTypeURI()); + pv.setName(v.getName()); + + if (null != v.getDescription()) + pv.setDescription(v.getDescription()); + if (null != v.getErrorMessage()) + pv.setErrorMessage(v.getErrorMessage()); + if (null != v.getExpression()) + pv.setExpressionValue(v.getExpression()); + + for (ValidatorPropertyType prop : v.getPropertyArray()) + pv.setProperty(prop.getName(), prop.getValue()); + + list.add(pv); + } + } + + return list; + } } \ No newline at end of file diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index 07512ab4ef9..adb096bcb33 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,953 +1,953 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext, providedValue); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value, null); - } - } - } - - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value, null); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = ConvertUtils.convert(pkValue.toString(), pk.getJavaObjectClass()); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getDbTable(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = null==value ? null : convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().apply(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters != null) - { - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - } - -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = ConvertUtils.convert(pkValue.toString(), pk.getJavaObjectClass()); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = null==value ? null : convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().apply(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters != null) + { + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + } + +} diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index e49d411e5b1..a4c01c9786c 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -1,1875 +1,1875 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.collections4.ListUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.math3.util.Precision; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.assay.plate.AssayPlateMetadataService; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.cache.BlockingCache; -import org.labkey.api.cache.CacheManager; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.compliance.TableRules; -import org.labkey.api.compliance.TableRulesManager; -import org.labkey.api.data.ColumnHeaderType; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DataColumn; -import org.labkey.api.data.DataRegion; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DisplayColumn; -import org.labkey.api.data.DisplayColumnFactory; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MaterializedQueryHelper; -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.data.PHI; -import org.labkey.api.data.RenderContext; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.UnionContainerFilter; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.LoggingDataIterator; -import org.labkey.api.dataiterator.SimpleTranslator; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ExperimentUrls; -import org.labkey.api.exp.api.NameExpressionOptionService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.DomainUtil; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpMaterialTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.inventory.InventoryService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.qc.SampleStatusService; -import org.labkey.api.query.AliasedColumn; -import org.labkey.api.query.DetailsURL; -import org.labkey.api.query.ExprColumn; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.LookupForeignKey; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryForeignKey; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.QueryUrls; -import org.labkey.api.query.RowIdForeignKey; -import org.labkey.api.query.SchemaKey; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.column.BuiltInColumnTypes; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HeartBeat; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.StringExpression; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.experiment.ExpDataIterators; -import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.lineage.LineageMethod; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLETYPE_FILE_DIRECTORY; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; -import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; -import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; - -public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable -{ - ExpSampleTypeImpl _ss; - Set _uniqueIdFields; - boolean _supportTableRules = true; - - public static final Set MATERIAL_ALT_MERGE_KEYS; - public static final Set MATERIAL_ALT_UPDATE_KEYS; - public static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); - static { - MATERIAL_ALT_MERGE_KEYS = Set.of(Column.MaterialSourceId.name(), Column.Name.name()); - MATERIAL_ALT_UPDATE_KEYS = Set.of(Column.LSID.name()); - - Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); - IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); - amountValidator.setExpressionValue("~gte=0"); - amountValidator.setErrorMessage("Amounts must be non-negative."); - amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); - AMOUNT_RANGE_VALIDATORS.add(amountValidator); - } - - public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) - { - super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); - setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); - setPublicSchemaName(ExpSchema.SCHEMA_NAME); - addAllowablePermission(InsertPermission.class); - addAllowablePermission(UpdatePermission.class); - addAllowablePermission(MoveEntitiesPermission.class); - setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); - setSampleType(sampleType); - } - - public Set getUniqueIdFields() - { - if (_uniqueIdFields == null) - { - _uniqueIdFields = new CaseInsensitiveHashSet(); - _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); - } - return _uniqueIdFields; - } - - @Override - protected ColumnInfo resolveColumn(String name) - { - ColumnInfo result = super.resolveColumn(name); - if (result == null) - { - if ("CpasType".equalsIgnoreCase(name)) - result = createColumn(Column.SampleSet.name(), Column.SampleSet); - else if (Column.Property.name().equalsIgnoreCase(name)) - result = createPropertyColumn(Column.Property.name()); - else if (Column.QueryableInputs.name().equalsIgnoreCase(name)) - result = createColumn(Column.QueryableInputs.name(), Column.QueryableInputs); - } - return result; - } - - @Override - public ColumnInfo getExpObjectColumn() - { - var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); - ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); - return ret; - } - - @Override - public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) - { - if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) - { - // Special case sample auditing to help build a useful timeline view - return SampleTypeServiceImpl.get(); - } - - return super.getAuditHandler(auditBehaviorType); - } - - @Override - public MutableColumnInfo createColumn(String alias, Column column) - { - switch (column) - { - case Folder -> - { - return wrapColumn(alias, _rootTable.getColumn("Container")); - } - case LSID -> - { - return wrapColumn(alias, _rootTable.getColumn(Column.LSID.name())); - } - case MaterialSourceId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.MaterialSourceId.name())); - columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case RootMaterialRowId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.RootMaterialRowId.name())); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.RowId.name())); - columnInfo.setLabel("Root Material"); - columnInfo.setUserEditable(false); - - // NK: Here we mark the column as not required AND nullable which is the opposite of the database where - // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. - // See ExpMaterialValidatorIterator. - columnInfo.setRequired(false); - columnInfo.setNullable(true); - - return columnInfo; - } - case AliquotedFromLSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.AliquotedFromLSID.name())); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.LSID.name())); - columnInfo.setLabel("Aliquoted From Parent"); - return columnInfo; - } - case IsAliquot -> - { - String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RootMaterialRowId.name(); - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); - ExprColumn columnInfo = new ExprColumn(this, FieldKey.fromParts(Column.IsAliquot.name()), new SQLFragment( - "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) - .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 - .append(" ELSE NULL END)"), JdbcType.BOOLEAN); - columnInfo.setLabel("Is Aliquot"); - columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(false); - return columnInfo; - } - case Name -> - { - var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); - // shut off this field in insert and update views if user specified names are not allowed - if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) - { - nameCol.setShownInInsertView(false); - nameCol.setShownInUpdateView(false); - } - return nameCol; - } - case RawAmount -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - case StoredAmount -> - { - String label = StoredAmount.label(); - Set importAliases = Set.of(label, "Stored Amount"); - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setLabel(label); - columnInfo.setImportAliasesSet(importAliases); - columnInfo.setDescription("The amount of this sample currently on hand."); - return columnInfo; - } - } - case RawUnits -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - return columnInfo; - } - case Units -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Column.Units.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); - columnInfo.setFk(fk); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - return columnInfo; - } - } - case Description -> - { - return wrapColumn(alias, _rootTable.getColumn(Column.Description.name())); - } - case SampleSet -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); - // NOTE: populateColumns() overwrites this with a QueryForeignKey. Can this be removed? - columnInfo.setFk(new LookupForeignKey(getContainerFilter(), null, null, null, null, "LSID", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - return columnInfo; - } - case SourceProtocolLSID -> - { - // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module - ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( - "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + - " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); - columnInfo.setLabel("Source Protocol"); - columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case SourceProtocolApplication -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); - columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - columnInfo.setAutoIncrement(false); - return columnInfo; - } - case SourceApplicationInput -> - { - var col = createEdgeColumn(alias, Column.SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); - col.setHidden(true); - return col; - } - case RunApplication -> - { - SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") - .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") - .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") - .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) - .append(")"); - - var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); - col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); - col.setUserEditable(false); - col.setReadOnly(true); - col.setHidden(true); - return col; - } - case RunApplicationOutput -> - { - var col = createEdgeColumn(alias, Column.RunApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); - return col; - } - case Run -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); - ret.setReadOnly(true); - return ret; - } - case RowId -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); - // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction - ret.setSortDirection(Sort.SortDirection.DESC); - ret.setFk(new RowIdForeignKey(ret)); - ret.setUserEditable(false); - ret.setHidden(true); - ret.setShownInInsertView(false); - ret.setHasDbSequence(true); - ret.setIsRootDbSequence(true); - return ret; - } - case Property -> - { - return createPropertyColumn(alias); - } - case Flag -> - { - return createFlagColumn(alias); - } - case Created -> - { - return wrapColumn(alias, _rootTable.getColumn("Created")); - } - case CreatedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); - } - case Modified -> - { - return wrapColumn(alias, _rootTable.getColumn("Modified")); - } - case ModifiedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); - } - case Alias -> - { - return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); - } - case Inputs -> - { - return createLineageColumn(this, alias, true, false); - } - case QueryableInputs -> - { - return createLineageColumn(this, alias, true, true); - } - case Outputs -> - { - return createLineageColumn(this, alias, false, false); - } - case Properties -> - { - return createPropertiesColumn(alias); - } - case SampleState -> - { - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); - var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); - ret.setLabel("Status"); - ret.setHidden(!statusEnabled); - ret.setShownInDetailsView(statusEnabled); - ret.setShownInInsertView(statusEnabled); - ret.setShownInUpdateView(statusEnabled); - ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - return ret; - } - case AliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); - ret.setLabel(ALIQUOT_COUNT_LABEL); - return ret; - } - case AliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(ALIQUOT_VOLUME_LABEL); - return ret; - } - case AvailableAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); - return ret; - } - case AvailableAliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); - ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); - return ret; - } - case AliquotUnit -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); - ret.setShownInDetailsView(false); - return ret; - } - case MaterialExpDate -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("MaterialExpDate")); - ret.setLabel("Expiration Date"); - ret.setShownInDetailsView(true); - ret.setShownInInsertView(true); - ret.setShownInUpdateView(true); - return ret; - } - default -> throw new IllegalArgumentException("Unknown column " + column); - } - } - - @Override - public MutableColumnInfo createPropertyColumn(String alias) - { - var ret = super.createPropertyColumn(alias); - if (_ss != null) - { - final TableInfo t = _ss.getTinfo(); - if (t != null) - { - ret.setFk(new LookupForeignKey() - { - @Override - public TableInfo getLookupTableInfo() - { - return t; - } - - @Override - protected ColumnInfo getPkColumn(TableInfo table) - { - return t.getColumn("lsid"); - } - }); - } - } - ret.setIsUnselectable(true); - ret.setDescription("A holder for any custom fields associated with this sample"); - ret.setHidden(true); - return ret; - } - - private Unit getSampleTypeUnit() - { - Unit typeUnit = null; - if (_ss != null && _ss.getMetricUnit() != null) - typeUnit = Unit.fromName(_ss.getMetricUnit()); - return typeUnit; - } - - private void setSampleType(@Nullable ExpSampleType st) - { - checkLocked(); - if (_ss != null) - { - throw new IllegalStateException("Cannot unset sample type"); - } - if (st != null && !(st instanceof ExpSampleTypeImpl)) - { - throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); - } - _ss = (ExpSampleTypeImpl) st; - if (_ss != null) - { - setPublicSchemaName(SamplesSchema.SCHEMA_NAME); - setName(st.getName()); - - String description = _ss.getDescription(); - if (StringUtils.isEmpty(description)) - description = "Contains one row per sample in the " + _ss.getName() + " sample type"; - setDescription(description); - - if (canUserAccessPhi()) - { - ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); - setImportURL(new DetailsURL(url)); - } - } - } - - public ExpSampleType getSampleType() - { - return _ss; - } - - @Override - protected void populateColumns() - { - var st = getSampleType(); - var rowIdCol = addColumn(Column.RowId); - addColumn(Column.MaterialSourceId); - addColumn(Column.SourceProtocolApplication); - addColumn(Column.SourceApplicationInput); - addColumn(Column.RunApplication); - addColumn(Column.RunApplicationOutput); - addColumn(Column.SourceProtocolLSID); - - var nameCol = addColumn(Column.Name); - if (st != null && st.hasNameAsIdCol()) - { - // Show the Name field but don't mark is as required when using name expressions - if (st.hasNameExpression()) - { - var nameExpression = st.getNameExpression(); - nameCol.setNameExpression(nameExpression); - nameCol.setNullable(true); - String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); - String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); - nameCol.setDescription(desc); - } - else - { - nameCol.setNullable(false); - } - } - else - { - nameCol.setReadOnly(true); - nameCol.setShownInInsertView(false); - } - - addColumn(Column.Alias); - addColumn(Column.Description); - - var typeColumnInfo = addColumn(Column.SampleSet); - typeColumnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) - { - @Override - protected ContainerFilter getLookupContainerFilter() - { - // Be sure that we can resolve the sample type if it's defined in a separate container. - // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. - // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has - // parents in current sample type and a sample type in the parent container - Set containers = new HashSet<>(); - if (null != st) - containers.add(st.getContainer()); - containers.add(getContainer()); - if (getContainer().getProject() != null) - containers.add(getContainer().getProject()); - containers.add(ContainerManager.getSharedContainer()); - ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); - - if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) - cf = new UnionContainerFilter(_containerFilter, cf); - return cf; - } - }); - - typeColumnInfo.setReadOnly(true); - typeColumnInfo.setUserEditable(false); - typeColumnInfo.setShownInInsertView(false); - - addColumn(Column.MaterialExpDate); - addContainerColumn(Column.Folder, null); - var runCol = addColumn(Column.Run); - runCol.setFk(new ExpSchema(_userSchema.getUser(), getContainer()).getRunIdForeignKey(getContainerFilter())); - runCol.setShownInInsertView(false); - runCol.setShownInUpdateView(false); - - var colLSID = addColumn(Column.LSID); - colLSID.setHidden(true); - colLSID.setReadOnly(true); - colLSID.setUserEditable(false); - colLSID.setShownInInsertView(false); - colLSID.setShownInDetailsView(false); - colLSID.setShownInUpdateView(false); - - var rootRowId = addColumn(Column.RootMaterialRowId); - rootRowId.setHidden(true); - rootRowId.setReadOnly(true); - rootRowId.setUserEditable(false); - rootRowId.setShownInInsertView(false); - rootRowId.setShownInDetailsView(false); - rootRowId.setShownInUpdateView(false); - - var aliquotParentLSID = addColumn(Column.AliquotedFromLSID); - aliquotParentLSID.setHidden(true); - aliquotParentLSID.setReadOnly(true); - aliquotParentLSID.setUserEditable(false); - aliquotParentLSID.setShownInInsertView(false); - aliquotParentLSID.setShownInDetailsView(false); - aliquotParentLSID.setShownInUpdateView(false); - - addColumn(Column.IsAliquot); - addColumn(Column.Created); - addColumn(Column.CreatedBy); - addColumn(Column.Modified); - addColumn(Column.ModifiedBy); - - List defaultCols = new ArrayList<>(); - defaultCols.add(FieldKey.fromParts(Column.Name)); - defaultCols.add(FieldKey.fromParts(Column.MaterialExpDate)); - boolean hasProductFolders = getContainer().hasProductFolders(); - if (hasProductFolders) - defaultCols.add(FieldKey.fromParts(Column.Folder)); - defaultCols.add(FieldKey.fromParts(Column.Run)); - - if (st == null) - defaultCols.add(FieldKey.fromParts(Column.SampleSet)); - - addColumn(Column.Flag); - - var statusColInfo = addColumn(Column.SampleState); - boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); - statusColInfo.setShownInDetailsView(statusEnabled); - statusColInfo.setShownInInsertView(statusEnabled); - statusColInfo.setShownInUpdateView(statusEnabled); - statusColInfo.setHidden(!statusEnabled); - statusColInfo.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - if (statusEnabled) - defaultCols.add(FieldKey.fromParts(Column.SampleState)); - statusColInfo.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - - // TODO is this a real Domain??? - if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) - { - defaultCols.add(FieldKey.fromParts(Column.Flag)); - addSampleTypeColumns(st, defaultCols); - - setName(_ss.getName()); - - ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); - gridUrl.addParameter("rowId", st.getRowId()); - setGridURL(new DetailsURL(gridUrl)); - } - - List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); - defaultCols.addAll(calculatedFieldKeys); - - addColumn(Column.AliquotCount); - addColumn(Column.AliquotVolume); - addColumn(Column.AliquotUnit); - addColumn(Column.AvailableAliquotCount); - addColumn(Column.AvailableAliquotVolume); - - addColumn(Column.StoredAmount); - defaultCols.add(FieldKey.fromParts(Column.StoredAmount)); - - addColumn(Column.Units); - defaultCols.add(FieldKey.fromParts(Column.Units)); - - var rawAmountColumn = addColumn(Column.RawAmount); - rawAmountColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.StoredAmount)); - - } - }; - } - }); - rawAmountColumn.setHidden(true); - rawAmountColumn.setShownInDetailsView(false); - rawAmountColumn.setShownInInsertView(false); - rawAmountColumn.setShownInUpdateView(false); - - var rawUnitsColumn = addColumn(Column.RawUnits); - rawUnitsColumn.setDisplayColumnFactory(new DisplayColumnFactory() - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new DataColumn(colInfo) - { - @Override - public void addQueryFieldKeys(Set keys) - { - super.addQueryFieldKeys(keys); - keys.add(FieldKey.fromParts(Column.Units)); - - } - }; - } - }); - rawUnitsColumn.setHidden(true); - rawUnitsColumn.setShownInDetailsView(false); - rawUnitsColumn.setShownInInsertView(false); - rawUnitsColumn.setShownInUpdateView(false); - - if (InventoryService.get() != null && (st == null || !st.isMedia())) - defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); - - SQLFragment sql; - UserSchema plateUserSchema; - // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset - // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying - // against tables in that schema. - if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) - plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); - else - plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); - - if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) - { - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); - SQLFragment existsSubquery = new SQLFragment() - .append("SELECT 1 FROM ") - .append(plateUserSchema.getTable("Well"), "well") - .append(" WHERE well.sampleid = ").append(rowIdField); - - sql = new SQLFragment() - .append("CASE WHEN EXISTS (") - .append(existsSubquery) - .append(") THEN 'Plated' ") - .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 - .append("ELSE NULL END"); - } - else - { - sql = new SQLFragment("(SELECT NULL)"); - } - var col = new ExprColumn(this, Column.IsPlated.name(), sql, JdbcType.VARCHAR); - col.setDescription("Whether the sample that has been plated, if plating is supported."); - col.setUserEditable(false); - col.setReadOnly(true); - col.setShownInDetailsView(false); - col.setShownInInsertView(false); - col.setShownInUpdateView(false); - if (plateUserSchema != null) - col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + Column.RowId.name() + "}")); - addColumn(col); - - addVocabularyDomains(); - - addColumn(Column.Properties); - - var colInputs = addColumn(Column.Inputs); - addMethod("Inputs", new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); - - var colOutputs = addColumn(Column.Outputs); - addMethod("Outputs", new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); - - addExpObjectMethod(); - - ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); - DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); - nameCol.setURL(url); - rowIdCol.setURL(url); - setDetailsURL(url); - - if (canUserAccessPhi()) - { - ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); - setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); - - ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); - setInsertURL(new DetailsURL(insertActionURL)); - } - else - { - setImportURL(LINK_DISABLER); - setInsertURL(LINK_DISABLER); - setUpdateURL(LINK_DISABLER); - } - - setTitleColumn(Column.Name.toString()); - - setDefaultVisibleColumns(defaultCols); - - MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn("rowid"), _ss, true); - addColumn(lineageLookup); - } - - private ContainerFilter getSampleStatusLookupContainerFilter() - { - // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared - // for the sample status lookup since in the app project context we want to share status definitions across - // a given project instead of creating duplicate statuses in each subfolder project. - ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); - type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; - return type.create(getUserSchema()); - } - - @Override - public Domain getDomain() - { - return getDomain(false); - } - - @Override - public Domain getDomain(boolean forUpdate) - { - return _ss == null ? null : _ss.getDomain(forUpdate); - } - - - public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) - { - if (nameExpression == null) - return currentDescription; - - StringBuilder sb = new StringBuilder(); - if (currentDescription != null && !currentDescription.isEmpty()) - { - sb.append(currentDescription); - if (!currentDescription.endsWith(".")) - sb.append("."); - sb.append("\n"); - } - - sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); - sb.append(nameExpression); - sb.append("."); - if (!StringUtils.isEmpty(nameExpressionPreview)) - { - sb.append("\nExample of name that will be generated from the current pattern: \n"); - sb.append(nameExpressionPreview); - } - - return sb.toString(); - } - - private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) - { - TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); - if (null == dbTable) - return; - - UserSchema schema = getUserSchema(); - Domain domain = st.getDomain(); - ColumnInfo rowIdColumn = getColumn(Column.RowId); - ColumnInfo lsidColumn = getColumn(Column.LSID); - ColumnInfo nameColumn = getColumn(Column.Name); - - visibleColumns.remove(FieldKey.fromParts(Column.Run.name())); - - // When not using name expressions, mark the ID columns as required. - // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. - final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); - - Set mvColumns = domain.getProperties().stream() - .filter(ImportAliasable::isMvEnabled) - .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) - .collect(Collectors.toSet()); - - for (ColumnInfo dbColumn : dbTable.getColumns()) - { - // Don't include PHI columns in full text search index - // CONSIDER: Can we move this to a base class? Maybe in .addColumn() - if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) - continue; - - if ( - rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - lsidColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) - ) - { - continue; - } - - var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); - - // TODO missing values? comments? flags? - DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); - var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); - if (propColumn.getName().equalsIgnoreCase("genid")) - { - propColumn.setHidden(true); - propColumn.setUserEditable(false); - propColumn.setShownInDetailsView(false); - propColumn.setShownInInsertView(false); - propColumn.setShownInUpdateView(false); - } - if (null != dp) - { - PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), - SchemaKey.fromParts("samples"), st.getName(), FieldKey.fromParts("RowId"), null, getLookupContainerFilter()); - - if (idCols.contains(dp)) - { - propColumn.setNullable(false); - propColumn.setDisplayColumnFactory(new IdColumnRendererFactory()); - } - - // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected - if (!propColumn.isHidden()) - { - visibleColumns.add(propColumn.getFieldKey()); - } - - if (propColumn.isMvEnabled()) - { - // The column in the physical table has a "_MVIndicator" suffix, but we want to expose - // it with a "MVIndicator" suffix (no underscore) - var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, - StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); - mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); - mvColumn.setSqlTypeName("VARCHAR"); - mvColumn.setPropertyURI(dp.getPropertyURI()); - mvColumn.setNullable(true); - mvColumn.setUserEditable(false); - mvColumn.setHidden(true); - mvColumn.setMvIndicatorColumn(true); - - addColumn(mvColumn); - propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); - } - } - - if (!mvColumns.contains(propColumn.getFieldKey())) - addColumn(propColumn); - - } - - setDefaultVisibleColumns(visibleColumns); - } - - // These are mostly fields that are wrapped by fields with different names (see createColumn()) - // we could handle each case separately, but this is easier - static final Set wrappedFieldKeys = Set.of( - new FieldKey(null, "objectid"), - new FieldKey(null, "RowId"), - new FieldKey(null, "LSID"), // Flag - new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication - new FieldKey(null, "runId"), // Run, RunApplication - new FieldKey(null, "CpasType")); // SampleSet - static final Set ALL_COLUMNS = Set.of(); - - private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) - { - if (null == selectedColumns) - return ALL_COLUMNS; - selectedColumns = new TreeSet<>(selectedColumns); - if (selectedColumns.contains(new FieldKey(null, StoredAmount))) - selectedColumns.add(new FieldKey(null, Units)); - if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) - selectedColumns.add(new FieldKey(null, Column.AliquotedFromLSID.name())); - if (selectedColumns.contains(new FieldKey(null, Column.IsAliquot.name()))) - selectedColumns.add(new FieldKey(null, Column.RootMaterialRowId.name())); - selectedColumns.addAll(wrappedFieldKeys); - if (null != getFilter()) - selectedColumns.addAll(getFilter().getAllFieldKeys()); - return selectedColumns; - } - - @NotNull - @Override - public SQLFragment getFromSQL(String alias) - { - return getFromSQL(alias, null); - } - - @Override - public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) - { - SQLFragment sql = new SQLFragment("("); - boolean usedMaterialized; - - - // SELECT FROM - /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) - * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. - * Also, we may use RemapCache for material lookup outside a transaction - */ - boolean onlyMaterialColums = false; - if (null != selectedColumns && !selectedColumns.isEmpty()) - onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); - if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) - { - sql.append(getMaterializedSQL()); - usedMaterialized = true; - } - else - { - sql.append(getJoinSQL(selectedColumns)); - usedMaterialized = false; - } - - // WHERE - SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); - sql.append("\n").append(filterFrag); - if (_ss != null && !usedMaterialized) - { - if (!filterFrag.isEmpty()) - sql.append(" AND "); - else - sql.append(" WHERE "); - sql.append("CpasType = ").appendValue(_ss.getLSID()); - } - sql.append(") ").appendIdentifier(alias); - - return getTransformedFromSQL(sql); - } - - @Override - public void setSupportTableRules(boolean b) - { - this._supportTableRules = b; - } - - @Override - public boolean supportTableRules() // intentional override - { - return _supportTableRules; - } - - @Override - protected @NotNull TableRules findTableRules() - { - Container definitionContainer = getUserSchema().getContainer(); - if (null != _ss) - definitionContainer = _ss.getContainer(); - return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); - } - - - static class InvalidationCounters - { - public final AtomicLong update, insert, delete, rollup; - InvalidationCounters() - { - long l = System.currentTimeMillis(); - update = new AtomicLong(l); - insert = new AtomicLong(l); - delete = new AtomicLong(l); - rollup = new AtomicLong(l); - } - } - - static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); - static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); - static final AtomicBoolean initializedListeners = new AtomicBoolean(false); - - // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() - public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - var scope = ExperimentServiceImpl.getExpSchema().getScope(); - var runnable = new RefreshMaterializedViewRunnable(lsid, reason); - scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); - } - - private static class RefreshMaterializedViewRunnable implements Runnable - { - private final String _lsid; - private final SampleTypeServiceImpl.SampleChangeType _reason; - - public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - _lsid = lsid; - _reason = reason; - } - - @Override - public void run() - { - if (_reason == schema) - { - /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. - * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the - * MQH from the cache to force the SQL to be regenerated. - */ - _materializedQueries.remove(_lsid); - return; - } - var counters = getInvalidateCounters(_lsid); - switch (_reason) - { - case insert -> counters.insert.incrementAndGet(); - case rollup -> counters.rollup.incrementAndGet(); - case update -> counters.update.incrementAndGet(); - case delete -> counters.delete.incrementAndGet(); - default -> throw new IllegalStateException("Unexpected value: " + _reason); - } - } - - @Override - public boolean equals(Object obj) - { - return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); - } - } - - private static InvalidationCounters getInvalidateCounters(String lsid) - { - if (!initializedListeners.getAndSet(true)) - { - CacheManager.addListener(_invalidationCounters::clear); - } - return _invalidationCounters.computeIfAbsent(lsid, (unused) -> - new InvalidationCounters() - ); - } - - /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ - private SQLFragment getMaterializedSQL() - { - if (null == _ss) - return getJoinSQL(null); - - var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> - { - /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. - * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, - * even if just to help with race-conditions. - * - * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. - */ - SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); - return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") - .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") - .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") - .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) - .build(); - }); - return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); - } - - - /** - * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. - * It does not help with incremental updates (except for providing the upsert() method). - * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. - */ - static class _MaterializedQueryHelper extends MaterializedQueryHelper - { - final String _lsid; - - static class Builder extends MaterializedQueryHelper.Builder - { - String _lsid; - - public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) - { - super(prefix, scope, select); - this._lsid = lsid; - } - - @Override - public _MaterializedQueryHelper build() - { - return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); - } - } - - _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, - boolean isSelectIntoSql) - { - super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); - this._lsid = lsid; - } - - @Override - protected Materialized createMaterialized(String txCacheKey) - { - DbSchema temp = DbSchema.getTemp(); - String name = _prefix + "_" + GUID.makeHash(); - _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); - initMaterialized(materialized); - return materialized; - } - - @Override - protected void incrementalUpdateBeforeSelect(Materialized m) - { - _Materialized materialized = (_Materialized) m; - - boolean lockAcquired = false; - try - { - lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); - if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) - throw materialized._loadException; - - if (!materialized.incrementalDeleteCheck.stillValid(0)) - executeIncrementalDelete(); - if (!materialized.incrementalRollupCheck.stillValid(0)) - executeIncrementalRollup(); - if (!materialized.incrementalInsertCheck.stillValid(0)) - executeIncrementalInsert(); - } - catch (RuntimeException|InterruptedException ex) - { - RuntimeException rex = UnexpectedException.wrap(ex); - materialized.setError(rex); - // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. - - // Ensure that next refresh starts clean - _materializedQueries.remove(_lsid); - getInvalidateCounters(_lsid).update.incrementAndGet(); - throw rex; - } - finally - { - if (lockAcquired) - materialized.getLock().unlock(); - } - } - - void upsertWithRetry(SQLFragment sql) - { - // not actually read-only, but we don't want to start an explicit transaction - _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); - } - - void executeIncrementalInsert() - { - SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") - .append("SELECT * FROM (") - .append(getViewSourceSql()).append(") viewsource_\n") - .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); - upsertWithRetry(incremental); - } - - void executeIncrementalDelete() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - // POSTGRES bug??? the obvious query is _very_ slow O(n^2) - // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) - SQLFragment incremental = new SQLFragment() - .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") - .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); - upsertWithRetry(incremental); - } - - void executeIncrementalRollup() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - SQLFragment incremental = new SQLFragment(); - if (d.isPostgreSQL()) - { - incremental - .append("UPDATE temp.${NAME} AS st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM exp.Material AS expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") - .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") - .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") - .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") - .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") - .append(")"); - } - else - { - // SQL Server 2022 supports IS DISTINCT FROM - incremental - .append("UPDATE st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM temp.${NAME} st, exp.Material expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") - .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") - .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") - .append(")"); - } - upsertWithRetry(incremental); - } - } - - static class _Materialized extends MaterializedQueryHelper.Materialized - { - final MaterializedQueryHelper.Invalidator incrementalInsertCheck; - final MaterializedQueryHelper.Invalidator incrementalRollupCheck; - final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; - - _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) - { - super(mqh, tableName, cacheKey, created, sql); - final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); - incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); - incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); - incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); - } - - @Override - public void reset() - { - super.reset(); - long now = HeartBeat.currentTimeMillis(); - incrementalInsertCheck.stillValid(now); - incrementalRollupCheck.stillValid(now); - incrementalDeleteCheck.stillValid(now); - } - - Lock getLock() - { - return _loadingLock; - } - } - - - /* SELECT and JOIN, does not include WHERE */ - private SQLFragment getJoinSQL(Set selectedColumns) - { - TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); - Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); - provisionedCols.remove(Column.RowId.name()); - provisionedCols.remove(Column.LSID.name()); - provisionedCols.remove(Column.Name.name()); - boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); - - boolean hasSampleColumns = false; - boolean hasAliquotColumns = false; - - Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); - selectedColumns = computeInnerSelectedColumns(selectedColumns); - - SQLFragment sql = new SQLFragment(); - sql.appendComment("", getSqlDialect()); - sql.append("SELECT "); - String comma = ""; - for (String materialCol : materialCols) - { - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) - { - sql.append(comma).append("m.").appendIdentifier(materialCol); - comma = ", "; - } - } - if (null != provisioned && hasProvisionedColumns) - { - for (ColumnInfo propertyColumn : provisioned.getColumns()) - { - // don't select twice - if ( - Column.RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.LSID.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Column.Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) - ) - { - continue; - } - - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) - { - sql.append(comma); - boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) - || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); - if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - else if (rootField) - { - sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasSampleColumns = true; - } - else - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - comma = ", "; - } - } - } - - sql.append("\nFROM "); - sql.append(_rootTable, "m"); - if (hasSampleColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); - if (hasAliquotColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); - - sql.appendComment("", getSqlDialect()); - return sql; - } - - private class IdColumnRendererFactory implements DisplayColumnFactory - { - @Override - public DisplayColumn createRenderer(ColumnInfo colInfo) - { - return new IdColumnRenderer(colInfo); - } - } - - private static class IdColumnRenderer extends DataColumn - { - public IdColumnRenderer(ColumnInfo col) - { - super(col); - } - - @Override - protected boolean isDisabledInput(RenderContext ctx) - { - return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; - } - } - - private static class SampleTypeAmountDisplayColumn extends ExprColumn - { - public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) - { - super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" / ? AS ") - .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") - .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.getValue()), - JdbcType.DOUBLE); - - setLabel(label); - setImportAliasesSet(importAliases); - } - } - - private static class SampleTypeUnitDisplayColumn extends ExprColumn - { - public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) - { - super(parent, FieldKey.fromParts(Column.Units.name()), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.toString()), - JdbcType.VARCHAR); - } - } - - @Override - public QueryUpdateService getUpdateService() - { - return new SampleTypeUpdateServiceDI(this, _ss); - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) - { - if (_ss == null) - { - // Allow read and delete for exp.Materials. - // Don't allow insert/update on exp.Materials without a sample type. - if (perm == DeletePermission.class || perm == ReadPermission.class) - return getContainer().hasPermission(user, perm); - return false; - } - - if (_ss.isMedia() && perm == ReadPermission.class) - return getContainer().hasPermission(user, MediaReadPermission.class); - - return super.hasPermission(user, perm); - } - - @NotNull - @Override - public List getUniqueIndices() - { - // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" - // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. - // Some FKs to ExpMaterialTable don't include the "Name" column (e.g. NabBaseTable.Specimen) - String indexName = "idx_material_ak"; - List ret = new ArrayList<>(super.getUniqueIndices()); - if (getColumn("Name") != null) - ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); - else - ret.removeIf( def -> def.name().equals(indexName)); - return Collections.unmodifiableList(ret); - } - - - // - // UpdatableTableInfo - // - - - @Override - public @Nullable Long getOwnerObjectId() - { - return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); - } - - @Nullable - @Override - public CaseInsensitiveHashMap remapSchemaColumns() - { - CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); - - if (null != getRealTable().getColumn("container") && null != getColumn("folder")) - { - m.put("container", "folder"); - } - - for (ColumnInfo col : getColumns()) - { - if (col.getMvColumnName() != null) - m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); - } - - return m; - } - - @Override - public Set getAltMergeKeys(DataIteratorContext context) - { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); - - return MATERIAL_ALT_MERGE_KEYS; - } - - @NotNull - @Override - public Set getAltKeysForUpdate() - { - return MATERIAL_ALT_UPDATE_KEYS; - } - - @Override - @NotNull - public List> getAdditionalRequiredInsertColumns() - { - if (getSampleType() == null) - return Collections.emptyList(); - - try - { - return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); - } - catch (IOException e) - { - return Collections.emptyList(); - } - } - - @Override - public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) - { - TableInfo propertiesTable = _ss.getTinfo(); - - // The specimens sample type doesn't have a properties table - if (propertiesTable == null) - { - return data; - } - - long sampleTypeObjectId = requireNonNull(getOwnerObjectId()); - - // TODO: subclass PersistDataIteratorBuilder to index Materials! not DataClass! - try - { - var persist = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, getUserSchema().getContainer(), getUserSchema().getUser(), _ss.getImportAliasesIncludingAliquot(), sampleTypeObjectId) - .setFileLinkDirectory(SAMPLETYPE_FILE_DIRECTORY); - ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); - - persist.setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> - { - List lsids = searchIndexDataKeys.lsids(); - List orderedRowIds = searchIndexDataKeys.orderedRowIds(); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterials(sublist)) - expMaterial.index(q, this); - }) - ); - - ListUtils.partition(lsids, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterialsByLsid(sublist)) - expMaterial.index(q, this); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT) - ); - - DataIteratorBuilder builder = LoggingDataIterator.wrap(persist); - return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(builder, getUserSchema().getContainer(), getUserSchema().getUser(), ExperimentService.get().getTinfoMaterialAliasMap(), _ss, true)); - } - catch (IOException e) - { - throw new UncheckedIOException(e); - } - } - - @Override - @NotNull - public AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.DETAILED; - } - - static final Set excludeFromDetailedAuditField; - static - { - var set = new CaseInsensitiveHashSet(); - set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); - set.addAll(ExpDataIterators.NOT_FOR_UPDATE); - // We don't want the inventory columns to show up in the sample timeline audit record; - // they are captured in their own audit record. - set.addAll(InventoryService.InventoryStatusColumn.names()); - excludeFromDetailedAuditField = Collections.unmodifiableSet(set); - } - - @Override - public @NotNull Set getExcludedDetailedUpdateAuditFields() - { - // uniqueId fields don't change in reality, so exclude them from the audit updates - Set excluded = new CaseInsensitiveHashSet(); - excluded.addAll(this.getUniqueIdFields()); - excluded.addAll(excludeFromDetailedAuditField); - return excluded; - } - - @Override - public List> getImportTemplates(ViewContext ctx) - { - // respect any metadata overrides - if (getRawImportTemplates() != null) - return super.getImportTemplates(ctx); - - List> templates = new ArrayList<>(); - ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); - url.addParameter("headerType", ColumnHeaderType.ImportField.name()); - try - { - if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) - { - for (String aliasKey : getSampleType().getImportAliases().keySet()) - url.addParameter("includeColumn", aliasKey); - } - } - catch (IOException e) - {} - templates.add(Pair.of("Download Template", url.toString())); - return templates; - } - - @Override - public void overlayMetadata(String tableName, UserSchema schema, Collection errors) - { - if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) - { - Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); - if (null != metadata) - { - overlayMetadata(metadata, schema, errors); - } - } - super.overlayMetadata(tableName, schema, errors); - } - - static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn - { - private Unit typeUnit; - private boolean applySampleTypePrecision = true; - - public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) { - super(col, false); - this.typeUnit = typeUnit; - this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user - } - - @Override - public Object getDisplayValue(RenderContext ctx) - { - Object value = super.getDisplayValue(ctx); - if (this.applySampleTypePrecision && value != null) - { - int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); - value = Precision.round(Double.valueOf(value.toString()), scale); - } - return value; - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Precision; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.assay.plate.AssayPlateMetadataService; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.cache.BlockingCache; +import org.labkey.api.cache.CacheManager; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.compliance.TableRules; +import org.labkey.api.compliance.TableRulesManager; +import org.labkey.api.data.ColumnHeaderType; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DataColumn; +import org.labkey.api.data.DataRegion; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DisplayColumn; +import org.labkey.api.data.DisplayColumnFactory; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MaterializedQueryHelper; +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.data.PHI; +import org.labkey.api.data.RenderContext; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.UnionContainerFilter; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.LoggingDataIterator; +import org.labkey.api.dataiterator.SimpleTranslator; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ExperimentUrls; +import org.labkey.api.exp.api.NameExpressionOptionService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.DomainUtil; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpMaterialTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.inventory.InventoryService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.qc.SampleStatusService; +import org.labkey.api.query.AliasedColumn; +import org.labkey.api.query.DetailsURL; +import org.labkey.api.query.ExprColumn; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.LookupForeignKey; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryForeignKey; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.QueryUrls; +import org.labkey.api.query.RowIdForeignKey; +import org.labkey.api.query.SchemaKey; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.column.BuiltInColumnTypes; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HeartBeat; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.StringExpression; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.experiment.ExpDataIterators; +import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.lineage.LineageMethod; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLETYPE_FILE_DIRECTORY; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotCount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AliquotVolume; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotCount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.AvailableAliquotVolume; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.StoredAmount; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Units; +import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; +import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; + +public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable +{ + ExpSampleTypeImpl _ss; + Set _uniqueIdFields; + boolean _supportTableRules = true; + + public static final Set MATERIAL_ALT_MERGE_KEYS; + public static final Set MATERIAL_ALT_UPDATE_KEYS; + public static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); + static { + MATERIAL_ALT_MERGE_KEYS = Set.of(Column.MaterialSourceId.name(), Column.Name.name()); + MATERIAL_ALT_UPDATE_KEYS = Set.of(Column.LSID.name()); + + Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); + IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); + amountValidator.setExpressionValue("~gte=0"); + amountValidator.setErrorMessage("Amounts must be non-negative."); + amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); + AMOUNT_RANGE_VALIDATORS.add(amountValidator); + } + + public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) + { + super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); + setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); + setPublicSchemaName(ExpSchema.SCHEMA_NAME); + addAllowablePermission(InsertPermission.class); + addAllowablePermission(UpdatePermission.class); + addAllowablePermission(MoveEntitiesPermission.class); + setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); + setSampleType(sampleType); + } + + public Set getUniqueIdFields() + { + if (_uniqueIdFields == null) + { + _uniqueIdFields = new CaseInsensitiveHashSet(); + _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); + } + return _uniqueIdFields; + } + + @Override + protected ColumnInfo resolveColumn(String name) + { + ColumnInfo result = super.resolveColumn(name); + if (result == null) + { + if ("CpasType".equalsIgnoreCase(name)) + result = createColumn(Column.SampleSet.name(), Column.SampleSet); + else if (Column.Property.name().equalsIgnoreCase(name)) + result = createPropertyColumn(Column.Property.name()); + else if (Column.QueryableInputs.name().equalsIgnoreCase(name)) + result = createColumn(Column.QueryableInputs.name(), Column.QueryableInputs); + } + return result; + } + + @Override + public ColumnInfo getExpObjectColumn() + { + var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); + ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); + return ret; + } + + @Override + public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) + { + if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) + { + // Special case sample auditing to help build a useful timeline view + return SampleTypeServiceImpl.get(); + } + + return super.getAuditHandler(auditBehaviorType); + } + + @Override + public MutableColumnInfo createColumn(String alias, Column column) + { + switch (column) + { + case Folder -> + { + return wrapColumn(alias, _rootTable.getColumn("Container")); + } + case LSID -> + { + return wrapColumn(alias, _rootTable.getColumn(Column.LSID.name())); + } + case MaterialSourceId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.MaterialSourceId.name())); + columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case RootMaterialRowId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.RootMaterialRowId.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.RowId.name())); + columnInfo.setLabel("Root Material"); + columnInfo.setUserEditable(false); + + // NK: Here we mark the column as not required AND nullable which is the opposite of the database where + // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. + // See ExpMaterialValidatorIterator. + columnInfo.setRequired(false); + columnInfo.setNullable(true); + + return columnInfo; + } + case AliquotedFromLSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.AliquotedFromLSID.name())); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), Column.LSID.name())); + columnInfo.setLabel("Aliquoted From Parent"); + return columnInfo; + } + case IsAliquot -> + { + String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RootMaterialRowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); + ExprColumn columnInfo = new ExprColumn(this, FieldKey.fromParts(Column.IsAliquot.name()), new SQLFragment( + "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) + .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 + .append(" ELSE NULL END)"), JdbcType.BOOLEAN); + columnInfo.setLabel("Is Aliquot"); + columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(false); + return columnInfo; + } + case Name -> + { + var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); + // shut off this field in insert and update views if user specified names are not allowed + if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) + { + nameCol.setShownInInsertView(false); + nameCol.setShownInUpdateView(false); + } + return nameCol; + } + case RawAmount -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + case StoredAmount -> + { + String label = StoredAmount.label(); + Set importAliases = Set.of(label, "Stored Amount"); + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setLabel(label); + columnInfo.setImportAliasesSet(importAliases); + columnInfo.setDescription("The amount of this sample currently on hand."); + return columnInfo; + } + } + case RawUnits -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + return columnInfo; + } + case Units -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Column.Units.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Column.Units.name())); + columnInfo.setFk(fk); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + return columnInfo; + } + } + case Description -> + { + return wrapColumn(alias, _rootTable.getColumn(Column.Description.name())); + } + case SampleSet -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); + // NOTE: populateColumns() overwrites this with a QueryForeignKey. Can this be removed? + columnInfo.setFk(new LookupForeignKey(getContainerFilter(), null, null, null, null, "LSID", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + return columnInfo; + } + case SourceProtocolLSID -> + { + // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module + ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( + "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + + " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); + columnInfo.setLabel("Source Protocol"); + columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case SourceProtocolApplication -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); + columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + columnInfo.setAutoIncrement(false); + return columnInfo; + } + case SourceApplicationInput -> + { + var col = createEdgeColumn(alias, Column.SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); + col.setHidden(true); + return col; + } + case RunApplication -> + { + SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") + .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") + .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") + .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) + .append(")"); + + var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); + col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); + col.setUserEditable(false); + col.setReadOnly(true); + col.setHidden(true); + return col; + } + case RunApplicationOutput -> + { + var col = createEdgeColumn(alias, Column.RunApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); + return col; + } + case Run -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); + ret.setReadOnly(true); + return ret; + } + case RowId -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); + // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction + ret.setSortDirection(Sort.SortDirection.DESC); + ret.setFk(new RowIdForeignKey(ret)); + ret.setUserEditable(false); + ret.setHidden(true); + ret.setShownInInsertView(false); + ret.setHasDbSequence(true); + ret.setIsRootDbSequence(true); + return ret; + } + case Property -> + { + return createPropertyColumn(alias); + } + case Flag -> + { + return createFlagColumn(alias); + } + case Created -> + { + return wrapColumn(alias, _rootTable.getColumn("Created")); + } + case CreatedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); + } + case Modified -> + { + return wrapColumn(alias, _rootTable.getColumn("Modified")); + } + case ModifiedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); + } + case Alias -> + { + return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); + } + case Inputs -> + { + return createLineageColumn(this, alias, true, false); + } + case QueryableInputs -> + { + return createLineageColumn(this, alias, true, true); + } + case Outputs -> + { + return createLineageColumn(this, alias, false, false); + } + case Properties -> + { + return createPropertiesColumn(alias); + } + case SampleState -> + { + boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); + var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); + ret.setLabel("Status"); + ret.setHidden(!statusEnabled); + ret.setShownInDetailsView(statusEnabled); + ret.setShownInInsertView(statusEnabled); + ret.setShownInUpdateView(statusEnabled); + ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + return ret; + } + case AliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); + ret.setLabel(ALIQUOT_COUNT_LABEL); + return ret; + } + case AliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(ALIQUOT_VOLUME_LABEL); + return ret; + } + case AvailableAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); + return ret; + } + case AvailableAliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); + ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); + return ret; + } + case AliquotUnit -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("AliquotUnit")); + ret.setShownInDetailsView(false); + return ret; + } + case MaterialExpDate -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("MaterialExpDate")); + ret.setLabel("Expiration Date"); + ret.setShownInDetailsView(true); + ret.setShownInInsertView(true); + ret.setShownInUpdateView(true); + return ret; + } + default -> throw new IllegalArgumentException("Unknown column " + column); + } + } + + @Override + public MutableColumnInfo createPropertyColumn(String alias) + { + var ret = super.createPropertyColumn(alias); + if (_ss != null) + { + final TableInfo t = _ss.getTinfo(); + if (t != null) + { + ret.setFk(new LookupForeignKey() + { + @Override + public TableInfo getLookupTableInfo() + { + return t; + } + + @Override + protected ColumnInfo getPkColumn(TableInfo table) + { + return t.getColumn("lsid"); + } + }); + } + } + ret.setIsUnselectable(true); + ret.setDescription("A holder for any custom fields associated with this sample"); + ret.setHidden(true); + return ret; + } + + private Unit getSampleTypeUnit() + { + Unit typeUnit = null; + if (_ss != null && _ss.getMetricUnit() != null) + typeUnit = Unit.fromName(_ss.getMetricUnit()); + return typeUnit; + } + + private void setSampleType(@Nullable ExpSampleType st) + { + checkLocked(); + if (_ss != null) + { + throw new IllegalStateException("Cannot unset sample type"); + } + if (st != null && !(st instanceof ExpSampleTypeImpl)) + { + throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); + } + _ss = (ExpSampleTypeImpl) st; + if (_ss != null) + { + setPublicSchemaName(SamplesSchema.SCHEMA_NAME); + setName(st.getName()); + + String description = _ss.getDescription(); + if (StringUtils.isEmpty(description)) + description = "Contains one row per sample in the " + _ss.getName() + " sample type"; + setDescription(description); + + if (canUserAccessPhi()) + { + ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); + setImportURL(new DetailsURL(url)); + } + } + } + + public ExpSampleType getSampleType() + { + return _ss; + } + + @Override + protected void populateColumns() + { + var st = getSampleType(); + var rowIdCol = addColumn(Column.RowId); + addColumn(Column.MaterialSourceId); + addColumn(Column.SourceProtocolApplication); + addColumn(Column.SourceApplicationInput); + addColumn(Column.RunApplication); + addColumn(Column.RunApplicationOutput); + addColumn(Column.SourceProtocolLSID); + + var nameCol = addColumn(Column.Name); + if (st != null && st.hasNameAsIdCol()) + { + // Show the Name field but don't mark is as required when using name expressions + if (st.hasNameExpression()) + { + var nameExpression = st.getNameExpression(); + nameCol.setNameExpression(nameExpression); + nameCol.setNullable(true); + String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); + String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); + nameCol.setDescription(desc); + } + else + { + nameCol.setNullable(false); + } + } + else + { + nameCol.setReadOnly(true); + nameCol.setShownInInsertView(false); + } + + addColumn(Column.Alias); + addColumn(Column.Description); + + var typeColumnInfo = addColumn(Column.SampleSet); + typeColumnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) + { + @Override + protected ContainerFilter getLookupContainerFilter() + { + // Be sure that we can resolve the sample type if it's defined in a separate container. + // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. + // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has + // parents in current sample type and a sample type in the parent container + Set containers = new HashSet<>(); + if (null != st) + containers.add(st.getContainer()); + containers.add(getContainer()); + if (getContainer().getProject() != null) + containers.add(getContainer().getProject()); + containers.add(ContainerManager.getSharedContainer()); + ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); + + if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) + cf = new UnionContainerFilter(_containerFilter, cf); + return cf; + } + }); + + typeColumnInfo.setReadOnly(true); + typeColumnInfo.setUserEditable(false); + typeColumnInfo.setShownInInsertView(false); + + addColumn(Column.MaterialExpDate); + addContainerColumn(Column.Folder, null); + var runCol = addColumn(Column.Run); + runCol.setFk(new ExpSchema(_userSchema.getUser(), getContainer()).getRunIdForeignKey(getContainerFilter())); + runCol.setShownInInsertView(false); + runCol.setShownInUpdateView(false); + + var colLSID = addColumn(Column.LSID); + colLSID.setHidden(true); + colLSID.setReadOnly(true); + colLSID.setUserEditable(false); + colLSID.setShownInInsertView(false); + colLSID.setShownInDetailsView(false); + colLSID.setShownInUpdateView(false); + + var rootRowId = addColumn(Column.RootMaterialRowId); + rootRowId.setHidden(true); + rootRowId.setReadOnly(true); + rootRowId.setUserEditable(false); + rootRowId.setShownInInsertView(false); + rootRowId.setShownInDetailsView(false); + rootRowId.setShownInUpdateView(false); + + var aliquotParentLSID = addColumn(Column.AliquotedFromLSID); + aliquotParentLSID.setHidden(true); + aliquotParentLSID.setReadOnly(true); + aliquotParentLSID.setUserEditable(false); + aliquotParentLSID.setShownInInsertView(false); + aliquotParentLSID.setShownInDetailsView(false); + aliquotParentLSID.setShownInUpdateView(false); + + addColumn(Column.IsAliquot); + addColumn(Column.Created); + addColumn(Column.CreatedBy); + addColumn(Column.Modified); + addColumn(Column.ModifiedBy); + + List defaultCols = new ArrayList<>(); + defaultCols.add(FieldKey.fromParts(Column.Name)); + defaultCols.add(FieldKey.fromParts(Column.MaterialExpDate)); + boolean hasProductFolders = getContainer().hasProductFolders(); + if (hasProductFolders) + defaultCols.add(FieldKey.fromParts(Column.Folder)); + defaultCols.add(FieldKey.fromParts(Column.Run)); + + if (st == null) + defaultCols.add(FieldKey.fromParts(Column.SampleSet)); + + addColumn(Column.Flag); + + var statusColInfo = addColumn(Column.SampleState); + boolean statusEnabled = SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(getContainer()).isEmpty(); + statusColInfo.setShownInDetailsView(statusEnabled); + statusColInfo.setShownInInsertView(statusEnabled); + statusColInfo.setShownInUpdateView(statusEnabled); + statusColInfo.setHidden(!statusEnabled); + statusColInfo.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + if (statusEnabled) + defaultCols.add(FieldKey.fromParts(Column.SampleState)); + statusColInfo.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + + // TODO is this a real Domain??? + if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) + { + defaultCols.add(FieldKey.fromParts(Column.Flag)); + addSampleTypeColumns(st, defaultCols); + + setName(_ss.getName()); + + ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); + gridUrl.addParameter("rowId", st.getRowId()); + setGridURL(new DetailsURL(gridUrl)); + } + + List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); + defaultCols.addAll(calculatedFieldKeys); + + addColumn(Column.AliquotCount); + addColumn(Column.AliquotVolume); + addColumn(Column.AliquotUnit); + addColumn(Column.AvailableAliquotCount); + addColumn(Column.AvailableAliquotVolume); + + addColumn(Column.StoredAmount); + defaultCols.add(FieldKey.fromParts(Column.StoredAmount)); + + addColumn(Column.Units); + defaultCols.add(FieldKey.fromParts(Column.Units)); + + var rawAmountColumn = addColumn(Column.RawAmount); + rawAmountColumn.setDisplayColumnFactory(new DisplayColumnFactory() + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new DataColumn(colInfo) + { + @Override + public void addQueryFieldKeys(Set keys) + { + super.addQueryFieldKeys(keys); + keys.add(FieldKey.fromParts(Column.StoredAmount)); + + } + }; + } + }); + rawAmountColumn.setHidden(true); + rawAmountColumn.setShownInDetailsView(false); + rawAmountColumn.setShownInInsertView(false); + rawAmountColumn.setShownInUpdateView(false); + + var rawUnitsColumn = addColumn(Column.RawUnits); + rawUnitsColumn.setDisplayColumnFactory(new DisplayColumnFactory() + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new DataColumn(colInfo) + { + @Override + public void addQueryFieldKeys(Set keys) + { + super.addQueryFieldKeys(keys); + keys.add(FieldKey.fromParts(Column.Units)); + + } + }; + } + }); + rawUnitsColumn.setHidden(true); + rawUnitsColumn.setShownInDetailsView(false); + rawUnitsColumn.setShownInInsertView(false); + rawUnitsColumn.setShownInUpdateView(false); + + if (InventoryService.get() != null && (st == null || !st.isMedia())) + defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); + + SQLFragment sql; + UserSchema plateUserSchema; + // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset + // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying + // against tables in that schema. + if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) + plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); + else + plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); + + if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) + { + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + Column.RowId.name(); + SQLFragment existsSubquery = new SQLFragment() + .append("SELECT 1 FROM ") + .append(plateUserSchema.getTable("Well"), "well") + .append(" WHERE well.sampleid = ").append(rowIdField); + + sql = new SQLFragment() + .append("CASE WHEN EXISTS (") + .append(existsSubquery) + .append(") THEN 'Plated' ") + .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 + .append("ELSE NULL END"); + } + else + { + sql = new SQLFragment("(SELECT NULL)"); + } + var col = new ExprColumn(this, Column.IsPlated.name(), sql, JdbcType.VARCHAR); + col.setDescription("Whether the sample that has been plated, if plating is supported."); + col.setUserEditable(false); + col.setReadOnly(true); + col.setShownInDetailsView(false); + col.setShownInInsertView(false); + col.setShownInUpdateView(false); + if (plateUserSchema != null) + col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + Column.RowId.name() + "}")); + addColumn(col); + + addVocabularyDomains(); + + addColumn(Column.Properties); + + var colInputs = addColumn(Column.Inputs); + addMethod("Inputs", new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); + + var colOutputs = addColumn(Column.Outputs); + addMethod("Outputs", new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); + + addExpObjectMethod(); + + ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); + DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); + nameCol.setURL(url); + rowIdCol.setURL(url); + setDetailsURL(url); + + if (canUserAccessPhi()) + { + ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); + setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); + + ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); + setInsertURL(new DetailsURL(insertActionURL)); + } + else + { + setImportURL(LINK_DISABLER); + setInsertURL(LINK_DISABLER); + setUpdateURL(LINK_DISABLER); + } + + setTitleColumn(Column.Name.toString()); + + setDefaultVisibleColumns(defaultCols); + + MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn("rowid"), _ss, true); + addColumn(lineageLookup); + } + + private ContainerFilter getSampleStatusLookupContainerFilter() + { + // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared + // for the sample status lookup since in the app project context we want to share status definitions across + // a given project instead of creating duplicate statuses in each subfolder project. + ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); + type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; + return type.create(getUserSchema()); + } + + @Override + public Domain getDomain() + { + return getDomain(false); + } + + @Override + public Domain getDomain(boolean forUpdate) + { + return _ss == null ? null : _ss.getDomain(forUpdate); + } + + + public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) + { + if (nameExpression == null) + return currentDescription; + + StringBuilder sb = new StringBuilder(); + if (currentDescription != null && !currentDescription.isEmpty()) + { + sb.append(currentDescription); + if (!currentDescription.endsWith(".")) + sb.append("."); + sb.append("\n"); + } + + sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); + sb.append(nameExpression); + sb.append("."); + if (!StringUtils.isEmpty(nameExpressionPreview)) + { + sb.append("\nExample of name that will be generated from the current pattern: \n"); + sb.append(nameExpressionPreview); + } + + return sb.toString(); + } + + private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) + { + TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); + if (null == dbTable) + return; + + UserSchema schema = getUserSchema(); + Domain domain = st.getDomain(); + ColumnInfo rowIdColumn = getColumn(Column.RowId); + ColumnInfo lsidColumn = getColumn(Column.LSID); + ColumnInfo nameColumn = getColumn(Column.Name); + + visibleColumns.remove(FieldKey.fromParts(Column.Run.name())); + + // When not using name expressions, mark the ID columns as required. + // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. + final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); + + Set mvColumns = domain.getProperties().stream() + .filter(ImportAliasable::isMvEnabled) + .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) + .collect(Collectors.toSet()); + + for (ColumnInfo dbColumn : dbTable.getColumns()) + { + // Don't include PHI columns in full text search index + // CONSIDER: Can we move this to a base class? Maybe in .addColumn() + if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) + continue; + + if ( + rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + lsidColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) + ) + { + continue; + } + + var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); + + // TODO missing values? comments? flags? + DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); + var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); + if (propColumn.getName().equalsIgnoreCase("genid")) + { + propColumn.setHidden(true); + propColumn.setUserEditable(false); + propColumn.setShownInDetailsView(false); + propColumn.setShownInInsertView(false); + propColumn.setShownInUpdateView(false); + } + if (null != dp) + { + PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), + SchemaKey.fromParts("samples"), st.getName(), FieldKey.fromParts("RowId"), null, getLookupContainerFilter()); + + if (idCols.contains(dp)) + { + propColumn.setNullable(false); + propColumn.setDisplayColumnFactory(new IdColumnRendererFactory()); + } + + // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected + if (!propColumn.isHidden()) + { + visibleColumns.add(propColumn.getFieldKey()); + } + + if (propColumn.isMvEnabled()) + { + // The column in the physical table has a "_MVIndicator" suffix, but we want to expose + // it with a "MVIndicator" suffix (no underscore) + var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, + StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); + mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); + mvColumn.setSqlTypeName("VARCHAR"); + mvColumn.setPropertyURI(dp.getPropertyURI()); + mvColumn.setNullable(true); + mvColumn.setUserEditable(false); + mvColumn.setHidden(true); + mvColumn.setMvIndicatorColumn(true); + + addColumn(mvColumn); + propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); + } + } + + if (!mvColumns.contains(propColumn.getFieldKey())) + addColumn(propColumn); + + } + + setDefaultVisibleColumns(visibleColumns); + } + + // These are mostly fields that are wrapped by fields with different names (see createColumn()) + // we could handle each case separately, but this is easier + static final Set wrappedFieldKeys = Set.of( + new FieldKey(null, "objectid"), + new FieldKey(null, "RowId"), + new FieldKey(null, "LSID"), // Flag + new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication + new FieldKey(null, "runId"), // Run, RunApplication + new FieldKey(null, "CpasType")); // SampleSet + static final Set ALL_COLUMNS = Set.of(); + + private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) + { + if (null == selectedColumns) + return ALL_COLUMNS; + selectedColumns = new TreeSet<>(selectedColumns); + if (selectedColumns.contains(new FieldKey(null, StoredAmount))) + selectedColumns.add(new FieldKey(null, Units)); + if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) + selectedColumns.add(new FieldKey(null, Column.AliquotedFromLSID.name())); + if (selectedColumns.contains(new FieldKey(null, Column.IsAliquot.name()))) + selectedColumns.add(new FieldKey(null, Column.RootMaterialRowId.name())); + selectedColumns.addAll(wrappedFieldKeys); + if (null != getFilter()) + selectedColumns.addAll(getFilter().getAllFieldKeys()); + return selectedColumns; + } + + @NotNull + @Override + public SQLFragment getFromSQL(String alias) + { + return getFromSQL(alias, null); + } + + @Override + public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) + { + SQLFragment sql = new SQLFragment("("); + boolean usedMaterialized; + + + // SELECT FROM + /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) + * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. + * Also, we may use RemapCache for material lookup outside a transaction + */ + boolean onlyMaterialColums = false; + if (null != selectedColumns && !selectedColumns.isEmpty()) + onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); + if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) + { + sql.append(getMaterializedSQL()); + usedMaterialized = true; + } + else + { + sql.append(getJoinSQL(selectedColumns)); + usedMaterialized = false; + } + + // WHERE + SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); + sql.append("\n").append(filterFrag); + if (_ss != null && !usedMaterialized) + { + if (!filterFrag.isEmpty()) + sql.append(" AND "); + else + sql.append(" WHERE "); + sql.append("CpasType = ").appendValue(_ss.getLSID()); + } + sql.append(") ").appendIdentifier(alias); + + return getTransformedFromSQL(sql); + } + + @Override + public void setSupportTableRules(boolean b) + { + this._supportTableRules = b; + } + + @Override + public boolean supportTableRules() // intentional override + { + return _supportTableRules; + } + + @Override + protected @NotNull TableRules findTableRules() + { + Container definitionContainer = getUserSchema().getContainer(); + if (null != _ss) + definitionContainer = _ss.getContainer(); + return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); + } + + + static class InvalidationCounters + { + public final AtomicLong update, insert, delete, rollup; + InvalidationCounters() + { + long l = System.currentTimeMillis(); + update = new AtomicLong(l); + insert = new AtomicLong(l); + delete = new AtomicLong(l); + rollup = new AtomicLong(l); + } + } + + static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); + static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); + static final AtomicBoolean initializedListeners = new AtomicBoolean(false); + + // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() + public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + var scope = ExperimentServiceImpl.getExpSchema().getScope(); + var runnable = new RefreshMaterializedViewRunnable(lsid, reason); + scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); + } + + private static class RefreshMaterializedViewRunnable implements Runnable + { + private final String _lsid; + private final SampleTypeServiceImpl.SampleChangeType _reason; + + public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + _lsid = lsid; + _reason = reason; + } + + @Override + public void run() + { + if (_reason == schema) + { + /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. + * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the + * MQH from the cache to force the SQL to be regenerated. + */ + _materializedQueries.remove(_lsid); + return; + } + var counters = getInvalidateCounters(_lsid); + switch (_reason) + { + case insert -> counters.insert.incrementAndGet(); + case rollup -> counters.rollup.incrementAndGet(); + case update -> counters.update.incrementAndGet(); + case delete -> counters.delete.incrementAndGet(); + default -> throw new IllegalStateException("Unexpected value: " + _reason); + } + } + + @Override + public boolean equals(Object obj) + { + return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); + } + } + + private static InvalidationCounters getInvalidateCounters(String lsid) + { + if (!initializedListeners.getAndSet(true)) + { + CacheManager.addListener(_invalidationCounters::clear); + } + return _invalidationCounters.computeIfAbsent(lsid, (unused) -> + new InvalidationCounters() + ); + } + + /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ + private SQLFragment getMaterializedSQL() + { + if (null == _ss) + return getJoinSQL(null); + + var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> + { + /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. + * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, + * even if just to help with race-conditions. + * + * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. + */ + SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); + return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") + .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") + .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") + .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) + .build(); + }); + return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); + } + + + /** + * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. + * It does not help with incremental updates (except for providing the upsert() method). + * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. + */ + static class _MaterializedQueryHelper extends MaterializedQueryHelper + { + final String _lsid; + + static class Builder extends MaterializedQueryHelper.Builder + { + String _lsid; + + public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) + { + super(prefix, scope, select); + this._lsid = lsid; + } + + @Override + public _MaterializedQueryHelper build() + { + return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); + } + } + + _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, + boolean isSelectIntoSql) + { + super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); + this._lsid = lsid; + } + + @Override + protected Materialized createMaterialized(String txCacheKey) + { + DbSchema temp = DbSchema.getTemp(); + String name = _prefix + "_" + GUID.makeHash(); + _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); + initMaterialized(materialized); + return materialized; + } + + @Override + protected void incrementalUpdateBeforeSelect(Materialized m) + { + _Materialized materialized = (_Materialized) m; + + boolean lockAcquired = false; + try + { + lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); + if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) + throw materialized._loadException; + + if (!materialized.incrementalDeleteCheck.stillValid(0)) + executeIncrementalDelete(); + if (!materialized.incrementalRollupCheck.stillValid(0)) + executeIncrementalRollup(); + if (!materialized.incrementalInsertCheck.stillValid(0)) + executeIncrementalInsert(); + } + catch (RuntimeException|InterruptedException ex) + { + RuntimeException rex = UnexpectedException.wrap(ex); + materialized.setError(rex); + // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. + + // Ensure that next refresh starts clean + _materializedQueries.remove(_lsid); + getInvalidateCounters(_lsid).update.incrementAndGet(); + throw rex; + } + finally + { + if (lockAcquired) + materialized.getLock().unlock(); + } + } + + void upsertWithRetry(SQLFragment sql) + { + // not actually read-only, but we don't want to start an explicit transaction + _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); + } + + void executeIncrementalInsert() + { + SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") + .append("SELECT * FROM (") + .append(getViewSourceSql()).append(") viewsource_\n") + .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); + upsertWithRetry(incremental); + } + + void executeIncrementalDelete() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + // POSTGRES bug??? the obvious query is _very_ slow O(n^2) + // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) + SQLFragment incremental = new SQLFragment() + .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") + .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); + upsertWithRetry(incremental); + } + + void executeIncrementalRollup() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + SQLFragment incremental = new SQLFragment(); + if (d.isPostgreSQL()) + { + incremental + .append("UPDATE temp.${NAME} AS st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM exp.Material AS expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") + .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") + .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") + .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") + .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") + .append(")"); + } + else + { + // SQL Server 2022 supports IS DISTINCT FROM + incremental + .append("UPDATE st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM temp.${NAME} st, exp.Material expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") + .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") + .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") + .append(")"); + } + upsertWithRetry(incremental); + } + } + + static class _Materialized extends MaterializedQueryHelper.Materialized + { + final MaterializedQueryHelper.Invalidator incrementalInsertCheck; + final MaterializedQueryHelper.Invalidator incrementalRollupCheck; + final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; + + _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) + { + super(mqh, tableName, cacheKey, created, sql); + final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); + incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); + incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); + incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); + } + + @Override + public void reset() + { + super.reset(); + long now = HeartBeat.currentTimeMillis(); + incrementalInsertCheck.stillValid(now); + incrementalRollupCheck.stillValid(now); + incrementalDeleteCheck.stillValid(now); + } + + Lock getLock() + { + return _loadingLock; + } + } + + + /* SELECT and JOIN, does not include WHERE */ + private SQLFragment getJoinSQL(Set selectedColumns) + { + TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); + Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); + provisionedCols.remove(Column.RowId.name()); + provisionedCols.remove(Column.LSID.name()); + provisionedCols.remove(Column.Name.name()); + boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); + + boolean hasSampleColumns = false; + boolean hasAliquotColumns = false; + + Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); + selectedColumns = computeInnerSelectedColumns(selectedColumns); + + SQLFragment sql = new SQLFragment(); + sql.appendComment("", getSqlDialect()); + sql.append("SELECT "); + String comma = ""; + for (String materialCol : materialCols) + { + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) + { + sql.append(comma).append("m.").appendIdentifier(materialCol); + comma = ", "; + } + } + if (null != provisioned && hasProvisionedColumns) + { + for (ColumnInfo propertyColumn : provisioned.getColumns()) + { + // don't select twice + if ( + Column.RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Column.LSID.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Column.Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) + ) + { + continue; + } + + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) + { + sql.append(comma); + boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) + || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); + if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + else if (rootField) + { + sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasSampleColumns = true; + } + else + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + comma = ", "; + } + } + } + + sql.append("\nFROM "); + sql.append(_rootTable, "m"); + if (hasSampleColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); + if (hasAliquotColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); + + sql.appendComment("", getSqlDialect()); + return sql; + } + + private class IdColumnRendererFactory implements DisplayColumnFactory + { + @Override + public DisplayColumn createRenderer(ColumnInfo colInfo) + { + return new IdColumnRenderer(colInfo); + } + } + + private static class IdColumnRenderer extends DataColumn + { + public IdColumnRenderer(ColumnInfo col) + { + super(col); + } + + @Override + protected boolean isDisabledInput(RenderContext ctx) + { + return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; + } + } + + private static class SampleTypeAmountDisplayColumn extends ExprColumn + { + public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) + { + super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" / ? AS ") + .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") + .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.getValue()), + JdbcType.DOUBLE); + + setLabel(label); + setImportAliasesSet(importAliases); + } + } + + private static class SampleTypeUnitDisplayColumn extends ExprColumn + { + public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) + { + super(parent, FieldKey.fromParts(Column.Units.name()), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.toString()), + JdbcType.VARCHAR); + } + } + + @Override + public QueryUpdateService getUpdateService() + { + return new SampleTypeUpdateServiceDI(this, _ss); + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) + { + if (_ss == null) + { + // Allow read and delete for exp.Materials. + // Don't allow insert/update on exp.Materials without a sample type. + if (perm == DeletePermission.class || perm == ReadPermission.class) + return getContainer().hasPermission(user, perm); + return false; + } + + if (_ss.isMedia() && perm == ReadPermission.class) + return getContainer().hasPermission(user, MediaReadPermission.class); + + return super.hasPermission(user, perm); + } + + @NotNull + @Override + public List getUniqueIndices() + { + // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" + // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. + // Some FKs to ExpMaterialTable don't include the "Name" column (e.g. NabBaseTable.Specimen) + String indexName = "idx_material_ak"; + List ret = new ArrayList<>(super.getUniqueIndices()); + if (getColumn("Name") != null) + ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); + else + ret.removeIf( def -> def.name().equals(indexName)); + return Collections.unmodifiableList(ret); + } + + + // + // UpdatableTableInfo + // + + + @Override + public @Nullable Long getOwnerObjectId() + { + return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); + } + + @Nullable + @Override + public CaseInsensitiveHashMap remapSchemaColumns() + { + CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); + + if (null != getRealTable().getColumn("container") && null != getColumn("folder")) + { + m.put("container", "folder"); + } + + for (ColumnInfo col : getColumns()) + { + if (col.getMvColumnName() != null) + m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); + } + + return m; + } + + @Override + public Set getAltMergeKeys(DataIteratorContext context) + { + if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) + return getAltKeysForUpdate(); + + return MATERIAL_ALT_MERGE_KEYS; + } + + @NotNull + @Override + public Set getAltKeysForUpdate() + { + return MATERIAL_ALT_UPDATE_KEYS; + } + + @Override + @NotNull + public List> getAdditionalRequiredInsertColumns() + { + if (getSampleType() == null) + return Collections.emptyList(); + + try + { + return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); + } + catch (IOException e) + { + return Collections.emptyList(); + } + } + + @Override + public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) + { + TableInfo propertiesTable = _ss.getTinfo(); + + // The specimens sample type doesn't have a properties table + if (propertiesTable == null) + { + return data; + } + + long sampleTypeObjectId = requireNonNull(getOwnerObjectId()); + + // TODO: subclass PersistDataIteratorBuilder to index Materials! not DataClass! + try + { + var persist = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, getUserSchema().getContainer(), getUserSchema().getUser(), _ss.getImportAliasesIncludingAliquot(), sampleTypeObjectId) + .setFileLinkDirectory(SAMPLETYPE_FILE_DIRECTORY); + ExperimentServiceImpl experimentServiceImpl = ExperimentServiceImpl.get(); + SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified); + + persist.setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> + { + List lsids = searchIndexDataKeys.lsids(); + List orderedRowIds = searchIndexDataKeys.orderedRowIds(); + + // Issue 51263: order by RowId to reduce deadlock + ListUtils.partition(orderedRowIds, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterials(sublist)) + expMaterial.index(q, this); + }) + ); + + ListUtils.partition(lsids, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : experimentServiceImpl.getExpMaterialsByLsid(sublist)) + expMaterial.index(q, this); + }) + ); + }, DbScope.CommitTaskOption.POSTCOMMIT) + ); + + DataIteratorBuilder builder = LoggingDataIterator.wrap(persist); + return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(builder, getUserSchema().getContainer(), getUserSchema().getUser(), ExperimentService.get().getTinfoMaterialAliasMap(), _ss, true)); + } + catch (IOException e) + { + throw new UncheckedIOException(e); + } + } + + @Override + @NotNull + public AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.DETAILED; + } + + static final Set excludeFromDetailedAuditField; + static + { + var set = new CaseInsensitiveHashSet(); + set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); + set.addAll(ExpDataIterators.NOT_FOR_UPDATE); + // We don't want the inventory columns to show up in the sample timeline audit record; + // they are captured in their own audit record. + set.addAll(InventoryService.InventoryStatusColumn.names()); + excludeFromDetailedAuditField = Collections.unmodifiableSet(set); + } + + @Override + public @NotNull Set getExcludedDetailedUpdateAuditFields() + { + // uniqueId fields don't change in reality, so exclude them from the audit updates + Set excluded = new CaseInsensitiveHashSet(); + excluded.addAll(this.getUniqueIdFields()); + excluded.addAll(excludeFromDetailedAuditField); + return excluded; + } + + @Override + public List> getImportTemplates(ViewContext ctx) + { + // respect any metadata overrides + if (getRawImportTemplates() != null) + return super.getImportTemplates(ctx); + + List> templates = new ArrayList<>(); + ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); + url.addParameter("headerType", ColumnHeaderType.ImportField.name()); + try + { + if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) + { + for (String aliasKey : getSampleType().getImportAliases().keySet()) + url.addParameter("includeColumn", aliasKey); + } + } + catch (IOException e) + {} + templates.add(Pair.of("Download Template", url.toString())); + return templates; + } + + @Override + public void overlayMetadata(String tableName, UserSchema schema, Collection errors) + { + if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) + { + Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); + if (null != metadata) + { + overlayMetadata(metadata, schema, errors); + } + } + super.overlayMetadata(tableName, schema, errors); + } + + static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn + { + private Unit typeUnit; + private boolean applySampleTypePrecision = true; + + public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) { + super(col, false); + this.typeUnit = typeUnit; + this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user + } + + @Override + public Object getDisplayValue(RenderContext ctx) + { + Object value = super.getDisplayValue(ctx); + if (this.applySampleTypePrecision && value != null) + { + int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); + value = Precision.round(Double.valueOf(value.toString()), scale); + } + return value; + } + } +} diff --git a/experiment/src/org/labkey/experiment/api/property/LookupValidator.java b/experiment/src/org/labkey/experiment/api/property/LookupValidator.java index ba31803df1f..ee33a451f0c 100644 --- a/experiment/src/org/labkey/experiment/api/property/LookupValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/LookupValidator.java @@ -1,298 +1,298 @@ -/* - * Copyright (c) 2010-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.PropertyValidationError; -import org.labkey.api.query.QuerySchema; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.security.User; -import org.labkey.experiment.api.ExpMaterialTableImpl; - -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -/** - * User: jeckels - * Date: Jan 26, 2010 - */ -public class LookupValidator extends DefaultPropertyValidator implements ValidatorKind -{ - @Override - public String getName() - { - return "Lookup Property Validator"; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.Lookup).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - return true; - } - - private static class LookupKey - { - private final String _schema; - private final String _query; - private final String _container; - private final JdbcType _type; - - public LookupKey(PropertyDescriptor field) - { - _schema = field.getLookupSchema(); - _query = field.getLookupQuery(); - _container = field.getLookupContainer(); - _type = field.getJdbcType(); - } - - public LookupKey(ForeignKey fk, JdbcType jdbcType) - { - _schema = fk.getLookupSchemaName(); - _query = fk.getLookupTableName(); - _container = (null == fk.getLookupContainer() ? null : fk.getLookupContainer().getId()); - _type = jdbcType; - } - - @Override - public boolean equals(Object o) - { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - LookupKey that = (LookupKey) o; - - if (_container != null ? !_container.equals(that._container) : that._container != null) return false; - if (_query != null ? !_query.equals(that._query) : that._query != null) return false; - if (_type != null ? !_type.equals(that._type) : that._type != null) return false; - return !(_schema != null ? !_schema.equals(that._schema) : that._schema != null); - } - - @Override - public int hashCode() - { - int result = _schema != null ? _schema.hashCode() : 0; - result = 31 * result + (_query != null ? _query.hashCode() : 0); - result = 31 * result + (_container != null ? _container.hashCode() : 0); - result = 31 * result + (_type != null ? _type.hashCode() : 0); - return result; - } - } - - private static class LookupValues extends HashSet - { - final private Container _container; - - public LookupValues(ColumnInfo field, Container defaultContainer, List errors) - { - if (field.getFk().getLookupContainer() != null) - { - _container = field.getFk().getLookupContainer(); - } - else - { - _container = defaultContainer; - } - - processTableInfo(field.getFk().getLookupTableInfo(), field.getJdbcType(), field.getFk().getLookupTableName(), field.getNonBlankCaption(), errors); - } - - public LookupValues(PropertyDescriptor field, Container defaultContainer, User user, List errors) - { - if (field.getLookupContainer() != null) - { - _container = ContainerManager.getForId(field.getLookupContainer()); - } - else - { - _container = defaultContainer; - } - - if (user == null) - { - throw new IllegalArgumentException("Must supply a user"); - } - - if (_container == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target folder for field '" + field.getNonBlankCaption() + "'")); - } - else - { - QuerySchema userSchema = QueryService.get().getUserSchema(user, _container, field.getLookupSchema()); - if (userSchema == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target schema ('" + field.getLookupSchema() + "') for field '" + field.getNonBlankCaption() + "'")); - } - else - { - processTableInfo(userSchema.getTableForInsert(field.getLookupQuery()), field.getJdbcType(), field.getLookupQuery(), field.getNonBlankCaption(), errors); - } - } - } - - private void processTableInfo(TableInfo tableInfo, JdbcType jdbcType, String queryName, String label, List errors) - { - if (tableInfo == null) - { - errors.add(new SimpleValidationError("Could not find the lookup's target query ('" + queryName + "') for field '" + label + "'")); - } - else - { - List keyCols = tableInfo.getPkColumns(); - - if (keyCols.size() != 1) - { - errors.add(new SimpleValidationError("Could not validate target query ('" + queryName + "') because it has " + keyCols.size() + " columns instead of one for the field '" + label + "'")); - } - else - { - ColumnInfo lookupTargetCol = keyCols.get(0); - // Hack for sample types - see also revision 37612 - if (lookupTargetCol.getJdbcType() != jdbcType && jdbcType.isText() && tableInfo instanceof ExpMaterialTableImpl) - { - ColumnInfo nameCol = tableInfo.getColumn(ExpMaterialTableImpl.Column.Name.toString()); - assert nameCol != null : "Could not find Name column in SampleType table"; - if (nameCol != null) - { - lookupTargetCol = nameCol; - } - } - Collection keys = new TableSelector(lookupTargetCol).getCollection(lookupTargetCol.getJavaObjectClass()); - addAll(keys); - } - } - } - - public Container getContainer() - { - return _container; - } - } - - @Override - public boolean validate(IPropertyValidator validator, - ColumnRenderProperties crpField, - @NotNull Object value, - List errors, - ValidatorContext validatorCache, - @Nullable Object providedValue) - { - //noinspection ConstantConditions - assert value != null : "Shouldn't be validating a null value"; - - if (value != null) - value = ConvertHelper.convert(value, crpField.getJavaObjectClass()); - - if (crpField instanceof PropertyDescriptor field) - { - if (field.getLookupQuery() != null && field.getLookupSchema() != null) - { - LookupKey key = new LookupKey(field); - - LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); - if (validValues == null) - { - validValues = new LookupValues(field, validatorCache.getContainer(), validatorCache.getUser(), errors); - } - return isLookupValid(value, errors, validatorCache, key, field.getLookupSchema(), - field.getLookupQuery(), field.getNonBlankCaption(), validValues); - } - } - else if (crpField instanceof ColumnInfo field) - { - if (field.getFk() != null) - { - LookupKey key = new LookupKey(field.getFk(), field.getJdbcType()); - LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); - - if (validValues == null) - { - validValues = new LookupValues(field, validatorCache.getContainer(), errors); - } - return isLookupValid(value, errors, validatorCache, key, field.getFk().getLookupSchemaName(), - field.getFk().getLookupTableName(), field.getNonBlankCaption(), validValues); - } - } - else - { - throw new IllegalArgumentException("Unknown column type : '" + crpField.getClass() + "'"); - } - - return true; - } - - private boolean isLookupValid(@NotNull Object value, - List errors, - ValidatorContext validatorCache, - LookupKey key, - String schemaName, - String queryName, - String label, - LookupValues validValues) - { - validatorCache.put(LookupValidator.class, key, validValues); - - if (validValues.contains(value)) - { - return true; - } - - errors.add(new PropertyValidationError("Value '" + value + "' was not present in lookup target '" + - schemaName + "." + queryName + "' for field '" + label + "'",label)); - - return false; - } -} +/* + * Copyright (c) 2010-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.PropertyValidationError; +import org.labkey.api.query.QuerySchema; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.security.User; +import org.labkey.experiment.api.ExpMaterialTableImpl; + +import java.util.Collection; +import java.util.HashSet; +import java.util.List; + +/** + * User: jeckels + * Date: Jan 26, 2010 + */ +public class LookupValidator extends DefaultPropertyValidator implements ValidatorKind +{ + @Override + public String getName() + { + return "Lookup Property Validator"; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.Lookup).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + return true; + } + + private static class LookupKey + { + private final String _schema; + private final String _query; + private final String _container; + private final JdbcType _type; + + public LookupKey(PropertyDescriptor field) + { + _schema = field.getLookupSchema(); + _query = field.getLookupQuery(); + _container = field.getLookupContainer(); + _type = field.getJdbcType(); + } + + public LookupKey(ForeignKey fk, JdbcType jdbcType) + { + _schema = fk.getLookupSchemaName(); + _query = fk.getLookupTableName(); + _container = (null == fk.getLookupContainer() ? null : fk.getLookupContainer().getId()); + _type = jdbcType; + } + + @Override + public boolean equals(Object o) + { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + LookupKey that = (LookupKey) o; + + if (_container != null ? !_container.equals(that._container) : that._container != null) return false; + if (_query != null ? !_query.equals(that._query) : that._query != null) return false; + if (_type != null ? !_type.equals(that._type) : that._type != null) return false; + return !(_schema != null ? !_schema.equals(that._schema) : that._schema != null); + } + + @Override + public int hashCode() + { + int result = _schema != null ? _schema.hashCode() : 0; + result = 31 * result + (_query != null ? _query.hashCode() : 0); + result = 31 * result + (_container != null ? _container.hashCode() : 0); + result = 31 * result + (_type != null ? _type.hashCode() : 0); + return result; + } + } + + private static class LookupValues extends HashSet + { + final private Container _container; + + public LookupValues(ColumnInfo field, Container defaultContainer, List errors) + { + if (field.getFk().getLookupContainer() != null) + { + _container = field.getFk().getLookupContainer(); + } + else + { + _container = defaultContainer; + } + + processTableInfo(field.getFk().getLookupTableInfo(), field.getJdbcType(), field.getFk().getLookupTableName(), field.getNonBlankCaption(), errors); + } + + public LookupValues(PropertyDescriptor field, Container defaultContainer, User user, List errors) + { + if (field.getLookupContainer() != null) + { + _container = ContainerManager.getForId(field.getLookupContainer()); + } + else + { + _container = defaultContainer; + } + + if (user == null) + { + throw new IllegalArgumentException("Must supply a user"); + } + + if (_container == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target folder for field '" + field.getNonBlankCaption() + "'")); + } + else + { + QuerySchema userSchema = QueryService.get().getUserSchema(user, _container, field.getLookupSchema()); + if (userSchema == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target schema ('" + field.getLookupSchema() + "') for field '" + field.getNonBlankCaption() + "'")); + } + else + { + processTableInfo(userSchema.getTableForInsert(field.getLookupQuery()), field.getJdbcType(), field.getLookupQuery(), field.getNonBlankCaption(), errors); + } + } + } + + private void processTableInfo(TableInfo tableInfo, JdbcType jdbcType, String queryName, String label, List errors) + { + if (tableInfo == null) + { + errors.add(new SimpleValidationError("Could not find the lookup's target query ('" + queryName + "') for field '" + label + "'")); + } + else + { + List keyCols = tableInfo.getPkColumns(); + + if (keyCols.size() != 1) + { + errors.add(new SimpleValidationError("Could not validate target query ('" + queryName + "') because it has " + keyCols.size() + " columns instead of one for the field '" + label + "'")); + } + else + { + ColumnInfo lookupTargetCol = keyCols.get(0); + // Hack for sample types - see also revision 37612 + if (lookupTargetCol.getJdbcType() != jdbcType && jdbcType.isText() && tableInfo instanceof ExpMaterialTableImpl) + { + ColumnInfo nameCol = tableInfo.getColumn(ExpMaterialTableImpl.Column.Name.toString()); + assert nameCol != null : "Could not find Name column in SampleType table"; + if (nameCol != null) + { + lookupTargetCol = nameCol; + } + } + Collection keys = new TableSelector(lookupTargetCol).getCollection(lookupTargetCol.getJavaObjectClass()); + addAll(keys); + } + } + } + + public Container getContainer() + { + return _container; + } + } + + @Override + public boolean validate(IPropertyValidator validator, + ColumnRenderProperties crpField, + @NotNull Object value, + List errors, + ValidatorContext validatorCache, + @Nullable Object providedValue) + { + //noinspection ConstantConditions + assert value != null : "Shouldn't be validating a null value"; + + if (value != null) + value = ConvertHelper.convert(value, crpField.getJavaObjectClass()); + + if (crpField instanceof PropertyDescriptor field) + { + if (field.getLookupQuery() != null && field.getLookupSchema() != null) + { + LookupKey key = new LookupKey(field); + + LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); + if (validValues == null) + { + validValues = new LookupValues(field, validatorCache.getContainer(), validatorCache.getUser(), errors); + } + return isLookupValid(value, errors, validatorCache, key, field.getLookupSchema(), + field.getLookupQuery(), field.getNonBlankCaption(), validValues); + } + } + else if (crpField instanceof ColumnInfo field) + { + if (field.getFk() != null) + { + LookupKey key = new LookupKey(field.getFk(), field.getJdbcType()); + LookupValues validValues = (LookupValues) validatorCache.get(LookupValidator.class, key); + + if (validValues == null) + { + validValues = new LookupValues(field, validatorCache.getContainer(), errors); + } + return isLookupValid(value, errors, validatorCache, key, field.getFk().getLookupSchemaName(), + field.getFk().getLookupTableName(), field.getNonBlankCaption(), validValues); + } + } + else + { + throw new IllegalArgumentException("Unknown column type : '" + crpField.getClass() + "'"); + } + + return true; + } + + private boolean isLookupValid(@NotNull Object value, + List errors, + ValidatorContext validatorCache, + LookupKey key, + String schemaName, + String queryName, + String label, + LookupValues validValues) + { + validatorCache.put(LookupValidator.class, key, validValues); + + if (validValues.contains(value)) + { + return true; + } + + errors.add(new PropertyValidationError("Value '" + value + "' was not present in lookup target '" + + schemaName + "." + queryName + "' for field '" + label + "'",label)); + + return false; + } +} diff --git a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java index f5f21a331e2..fa701ab3eb6 100644 --- a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java +++ b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java @@ -1,271 +1,271 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.Strings; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.Table; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.User; -import org.labkey.api.util.PageFlowUtil; - -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -public class PropertyValidatorImpl implements IPropertyValidator -{ - private PropertyValidator _validator; - private PropertyValidator _validatorOld; - private boolean _deleted; - private String columnNameProvidedData; - - public PropertyValidatorImpl(PropertyValidator validator) - { - _validator = validator; - } - - @Override - public int getPropertyId() - { - return _validator.getPropertyId(); - } - - @Override - public void setPropertyId(int propertyId) - { - edit().setPropertyId(propertyId); - } - - @Override - public String getName() - { - return _validator.getName(); - } - - @Override - public void setName(String name) - { - if (Strings.CS.equals(name, getName())) - return; - - edit().setName(name); - } - - @Override - public String getDescription() - { - return _validator.getDescription(); - } - - @Override - public void setDescription(String description) - { - if (Strings.CS.equals(description, getDescription())) - return; - edit().setDescription(description); - } - - @Override - public String getTypeURI() - { - return _validator.getTypeURI(); - } - - public void setTypeURI(String typeURI) - { - edit().setTypeURI(typeURI); - } - - @Override - public String getExpressionValue() - { - return _validator.getExpression(); - } - - @Override - public void setExpressionValue(String expression) - { - if (Strings.CS.equals(getExpressionValue(), expression)) - return; - - edit().setExpression(expression); - } - - @Override - public Container getContainer() - { - return ContainerManager.getForId(_validator.getContainer()); - } - - public void setContainer(String container) - { - edit().setContainer(container); - } - - public void setRowId(long rowId) - { - edit().setRowId(rowId); - } - - @Override - public long getRowId() - { - return _validator.getRowId(); - } - - @Override - public String getErrorMessage() - { - return _validator.getErrorMessage(); - } - - @Override - public Map getProperties() - { - return PageFlowUtil.mapFromQueryString(_validator.getProperties()); - } - - @Override - public void setErrorMessage(String message) - { - if (Strings.CS.equals(getErrorMessage(), message)) - return; - - edit().setErrorMessage(message); - } - - @Override - public void setProperty(String key, String value) - { - Map props = getProperties(); - if (Strings.CS.equals(props.get(key), value)) - return; - - props.put(key, value); - edit().setProperties(PageFlowUtil.toQueryString(props.entrySet())); - } - - @Override - public ValidatorKind getType() - { - return PropertyService.get().getValidatorKind(getTypeURI()); - } - - public String getColumnNameProvidedData() - { - return columnNameProvidedData; - } - - public void setColumnNameProvidedData(String columnNameProvidedData) - { - this.columnNameProvidedData = columnNameProvidedData; - } - - @Override - public IPropertyValidator save(User user, Container container) throws ValidationException - { - ValidatorKind kind = getType(); - List errors = new ArrayList<>(); - - if (kind != null && !kind.isValid(this, errors)) - { - throw new ValidationException(errors); - } - - if (isNew()) - { - if (0 == _validator.getPropertyId()) - throw new IllegalStateException("Validator requires a valid propertyId"); - setContainer(container.getId()); - return new PropertyValidatorImpl(Table.insert(user, DomainPropertyManager.getTinfoValidator(), _validator)); - } - else - { - String cid = _validator.getContainer(); - int propid = _validator.getPropertyId(); - long rowid = _validator.getRowId(); - return new PropertyValidatorImpl(Table.update(user, DomainPropertyManager.getTinfoValidator(), _validator, new Object[] {cid, propid, rowid})); - } - } - - public void delete() - { - _deleted = true; - } - - public boolean isDeleted() - { - return _deleted; - } - - @Override - public boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache) - { - // Don't validate null values, #15683 - if (null == value) - return true; - - ValidatorKind kind = getType(); - - if (kind != null) - return kind.validate(this, prop, value, errors, validatorCache, null); - else - errors.add(new SimpleValidationError("Validator type : " + getTypeURI() + " does not exist.")); - - return false; - } - - public boolean isNew() - { - return _validator.getRowId() == 0; - } - - public boolean isDirty() - { - return _validatorOld != null || _deleted; - } - - private PropertyValidator edit() - { - if (getRowId() == 0) - return _validator; - if (_validatorOld == null) - { - _validatorOld = _validator; - _validator = _validatorOld.clone(); - } - return _validator; - } - - public String toString() - { - StringBuilder sb = new StringBuilder(_validator.getName()); - if (_validator.getDescription() != null) - sb.append(" (").append(_validator.getDescription()).append(")"); - - return sb.toString(); - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.Strings; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.Table; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.User; +import org.labkey.api.util.PageFlowUtil; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +public class PropertyValidatorImpl implements IPropertyValidator +{ + private PropertyValidator _validator; + private PropertyValidator _validatorOld; + private boolean _deleted; + private String columnNameProvidedData; + + public PropertyValidatorImpl(PropertyValidator validator) + { + _validator = validator; + } + + @Override + public int getPropertyId() + { + return _validator.getPropertyId(); + } + + @Override + public void setPropertyId(int propertyId) + { + edit().setPropertyId(propertyId); + } + + @Override + public String getName() + { + return _validator.getName(); + } + + @Override + public void setName(String name) + { + if (Strings.CS.equals(name, getName())) + return; + + edit().setName(name); + } + + @Override + public String getDescription() + { + return _validator.getDescription(); + } + + @Override + public void setDescription(String description) + { + if (Strings.CS.equals(description, getDescription())) + return; + edit().setDescription(description); + } + + @Override + public String getTypeURI() + { + return _validator.getTypeURI(); + } + + public void setTypeURI(String typeURI) + { + edit().setTypeURI(typeURI); + } + + @Override + public String getExpressionValue() + { + return _validator.getExpression(); + } + + @Override + public void setExpressionValue(String expression) + { + if (Strings.CS.equals(getExpressionValue(), expression)) + return; + + edit().setExpression(expression); + } + + @Override + public Container getContainer() + { + return ContainerManager.getForId(_validator.getContainer()); + } + + public void setContainer(String container) + { + edit().setContainer(container); + } + + public void setRowId(long rowId) + { + edit().setRowId(rowId); + } + + @Override + public long getRowId() + { + return _validator.getRowId(); + } + + @Override + public String getErrorMessage() + { + return _validator.getErrorMessage(); + } + + @Override + public Map getProperties() + { + return PageFlowUtil.mapFromQueryString(_validator.getProperties()); + } + + @Override + public void setErrorMessage(String message) + { + if (Strings.CS.equals(getErrorMessage(), message)) + return; + + edit().setErrorMessage(message); + } + + @Override + public void setProperty(String key, String value) + { + Map props = getProperties(); + if (Strings.CS.equals(props.get(key), value)) + return; + + props.put(key, value); + edit().setProperties(PageFlowUtil.toQueryString(props.entrySet())); + } + + @Override + public ValidatorKind getType() + { + return PropertyService.get().getValidatorKind(getTypeURI()); + } + + public String getColumnNameProvidedData() + { + return columnNameProvidedData; + } + + public void setColumnNameProvidedData(String columnNameProvidedData) + { + this.columnNameProvidedData = columnNameProvidedData; + } + + @Override + public IPropertyValidator save(User user, Container container) throws ValidationException + { + ValidatorKind kind = getType(); + List errors = new ArrayList<>(); + + if (kind != null && !kind.isValid(this, errors)) + { + throw new ValidationException(errors); + } + + if (isNew()) + { + if (0 == _validator.getPropertyId()) + throw new IllegalStateException("Validator requires a valid propertyId"); + setContainer(container.getId()); + return new PropertyValidatorImpl(Table.insert(user, DomainPropertyManager.getTinfoValidator(), _validator)); + } + else + { + String cid = _validator.getContainer(); + int propid = _validator.getPropertyId(); + long rowid = _validator.getRowId(); + return new PropertyValidatorImpl(Table.update(user, DomainPropertyManager.getTinfoValidator(), _validator, new Object[] {cid, propid, rowid})); + } + } + + public void delete() + { + _deleted = true; + } + + public boolean isDeleted() + { + return _deleted; + } + + @Override + public boolean validate(PropertyDescriptor prop, Object value, List errors, ValidatorContext validatorCache) + { + // Don't validate null values, #15683 + if (null == value) + return true; + + ValidatorKind kind = getType(); + + if (kind != null) + return kind.validate(this, prop, value, errors, validatorCache, null); + else + errors.add(new SimpleValidationError("Validator type : " + getTypeURI() + " does not exist.")); + + return false; + } + + public boolean isNew() + { + return _validator.getRowId() == 0; + } + + public boolean isDirty() + { + return _validatorOld != null || _deleted; + } + + private PropertyValidator edit() + { + if (getRowId() == 0) + return _validator; + if (_validatorOld == null) + { + _validatorOld = _validator; + _validator = _validatorOld.clone(); + } + return _validator; + } + + public String toString() + { + StringBuilder sb = new StringBuilder(_validator.getName()); + if (_validator.getDescription() != null) + sb.append(" (").append(_validator.getDescription()).append(")"); + + return sb.toString(); + } } \ No newline at end of file diff --git a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java index 1dbb8ad72a8..a052c31744b 100644 --- a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java @@ -1,138 +1,138 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.ValidationError; -import org.labkey.api.util.DateUtil; -import org.labkey.api.util.Pair; - -import java.util.ArrayList; -import java.util.Date; -import java.util.List; - -/* -* User: Karl Lum -* Date: Aug 17, 2008 -* Time: 12:43:30 PM -*/ -public class RangeValidator extends DefaultPropertyValidator implements ValidatorKind -{ - @Override - public String getName() - { - return "Range Property Validator"; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.Range).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - return true; - } - - @Override - public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache, @Nullable Object providedValue) - { - //noinspection ConstantConditions - assert value != null : "Shouldn't be validating a null value"; - - for (Pair constraint : parseExpression(validator.getExpressionValue())) - { - if (!isValid(value, constraint)) - { - createErrorMessage(validator, field, providedValue == null ? value : providedValue, errors); - return false; - } - } - return true; - } - - @SuppressWarnings("unchecked") - private Pair[] parseExpression(String expression) - { - List> constraints = new ArrayList<>(); - String[] parts = expression.split("&"); - for (String part : parts) - { - Pair constraint = parsePart(part); - if (constraint != null) - constraints.add(constraint); - } - return constraints.toArray(new Pair[0]); - } - - private Pair parsePart(String expression) - { - String[] parts = expression.split("="); - if (parts.length == 2) - { - return new Pair<>(parts[0], parts[1]); - } - return null; - } - - private boolean isValid(Object value, Pair constraint) - { - if (NumberUtils.isCreatable(String.valueOf(value))) - { - int comparison = Double.compare(NumberUtils.toDouble(String.valueOf(value)), NumberUtils.toDouble(constraint.getValue())); - return comparisonValid(comparison, constraint.getKey()); - } - else if (value instanceof Date) - { - Date dateConstraint = new Date(DateUtil.parseDateTime(constraint.getValue())); - int comparison = ((Date) value).compareTo(dateConstraint); - - // Issue 46094: handle "~date" based filter types (i.e. ~dateeq, ~dategt, etc.) - String type = constraint.getKey(); - if (type != null && type.startsWith("~date")) - type = type.replace("~date", "~"); - - return comparisonValid(comparison, type); - } - return false; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.ValidationError; +import org.labkey.api.util.DateUtil; +import org.labkey.api.util.Pair; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +/* +* User: Karl Lum +* Date: Aug 17, 2008 +* Time: 12:43:30 PM +*/ +public class RangeValidator extends DefaultPropertyValidator implements ValidatorKind +{ + @Override + public String getName() + { + return "Range Property Validator"; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.Range).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + return true; + } + + @Override + public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) + { + //noinspection ConstantConditions + assert value != null : "Shouldn't be validating a null value"; + + for (Pair constraint : parseExpression(validator.getExpressionValue())) + { + if (!isValid(value, constraint)) + { + createErrorMessage(validator, field, providedValue == null ? value : providedValue, errors); + return false; + } + } + return true; + } + + @SuppressWarnings("unchecked") + private Pair[] parseExpression(String expression) + { + List> constraints = new ArrayList<>(); + String[] parts = expression.split("&"); + for (String part : parts) + { + Pair constraint = parsePart(part); + if (constraint != null) + constraints.add(constraint); + } + return constraints.toArray(new Pair[0]); + } + + private Pair parsePart(String expression) + { + String[] parts = expression.split("="); + if (parts.length == 2) + { + return new Pair<>(parts[0], parts[1]); + } + return null; + } + + private boolean isValid(Object value, Pair constraint) + { + if (NumberUtils.isCreatable(String.valueOf(value))) + { + int comparison = Double.compare(NumberUtils.toDouble(String.valueOf(value)), NumberUtils.toDouble(constraint.getValue())); + return comparisonValid(comparison, constraint.getKey()); + } + else if (value instanceof Date) + { + Date dateConstraint = new Date(DateUtil.parseDateTime(constraint.getValue())); + int comparison = ((Date) value).compareTo(dateConstraint); + + // Issue 46094: handle "~date" based filter types (i.e. ~dateeq, ~dategt, etc.) + String type = constraint.getKey(); + if (type != null && type.startsWith("~date")) + type = type.replace("~date", "~"); + + return comparisonValid(comparison, type); + } + return false; + } } \ No newline at end of file diff --git a/experiment/src/org/labkey/experiment/api/property/RegExValidator.java b/experiment/src/org/labkey/experiment/api/property/RegExValidator.java index b3c953c1227..5c5f2e32ba7 100644 --- a/experiment/src/org/labkey/experiment/api/property/RegExValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/RegExValidator.java @@ -1,126 +1,126 @@ -/* - * Copyright (c) 2008-2018 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment.api.property; - -import org.apache.commons.lang3.BooleanUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.ColumnRenderProperties; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.exp.property.ValidatorKind; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.query.SimpleValidationError; -import org.labkey.api.query.ValidationError; - -import java.util.List; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; - -/* -* User: Karl Lum -* Date: Aug 11, 2008 -* Time: 10:52:22 AM -*/ -public class RegExValidator extends DefaultPropertyValidator implements ValidatorKind -{ - public static final String FAIL_ON_MATCH = "failOnMatch"; - - @Override - public String getName() - { - return "Regular Expression Property Validator"; - } - - @Override - public String getTypeURI() - { - return createValidatorURI(PropertyValidatorType.RegEx).toString(); - } - - @Override - public String getDescription() - { - return null; - } - - @Override - public IPropertyValidator createInstance() - { - PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); - validator.setTypeURI(getTypeURI()); - - return validator; - } - - @Override - public boolean isValid(IPropertyValidator validator, List errors) - { - try - { - //noinspection ResultOfMethodCallIgnored - Pattern.compile(validator.getExpressionValue()); - return true; - } - catch (PatternSyntaxException se) - { - String sb = "The " + - getName() + - ": '" + - validator.getName() + - "' has a syntax error : " + - se.getMessage(); - - errors.add(new SimpleValidationError(sb)); - } - return false; - } - - @Override - public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, - List errors, ValidatorContext validatorCache, @Nullable Object providedValue) - { - assert value != null : "Shouldn't be validating a null value"; - - try - { - Pattern expression = (Pattern)validatorCache.get(RegExValidator.class, validator.getExpressionValue()); - if (expression == null) - { - expression = Pattern.compile(validator.getExpressionValue()); - // Cache the pattern so that it can be reused - validatorCache.put(RegExValidator.class, validator.getExpressionValue(), expression); - } - Matcher matcher = expression.matcher(String.valueOf(value)); - boolean failOnMatch = BooleanUtils.toBoolean(validator.getProperties().get(FAIL_ON_MATCH)); - boolean matched = matcher.matches(); - - if ((matched && failOnMatch) || (!matched && !failOnMatch)) - { - createErrorMessage(validator, field, value, errors); - return false; - } - return true; - } - catch (PatternSyntaxException se) - { - errors.add(new SimpleValidationError(se.getMessage())); - } - return false; - } +/* + * Copyright (c) 2008-2018 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment.api.property; + +import org.apache.commons.lang3.BooleanUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.ColumnRenderProperties; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.exp.property.ValidatorKind; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.query.SimpleValidationError; +import org.labkey.api.query.ValidationError; + +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.regex.PatternSyntaxException; + +/* +* User: Karl Lum +* Date: Aug 11, 2008 +* Time: 10:52:22 AM +*/ +public class RegExValidator extends DefaultPropertyValidator implements ValidatorKind +{ + public static final String FAIL_ON_MATCH = "failOnMatch"; + + @Override + public String getName() + { + return "Regular Expression Property Validator"; + } + + @Override + public String getTypeURI() + { + return createValidatorURI(PropertyValidatorType.RegEx).toString(); + } + + @Override + public String getDescription() + { + return null; + } + + @Override + public IPropertyValidator createInstance() + { + PropertyValidatorImpl validator = new PropertyValidatorImpl(new PropertyValidator()); + validator.setTypeURI(getTypeURI()); + + return validator; + } + + @Override + public boolean isValid(IPropertyValidator validator, List errors) + { + try + { + //noinspection ResultOfMethodCallIgnored + Pattern.compile(validator.getExpressionValue()); + return true; + } + catch (PatternSyntaxException se) + { + String sb = "The " + + getName() + + ": '" + + validator.getName() + + "' has a syntax error : " + + se.getMessage(); + + errors.add(new SimpleValidationError(sb)); + } + return false; + } + + @Override + public boolean validate(IPropertyValidator validator, ColumnRenderProperties field, @NotNull Object value, + List errors, ValidatorContext validatorCache, @Nullable Object providedValue) + { + assert value != null : "Shouldn't be validating a null value"; + + try + { + Pattern expression = (Pattern)validatorCache.get(RegExValidator.class, validator.getExpressionValue()); + if (expression == null) + { + expression = Pattern.compile(validator.getExpressionValue()); + // Cache the pattern so that it can be reused + validatorCache.put(RegExValidator.class, validator.getExpressionValue(), expression); + } + Matcher matcher = expression.matcher(String.valueOf(value)); + boolean failOnMatch = BooleanUtils.toBoolean(validator.getProperties().get(FAIL_ON_MATCH)); + boolean matched = matcher.matches(); + + if ((matched && failOnMatch) || (!matched && !failOnMatch)) + { + createErrorMessage(validator, field, value, errors); + return false; + } + return true; + } + catch (PatternSyntaxException se) + { + errors.add(new SimpleValidationError(se.getMessage())); + } + return false; + } } \ No newline at end of file From 5fa67c49e065d98428e1f88d58f974bd031f1ce6 Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 17 Nov 2025 12:25:36 -0800 Subject: [PATCH 3/7] Handle no sample type display unit --- .../test/integration/SampleTypeCrud.ispec.ts | 16 ++++++++++++---- .../experiment/api/ExpMaterialTableImpl.java | 2 ++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts index 5c55f84a970..a600c34d2b4 100644 --- a/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts +++ b/experiment/src/client/test/integration/SampleTypeCrud.ispec.ts @@ -968,14 +968,17 @@ describe('Amount/Unit CRUD', () => { const dataName = "S-amountCrud"; - let errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\nData1\t1\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + let errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\nData1\t1", dataType, "INSERT", topFolderOptions, editorUserOptions); expect(errorMsg.text).toContain(NO_UNIT_ERROR); - errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tUnits\nData1\tkg\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tUnits\nData1\tkg", dataType, "INSERT", topFolderOptions, editorUserOptions); expect(errorMsg.text).toContain(NO_AMOUNT_ERROR); - errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t1.1\tL\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t1.1\tL", dataType, "INSERT", topFolderOptions, editorUserOptions); expect(errorMsg.text).toContain(INCOMPATIBLE_ERROR); - errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t-1.1\tkg\n\tisBlank", dataType, "INSERT", topFolderOptions, editorUserOptions); + errorMsg = await ExperimentCRUDUtils.importSample(server, "Name\tStoredAmount\tUnits\nData1\t-1.1\tkg", dataType, "INSERT", topFolderOptions, editorUserOptions); expect(errorMsg.text).toContain(NEGATIVE_ERROR); + errorMsg = await ExperimentCRUDUtils.importCrossTypeData(server, "Name\tStoredAmount\tUnits\tSampleType\nData1\t-1.1\tkg\t" + dataType ,'IMPORT', topFolderOptions, adminOptions, true); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + await server.post('query', 'insertRows', { schemaName: 'samples', queryName: dataType, @@ -1075,6 +1078,11 @@ describe('Amount/Unit CRUD', () => { expect(errorResp['exception']).toContain("Value '-1000.0 (g)' for field 'Amount' is invalid. Amounts must be non-negative."); }); + errorMsg = await ExperimentCRUDUtils.importCrossTypeData(server, "Name\tStoredAmount\tUnits\tSampleType\nData1\t-1.1\tkg\t" + dataType ,'UPDATE', topFolderOptions, adminOptions, true); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + errorMsg = await ExperimentCRUDUtils.importCrossTypeData(server, "Name\tStoredAmount\tUnits\tSampleType\nData1\t-1.1\tkg\t" + dataType ,'MERGE', topFolderOptions, adminOptions, true); + expect(errorMsg.text).toContain(NEGATIVE_ERROR); + }); }); diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index a4c01c9786c..245f5924f76 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -358,6 +358,8 @@ public StringExpression getURL(ColumnInfo parent) columnInfo.setLabel(label); columnInfo.setImportAliasesSet(importAliases); columnInfo.setDescription("The amount of this sample currently on hand."); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); return columnInfo; } } From c9ca82bdcb03e68a60f5b5255d1d2b03836a6869 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 19 Nov 2025 11:55:09 -0800 Subject: [PATCH 4/7] fix name --- .../src/org/labkey/experiment/api/ExpMaterialTableImpl.java | 1 + 1 file changed, 1 insertion(+) diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index 245f5924f76..3843fc1b6c0 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -172,6 +172,7 @@ public class ExpMaterialTableImpl extends ExpRunItemTableImpl Date: Wed, 19 Nov 2025 16:55:23 -0800 Subject: [PATCH 5/7] -0.0 --- .../experiment/api/property/RangeValidator.java | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java index a052c31744b..3c93dcd3b06 100644 --- a/experiment/src/org/labkey/experiment/api/property/RangeValidator.java +++ b/experiment/src/org/labkey/experiment/api/property/RangeValidator.java @@ -114,11 +114,23 @@ private Pair parsePart(String expression) return null; } + /** + * Normalize value for comparison. + * For example -0.0 == 0.0, but: + * Double.doubleToLongBits(-0.0) // returns -9223372036854775808L + * Double.doubleToLongBits(0.0) // returns 0L + */ + private double normalizeForComparison(String strVal) + { + double d = NumberUtils.toDouble(strVal); + return d == 0.0d ? 0.0d : d; + } + private boolean isValid(Object value, Pair constraint) { if (NumberUtils.isCreatable(String.valueOf(value))) { - int comparison = Double.compare(NumberUtils.toDouble(String.valueOf(value)), NumberUtils.toDouble(constraint.getValue())); + int comparison = Double.compare(normalizeForComparison(String.valueOf(value)), normalizeForComparison(constraint.getValue())); return comparisonValid(comparison, constraint.getKey()); } else if (value instanceof Date) From 22f68eabb8a51a446bf800aa477533963eb38154 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 19 Nov 2025 18:10:26 -0800 Subject: [PATCH 6/7] fix error message for no display unit --- .../org/labkey/experiment/api/SampleTypeUpdateServiceDI.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index aaf440a7466..a9691bc3f55 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -846,7 +846,7 @@ protected void validateUpdateRow(Map row) throws ValidationExcep // if provided value is present, validate provided Object value = row.get(col.getColumnName()); Object providedValue = null; - if (_sampleType != null && value != null && (StoredAmount.name().equalsIgnoreCase(col.getColumnName()) || "Amount".equalsIgnoreCase(col.getColumnName()))) + if (_sampleType != null && _sampleType.getMetricUnit() != null && value != null && (StoredAmount.name().equalsIgnoreCase(col.getColumnName()) || "Amount".equalsIgnoreCase(col.getColumnName()))) { providedValue = value + " (" + _sampleType.getMetricUnit() + ")"; } From 8776f29ddee42ff47e2d52b410dfcfad5d699bbb Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 20 Nov 2025 10:00:23 -0800 Subject: [PATCH 7/7] code review changes --- .../api/dataiterator/ValidatorIterator.java | 18 +---------- .../api/exp/property/IPropertyValidator.java | 3 +- .../api/property/PropertyValidatorImpl.java | 30 +++++++++++++++---- 3 files changed, 28 insertions(+), 23 deletions(-) diff --git a/api/src/org/labkey/api/dataiterator/ValidatorIterator.java b/api/src/org/labkey/api/dataiterator/ValidatorIterator.java index ca63069bfd4..e03ea70bb39 100644 --- a/api/src/org/labkey/api/dataiterator/ValidatorIterator.java +++ b/api/src/org/labkey/api/dataiterator/ValidatorIterator.java @@ -172,23 +172,7 @@ public boolean next() throws BatchValidationException if (pv.getPropertyValidator() != null) { // Use :::provided:::Amount in non-negative validator message, instead of converted Amount - String providedDataColumn = pv.getPropertyValidator().getColumnNameProvidedData(); - if (providedDataColumn != null) - { - // Get the value from the provided data column - int providedDataColIndex = -1; - for (int colIndex = 0; colIndex < _data.getColumnCount(); colIndex++) - { - ColumnInfo colInfo = _data.getColumnInfo(colIndex); - if (colInfo != null && providedDataColumn.equalsIgnoreCase(colInfo.getName())) - { - providedDataColIndex = colIndex; - break; - } - } - if (providedDataColIndex != -1) - providedDataValue = _data.get(providedDataColIndex); - } + providedDataValue = pv.getPropertyValidator().getProvidedDataValue(_data); } } String msg = validate(v, rowNum, value, _data, providedDataValue); diff --git a/api/src/org/labkey/api/exp/property/IPropertyValidator.java b/api/src/org/labkey/api/exp/property/IPropertyValidator.java index e43dad5541e..11e2e4264f5 100644 --- a/api/src/org/labkey/api/exp/property/IPropertyValidator.java +++ b/api/src/org/labkey/api/exp/property/IPropertyValidator.java @@ -17,6 +17,7 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.data.Container; +import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.exp.PropertyDescriptor; import org.labkey.api.query.ValidationError; import org.labkey.api.query.ValidationException; @@ -52,7 +53,7 @@ public interface IPropertyValidator void setErrorMessage(String message); void setProperty(String key, String value); void setColumnNameProvidedData(String columnNameProvidedData); - @Nullable String getColumnNameProvidedData(); + @Nullable Object getProvidedDataValue(DataIterator dataIterator); IPropertyValidator save(User user, Container container) throws ValidationException; diff --git a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java index fa701ab3eb6..ad006489653 100644 --- a/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java +++ b/experiment/src/org/labkey/experiment/api/property/PropertyValidatorImpl.java @@ -15,11 +15,12 @@ */ package org.labkey.experiment.api.property; -import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Strings; +import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.Table; +import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.exp.PropertyDescriptor; import org.labkey.api.exp.property.IPropertyValidator; import org.labkey.api.exp.property.PropertyService; @@ -174,14 +175,33 @@ public ValidatorKind getType() return PropertyService.get().getValidatorKind(getTypeURI()); } - public String getColumnNameProvidedData() + @Override + public void setColumnNameProvidedData(String columnNameProvidedData) { - return columnNameProvidedData; + this.columnNameProvidedData = columnNameProvidedData; } - public void setColumnNameProvidedData(String columnNameProvidedData) + @Override + public Object getProvidedDataValue(DataIterator dataIterator) { - this.columnNameProvidedData = columnNameProvidedData; + if (columnNameProvidedData != null) + { + // Get the value from the provided data column + int providedDataColIndex = -1; + for (int colIndex = 0; colIndex < dataIterator.getColumnCount(); colIndex++) + { + ColumnInfo colInfo = dataIterator.getColumnInfo(colIndex); + if (colInfo != null && columnNameProvidedData.equalsIgnoreCase(colInfo.getName())) + { + providedDataColIndex = colIndex; + break; + } + } + if (providedDataColIndex != -1) + return dataIterator.get(providedDataColIndex); + } + + return null; } @Override