From e661698442f2f8411c52100f2c7ab57ed076b50c Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 19:23:53 -0700 Subject: [PATCH 01/17] Consolidate Dataclass data update methods - use DIB for update only --- .../labkey/api/exp/api/ExperimentService.java | 3 +- .../labkey/api/exp/query/ExpDataTable.java | 173 +- .../api/query/AbstractQueryUpdateService.java | 3117 +++++++++-------- .../api/query/DefaultQueryUpdateService.java | 1877 +++++----- .../test/integration/DataClassCrud.ispec.ts | 156 +- .../labkey/experiment/ExpDataIterators.java | 75 +- .../labkey/experiment/ExperimentModule.java | 2346 ++++++------- .../experiment/ExperimentUpgradeCode.java | 111 + .../experiment/api/DataClassDomainKind.java | 6 +- .../api/ExpDataClassDataTableImpl.java | 320 +- .../labkey/experiment/api/ExpDataImpl.java | 1963 +++++------ .../api/SampleTypeUpdateServiceDI.java | 104 +- .../samples/AbstractExpFolderImporter.java | 1 - 13 files changed, 5113 insertions(+), 5139 deletions(-) diff --git a/api/src/org/labkey/api/exp/api/ExperimentService.java b/api/src/org/labkey/api/exp/api/ExperimentService.java index be904efff7c..2575d10582e 100644 --- a/api/src/org/labkey/api/exp/api/ExperimentService.java +++ b/api/src/org/labkey/api/exp/api/ExperimentService.java @@ -130,6 +130,8 @@ public interface ExperimentService extends ExperimentRunTypeSource String EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS = "org.labkey.api.exp.api.ExperimentService#FROM_EXPANCESTORS"; + String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; + int SIMPLE_PROTOCOL_FIRST_STEP_SEQUENCE = 1; int SIMPLE_PROTOCOL_CORE_STEP_SEQUENCE = 10; int SIMPLE_PROTOCOL_EXTRA_STEP_SEQUENCE = 15; @@ -149,7 +151,6 @@ static void setInstance(ExperimentService impl) enum QueryOptions { - UseLsidForUpdate, GetSampleRecomputeCol, SkipBulkRemapCache, DeferRequiredLineageValidation, diff --git a/api/src/org/labkey/api/exp/query/ExpDataTable.java b/api/src/org/labkey/api/exp/query/ExpDataTable.java index e8ab68a2abb..99f42cb5b27 100644 --- a/api/src/org/labkey/api/exp/query/ExpDataTable.java +++ b/api/src/org/labkey/api/exp/query/ExpDataTable.java @@ -1,84 +1,89 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.exp.query; - -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.query.FieldKey; - -public interface ExpDataTable extends ExpTable -{ - enum Column - { - RowId, - LSID, - Name, - Description, - DataClass, - Protocol, - SourceProtocolApplication, - SourceApplicationInput, - DataFileUrl, - ReferenceCount, - Run, - RunApplication, - RunApplicationOutput, - Created, - CreatedBy, - Modified, - ModifiedBy, - Folder, - Flag, - Alias, - DownloadLink, - ContentLink, - ViewFileLink, - Thumbnail, - InlineThumbnail, - FileSize, - FileExists, - FileExtension, - ViewOrDownload, - WebDavUrl, - WebDavUrlRelative, - Generated, - LastIndexed, - Inputs, - Outputs, - Properties; - - public FieldKey fieldKey() - { - return FieldKey.fromParts(name()); - } - } - - void setExperiment(ExpExperiment experiment); - ExpExperiment getExperiment(); - void setRun(ExpRun run); - ExpRun getRun(); - - void setDataType(DataType type); - DataType getDataType(); - - MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); - MutableColumnInfo addDataInputColumn(String alias, String role); - MutableColumnInfo addInputRunCountColumn(String alias); -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.exp.query; + +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.query.FieldKey; + +public interface ExpDataTable extends ExpTable +{ + enum Column + { + Alias, + ContentLink, + ClassId, // database table only + CpasType, // database table only + Created, + CreatedBy, + DataClass, + DataFileUrl, + Description, + DownloadLink, + FileExtension, + FileExists, + FileSize, + Flag, + Folder, + Generated, + InlineThumbnail, + Inputs, + LastIndexed, + LSID, + Modified, + ModifiedBy, + Name, + ObjectId, // database table only + Outputs, + Properties, + Protocol, + ReferenceCount, + Run, + RunApplication, + RunApplicationOutput, + RunId, // database table only + RowId, + SourceApplicationId, // database table only + SourceApplicationInput, + SourceProtocolApplication, + Thumbnail, + ViewFileLink, + ViewOrDownload, + WebDavUrl, + WebDavUrlRelative; + + public FieldKey fieldKey() + { + return FieldKey.fromParts(name()); + } + } + + void setExperiment(ExpExperiment experiment); + ExpExperiment getExperiment(); + void setRun(ExpRun run); + ExpRun getRun(); + + void setDataType(DataType type); + DataType getDataType(); + + MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); + MutableColumnInfo addDataInputColumn(String alias, String role); + MutableColumnInfo addInputRunCountColumn(String alias); +} diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 6aaa7f8e4bb..4972b62bd17 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -1,1557 +1,1560 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.labkey.api.assay.AssayFileWriter; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.attachments.AttachmentParentFactory; -import org.labkey.api.attachments.SpringAttachmentFile; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.audit.provider.FileSystemAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.Sets; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DbSequenceManager; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.MultiValuedForeignKey; -import org.labkey.api.data.PropertyStorageSpec; -import org.labkey.api.data.RuntimeSQLException; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.dataiterator.AttachmentDataIterator; -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; -import org.labkey.api.dataiterator.ExistingRecordDataIterator; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.dataiterator.Pump; -import org.labkey.api.dataiterator.StandardDataIteratorBuilder; -import org.labkey.api.dataiterator.TriggerDataBuilderHelper; -import org.labkey.api.dataiterator.WrapperDataIterator; -import org.labkey.api.exceptions.OptimisticConflictException; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.list.ListDefinition; -import org.labkey.api.exp.list.ListService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.files.FileContentService; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.reader.TabLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.AdminPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.test.TestWhen; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JunitUtil; -import org.labkey.api.util.TestContext; -import org.labkey.api.util.URIUtil; -import org.labkey.api.view.NotFoundException; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.writer.VirtualFile; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.File; -import java.io.IOException; -import java.io.StringReader; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.function.Function; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; -import static org.labkey.api.files.FileContentService.UPLOADED_FILE; -import static org.labkey.api.util.FileUtil.toFileForRead; -import static org.labkey.api.util.FileUtil.toFileForWrite; - -public abstract class AbstractQueryUpdateService implements QueryUpdateService -{ - protected final TableInfo _queryTable; - - private boolean _bulkLoad = false; - private CaseInsensitiveHashMap _columnImportMap = null; - private VirtualFile _att = null; - - /* AbstractQueryUpdateService is generally responsible for some shared functionality - * - triggers - * - coercion/validation - * - detailed logging - * - attachments - * - * If a subclass wants to disable some of these features (w/o subclassing), put flags here... - */ - protected boolean _enableExistingRecordsDataIterator = true; - protected Set _previouslyUpdatedRows = new HashSet<>(); - - protected AbstractQueryUpdateService(TableInfo queryTable) - { - if (queryTable == null) - throw new IllegalArgumentException(); - _queryTable = queryTable; - } - - protected TableInfo getQueryTable() - { - return _queryTable; - } - - public @NotNull Set getPreviouslyUpdatedRows() - { - return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) - { - return getQueryTable().hasPermission(user, acl); - } - - protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - return getRow(user, container, keys); - } - - protected abstract Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException; - - @Override - public List> getRows(User user, Container container, List> keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - List> result = new ArrayList<>(); - for (Map rowKeys : keys) - { - Map row = getRow(user, container, rowKeys); - if (row != null) - result.add(row); - } - return result; - } - - @Override - public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - Map> result = new LinkedHashMap<>(); - for (Map.Entry> key : keys.entrySet()) - { - Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); - if (row != null && !row.isEmpty()) - { - result.put(key.getKey(), row); - if (verifyNoCrossFolderData) - { - String dataContainer = (String) row.get("container"); - if (StringUtils.isEmpty(dataContainer)) - dataContainer = (String) row.get("folder"); - if (!container.getId().equals(dataContainer)) - throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); - } - } - else if (verifyExisting) - throw new InvalidKeyException("Data not found for " + key.getValue().values()); - } - return result; - } - - @Override - public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) - { - return false; - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) - { - return createTransactionAuditEvent(container, auditAction, null); - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) - { - long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); - TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); - if (details != null) - event.addDetails(details); - return event; - } - - public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) - { - UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); - - if (schema != null) - { - // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the - // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the - // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the - // table. - schema.getTable(auditEvent.getEventType(), false); - - transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); - - transaction.setAuditEvent(auditEvent); - } - } - - protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) - { - if (null == errors) - errors = new BatchValidationException(); - DataIteratorContext context = new DataIteratorContext(errors); - context.setInsertOption(forImport); - context.setConfigParameters(configParameters); - configureDataIteratorContext(context); - recordDataIteratorUsed(configParameters); - - return context; - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters == null) - return; - - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } - catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - - /** - * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. - * Used only for generating ExistingRecordDataIterator at the moment. - */ - protected Set getSelectKeys(DataIteratorContext context) - { - if (!context.getAlternateKeys().isEmpty()) - return context.getAlternateKeys(); - return null; - } - - /* - * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. - * does NOT handle triggers or the insert/update iterator. - */ - public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) - { - DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); - - if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) - { - // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) - dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); - } - - dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); - dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); - dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); - return dib; - } - - - /** - * Implementation to use insertRows() while we migrate to using DIB for all code paths - *

- * DataIterator should/must use the same error collection as passed in - */ - @Deprecated - protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) - { - MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); - List> list = new ArrayList<>(); - List> ret; - Exception rowException; - - try - { - while (mapIterator.next()) - list.add(mapIterator.getMap()); - ret = insertRows(user, container, list, errors, null, extraScriptContext); - if (errors.hasErrors()) - return 0; - return ret.size(); - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - return 0; - } - catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) - { - rowException = x; - } - finally - { - DataIteratorUtil.closeQuietly(mapIterator); - } - errors.addRowError(new ValidationException(rowException.getMessage())); - return 0; - } - - protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) - { - return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); - } - - protected boolean hasInsertRowsPermission(User user) - { - return hasPermission(user, InsertPermission.class); - } - - protected boolean hasDeleteRowsPermission(User user) - { - return hasPermission(user, DeletePermission.class); - } - - protected boolean hasUpdateRowsPermission(User user) - { - return hasPermission(user, UpdatePermission.class); - } - - // override this - protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) - { - } - - protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasImportRowsPermission(user, container, context)) - throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) - assert(getQueryTable().supportsInsertOption(context.getInsertOption())); - - context.getErrors().setExtraContext(extraScriptContext); - if (extraScriptContext != null) - { - context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); - } - - preImportDIBValidation(in, null); - - boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); - boolean hasTableScript = hasTableScript(container); - TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); - if (!skipTriggers) - { - in = preTriggerDataIterator(in, context); - if (hasTableScript) - in = helper.before(in); - } - DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); - DataIteratorBuilder out = importDIB; - - if (!skipTriggers) - { - if (hasTableScript) - out = helper.after(importDIB); - - out = postTriggerDataIterator(out, context); - } - - if (hasTableScript) - { - context.setFailFast(false); - context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); - } - int count = _pump(out, outputRows, context); - - if (context.getErrors().hasErrors()) - return 0; - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) - _addSummaryAuditEvent(container, user, context, count); - - return count; - } - - protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) - { - return in; - } - - protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) - { - return out; - } - - /** this is extracted so subclasses can add wrap */ - protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) - { - DataIterator it = etl.getDataIterator(context); - - try - { - if (null != rows) - { - MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); - it = new WrapperDataIterator(maps) - { - @Override - public boolean next() throws BatchValidationException - { - boolean ret = super.next(); - if (ret) - rows.add(((MapDataIterator)_delegate).getMap()); - return ret; - } - }; - } - - Pump pump = new Pump(it, context); - pump.run(); - - return pump.getRowCount(); - } - finally - { - DataIteratorUtil.closeQuietly(it); - } - } - - /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ - protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) - { - afterInsertUpdate(count, errors); - } - - protected void afterInsertUpdate(int count, BatchValidationException errors) - {} - - @Override - public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - return loadRows(user, container, rows, null, context, extraScriptContext); - } - - public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - configureDataIteratorContext(context); - int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - return count; - } - - @Override - public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) - { - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); - int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); - afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); - return count; - } - - @Override - public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - { - throw new UnsupportedOperationException("merge is not supported for all tables"); - } - - private boolean hasTableScript(Container container) - { - return getQueryTable().hasTriggers(container); - } - - - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); - } - - - protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); - ArrayList> outputRows = new ArrayList<>(); - int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - - if (context.getErrors().hasErrors()) - return null; - - return outputRows; - } - - // not yet supported - protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - - protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) - { - // TODO probably can't assume all rows have all columns - // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) - // TODO optimize ArrayListMap? - Set colNames; - - if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) - { - colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); - } - else - { - // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet - colNames = Sets.newCaseInsensitiveHashSet(); - for (Map row : rows) - colNames.addAll(row.keySet()); - } - - preImportDIBValidation(null, colNames); - return MapDataIterator.of(colNames, rows, debugName); - } - - - /** @deprecated switch to using DIB based method */ - @Deprecated - protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) - throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - boolean hasTableScript = hasTableScript(container); - - errors.setExtraContext(extraScriptContext); - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - row = normalizeColumnNames(row); - try - { - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), false); - if (hasTableScript) - { - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); - } - row = insertRow(user, container, row); - if (row == null) - continue; - - if (hasTableScript) - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); - result.add(row); - } - catch (SQLException sqlx) - { - if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) - { - ValidationException vex = new ValidationException(sqlx.getMessage()); - vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); - errors.addRowError(vex); - } - else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) - { - // if we already have some errors, just break - break; - } - else - { - throw sqlx; - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - } - - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); - - return result; - } - - protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) - { - if (!isBulkLoad()) - { - AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; - String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); - getQueryTable().getAuditHandler(auditBehavior) - .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); - } - } - - private Map normalizeColumnNames(Map row) - { - if(_columnImportMap == null) - { - _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); - } - - Map newRow = new CaseInsensitiveHashMap<>(); - CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); - columns.addAll(row.keySet()); - - String newName; - for(String key : row.keySet()) - { - if(_columnImportMap.containsKey(key)) - { - //it is possible for a normalized name to conflict with an existing property. if so, defer to the original - newName = _columnImportMap.get(key).getName(); - if(!columns.contains(newName)){ - newRow.put(newName, row.get(key)); - continue; - } - } - newRow.put(key, row.get(key)); - } - - return newRow; - } - - @Override - public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws DuplicateKeyException, QueryUpdateServiceException, SQLException - { - try - { - List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); - afterInsertUpdate(null==ret?0:ret.size(), errors); - if (errors.hasErrors()) - return null; - return ret; - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - } - return null; - } - - protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) - { - if (col != null && value != null && - !col.getJavaObjectClass().isInstance(value) && - !(value instanceof AttachmentFile) && - !(value instanceof MultipartFile) && - !(value instanceof String[]) && - !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) - { - try - { - if (col.getKindOfQuantity() != null) - providedValues.put(key, value); - if (PropertyType.FILE_LINK.equals(col.getPropertyType())) - value = ExpDataFileConverter.convert(value); - else - value = col.convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw e; - } - catch (ConversionException e) - { - // That's OK, the transformation script may be able to fix up the value before it gets inserted - } - } - - return value; - } - - /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ - @Deprecated - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); - result.put(entry.getKey(), value); - } - - return result; - } - - protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - - protected boolean firstUpdateRow = true; - Function,Map> updateTransform = Function.identity(); - - /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ - final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - if (firstUpdateRow) - { - firstUpdateRow = false; - if (null != OntologyService.get()) - { - var t = OntologyService.get().getConceptUpdateHandler(_queryTable); - if (null != t) - updateTransform = t; - } - } - row = updateTransform.apply(row); - return updateRow(user, container, row, oldRow, configParameters); - } - - // used by updateRows to check if all rows have the same set of keys - // prepared statement can only be used to updateRows if all rows have the same set of keys - protected static boolean hasUniformKeys(List> rowsToUpdate) - { - if (rowsToUpdate == null || rowsToUpdate.isEmpty()) - return false; - - if (rowsToUpdate.size() == 1) - return true; - - Set keys = rowsToUpdate.get(0).keySet(); - int keySize = keys.size(); - - for (int i = 1 ; i < rowsToUpdate.size(); i ++) - { - Set otherKeys = rowsToUpdate.get(i).keySet(); - if (otherKeys.size() != keySize) - return false; - if (!otherKeys.containsAll(keys)) - return false; - } - - return true; - } - - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, - BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - if (oldKeys != null && rows.size() != oldKeys.size()) - throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); - - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> oldRows = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), true); - try - { - Map oldKey = oldKeys == null ? row : oldKeys.get(i); - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, oldKey); - if (oldRow == null) - throw new NotFoundException("The existing row was not found."); - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); - Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); - if (!streaming) - { - result.add(updatedRow); - oldRows.add(oldRow); - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (OptimisticConflictException e) - { - errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); - afterInsertUpdate(null==result?0:result.size(), errors, true); - - if (errors.hasErrors()) - throw errors; - - addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); - - return result; - } - - protected void checkDuplicateUpdate(Object pkVals) throws ValidationException - { - if (pkVals == null) - return; - - Set updatedRows = getPreviouslyUpdatedRows(); - - Object[] keysObj; - if (pkVals.getClass().isArray()) - keysObj = (Object[]) pkVals; - else if (pkVals instanceof Map map) - { - List orderedKeyVals = new ArrayList<>(); - SortedSet sortedKeys = new TreeSet<>(map.keySet()); - for (String key : sortedKeys) - orderedKeyVals.add(map.get(key)); - keysObj = orderedKeyVals.toArray(); - } - else - keysObj = new Object[]{pkVals}; - - if (keysObj.length == 1) - { - if (updatedRows.contains(keysObj[0])) - throw new ValidationException("Duplicate key provided: " + keysObj[0]); - updatedRows.add(keysObj[0]); - return; - } - - List keys = new ArrayList<>(); - for (Object key : keysObj) - keys.add(String.valueOf(key)); - if (updatedRows.contains(keys)) - throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); - updatedRows.add(keys); - } - - @Override - public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Move is not supported for this table type."); - } - - protected abstract Map deleteRow(User user, Container container, Map oldRow) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return deleteRow(user, container, oldRow); - } - - @Override - public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to delete data from this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); - - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - List> result = new ArrayList<>(keys.size()); - for (int i = 0; i < keys.size(); i++) - { - Map key = keys.get(i); - try - { - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, key); - // if row doesn't exist, bail early - if (oldRow == null) - continue; - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); - Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); - result.add(updatedRow); - } - catch (InvalidKeyException ex) - { - ValidationException vex = new ValidationException(ex.getMessage()); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); - - return result; - } - - protected int truncateRows(User user, Container container) - throws QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException(); - } - - @Override - public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to truncate this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); - - int result = truncateRows(user, container); - - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); - addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); - - return result; - } - - @Override - public void setBulkLoad(boolean bulkLoad) - { - _bulkLoad = bulkLoad; - } - - @Override - public boolean isBulkLoad() - { - return _bulkLoad; - } - - public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException - { - FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); - return saveFile(user, container, name, value, dirPath); - } - - /** - * Save uploaded file to dirName directory under file or pipeline root. - */ - public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException - { - if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) - throw new ValidationException("Invalid file value"); - - String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; - FileLike file = null; - try - { - FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); - - FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); - if (value instanceof MultipartFile multipartFile) - { - // Once we've found one, write it to disk and replace the row's value with just the File reference to it - if (multipartFile.isEmpty()) - { - throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); - } - file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); - checkFileUnderRoot(container, file); - multipartFile.transferTo(toFileForWrite(file)); - event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); - event.setProvidedFileName(multipartFile.getOriginalFilename()); - } - else - { - SpringAttachmentFile saf = (SpringAttachmentFile) value; - file = FileUtil.findUniqueFileName(saf.getFilename(), dir); - checkFileUnderRoot(container, file); - saf.saveTo(file); - event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); - event.setProvidedFileName(saf.getFilename()); - } - event.setFile(file.getName()); - event.setFieldName(name); - event.setDirectory(file.getParent().toURI().getPath()); - AuditLogService.get().addEvent(user, event); - } - catch (IOException | ExperimentException e) - { - throw new QueryUpdateServiceException(e); - } - - ensureExpData(user, container, file.toNioPathForRead().toFile()); - return file; - } - - public static ExpData ensureExpData(User user, Container container, File file) - { - ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); - // create exp.data record - if (existingData == null) - { - File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); - ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); - data.setName(file.getName()); - data.setDataFileURI(canonicalFile.toPath().toUri()); - if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) - { - // If the path is too long to store, bail out without creating an exp.data row - data.save(user); - } - - return data; - } - - return existingData; - } - - // For security reasons, make sure the user hasn't tried to reference a file that's not under - // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server - static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException - { - Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); - if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) - return file; - - PipeRoot root = PipelineService.get().findPipelineRoot(container); - if (root == null) - throw new ExperimentException("Pipeline root not available in container " + container.getPath()); - - if (!root.isUnderRoot(toFileForRead(file))) - { - throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); - } - - return file; - } - - protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) - { - if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level - { - AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); - String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); - boolean skipAuditLevelCheck = false; - if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) - { - if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad - skipAuditLevelCheck = true; - } - getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); - } - } - - /** - * Is used by the AttachmentDataIterator to point to the location of the serialized - * attachment files. - */ - public void setAttachmentDirectory(VirtualFile att) - { - _att = att; - } - - @Nullable - protected VirtualFile getAttachmentDirectory() - { - return _att; - } - - /** - * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory - * implementation in order to resolve the attachment parent on incoming attachment files. - */ - @Nullable - protected AttachmentParentFactory getAttachmentParentFactory() - { - return null; - } - - /** Translate between the column name that query is exposing to the column name that actually lives in the database */ - protected static void aliasColumns(Map columnMapping, Map row) - { - for (Map.Entry entry : columnMapping.entrySet()) - { - if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) - { - row.put(entry.getKey(), row.get(entry.getValue())); - } - } - } - - /** - * The database table has underscores for MV column names, but we expose a column without the underscore. - * Therefore, we need to translate between the two sets of column names. - * @return database column name -> exposed TableInfo column name - */ - protected static Map createMVMapping(Domain domain) - { - Map result = new CaseInsensitiveHashMap<>(); - if (domain != null) - { - for (DomainProperty domainProperty : domain.getProperties()) - { - if (domainProperty.isMvEnabled()) - { - result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); - } - } - } - return result; - } - - @TestWhen(TestWhen.When.BVT) - public static class TestCase extends Assert - { - private boolean _useAlias = false; - - static TabLoader getTestData() throws IOException - { - TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); - testData.parseAsCSV(); - testData.getColumns()[0].clazz = Integer.class; - testData.getColumns()[1].clazz = Integer.class; - testData.getColumns()[2].clazz = String.class; - return testData; - } - - @BeforeClass - public static void createList() throws Exception - { - if (null == ListService.get()) - return; - deleteList(); - - TabLoader testData = getTestData(); - String hash = GUID.makeHash(); - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - assertNotNull(lists); - - ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); - R.setKeyName("pk"); - Domain d = requireNonNull(R.getDomain()); - for (int i=0 ; i> getRows() - { - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); - return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); - } - - @Before - public void resetList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - qus.truncateRows(user, c, null, null); - } - - @AfterClass - public static void deleteList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - Map m = s.getLists(c); - if (m.containsKey("R")) - m.get("R").delete(user); - } - - void validateDefaultData(List> rows) - { - assertEquals(3, rows.size()); - - assertEquals(0, rows.get(0).get("pk")); - assertEquals(1, rows.get(1).get("pk")); - assertEquals(2, rows.get(2).get("pk")); - - assertEquals(0, rows.get(0).get("i")); - assertEquals(1, rows.get(1).get("i")); - assertEquals(2, rows.get(2).get("i")); - - assertEquals("zero", rows.get(0).get("s")); - assertEquals("one", rows.get(1).get("s")); - assertEquals("two", rows.get(2).get("s")); - } - - @Test - public void INSERT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertFalse(errors.hasErrors()); - validateDefaultData(rows); - validateDefaultData(getRows()); - - qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void UPSERT() throws Exception - { - if (null == ListService.get()) - return; - /* not sure how you use/test ImportOptions.UPSERT - * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? - */ - } - - @Test - public void IMPORT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var count = qus.importRows(user, c, getTestData(), errors, null, null); - assertFalse(errors.hasErrors()); - assert(count == 3); - validateDefaultData(getRows()); - - qus.importRows(user, c, getTestData(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void MERGE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - BatchValidationException errors = new BatchValidationException() - { - @Override - public void addRowError(ValidationException vex) - { - LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); - fail(vex.getMessage()); - } - }; - int count=0; - try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) - { - var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - if (!errors.hasErrors()) - { - tx.commit(); - count = ret; - } - } - assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is not updated - assertEquals(2, rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - - // merge should fail if duplicate keys are provided - errors = new BatchValidationException(); - mergeRows = new ArrayList<>(); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - } - - @Test - public void UPDATE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var updateRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - - // update using data iterator - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(1, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO-UP", rows.get(2).get("s")); - // test existing row value is not updated/erased - assertEquals(2, rows.get(2).get("i")); - - // update should fail if a new record is provided - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - - // Issue 52728: update should fail if duplicate key is provide - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - - // use DIB - context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); - - // use updateRows - if (!_useAlias) // _update using alias is not supported - { - BatchValidationException errors = new BatchValidationException(); - try - { - qus.updateRows(user, c, updateRows, null, errors, null, null); - } - catch (Exception e) - { - - } - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - - } - } - - @Test - public void REPLACE() throws Exception - { - if (null == ListService.get()) - return; - assert(getRows().isEmpty()); - INSERT(); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.REPLACE); - var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is updated - assertNull(rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - } - - @Test - public void IMPORT_IDENTITY() - { - if (null == ListService.get()) - return; - // TODO - } - - @Test - public void ALIAS_MERGE() throws Exception - { - _useAlias = true; - MERGE(); - } - - @Test - public void ALIAS_REPLACE() throws Exception - { - _useAlias = true; - REPLACE(); - } - - @Test - public void ALIAS_UPDATE() throws Exception - { - _useAlias = true; - UPDATE(); - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.labkey.api.assay.AssayFileWriter; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.attachments.AttachmentParentFactory; +import org.labkey.api.attachments.SpringAttachmentFile; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.audit.provider.FileSystemAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DbSequenceManager; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.MultiValuedForeignKey; +import org.labkey.api.data.PropertyStorageSpec; +import org.labkey.api.data.RuntimeSQLException; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; +import org.labkey.api.dataiterator.ExistingRecordDataIterator; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.dataiterator.Pump; +import org.labkey.api.dataiterator.StandardDataIteratorBuilder; +import org.labkey.api.dataiterator.TriggerDataBuilderHelper; +import org.labkey.api.dataiterator.WrapperDataIterator; +import org.labkey.api.exceptions.OptimisticConflictException; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.list.ListDefinition; +import org.labkey.api.exp.list.ListService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.files.FileContentService; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.reader.TabLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.AdminPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.test.TestWhen; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JunitUtil; +import org.labkey.api.util.TestContext; +import org.labkey.api.util.URIUtil; +import org.labkey.api.view.NotFoundException; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.io.StringReader; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.files.FileContentService.UPLOADED_FILE; +import static org.labkey.api.util.FileUtil.toFileForRead; +import static org.labkey.api.util.FileUtil.toFileForWrite; + +public abstract class AbstractQueryUpdateService implements QueryUpdateService +{ + protected final TableInfo _queryTable; + + private boolean _bulkLoad = false; + private CaseInsensitiveHashMap _columnImportMap = null; + private VirtualFile _att = null; + + /* AbstractQueryUpdateService is generally responsible for some shared functionality + * - triggers + * - coercion/validation + * - detailed logging + * - attachments + * + * If a subclass wants to disable some of these features (w/o subclassing), put flags here... + */ + protected boolean _enableExistingRecordsDataIterator = true; + protected Set _previouslyUpdatedRows = new HashSet<>(); + + protected AbstractQueryUpdateService(TableInfo queryTable) + { + if (queryTable == null) + throw new IllegalArgumentException(); + _queryTable = queryTable; + } + + protected TableInfo getQueryTable() + { + return _queryTable; + } + + public @NotNull Set getPreviouslyUpdatedRows() + { + return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) + { + return getQueryTable().hasPermission(user, acl); + } + + protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + return getRow(user, container, keys); + } + + protected abstract Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException; + + @Override + public List> getRows(User user, Container container, List> keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + List> result = new ArrayList<>(); + for (Map rowKeys : keys) + { + Map row = getRow(user, container, rowKeys); + if (row != null) + result.add(row); + } + return result; + } + + @Override + public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + Map> result = new LinkedHashMap<>(); + for (Map.Entry> key : keys.entrySet()) + { + Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); + if (row != null && !row.isEmpty()) + { + result.put(key.getKey(), row); + if (verifyNoCrossFolderData) + { + String dataContainer = (String) row.get("container"); + if (StringUtils.isEmpty(dataContainer)) + dataContainer = (String) row.get("folder"); + if (!container.getId().equals(dataContainer)) + throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); + } + } + else if (verifyExisting) + throw new InvalidKeyException("Data not found for " + key.getValue().values()); + } + return result; + } + + @Override + public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) + { + return false; + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) + { + return createTransactionAuditEvent(container, auditAction, null); + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) + { + long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); + TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); + if (details != null) + event.addDetails(details); + return event; + } + + public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) + { + UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); + + if (schema != null) + { + // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the + // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the + // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the + // table. + schema.getTable(auditEvent.getEventType(), false); + + transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); + + transaction.setAuditEvent(auditEvent); + } + } + + protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) + { + if (null == errors) + errors = new BatchValidationException(); + DataIteratorContext context = new DataIteratorContext(errors); + context.setInsertOption(forImport); + context.setConfigParameters(configParameters); + configureDataIteratorContext(context); + recordDataIteratorUsed(configParameters); + + return context; + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters == null) + return; + + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } + catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + + /** + * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. + * Used only for generating ExistingRecordDataIterator at the moment. + */ + protected Set getSelectKeys(DataIteratorContext context) + { + if (!context.getAlternateKeys().isEmpty()) + return context.getAlternateKeys(); + return null; + } + + /* + * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. + * does NOT handle triggers or the insert/update iterator. + */ + public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) + { + DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); + + if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) + { + // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) + dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); + } + + dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); + dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); + dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); + return dib; + } + + + /** + * Implementation to use insertRows() while we migrate to using DIB for all code paths + *

+ * DataIterator should/must use the same error collection as passed in + */ + @Deprecated + protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) + { + MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); + List> list = new ArrayList<>(); + List> ret; + Exception rowException; + + try + { + while (mapIterator.next()) + list.add(mapIterator.getMap()); + ret = insertRows(user, container, list, errors, null, extraScriptContext); + if (errors.hasErrors()) + return 0; + return ret.size(); + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + return 0; + } + catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) + { + rowException = x; + } + finally + { + DataIteratorUtil.closeQuietly(mapIterator); + } + errors.addRowError(new ValidationException(rowException.getMessage())); + return 0; + } + + protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) + { + return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); + } + + protected boolean hasInsertRowsPermission(User user) + { + return hasPermission(user, InsertPermission.class); + } + + protected boolean hasDeleteRowsPermission(User user) + { + return hasPermission(user, DeletePermission.class); + } + + protected boolean hasUpdateRowsPermission(User user) + { + return hasPermission(user, UpdatePermission.class); + } + + // override this + protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) + { + } + + protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasImportRowsPermission(user, container, context)) + throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) + assert(getQueryTable().supportsInsertOption(context.getInsertOption())); + + context.getErrors().setExtraContext(extraScriptContext); + if (extraScriptContext != null) + { + context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); + } + + preImportDIBValidation(in, null); + + boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); + boolean hasTableScript = hasTableScript(container); + TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); + if (!skipTriggers) + { + in = preTriggerDataIterator(in, context); + if (hasTableScript) + in = helper.before(in); + } + DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); + DataIteratorBuilder out = importDIB; + + if (!skipTriggers) + { + if (hasTableScript) + out = helper.after(importDIB); + + out = postTriggerDataIterator(out, context); + } + + if (hasTableScript) + { + context.setFailFast(false); + context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); + } + int count = _pump(out, outputRows, context); + + if (context.getErrors().hasErrors()) + return 0; + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) + _addSummaryAuditEvent(container, user, context, count); + + return count; + } + + protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) + { + return in; + } + + protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) + { + return out; + } + + /** this is extracted so subclasses can add wrap */ + protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) + { + DataIterator it = etl.getDataIterator(context); + + if (null == it) + return 0; + + try + { + if (null != rows) + { + MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); + it = new WrapperDataIterator(maps) + { + @Override + public boolean next() throws BatchValidationException + { + boolean ret = super.next(); + if (ret) + rows.add(((MapDataIterator)_delegate).getMap()); + return ret; + } + }; + } + + Pump pump = new Pump(it, context); + pump.run(); + + return pump.getRowCount(); + } + finally + { + DataIteratorUtil.closeQuietly(it); + } + } + + /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ + protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) + { + afterInsertUpdate(count, errors); + } + + protected void afterInsertUpdate(int count, BatchValidationException errors) + {} + + @Override + public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + return loadRows(user, container, rows, null, context, extraScriptContext); + } + + public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + configureDataIteratorContext(context); + int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + return count; + } + + @Override + public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) + { + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); + int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); + afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); + return count; + } + + @Override + public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + { + throw new UnsupportedOperationException("merge is not supported for all tables"); + } + + private boolean hasTableScript(Container container) + { + return getQueryTable().hasTriggers(container); + } + + + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); + } + + + protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); + ArrayList> outputRows = new ArrayList<>(); + int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + + if (context.getErrors().hasErrors()) + return null; + + return outputRows; + } + + // not yet supported + protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + + protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) + { + // TODO probably can't assume all rows have all columns + // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) + // TODO optimize ArrayListMap? + Set colNames; + + if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) + { + colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); + } + else + { + // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet + colNames = Sets.newCaseInsensitiveHashSet(); + for (Map row : rows) + colNames.addAll(row.keySet()); + } + + preImportDIBValidation(null, colNames); + return MapDataIterator.of(colNames, rows, debugName); + } + + + /** @deprecated switch to using DIB based method */ + @Deprecated + protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) + throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + boolean hasTableScript = hasTableScript(container); + + errors.setExtraContext(extraScriptContext); + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + row = normalizeColumnNames(row); + try + { + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), false); + if (hasTableScript) + { + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); + } + row = insertRow(user, container, row); + if (row == null) + continue; + + if (hasTableScript) + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); + result.add(row); + } + catch (SQLException sqlx) + { + if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) + { + ValidationException vex = new ValidationException(sqlx.getMessage()); + vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); + errors.addRowError(vex); + } + else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) + { + // if we already have some errors, just break + break; + } + else + { + throw sqlx; + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + } + + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); + + return result; + } + + protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) + { + if (!isBulkLoad()) + { + AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; + String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); + getQueryTable().getAuditHandler(auditBehavior) + .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); + } + } + + private Map normalizeColumnNames(Map row) + { + if(_columnImportMap == null) + { + _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); + } + + Map newRow = new CaseInsensitiveHashMap<>(); + CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); + columns.addAll(row.keySet()); + + String newName; + for(String key : row.keySet()) + { + if(_columnImportMap.containsKey(key)) + { + //it is possible for a normalized name to conflict with an existing property. if so, defer to the original + newName = _columnImportMap.get(key).getName(); + if(!columns.contains(newName)){ + newRow.put(newName, row.get(key)); + continue; + } + } + newRow.put(key, row.get(key)); + } + + return newRow; + } + + @Override + public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws DuplicateKeyException, QueryUpdateServiceException, SQLException + { + try + { + List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); + afterInsertUpdate(null==ret?0:ret.size(), errors); + if (errors.hasErrors()) + return null; + return ret; + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + } + return null; + } + + protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) + { + if (col != null && value != null && + !col.getJavaObjectClass().isInstance(value) && + !(value instanceof AttachmentFile) && + !(value instanceof MultipartFile) && + !(value instanceof String[]) && + !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) + { + try + { + if (col.getKindOfQuantity() != null) + providedValues.put(key, value); + if (PropertyType.FILE_LINK.equals(col.getPropertyType())) + value = ExpDataFileConverter.convert(value); + else + value = col.convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw e; + } + catch (ConversionException e) + { + // That's OK, the transformation script may be able to fix up the value before it gets inserted + } + } + + return value; + } + + /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ + @Deprecated + protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) + { + Map result = new CaseInsensitiveHashMap<>(row.size()); + Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); + for (Map.Entry entry : row.entrySet()) + { + ColumnInfo col = columnMap.get(entry.getKey()); + Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); + result.put(entry.getKey(), value); + } + + return result; + } + + protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + + protected boolean firstUpdateRow = true; + Function,Map> updateTransform = Function.identity(); + + /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ + final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + if (firstUpdateRow) + { + firstUpdateRow = false; + if (null != OntologyService.get()) + { + var t = OntologyService.get().getConceptUpdateHandler(_queryTable); + if (null != t) + updateTransform = t; + } + } + row = updateTransform.apply(row); + return updateRow(user, container, row, oldRow, configParameters); + } + + // used by updateRows to check if all rows have the same set of keys + // prepared statement can only be used to updateRows if all rows have the same set of keys + protected static boolean hasUniformKeys(List> rowsToUpdate) + { + if (rowsToUpdate == null || rowsToUpdate.isEmpty()) + return false; + + if (rowsToUpdate.size() == 1) + return true; + + Set keys = rowsToUpdate.get(0).keySet(); + int keySize = keys.size(); + + for (int i = 1 ; i < rowsToUpdate.size(); i ++) + { + Set otherKeys = rowsToUpdate.get(i).keySet(); + if (otherKeys.size() != keySize) + return false; + if (!otherKeys.containsAll(keys)) + return false; + } + + return true; + } + + @Override + public List> updateRows(User user, Container container, List> rows, List> oldKeys, + BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + if (oldKeys != null && rows.size() != oldKeys.size()) + throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); + + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> oldRows = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), true); + try + { + Map oldKey = oldKeys == null ? row : oldKeys.get(i); + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, oldKey); + if (oldRow == null) + throw new NotFoundException("The existing row was not found."); + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); + Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); + if (!streaming) + { + result.add(updatedRow); + oldRows.add(oldRow); + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (OptimisticConflictException e) + { + errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + afterInsertUpdate(null==result?0:result.size(), errors, true); + + if (errors.hasErrors()) + throw errors; + + addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); + + return result; + } + + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException + { + if (pkVals == null) + return; + + Set updatedRows = getPreviouslyUpdatedRows(); + + Object[] keysObj; + if (pkVals.getClass().isArray()) + keysObj = (Object[]) pkVals; + else if (pkVals instanceof Map map) + { + List orderedKeyVals = new ArrayList<>(); + SortedSet sortedKeys = new TreeSet<>(map.keySet()); + for (String key : sortedKeys) + orderedKeyVals.add(map.get(key)); + keysObj = orderedKeyVals.toArray(); + } + else + keysObj = new Object[]{pkVals}; + + if (keysObj.length == 1) + { + if (updatedRows.contains(keysObj[0])) + throw new ValidationException("Duplicate key provided: " + keysObj[0]); + updatedRows.add(keysObj[0]); + return; + } + + List keys = new ArrayList<>(); + for (Object key : keysObj) + keys.add(String.valueOf(key)); + if (updatedRows.contains(keys)) + throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); + updatedRows.add(keys); + } + + @Override + public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Move is not supported for this table type."); + } + + protected abstract Map deleteRow(User user, Container container, Map oldRow) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return deleteRow(user, container, oldRow); + } + + @Override + public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to delete data from this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + List> result = new ArrayList<>(keys.size()); + for (int i = 0; i < keys.size(); i++) + { + Map key = keys.get(i); + try + { + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, key); + // if row doesn't exist, bail early + if (oldRow == null) + continue; + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); + Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); + result.add(updatedRow); + } + catch (InvalidKeyException ex) + { + ValidationException vex = new ValidationException(ex.getMessage()); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); + + return result; + } + + protected int truncateRows(User user, Container container) + throws QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException(); + } + + @Override + public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to truncate this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); + + int result = truncateRows(user, container); + + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); + addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); + + return result; + } + + @Override + public void setBulkLoad(boolean bulkLoad) + { + _bulkLoad = bulkLoad; + } + + @Override + public boolean isBulkLoad() + { + return _bulkLoad; + } + + public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException + { + FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); + return saveFile(user, container, name, value, dirPath); + } + + /** + * Save uploaded file to dirName directory under file or pipeline root. + */ + public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException + { + if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) + throw new ValidationException("Invalid file value"); + + String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; + FileLike file = null; + try + { + FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); + + FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); + if (value instanceof MultipartFile multipartFile) + { + // Once we've found one, write it to disk and replace the row's value with just the File reference to it + if (multipartFile.isEmpty()) + { + throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); + } + file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); + checkFileUnderRoot(container, file); + multipartFile.transferTo(toFileForWrite(file)); + event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); + event.setProvidedFileName(multipartFile.getOriginalFilename()); + } + else + { + SpringAttachmentFile saf = (SpringAttachmentFile) value; + file = FileUtil.findUniqueFileName(saf.getFilename(), dir); + checkFileUnderRoot(container, file); + saf.saveTo(file); + event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); + event.setProvidedFileName(saf.getFilename()); + } + event.setFile(file.getName()); + event.setFieldName(name); + event.setDirectory(file.getParent().toURI().getPath()); + AuditLogService.get().addEvent(user, event); + } + catch (IOException | ExperimentException e) + { + throw new QueryUpdateServiceException(e); + } + + ensureExpData(user, container, file.toNioPathForRead().toFile()); + return file; + } + + public static ExpData ensureExpData(User user, Container container, File file) + { + ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); + // create exp.data record + if (existingData == null) + { + File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); + ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); + data.setName(file.getName()); + data.setDataFileURI(canonicalFile.toPath().toUri()); + if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) + { + // If the path is too long to store, bail out without creating an exp.data row + data.save(user); + } + + return data; + } + + return existingData; + } + + // For security reasons, make sure the user hasn't tried to reference a file that's not under + // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server + static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException + { + Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); + if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) + return file; + + PipeRoot root = PipelineService.get().findPipelineRoot(container); + if (root == null) + throw new ExperimentException("Pipeline root not available in container " + container.getPath()); + + if (!root.isUnderRoot(toFileForRead(file))) + { + throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); + } + + return file; + } + + protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) + { + if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level + { + AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); + String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); + boolean skipAuditLevelCheck = false; + if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) + { + if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad + skipAuditLevelCheck = true; + } + getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); + } + } + + /** + * Is used by the AttachmentDataIterator to point to the location of the serialized + * attachment files. + */ + public void setAttachmentDirectory(VirtualFile att) + { + _att = att; + } + + @Nullable + protected VirtualFile getAttachmentDirectory() + { + return _att; + } + + /** + * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory + * implementation in order to resolve the attachment parent on incoming attachment files. + */ + @Nullable + protected AttachmentParentFactory getAttachmentParentFactory() + { + return null; + } + + /** Translate between the column name that query is exposing to the column name that actually lives in the database */ + protected static void aliasColumns(Map columnMapping, Map row) + { + for (Map.Entry entry : columnMapping.entrySet()) + { + if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) + { + row.put(entry.getKey(), row.get(entry.getValue())); + } + } + } + + /** + * The database table has underscores for MV column names, but we expose a column without the underscore. + * Therefore, we need to translate between the two sets of column names. + * @return database column name -> exposed TableInfo column name + */ + protected static Map createMVMapping(Domain domain) + { + Map result = new CaseInsensitiveHashMap<>(); + if (domain != null) + { + for (DomainProperty domainProperty : domain.getProperties()) + { + if (domainProperty.isMvEnabled()) + { + result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); + } + } + } + return result; + } + + @TestWhen(TestWhen.When.BVT) + public static class TestCase extends Assert + { + private boolean _useAlias = false; + + static TabLoader getTestData() throws IOException + { + TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); + testData.parseAsCSV(); + testData.getColumns()[0].clazz = Integer.class; + testData.getColumns()[1].clazz = Integer.class; + testData.getColumns()[2].clazz = String.class; + return testData; + } + + @BeforeClass + public static void createList() throws Exception + { + if (null == ListService.get()) + return; + deleteList(); + + TabLoader testData = getTestData(); + String hash = GUID.makeHash(); + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + assertNotNull(lists); + + ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); + R.setKeyName("pk"); + Domain d = requireNonNull(R.getDomain()); + for (int i=0 ; i> getRows() + { + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); + return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); + } + + @Before + public void resetList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + qus.truncateRows(user, c, null, null); + } + + @AfterClass + public static void deleteList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + Map m = s.getLists(c); + if (m.containsKey("R")) + m.get("R").delete(user); + } + + void validateDefaultData(List> rows) + { + assertEquals(3, rows.size()); + + assertEquals(0, rows.get(0).get("pk")); + assertEquals(1, rows.get(1).get("pk")); + assertEquals(2, rows.get(2).get("pk")); + + assertEquals(0, rows.get(0).get("i")); + assertEquals(1, rows.get(1).get("i")); + assertEquals(2, rows.get(2).get("i")); + + assertEquals("zero", rows.get(0).get("s")); + assertEquals("one", rows.get(1).get("s")); + assertEquals("two", rows.get(2).get("s")); + } + + @Test + public void INSERT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertFalse(errors.hasErrors()); + validateDefaultData(rows); + validateDefaultData(getRows()); + + qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void UPSERT() throws Exception + { + if (null == ListService.get()) + return; + /* not sure how you use/test ImportOptions.UPSERT + * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? + */ + } + + @Test + public void IMPORT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var count = qus.importRows(user, c, getTestData(), errors, null, null); + assertFalse(errors.hasErrors()); + assert(count == 3); + validateDefaultData(getRows()); + + qus.importRows(user, c, getTestData(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void MERGE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + BatchValidationException errors = new BatchValidationException() + { + @Override + public void addRowError(ValidationException vex) + { + LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); + fail(vex.getMessage()); + } + }; + int count=0; + try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) + { + var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + if (!errors.hasErrors()) + { + tx.commit(); + count = ret; + } + } + assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is not updated + assertEquals(2, rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + + // merge should fail if duplicate keys are provided + errors = new BatchValidationException(); + mergeRows = new ArrayList<>(); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + } + + @Test + public void UPDATE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var updateRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + + // update using data iterator + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(1, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO-UP", rows.get(2).get("s")); + // test existing row value is not updated/erased + assertEquals(2, rows.get(2).get("i")); + + // update should fail if a new record is provided + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + + // Issue 52728: update should fail if duplicate key is provide + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + + // use DIB + context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); + + // use updateRows + if (!_useAlias) // _update using alias is not supported + { + BatchValidationException errors = new BatchValidationException(); + try + { + qus.updateRows(user, c, updateRows, null, errors, null, null); + } + catch (Exception e) + { + + } + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + + } + } + + @Test + public void REPLACE() throws Exception + { + if (null == ListService.get()) + return; + assert(getRows().isEmpty()); + INSERT(); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.REPLACE); + var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is updated + assertNull(rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + } + + @Test + public void IMPORT_IDENTITY() + { + if (null == ListService.get()) + return; + // TODO + } + + @Test + public void ALIAS_MERGE() throws Exception + { + _useAlias = true; + MERGE(); + } + + @Test + public void ALIAS_REPLACE() throws Exception + { + _useAlias = true; + REPLACE(); + } + + @Test + public void ALIAS_UPDATE() throws Exception + { + _useAlias = true; + UPDATE(); + } + } +} diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index 1299cea69d9..f62a69ca510 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,941 +1,936 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext, providedValue); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value, null); - } - } - } - - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value, null); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = pk.convert(pkValue); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - protected TableInfo getTableInfoForConversion() - { - return getDbTable(); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getTableInfoForConversion(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = pk.convert(pkValue); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } +} diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index 6f0d615f647..afc9cfef66d 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -422,9 +422,7 @@ describe('Duplicate IDs', () => { }], 'exp.data', dataType, topFolderOptions, editorUserOptions); const data1RowId = caseInsensitive(dataRows[0], 'rowId'); - const data1Lsid = caseInsensitive(dataRows[0], 'lsid'); const data2RowId = caseInsensitive(dataRows[1], 'rowId'); - const data2Lsid = caseInsensitive(dataRows[1], 'lsid'); // update data2 twice using updateRows, using rowId await server.post('query', 'updateRows', { @@ -445,23 +443,43 @@ describe('Duplicate IDs', () => { expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2RowId); }); - // update data2 twice using updateRows, using lsid (data iterator) + // update date twice specifying the name across multiple partitions await server.post('query', 'updateRows', { schemaName: 'exp.data', queryName: dataType, rows: [{ description: 'update', - lsid: data1Lsid + name: dataName1 },{ description: 'update', - lsid: data2Lsid + rowId: data2RowId + },{ + description: 'update', + name: dataName1 + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + errorResp = JSON.parse(result.text); + expect(errorResp).toBe('a'); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); + }); + + // update date twice specifying the rowId across multiple partitions + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [{ + description: 'update', + rowId: data1RowId + },{ + description: 'update', + name: data2RowId },{ description: 'update', - lsid: data2Lsid + rowId: data1RowId }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { errorResp = JSON.parse(result.text); - expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2Lsid); + expect(errorResp['exception']).toBe('Duplicate key provided: ' + data1RowId); }); errorResp = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n" + dataName1 + "\tupdate\n" + dataName2 + "\tupdate\n" + dataName2 + "\tupdate", dataType, "UPDATE", topFolderOptions, editorUserOptions); @@ -821,6 +839,10 @@ describe('Multi Value Text Choice', () => { }); +const LSID_UPDATE_ERROR = 'LSID is no longer accepted as a key for data update. Specify a RowId or Name instead.'; +const LSID_MERGE_ERROR = 'LSID is no longer accepted as a key for data merge. Specify a RowId or Name instead.'; +const ROWID_MERGE_ERROR = 'RowId is not accepted when merging data. Specify only the data name instead.'; + describe('Data CRUD', () => { it ("Update using different key fields", async () => { @@ -838,20 +860,25 @@ describe('Data CRUD', () => { // insert 2 rows data, provide explicit names and a rowId = -1 const dataName1 = 'KeyData1'; const dataName2 = 'KeyData2'; + const dataName3 = 'KeyData3'; const inserted = await insertDataClassData([ { name: dataName1, description: 'original1', [fieldName]: 'val1', rowId: -1 }, { name: dataName2, description: 'original2', [fieldName]: 'val2', rowId: -1 }, + { name: dataName3, description: 'original3', [fieldName]: 'val3', rowId: -1 }, ], dataType, topFolderOptions); // verify both rows are inserted with correct name and rowId is not -1 for both rows, record the rowId and lsid for both rows expect(inserted[0].name).toBe(dataName1); expect(inserted[1].name).toBe(dataName2); + expect(inserted[2].name).toBe(dataName3); expect(inserted[0].rowId).not.toBe(-1); expect(inserted[1].rowId).not.toBe(-1); + expect(inserted[2].rowId).not.toBe(-1); const row1RowId = inserted[0].rowId; const row1Lsid = inserted[0].lsid; const row2RowId = inserted[1].rowId; const row2Lsid = inserted[1].lsid; + const row3RowId = inserted[2].rowId; const findRow = (rows: any[], rowId: number) => rows.find(r => caseInsensitive(r, 'RowId') === rowId); @@ -869,10 +896,32 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('updByRowId2'); expect(caseInsensitive(row2, fieldName)).toBe('rowIdVal2'); - // update description and fieldName value for both rows using lsid as key, verify update is successful and data are updated correctly + // Error when supplying LSID without RowId or Name + // query api + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: dataType, + rows: [ + { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + ] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain(LSID_UPDATE_ERROR); + }); + // update from import + let importUpdateText = 'LSID\tDescription\t' + fieldName + '\n' + row1Lsid + '\timportUpd1\timportLsidVal1\n' + row2Lsid + '\timportUpd2\timportLsidVal2'; + let errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_UPDATE_ERROR) > -1).toBeTruthy(); + + // merge from import + errorResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text.indexOf(LSID_MERGE_ERROR) > -1).toBeTruthy(); + + // update using lsid (correct and incorrect, should both be ignored), as well as rowId, as key, should succeed, verify update is successful and data are updated correctly await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, - { lsid: row2Lsid, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, + { lsid: row1Lsid, rowId: row1RowId, description: 'updByLsid1', [fieldName]: 'lsidVal1' }, + { lsid: row1Lsid /*wrong lsid, should be ignored anyways*/, rowId: row2RowId, description: 'updByLsid2', [fieldName]: 'lsidVal2' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); @@ -882,27 +931,34 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, fieldName)).toBe('lsidVal1'); expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('lsidVal2'); + expect(caseInsensitive(row2, 'lsid')).toBe(row2Lsid); // lsid should not be updated - // update description and fieldName value, one of the row use lsid as key, the other use rowId, verify update is successful and data are updated correctly + // update with different set of columns + // should use partitioned data iterator await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, description: 'updMixed1', [fieldName]: 'mixedVal1' }, - { rowId: row2RowId, description: 'updMixed2', [fieldName]: 'mixedVal2' }, + { rowId: row1RowId, description: 'updMixed1', [fieldName]: 'mixedVal1' }, + { rowId: row2RowId, name: 'mixed_rename2', [fieldName]: 'mixedVal2' }, + { rowId: row3RowId, description: 'mixedVal3 desc' }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); - rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId, row3RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); row1 = findRow(rows, row1RowId); row2 = findRow(rows, row2RowId); + var row3 = findRow(rows, row3RowId); expect(caseInsensitive(row1, 'description')).toBe('updMixed1'); expect(caseInsensitive(row1, fieldName)).toBe('mixedVal1'); - expect(caseInsensitive(row2, 'description')).toBe('updMixed2'); + expect(caseInsensitive(row2, 'description')).toBe('updByLsid2'); expect(caseInsensitive(row2, fieldName)).toBe('mixedVal2'); + expect(caseInsensitive(row2, 'name')).toBe('mixed_rename2'); + expect(caseInsensitive(row3, 'description')).toBe('mixedVal3 desc'); + expect(caseInsensitive(row3, fieldName)).toBe('val3'); // fieldName value should not be updated for row3 - // update names of both rows using lsid as key, verify update is successful and names are updated correctly + // update names of both rows using lsid (ignored) an rowId as key, verify update is successful and names are updated correctly const newName1 = 'RenamedByLsid1'; const newName2 = 'RenamedByLsid2'; await ExperimentCRUDUtils.updateRows(server, [ - { lsid: row1Lsid, name: newName1 }, - { lsid: row2Lsid, name: newName2 }, + { lsid: "BAD", rowId: row1RowId, name: newName1 }, + { lsid: row1Lsid /*wrong*/, rowId: row2RowId, name: newName2 }, ], 'exp.data', dataType, topFolderOptions, editorUserOptions); rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, 'RowId,Name', topFolderOptions, adminOptions); @@ -911,7 +967,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row1, 'Name')).toBe(newName1); expect(caseInsensitive(row2, 'Name')).toBe(newName2); - // update names of both rows using rowId as key, verify update is successful and names are updated correctly + // update names of both rows using just rowId as key, verify update is successful and names are updated correctly const newName3 = 'RenamedByRowId1'; const newName4 = 'RenamedByRowId2'; await ExperimentCRUDUtils.updateRows(server, [ @@ -926,7 +982,7 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'Name')).toBe(newName4); // update description and fieldName value from Import with update, the import columns contains name field, verify update is successful and data are updated correctly - const importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; + importUpdateText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\timportUpd1\timportVal1\n' + newName4 + '\timportUpd2\timportVal2'; const updateResp = await ExperimentCRUDUtils.importData(server, importUpdateText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); expect(updateResp.body.success).toBe(true); @@ -938,6 +994,12 @@ describe('Data CRUD', () => { expect(caseInsensitive(row2, 'description')).toBe('importUpd2'); expect(caseInsensitive(row2, fieldName)).toBe('importVal2'); + // Error when supplying RowId during MERGE, verify import fails + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tName\tDescription\n" + row3RowId + "\t" + dataName3 + "\tupdate\n", dataType, "MERGE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain(ROWID_MERGE_ERROR); + // update description and fieldName value from Import with merge. at the same time create a new data. the import columns contain name field, verify update and insert is successful const newDataName = 'MergedNewData'; const importMergeText = 'Name\tDescription\t' + fieldName + '\n' + newName3 + '\tmergeUpd1\tmergeVal1\n' + newName4 + '\tmergeUpd2\tmergeVal2\n' + newDataName + '\tmergeNew\tmergeNewVal'; @@ -957,6 +1019,60 @@ describe('Data CRUD', () => { expect(caseInsensitive(newDataRow, 'Name')).toBe(newDataName); expect(caseInsensitive(newDataRow, 'description')).toBe('mergeNew'); expect(caseInsensitive(newDataRow, fieldName)).toBe('mergeNewVal'); + + // Update from file, using rowId as key, verify update should be successful and data are updated correctly + const importUpdateRowIdText = 'RowId\tDescription\t' + fieldName + '\n' + row1RowId + '\timportUpdByRowId1\timportValByRowId1\n' + row2RowId + '\timportUpdByRowId2\timportValByRowId2'; + const updateByRowIdResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1'); + expect(caseInsensitive(row1, fieldName)).toBe('importValByRowId1'); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2'); + expect(caseInsensitive(row2, fieldName)).toBe('importValByRowId2'); + + // update from file, provide rowId and an updated name, verify name is successfully updated + const newNameByRowId1 = 'RenamedByRowId1Import'; + const newNameByRowId2 = 'RenamedByRowId2Import'; + const importUpdateRowIdNameText = 'RowId\tName\tDescription\n' + row1RowId + '\t' + newNameByRowId1 + '\timportUpdByRowId1-2\n' + row2RowId + '\t' + newNameByRowId2 + '\timportUpdByRowId2-2\n'; + const updateByRowIdNameResp = await ExperimentCRUDUtils.importData(server, importUpdateRowIdNameText, dataType, 'UPDATE', topFolderOptions, editorUserOptions); + expect(updateByRowIdNameResp.body.success).toBe(true); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'Name')).toBe(newNameByRowId1); + expect(caseInsensitive(row1, 'description')).toBe('importUpdByRowId1-2'); + expect(caseInsensitive(row2, 'Name')).toBe(newNameByRowId2); + expect(caseInsensitive(row2, 'description')).toBe('importUpdByRowId2-2'); + + // verify data rowId needs to match provided dataclass type + const emptyDataClass = dataType + "Empty"; + await server.post('property', 'createDomain', { + kind: 'DataClass', + domainDesign: { name: emptyDataClass, fields: [{ name: fieldName }] }, + options: { name: dataType } + }, { ...topFolderOptions, ...designerReaderOptions }).expect(successfulResponse); + + // using query api, update using rowId for data that doesn't exist on the new dataclass should fail. + await server.post('query', 'updateRows', { + schemaName: 'exp.data', + queryName: emptyDataClass, + rows: [{ + description: 'update', + rowId: row3RowId + }] + }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { + const errorResp = JSON.parse(result.text); + expect(errorResp['exception']).toContain('Data not found for [' + row3RowId + ']'); + }); + + // using update from file, verify update using rowId for data that doesn't exist on this datacalss should fail. + errorResp = await ExperimentCRUDUtils.importData(server, "RowId\tDescription\n" + row3RowId + "\tupdate\n", emptyDataClass, "UPDATE", topFolderOptions, editorUserOptions); + expect(errorResp.text).toContain('Data not found for [' + row3RowId + ']'); + }); }); diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index b14dacc468e..68045b61013 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -870,9 +870,6 @@ public static void derive(User user, Container container, DataIterator di, boole ExpDataIterators.DerivationDataIteratorBuilder ddib = new ExpDataIterators.DerivationDataIteratorBuilder(di, container, user, isSample, dataType, skipAliquot, true); DataIteratorContext context = new DataIteratorContext(); context.setInsertOption(QueryUpdateService.InsertOption.UPDATE); - Map configParameters = new HashMap<>(); - configParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - context.setConfigParameters(configParameters); DataIterator derive = ddib.getDataIterator(context); new Pump(derive, context).run(); if (context.getErrors().hasErrors()) @@ -915,7 +912,7 @@ public DataIterator getDataIterator(DataIteratorContext context) else if (_isSample) di = new SampleUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); else - di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); + di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents);// return LoggingDataIterator.wrap(di); } @@ -1186,7 +1183,7 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String lsid = (String) get(_lsidCol); + String lsid = (String) get(_lsidCol); // why lsid?, insert or merge String name = null; if (_nameCol != null) name = (String) get(_nameCol); @@ -1449,9 +1446,10 @@ else if (o instanceof Number) private static class DataUpdateDerivationDataIterator extends DerivationDataIteratorBase { - // Map from Data name to Set of (parentColName, parentName) - final Map>> _parentNames; - final boolean _useLsid; + // Map from Data key (RowId or name) to Set of (parentColName, parentName) + final Map>> _parentNames; + final Integer _rowIdCol; + final boolean _useRowId; protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext context, Container container, User user, ExpObject currentDataType, boolean checkRequiredParent) { @@ -1459,7 +1457,8 @@ protected DataUpdateDerivationDataIterator(DataIterator di, DataIteratorContext Map map = DataIteratorUtil.createColumnNameMap(di); _parentNames = new LinkedHashMap<>(); - _useLsid = map.containsKey("lsid") && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); + _rowIdCol = map.getOrDefault(ExpDataTable.Column.RowId.name(), -1); + _useRowId = map.containsKey(ExpDataTable.Column.RowId.name()); } @Override @@ -1474,9 +1473,15 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String key = null; - if (_useLsid && _lsidCol != null) - key = (String) get(_lsidCol); + Object key = null; + if (_useRowId && _rowIdCol != null) + { + key = get(_rowIdCol); + if (key instanceof String k) + key = Long.parseLong(k); + else + key = asLong(key); + } else if (_nameCol != null) key = (String) get(_nameCol); @@ -1504,9 +1509,11 @@ else if (_nameCol != null) Map dataCache = new LongHashMap<>(); List runRecords = new ArrayList<>(); - for (String key : _parentNames.keySet()) + for (Object key : _parentNames.keySet()) { - ExpData expData = _useLsid ? ExperimentService.get().getExpData(key) : getDataClass().getData(_container, key); + ExpData expData = _useRowId + ? ExperimentService.get().getExpData((Long) key) + : getDataClass().getData(_container, (String) key); if (expData == null) continue; @@ -2428,15 +2435,8 @@ public DataIterator getDataIterator(DataIteratorContext context) } else { - if (isMergeOrUpdate) - { - boolean isUpdateUsingLsid = isUpdateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid && !canUpdateNames) - dontUpdate.add(ExpDataTable.Column.Name.name()); - } + if (isUpdateOnly && !canUpdateNames) + dontUpdate.add(ExpDataTable.Column.Name.name()); } // Since we support detailed audit logging, add the ExistingRecordDataIterator here just before TableInsertDataIterator. @@ -2700,29 +2700,20 @@ private MultiDataTypeCrossProjectDataIterator(DataIterator di, DataIteratorConte FieldKey dataKey; boolean isNumeric; - if (_isSamples) - { - var foundId = RowId.namesAndLabels().stream() - .filter(map::containsKey) - .findFirst(); + var foundId = RowId.namesAndLabels().stream() + .filter(map::containsKey) + .findFirst(); - if (foundId.isPresent()) - { - index = map.get(foundId.get()); - dataKey = RowId.fieldKey(); - isNumeric = true; - } - else - { - index = map.getOrDefault(Name.name(), -1); - dataKey = Name.fieldKey(); - isNumeric = false; - } + if (foundId.isPresent()) + { + index = map.get(foundId.get()); + dataKey = RowId.fieldKey(); + isNumeric = true; } else { - index = map.getOrDefault(ExpDataTable.Column.Name.name(), -1); - dataKey = ExpDataTable.Column.Name.fieldKey(); + index = map.getOrDefault(Name.name(), -1); + dataKey = Name.fieldKey(); isNumeric = false; } diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 3808f929df0..90d22e0091a 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -1,1173 +1,1173 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.admin.FolderSerializationRegistry; -import org.labkey.api.assay.AssayProvider; -import org.labkey.api.assay.AssayService; -import org.labkey.api.attachments.AttachmentService; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.SampleTimelineAuditEvent; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbSchemaType; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.NameGenerator; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SimpleFilter.FilterClause; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpgradeCode; -import org.labkey.api.defaults.DefaultValueService; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.ExperimentRunType; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.DefaultExperimentDataHandler; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpLineageService; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolAttachmentType; -import org.labkey.api.exp.api.ExpRunAttachmentType; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentJSONConverter; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.FilterProtocolInputCriteria; -import org.labkey.api.exp.api.SampleTypeDomainKind; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DomainAuditProvider; -import org.labkey.api.exp.property.DomainPropertyAuditProvider; -import org.labkey.api.exp.property.ExperimentProperty; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.SystemProperty; -import org.labkey.api.exp.query.ExpDataClassTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.exp.xar.LSIDRelativizer; -import org.labkey.api.exp.xar.LsidUtils; -import org.labkey.api.files.FileContentService; -import org.labkey.api.files.TableUpdaterFileListener; -import org.labkey.api.migration.DatabaseMigrationService; -import org.labkey.api.migration.ExperimentDeleteService; -import org.labkey.api.migration.MigrationTableHandler; -import org.labkey.api.module.ModuleContext; -import org.labkey.api.module.ModuleLoader; -import org.labkey.api.module.SpringModule; -import org.labkey.api.module.Summary; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.FilteredTable; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.UserSchema; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.roles.RoleManager; -import org.labkey.api.settings.AppProps; -import org.labkey.api.settings.OptionalFeatureService; -import org.labkey.api.usageMetrics.UsageMetricsService; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JspTestCase; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.SystemMaintenance; -import org.labkey.api.view.AlwaysAvailableWebPartFactory; -import org.labkey.api.view.BaseWebPartFactory; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.JspView; -import org.labkey.api.view.Portal; -import org.labkey.api.view.ViewContext; -import org.labkey.api.view.WebPartFactory; -import org.labkey.api.view.WebPartView; -import org.labkey.api.view.template.WarningService; -import org.labkey.api.vocabulary.security.DesignVocabularyPermission; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.api.webdav.WebdavService; -import org.labkey.api.writer.ContainerUser; -import org.labkey.experiment.api.DataClassDomainKind; -import org.labkey.experiment.api.ExpDataClassImpl; -import org.labkey.experiment.api.ExpDataClassTableImpl; -import org.labkey.experiment.api.ExpDataClassType; -import org.labkey.experiment.api.ExpDataImpl; -import org.labkey.experiment.api.ExpDataTableImpl; -import org.labkey.experiment.api.ExpMaterialImpl; -import org.labkey.experiment.api.ExpProtocolImpl; -import org.labkey.experiment.api.ExpSampleTypeImpl; -import org.labkey.experiment.api.ExpSampleTypeTableImpl; -import org.labkey.experiment.api.ExperimentServiceImpl; -import org.labkey.experiment.api.ExperimentStressTest; -import org.labkey.experiment.api.GraphAlgorithms; -import org.labkey.experiment.api.LineageTest; -import org.labkey.experiment.api.LogDataType; -import org.labkey.experiment.api.Protocol; -import org.labkey.experiment.api.SampleTypeServiceImpl; -import org.labkey.experiment.api.SampleTypeUpdateServiceDI; -import org.labkey.experiment.api.UniqueValueCounterTestCase; -import org.labkey.experiment.api.VocabularyDomainKind; -import org.labkey.experiment.api.data.ChildOfCompareType; -import org.labkey.experiment.api.data.ChildOfMethod; -import org.labkey.experiment.api.data.LineageCompareType; -import org.labkey.experiment.api.data.ParentOfCompareType; -import org.labkey.experiment.api.data.ParentOfMethod; -import org.labkey.experiment.api.property.DomainImpl; -import org.labkey.experiment.api.property.DomainPropertyImpl; -import org.labkey.experiment.api.property.LengthValidator; -import org.labkey.experiment.api.property.LookupValidator; -import org.labkey.experiment.api.property.PropertyServiceImpl; -import org.labkey.experiment.api.property.RangeValidator; -import org.labkey.experiment.api.property.RegExValidator; -import org.labkey.experiment.api.property.StorageNameGenerator; -import org.labkey.experiment.api.property.StorageProvisionerImpl; -import org.labkey.experiment.api.property.TextChoiceValidator; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.controllers.property.PropertyController; -import org.labkey.experiment.defaults.DefaultValueServiceImpl; -import org.labkey.experiment.lineage.ExpLineageServiceImpl; -import org.labkey.experiment.lineage.LineagePerfTest; -import org.labkey.experiment.pipeline.ExperimentPipelineProvider; -import org.labkey.experiment.pipeline.XarTestPipelineJob; -import org.labkey.experiment.samples.DataClassFolderImporter; -import org.labkey.experiment.samples.DataClassFolderWriter; -import org.labkey.experiment.samples.SampleStatusFolderImporter; -import org.labkey.experiment.samples.SampleTimelineAuditProvider; -import org.labkey.experiment.samples.SampleTypeFolderImporter; -import org.labkey.experiment.samples.SampleTypeFolderWriter; -import org.labkey.experiment.security.DataClassDesignerRole; -import org.labkey.experiment.security.SampleTypeDesignerRole; -import org.labkey.experiment.types.TypesController; -import org.labkey.experiment.xar.FolderXarImporterFactory; -import org.labkey.experiment.xar.FolderXarWriterFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; -import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; -import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; - -public class ExperimentModule extends SpringModule -{ - private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; - private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; - - public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; - public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; - public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; - public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; - - @Override - public String getName() - { - return MODULE_NAME; - } - - @Override - public Double getSchemaVersion() - { - return 26.004; - } - - @Nullable - @Override - public UpgradeCode getUpgradeCode() - { - return new ExperimentUpgradeCode(); - } - - @Override - protected void init() - { - addController("experiment", ExperimentController.class); - addController("experiment-types", TypesController.class); - addController("property", PropertyController.class); - ExperimentService.setInstance(new ExperimentServiceImpl()); - SampleTypeService.setInstance(new SampleTypeServiceImpl()); - DefaultValueService.setInstance(new DefaultValueServiceImpl()); - StorageProvisioner.setInstance(StorageProvisionerImpl.get()); - ExpLineageService.setInstance(new ExpLineageServiceImpl()); - - PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); - PropertyService.setInstance(propertyServiceImpl); - UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); - - UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); - - ExperimentProperty.register(); - SamplesSchema.register(this); - ExpSchema.register(this); - - PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); - PropertyService.get().registerDomainKind(new DataClassDomainKind()); - PropertyService.get().registerDomainKind(new VocabularyDomainKind()); - - QueryService.get().addCompareType(new ChildOfCompareType()); - QueryService.get().addCompareType(new ParentOfCompareType()); - QueryService.get().addCompareType(new LineageCompareType()); - QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); - QueryService.get().addQueryListener(new PropertyQueryChangeListener()); - - PropertyService.get().registerValidatorKind(new RegExValidator()); - PropertyService.get().registerValidatorKind(new RangeValidator()); - PropertyService.get().registerValidatorKind(new LookupValidator()); - PropertyService.get().registerValidatorKind(new LengthValidator()); - PropertyService.get().registerValidatorKind(new TextChoiceValidator()); - - ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); - ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); - ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); - ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); - ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", - "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); - if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) - { - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", - "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); - } - else - { - OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", - "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); - - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", - "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", - "Support selecting more than one value for text choice fields", false); - } - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", - "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", - "Support for querying lineage of experiment objects", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(SampleTypeUpdateServiceDI.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE, "Allow RowId to be accepted when merging samples", - "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); - - RoleManager.registerPermission(new DesignVocabularyPermission(), true); - RoleManager.registerRole(new SampleTypeDesignerRole()); - RoleManager.registerRole(new DataClassDesignerRole()); - - AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); - AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); - - WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); - ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); - - addModuleProperty(new LineageMaximumDepthModuleProperty(this)); - WarningService.get().register(new ExperimentWarningProvider()); - } - - @Override - public boolean hasScripts() - { - return true; - } - - @Override - @NotNull - protected Collection createWebPartFactories() - { - List result = new ArrayList<>(); - - BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); - } - }; - runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); - result.add(runGroupsFactory); - - BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunTypeWebPart(); - } - }; - result.add(runTypesFactory); - - result.add(new ExperimentRunWebPartFactory()); - BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); - result.add(sampleTypeFactory); - result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); - view.setTitle("Samples"); - return view; - } - }); - - result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); - } - }); - - BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - narrowProtocolFactory.addLegacyNames("Narrow Protocols"); - result.add(narrowProtocolFactory); - - return result; - } - - private void addDataResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() - { - @Override - public WebdavResource resolve(@NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return data.createIndexDocument(null); - } - - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); - if (idDataMap == null) - return null; - - Map> searchJsonMap = new HashMap<>(); - for (String resourceIdentifier : idDataMap.keySet()) - searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); - return searchJsonMap; - } - }); - } - - private void addDataClassResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); - if (dataClass == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); - - return properties; - } - }); - } - - private void addSampleTypeResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); - if (sampleType == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "sampleSet"); - - return properties; - } - }); - } - - private void addSampleResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); - if (material == null) - return null; - - return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Set rowIds = new HashSet<>(); - Map rowIdIdentifierMap = new LongHashMap<>(); - for (String resourceIdentifier : resourceIdentifiers) - { - long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId != 0) - { - rowIds.add(rowId); - rowIdIdentifierMap.put(rowId, resourceIdentifier); - } - } - - Map> searchJsonMap = new HashMap<>(); - for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) - { - searchJsonMap.put( - rowIdIdentifierMap.get(material.getRowId()), - ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() - ); - } - - return searchJsonMap; - } - }); - } - - @Override - protected void startupAfterSpringConfig(ModuleContext moduleContext) - { - SearchService ss = SearchService.get(); -// ss.addSearchCategory(OntologyManager.conceptCategory); - ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); - ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); - ss.addSearchCategory(ExpMaterialImpl.searchCategory); - ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); - ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataImpl.expDataCategory); - ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); - ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); - addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); - addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); - addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); - addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); - ss.addDocumentProvider(ExperimentServiceImpl.get()); - - PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); - ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); - ExperimentService.get().registerDataType(new LogDataType()); - - AuditLogService.get().registerAuditType(new DomainAuditProvider()); - AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); - AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); - - FileContentService fileContentService = FileContentService.get(); - if (null != fileContentService) - { - fileContentService.addFileListener(new ExpDataFileListener()); - fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); - fileContentService.addFileListener(new FileLinkFileListener()); - } - ContainerManager.addContainerListener(new ContainerManager.ContainerListener() - { - @Override - public void containerDeleted(Container c, User user) - { - try - { - ExperimentService.get().deleteAllExpObjInContainer(c, user); - } - catch (ExperimentException ee) - { - throw new RuntimeException(ee); - } - } - }, - // This is in the Last group because when a container is deleted, - // the Experiment listener needs to be called after the Study listener, - // because Study needs the metadata held by Experiment to delete properly. - // but it should be before the CoreContainerListener - ContainerManager.ContainerListener.Order.Last); - - if (ModuleLoader.getInstance().shouldInsertData()) - SystemProperty.registerProperties(); - - FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); - if (null != folderRegistry) - { - folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); - folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); - folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); - folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); - } - - AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); - - WebdavService.get().addProvider(new ScriptsResourceProvider()); - - SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); - - UsageMetricsService svc = UsageMetricsService.get(); - if (null != svc) - { - svc.registerUsageMetrics(getName(), () -> { - Map results = new HashMap<>(); - - DbSchema schema = ExperimentService.get().getSchema(); - if (AssayService.get() != null) - { - Map assayMetrics = new HashMap<>(); - SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); - SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); - for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) - { - Map protocolMetrics = new HashMap<>(); - - // Run count across all assay designs of this type - SQLFragment runSQL = new SQLFragment(baseRunSQL); - runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); - protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); - - // Number of assay designs of this type - SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); - protocolSQL.add(assayProvider.getProtocolPattern()); - protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); - List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); - protocolMetrics.put("protocolCount", protocols.size()); - - List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); - - protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); - - // Primary implementation class - protocolMetrics.put("implementingClass", assayProvider.getClass()); - - assayMetrics.put(assayProvider.getName(), protocolMetrics); - } - assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - - assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); - SQLFragment runsWithPlateSQL = new SQLFragment(""" - SELECT COUNT(*) FROM exp.experimentrun r - INNER JOIN exp.object o ON o.objectUri = r.lsid - INNER JOIN exp.objectproperty op ON op.objectId = o.objectId - WHERE op.propertyid IN ( - SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? - )"""); - assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); - assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); - - assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file - assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ - SELECT COUNT(*) FROM ( - SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data - WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( - SELECT rowid FROM exp.protocolapplication - WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') - ) - GROUP BY sourceapplicationid - ) x WHERE count > 1""").getObject(Long.class)); - - Map sampleLookupCountMetrics = new HashMap<>(); - SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); - - SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); - sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); - sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( - """ - SELECT COUNT(*) FROM ( - SELECT PD.domainid, COUNT(*) AS PropCount - FROM exp.propertydescriptor D - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) - AND propertyuri LIKE ? - GROUP BY PD.domainid - ) X WHERE X.PropCount > 1""" - ); - resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); - - assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); - - - // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) - results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assay", assayMetrics); - } - - results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); - results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); - - if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries - { - Collection> numSampleCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); - results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( - map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") - ).count()); - } - UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); - FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); - - SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + - " FROM (\n" + - " -- updates that are marked as lineage updates\n" + - " (SELECT DISTINCT transactionId\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + - " AND comment = 'Sample was updated.'\n" + - " ) a1\n" + - " JOIN\n" + - " -- but have associated entries that are not lineage updates\n" + - " (SELECT DISTINCT transactionid\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + - " ON a1.transactionid = a2.transactionid\n" + - " )"); - - results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); - - results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); - results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); - results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); - results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); - results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); - results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); - results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); - results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); - results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); - - results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - String duplicateCaseInsensitiveSampleNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.material - WHERE materialsourceid IS NOT NULL - GROUP BY LOWER(name), materialsourceid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - String duplicateCaseInsensitiveDataNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.data - WHERE classid IS NOT NULL - GROUP BY LOWER(name), classid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); - results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); - - results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); - results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); - results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + - "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); - if (schema.getSqlDialect().isPostgreSQL()) - { - Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); - results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> - (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); - } - - results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); - results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); - results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); - - results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); - results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - - results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); - - results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); - results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); - - results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); - results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); - - results.putAll(ExperimentService.get().getDomainMetrics()); - - return results; - }); - } - } - - @Override - public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) - { - ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); - service.registerSchemaHandler(handler); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); - DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); - service.registerSchemaHandler(dcHandler); - ExperimentDeleteService.setInstance(dcHandler); - } - - @Override - @NotNull - public Collection getSummary(Container c) - { - Collection list = new LinkedList<>(); - int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); - if (runGroupCount > 0) - list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); - - User user = HttpView.currentContext().getUser(); - - Set runTypes = ExperimentService.get().getExperimentRunTypes(c); - for (ExperimentRunType runType : runTypes) - { - if (runType == ExperimentRunType.ALL_RUNS_TYPE) - continue; - - long runCount = runType.getRunCount(user, c); - if (runCount > 0) - list.add(runCount + " runs of type " + runType.getDescription()); - } - - int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); - if (dataClassCount > 0) - list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); - - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); - - return list; - } - - @Override - public @NotNull ArrayList getDetailedSummary(Container c, User user) - { - ArrayList summaries = new ArrayList<>(); - - // Assay types - long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); - if (assayTypeCount > 0) - summaries.add(new Summary(assayTypeCount, "Assay Type")); - - // Run count - int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); - if (runGroupCount > 0) - summaries.add(new Summary(runGroupCount, "Assay run")); - - // Number of Data Classes - List dataClasses = ExperimentService.get().getDataClasses(c, false); - int dataClassCount = dataClasses.size(); - if (dataClassCount > 0) - summaries.add(new Summary(dataClassCount, "Data Class")); - - ExpSchema expSchema = new ExpSchema(user, c); - - // Individual Data Class row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 47919: The "DataCount" column is filtered to only count data in the target container - if (table instanceof ExpDataClassTableImpl tableImpl) - tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpDataClassTable.Column.Name.name()); - columns.add(ExpDataClassTable.Column.DataCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - summaries.add(new Summary(count, entry.getKey())); - } - } - - // Sample Types - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - summaries.add(new Summary(sampleTypeCount, "Sample Type")); - - // Individual Sample Type row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 51557: The "SampleCount" column is filtered to only count data in the target container - if (table instanceof ExpSampleTypeTableImpl tableImpl) - tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpSampleTypeTable.Column.Name.name()); - columns.add(ExpSampleTypeTable.Column.SampleCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - { - String name = entry.getKey(); - Summary s = name.equals("MixtureBatches") - ? new Summary(count, "Batch") - : new Summary(count, name); - summaries.add(s); - } - } - } - - return summaries; - } - - @Override - public @NotNull Set> getIntegrationTests() - { - return Set.of( - DomainImpl.TestCase.class, - DomainPropertyImpl.TestCase.class, - ExpDataTableImpl.TestCase.class, - ExperimentServiceImpl.AuditDomainUriTest.class, - ExperimentServiceImpl.LineageQueryTestCase.class, - ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, - ExperimentServiceImpl.TestCase.class, - ExperimentStressTest.class, - LineagePerfTest.class, - LineageTest.class, - OntologyManager.TestCase.class, - PropertyServiceImpl.TestCase.class, - SampleTypeServiceImpl.TestCase.class, - StorageNameGenerator.TestCase.class, - StorageProvisionerImpl.TestCase.class, - UniqueValueCounterTestCase.class, - XarTestPipelineJob.TestCase.class - ); - } - - @Override - public @NotNull Collection>> getIntegrationTestFactories() - { - List>> list = new ArrayList<>(super.getIntegrationTestFactories()); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); - return list; - } - - @Override - public @NotNull Set> getUnitTests() - { - return Set.of( - GraphAlgorithms.TestCase.class, - LSIDRelativizer.TestCase.class, - Lsid.TestCase.class, - LsidUtils.TestCase.class, - PropertyController.TestCase.class, - Quantity.TestCase.class, - Unit.TestCase.class - ); - } - - @Override - @NotNull - public Collection getSchemaNames() - { - return List.of( - ExpSchema.SCHEMA_NAME, - DataClassDomainKind.PROVISIONED_SCHEMA_NAME, - SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME - ); - } - - @NotNull - @Override - public Collection getProvisionedSchemaNames() - { - return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); - } - - @Override - public JSONObject getPageContextJson(ContainerUser context) - { - JSONObject json = super.getPageContextJson(context); - json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); - return json; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.admin.FolderSerializationRegistry; +import org.labkey.api.assay.AssayProvider; +import org.labkey.api.assay.AssayService; +import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.SampleTimelineAuditEvent; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.NameGenerator; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpgradeCode; +import org.labkey.api.defaults.DefaultValueService; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.ExperimentRunType; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.DefaultExperimentDataHandler; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpLineageService; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolAttachmentType; +import org.labkey.api.exp.api.ExpRunAttachmentType; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentJSONConverter; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.FilterProtocolInputCriteria; +import org.labkey.api.exp.api.SampleTypeDomainKind; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DomainAuditProvider; +import org.labkey.api.exp.property.DomainPropertyAuditProvider; +import org.labkey.api.exp.property.ExperimentProperty; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.SystemProperty; +import org.labkey.api.exp.query.ExpDataClassTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.exp.xar.LSIDRelativizer; +import org.labkey.api.exp.xar.LsidUtils; +import org.labkey.api.files.FileContentService; +import org.labkey.api.files.TableUpdaterFileListener; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.ExperimentDeleteService; +import org.labkey.api.migration.MigrationTableHandler; +import org.labkey.api.module.ModuleContext; +import org.labkey.api.module.ModuleLoader; +import org.labkey.api.module.SpringModule; +import org.labkey.api.module.Summary; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.FilteredTable; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.UserSchema; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.roles.RoleManager; +import org.labkey.api.settings.AppProps; +import org.labkey.api.settings.OptionalFeatureService; +import org.labkey.api.usageMetrics.UsageMetricsService; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JspTestCase; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.SystemMaintenance; +import org.labkey.api.view.AlwaysAvailableWebPartFactory; +import org.labkey.api.view.BaseWebPartFactory; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.JspView; +import org.labkey.api.view.Portal; +import org.labkey.api.view.ViewContext; +import org.labkey.api.view.WebPartFactory; +import org.labkey.api.view.WebPartView; +import org.labkey.api.view.template.WarningService; +import org.labkey.api.vocabulary.security.DesignVocabularyPermission; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.api.webdav.WebdavService; +import org.labkey.api.writer.ContainerUser; +import org.labkey.experiment.api.DataClassDomainKind; +import org.labkey.experiment.api.ExpDataClassImpl; +import org.labkey.experiment.api.ExpDataClassTableImpl; +import org.labkey.experiment.api.ExpDataClassType; +import org.labkey.experiment.api.ExpDataImpl; +import org.labkey.experiment.api.ExpDataTableImpl; +import org.labkey.experiment.api.ExpMaterialImpl; +import org.labkey.experiment.api.ExpProtocolImpl; +import org.labkey.experiment.api.ExpSampleTypeImpl; +import org.labkey.experiment.api.ExpSampleTypeTableImpl; +import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.experiment.api.ExperimentStressTest; +import org.labkey.experiment.api.GraphAlgorithms; +import org.labkey.experiment.api.LineageTest; +import org.labkey.experiment.api.LogDataType; +import org.labkey.experiment.api.Protocol; +import org.labkey.experiment.api.SampleTypeServiceImpl; +import org.labkey.experiment.api.SampleTypeUpdateServiceDI; +import org.labkey.experiment.api.UniqueValueCounterTestCase; +import org.labkey.experiment.api.VocabularyDomainKind; +import org.labkey.experiment.api.data.ChildOfCompareType; +import org.labkey.experiment.api.data.ChildOfMethod; +import org.labkey.experiment.api.data.LineageCompareType; +import org.labkey.experiment.api.data.ParentOfCompareType; +import org.labkey.experiment.api.data.ParentOfMethod; +import org.labkey.experiment.api.property.DomainImpl; +import org.labkey.experiment.api.property.DomainPropertyImpl; +import org.labkey.experiment.api.property.LengthValidator; +import org.labkey.experiment.api.property.LookupValidator; +import org.labkey.experiment.api.property.PropertyServiceImpl; +import org.labkey.experiment.api.property.RangeValidator; +import org.labkey.experiment.api.property.RegExValidator; +import org.labkey.experiment.api.property.StorageNameGenerator; +import org.labkey.experiment.api.property.StorageProvisionerImpl; +import org.labkey.experiment.api.property.TextChoiceValidator; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.controllers.property.PropertyController; +import org.labkey.experiment.defaults.DefaultValueServiceImpl; +import org.labkey.experiment.lineage.ExpLineageServiceImpl; +import org.labkey.experiment.lineage.LineagePerfTest; +import org.labkey.experiment.pipeline.ExperimentPipelineProvider; +import org.labkey.experiment.pipeline.XarTestPipelineJob; +import org.labkey.experiment.samples.DataClassFolderImporter; +import org.labkey.experiment.samples.DataClassFolderWriter; +import org.labkey.experiment.samples.SampleStatusFolderImporter; +import org.labkey.experiment.samples.SampleTimelineAuditProvider; +import org.labkey.experiment.samples.SampleTypeFolderImporter; +import org.labkey.experiment.samples.SampleTypeFolderWriter; +import org.labkey.experiment.security.DataClassDesignerRole; +import org.labkey.experiment.security.SampleTypeDesignerRole; +import org.labkey.experiment.types.TypesController; +import org.labkey.experiment.xar.FolderXarImporterFactory; +import org.labkey.experiment.xar.FolderXarWriterFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; +import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; +import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; + +public class ExperimentModule extends SpringModule +{ + private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; + private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; + + public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; + public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; + public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; + public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; + + @Override + public String getName() + { + return MODULE_NAME; + } + + @Override + public Double getSchemaVersion() + { + return 26.004; + } + + @Nullable + @Override + public UpgradeCode getUpgradeCode() + { + return new ExperimentUpgradeCode(); + } + + @Override + protected void init() + { + addController("experiment", ExperimentController.class); + addController("experiment-types", TypesController.class); + addController("property", PropertyController.class); + ExperimentService.setInstance(new ExperimentServiceImpl()); + SampleTypeService.setInstance(new SampleTypeServiceImpl()); + DefaultValueService.setInstance(new DefaultValueServiceImpl()); + StorageProvisioner.setInstance(StorageProvisionerImpl.get()); + ExpLineageService.setInstance(new ExpLineageServiceImpl()); + + PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); + PropertyService.setInstance(propertyServiceImpl); + UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); + + UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); + + ExperimentProperty.register(); + SamplesSchema.register(this); + ExpSchema.register(this); + + PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); + PropertyService.get().registerDomainKind(new DataClassDomainKind()); + PropertyService.get().registerDomainKind(new VocabularyDomainKind()); + + QueryService.get().addCompareType(new ChildOfCompareType()); + QueryService.get().addCompareType(new ParentOfCompareType()); + QueryService.get().addCompareType(new LineageCompareType()); + QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); + QueryService.get().addQueryListener(new PropertyQueryChangeListener()); + + PropertyService.get().registerValidatorKind(new RegExValidator()); + PropertyService.get().registerValidatorKind(new RangeValidator()); + PropertyService.get().registerValidatorKind(new LookupValidator()); + PropertyService.get().registerValidatorKind(new LengthValidator()); + PropertyService.get().registerValidatorKind(new TextChoiceValidator()); + + ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); + ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); + ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); + ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); + ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", + "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); + if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) + { + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", + "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); + } + else + { + OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", + "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); + + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", + "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", + "Support selecting more than one value for text choice fields", false); + } + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", + "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", + "Support for querying lineage of experiment objects", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", + "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); + + RoleManager.registerPermission(new DesignVocabularyPermission(), true); + RoleManager.registerRole(new SampleTypeDesignerRole()); + RoleManager.registerRole(new DataClassDesignerRole()); + + AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); + AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); + + WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); + ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); + + addModuleProperty(new LineageMaximumDepthModuleProperty(this)); + WarningService.get().register(new ExperimentWarningProvider()); + } + + @Override + public boolean hasScripts() + { + return true; + } + + @Override + @NotNull + protected Collection createWebPartFactories() + { + List result = new ArrayList<>(); + + BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); + } + }; + runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); + result.add(runGroupsFactory); + + BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunTypeWebPart(); + } + }; + result.add(runTypesFactory); + + result.add(new ExperimentRunWebPartFactory()); + BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); + result.add(sampleTypeFactory); + result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); + view.setTitle("Samples"); + return view; + } + }); + + result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); + } + }); + + BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + narrowProtocolFactory.addLegacyNames("Narrow Protocols"); + result.add(narrowProtocolFactory); + + return result; + } + + private void addDataResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() + { + @Override + public WebdavResource resolve(@NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return data.createIndexDocument(null); + } + + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); + if (idDataMap == null) + return null; + + Map> searchJsonMap = new HashMap<>(); + for (String resourceIdentifier : idDataMap.keySet()) + searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); + return searchJsonMap; + } + }); + } + + private void addDataClassResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); + if (dataClass == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); + + return properties; + } + }); + } + + private void addSampleTypeResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); + if (sampleType == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "sampleSet"); + + return properties; + } + }); + } + + private void addSampleResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); + if (material == null) + return null; + + return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Set rowIds = new HashSet<>(); + Map rowIdIdentifierMap = new LongHashMap<>(); + for (String resourceIdentifier : resourceIdentifiers) + { + long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId != 0) + { + rowIds.add(rowId); + rowIdIdentifierMap.put(rowId, resourceIdentifier); + } + } + + Map> searchJsonMap = new HashMap<>(); + for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) + { + searchJsonMap.put( + rowIdIdentifierMap.get(material.getRowId()), + ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() + ); + } + + return searchJsonMap; + } + }); + } + + @Override + protected void startupAfterSpringConfig(ModuleContext moduleContext) + { + SearchService ss = SearchService.get(); +// ss.addSearchCategory(OntologyManager.conceptCategory); + ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); + ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); + ss.addSearchCategory(ExpMaterialImpl.searchCategory); + ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); + ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataImpl.expDataCategory); + ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); + ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); + addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); + addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); + addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); + addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); + ss.addDocumentProvider(ExperimentServiceImpl.get()); + + PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); + ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); + ExperimentService.get().registerDataType(new LogDataType()); + + AuditLogService.get().registerAuditType(new DomainAuditProvider()); + AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); + AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); + + FileContentService fileContentService = FileContentService.get(); + if (null != fileContentService) + { + fileContentService.addFileListener(new ExpDataFileListener()); + fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); + fileContentService.addFileListener(new FileLinkFileListener()); + } + ContainerManager.addContainerListener(new ContainerManager.ContainerListener() + { + @Override + public void containerDeleted(Container c, User user) + { + try + { + ExperimentService.get().deleteAllExpObjInContainer(c, user); + } + catch (ExperimentException ee) + { + throw new RuntimeException(ee); + } + } + }, + // This is in the Last group because when a container is deleted, + // the Experiment listener needs to be called after the Study listener, + // because Study needs the metadata held by Experiment to delete properly. + // but it should be before the CoreContainerListener + ContainerManager.ContainerListener.Order.Last); + + if (ModuleLoader.getInstance().shouldInsertData()) + SystemProperty.registerProperties(); + + FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); + if (null != folderRegistry) + { + folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); + folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); + folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); + folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); + } + + AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); + + WebdavService.get().addProvider(new ScriptsResourceProvider()); + + SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); + + UsageMetricsService svc = UsageMetricsService.get(); + if (null != svc) + { + svc.registerUsageMetrics(getName(), () -> { + Map results = new HashMap<>(); + + DbSchema schema = ExperimentService.get().getSchema(); + if (AssayService.get() != null) + { + Map assayMetrics = new HashMap<>(); + SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); + SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); + for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) + { + Map protocolMetrics = new HashMap<>(); + + // Run count across all assay designs of this type + SQLFragment runSQL = new SQLFragment(baseRunSQL); + runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); + protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); + + // Number of assay designs of this type + SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); + protocolSQL.add(assayProvider.getProtocolPattern()); + protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); + List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); + protocolMetrics.put("protocolCount", protocols.size()); + + List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); + + protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); + + // Primary implementation class + protocolMetrics.put("implementingClass", assayProvider.getClass()); + + assayMetrics.put(assayProvider.getName(), protocolMetrics); + } + assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + + assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); + SQLFragment runsWithPlateSQL = new SQLFragment(""" + SELECT COUNT(*) FROM exp.experimentrun r + INNER JOIN exp.object o ON o.objectUri = r.lsid + INNER JOIN exp.objectproperty op ON op.objectId = o.objectId + WHERE op.propertyid IN ( + SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? + )"""); + assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); + assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); + + assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file + assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ + SELECT COUNT(*) FROM ( + SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data + WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( + SELECT rowid FROM exp.protocolapplication + WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') + ) + GROUP BY sourceapplicationid + ) x WHERE count > 1""").getObject(Long.class)); + + Map sampleLookupCountMetrics = new HashMap<>(); + SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); + + SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); + sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); + sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( + """ + SELECT COUNT(*) FROM ( + SELECT PD.domainid, COUNT(*) AS PropCount + FROM exp.propertydescriptor D + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) + AND propertyuri LIKE ? + GROUP BY PD.domainid + ) X WHERE X.PropCount > 1""" + ); + resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); + + assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); + + + // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) + results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assay", assayMetrics); + } + + results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); + results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); + + if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries + { + Collection> numSampleCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); + results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( + map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") + ).count()); + } + UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); + FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); + + SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + + " FROM (\n" + + " -- updates that are marked as lineage updates\n" + + " (SELECT DISTINCT transactionId\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + + " AND comment = 'Sample was updated.'\n" + + " ) a1\n" + + " JOIN\n" + + " -- but have associated entries that are not lineage updates\n" + + " (SELECT DISTINCT transactionid\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + + " ON a1.transactionid = a2.transactionid\n" + + " )"); + + results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); + + results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); + results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); + results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); + results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); + results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); + results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); + results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); + results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); + results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); + + results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + String duplicateCaseInsensitiveSampleNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.material + WHERE materialsourceid IS NOT NULL + GROUP BY LOWER(name), materialsourceid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + String duplicateCaseInsensitiveDataNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.data + WHERE classid IS NOT NULL + GROUP BY LOWER(name), classid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); + results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); + + results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); + results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); + results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + + "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); + if (schema.getSqlDialect().isPostgreSQL()) + { + Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); + results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> + (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); + } + + results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); + results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); + results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); + + results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); + results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + + results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); + + results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); + results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); + + results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); + results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); + + results.putAll(ExperimentService.get().getDomainMetrics()); + + return results; + }); + } + } + + @Override + public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) + { + ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); + service.registerSchemaHandler(handler); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); + DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); + service.registerSchemaHandler(dcHandler); + ExperimentDeleteService.setInstance(dcHandler); + } + + @Override + @NotNull + public Collection getSummary(Container c) + { + Collection list = new LinkedList<>(); + int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); + if (runGroupCount > 0) + list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); + + User user = HttpView.currentContext().getUser(); + + Set runTypes = ExperimentService.get().getExperimentRunTypes(c); + for (ExperimentRunType runType : runTypes) + { + if (runType == ExperimentRunType.ALL_RUNS_TYPE) + continue; + + long runCount = runType.getRunCount(user, c); + if (runCount > 0) + list.add(runCount + " runs of type " + runType.getDescription()); + } + + int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); + if (dataClassCount > 0) + list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); + + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); + + return list; + } + + @Override + public @NotNull ArrayList getDetailedSummary(Container c, User user) + { + ArrayList summaries = new ArrayList<>(); + + // Assay types + long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); + if (assayTypeCount > 0) + summaries.add(new Summary(assayTypeCount, "Assay Type")); + + // Run count + int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); + if (runGroupCount > 0) + summaries.add(new Summary(runGroupCount, "Assay run")); + + // Number of Data Classes + List dataClasses = ExperimentService.get().getDataClasses(c, false); + int dataClassCount = dataClasses.size(); + if (dataClassCount > 0) + summaries.add(new Summary(dataClassCount, "Data Class")); + + ExpSchema expSchema = new ExpSchema(user, c); + + // Individual Data Class row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 47919: The "DataCount" column is filtered to only count data in the target container + if (table instanceof ExpDataClassTableImpl tableImpl) + tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpDataClassTable.Column.Name.name()); + columns.add(ExpDataClassTable.Column.DataCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + summaries.add(new Summary(count, entry.getKey())); + } + } + + // Sample Types + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + summaries.add(new Summary(sampleTypeCount, "Sample Type")); + + // Individual Sample Type row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 51557: The "SampleCount" column is filtered to only count data in the target container + if (table instanceof ExpSampleTypeTableImpl tableImpl) + tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpSampleTypeTable.Column.Name.name()); + columns.add(ExpSampleTypeTable.Column.SampleCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + { + String name = entry.getKey(); + Summary s = name.equals("MixtureBatches") + ? new Summary(count, "Batch") + : new Summary(count, name); + summaries.add(s); + } + } + } + + return summaries; + } + + @Override + public @NotNull Set> getIntegrationTests() + { + return Set.of( + DomainImpl.TestCase.class, + DomainPropertyImpl.TestCase.class, + ExpDataTableImpl.TestCase.class, + ExperimentServiceImpl.AuditDomainUriTest.class, + ExperimentServiceImpl.LineageQueryTestCase.class, + ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, + ExperimentServiceImpl.TestCase.class, + ExperimentStressTest.class, + LineagePerfTest.class, + LineageTest.class, + OntologyManager.TestCase.class, + PropertyServiceImpl.TestCase.class, + SampleTypeServiceImpl.TestCase.class, + StorageNameGenerator.TestCase.class, + StorageProvisionerImpl.TestCase.class, + UniqueValueCounterTestCase.class, + XarTestPipelineJob.TestCase.class + ); + } + + @Override + public @NotNull Collection>> getIntegrationTestFactories() + { + List>> list = new ArrayList<>(super.getIntegrationTestFactories()); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); + return list; + } + + @Override + public @NotNull Set> getUnitTests() + { + return Set.of( + GraphAlgorithms.TestCase.class, + LSIDRelativizer.TestCase.class, + Lsid.TestCase.class, + LsidUtils.TestCase.class, + PropertyController.TestCase.class, + Quantity.TestCase.class, + Unit.TestCase.class + ); + } + + @Override + @NotNull + public Collection getSchemaNames() + { + return List.of( + ExpSchema.SCHEMA_NAME, + DataClassDomainKind.PROVISIONED_SCHEMA_NAME, + SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME + ); + } + + @NotNull + @Override + public Collection getProvisionedSchemaNames() + { + return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); + } + + @Override + public JSONObject getPageContextJson(ContainerUser context) + { + JSONObject json = super.getPageContextJson(context); + json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); + return json; + } +} diff --git a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java index 8c4a644d3cb..42774eefd05 100644 --- a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java +++ b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java @@ -657,4 +657,115 @@ private static void fillRowId(ExpDataClassImpl dc, Domain domain, DbScope scope) } + /** + * Called from exp-26.004-26.005.sql + * Drop the lsid column from existing provisioned DataClass tables. + */ + @SuppressWarnings("unused") + @DeferredUpgrade + public static void dropProvisionedDataClassLsidColumn(ModuleContext context) + { + if (context.isNewInstall()) + return; + + try (DbScope.Transaction tx = ExperimentService.get().ensureTransaction()) + { + TableInfo source = ExperimentServiceImpl.get().getTinfoDataClass(); + List dataClasses = new TableSelector(source, null, null) + .stream(DataClass.class) + .map(ExpDataClassImpl::new) + .toList(); + + LOG.info("Dropping the lsid column from {} data classes", dataClasses.size()); + + int successCount = 0; + for (ExpDataClassImpl dc : dataClasses) + { + boolean success = dropDataClassLsid(dc); + if (success) + successCount++; + } + + LOG.info("Dropped lsid column from {} of {} data classes successfully.", successCount, dataClasses.size()); + + tx.commit(); + } + } + + private static boolean dropDataClassLsid(ExpDataClassImpl dc) + { + Domain domain = dc.getDomain(); + DataClassDomainKind kind = null; + try + { + kind = (DataClassDomainKind) domain.getDomainKind(); + } + catch (IllegalArgumentException e) + { + // pass + } + if (null == kind || null == kind.getStorageSchemaName()) + return false; + + DbSchema schema = DataClassDomainKind.getSchema(); + + StorageProvisioner.get().ensureStorageTable(domain, kind, schema.getScope()); + domain = PropertyService.get().getDomain(domain.getTypeId()); + assert (null != domain && null != domain.getStorageTableName()); + + SchemaTableInfo provisionedTable = schema.getTable(domain.getStorageTableName()); + if (provisionedTable == null) + { + LOG.error("DataClass '" + dc.getName() + "' (" + dc.getRowId() + ") has no provisioned table."); + return false; + } + + String lsidColumnName = "lsid"; + ColumnInfo lsidColumn = provisionedTable.getColumn(FieldKey.fromParts(lsidColumnName)); + if (lsidColumn == null) + { + LOG.info("No lsid column found on table '{}'. Skipping drop.", provisionedTable.getName()); + return false; + } + + Set indicesToRemove = new HashSet<>(); + for (var index : provisionedTable.getAllIndices()) + { + var indexColumns = index.columns(); + if (indexColumns.contains(lsidColumn)) + { + if (indexColumns.size() > 1) + LOG.info("Dropping index '{}' on table '{}' because it contains the lsid column.", index.name(), provisionedTable.getName()); + + indicesToRemove.add(index.name()); + } + } + + if (!indicesToRemove.isEmpty()) + StorageProvisionerImpl.get().dropTableIndices(domain, indicesToRemove); + else + LOG.info("No indices found on table '{}' that contain the lsid column.", provisionedTable.getName()); + + // Remanufacture a property descriptor that matches the original LSID property descriptor. + var spec = new PropertyStorageSpec(lsidColumnName, JdbcType.VARCHAR, 300).setNullable(false); + PropertyDescriptor pd = new PropertyDescriptor(); + pd.setContainer(dc.getContainer()); + pd.setDatabaseDefaultValue(spec.getDefaultValue()); + pd.setName(spec.getName()); + pd.setJdbcType(spec.getJdbcType(), spec.getSize()); + pd.setNullable(spec.isNullable()); + pd.setMvEnabled(spec.isMvEnabled()); + pd.setPropertyURI(DomainUtil.createUniquePropertyURI(domain.getTypeURI(), null, new CaseInsensitiveHashSet())); + pd.setDescription(spec.getDescription()); + pd.setImportAliases(spec.getImportAliases()); + pd.setScale(spec.getSize()); + DomainPropertyImpl dp = new DomainPropertyImpl((DomainImpl) domain, pd); + + LOG.debug("Dropping lsid column from table '{}' for data class '{}' in folder {}.", provisionedTable.getName(), dc.getName(), dc.getContainer().getPath()); + StorageProvisionerImpl.get().dropProperties(domain, Set.of(dp)); + + return true; + } + + } diff --git a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java index 63155e503b2..c83453b6618 100644 --- a/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java +++ b/experiment/src/org/labkey/experiment/api/DataClassDomainKind.java @@ -100,7 +100,6 @@ public class DataClassDomainKind extends AbstractDomainKind implements ExpDataClassDataTable @@ -167,8 +170,8 @@ public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl ALLOWED_IMPORT_HEADERS; static { DATA_CLASS_ALT_MERGE_KEYS = new HashSet<>(Arrays.asList(Column.ClassId.name(), Name.name())); - DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.LSID.name(), Column.RowId.name())); - ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("name", "description", "flag", "comment", "alias", "datafileurl")); + DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.RowId.name())); + ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("description", "flag", "comment", "alias", "datafileurl")); } private Map _vocabularyDomainProviders; @@ -689,7 +692,7 @@ public SQLFragment getFromSQLExpanded(String alias, Set selectedColumn // all columns from dataclass property table except key columns Set pCols = new CaseInsensitiveHashSet(provisioned.getColumnNameSet()); pCols.remove("name"); - pCols.remove("lsid"); + pCols.remove("lsid"); // TODO remove pCols.remove("rowId"); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, pCols); @@ -837,12 +840,6 @@ protected SimpleFilter.FilterClause getContainerFilterClause(ContainerFilter fil // UpdatableTableInfo // - @Override - public @Nullable CaseInsensitiveHashSet skipProperties() - { - return super.skipProperties(); - } - @Nullable @Override public CaseInsensitiveHashMap remapSchemaColumns() @@ -859,8 +856,6 @@ public CaseInsensitiveHashMap remapSchemaColumns() @Override public @Nullable Set getAltMergeKeys(DataIteratorContext context) { - if (context.getInsertOption().updateOnly && context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - return getAltKeysForUpdate(); return DATA_CLASS_ALT_MERGE_KEYS; } @@ -878,19 +873,23 @@ public Set getAltKeysForUpdate() if (context.getInsertOption().allowUpdate) { - boolean isUpdateUsingLsid = context.getInsertOption().updateOnly && - colNameMap.containsKey(ExpDataTable.Column.LSID.name()) && - context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - - if (isUpdateUsingLsid) + if (context.getInsertOption().updateOnly) { - keyColumnNames.add(Column.LSID.name()); + // For UPDATE: prefer RowId, else require Name (with ClassId) + if (colNameMap.containsKey(Column.RowId.name())) + keyColumnNames.add(Column.RowId.name()); + else if (colNameMap.containsKey(Name.name())) + { + keyColumnNames.add(Column.ClassId.name()); + keyColumnNames.add(Name.name()); + } + else + throw new IllegalArgumentException("Either RowId or Name is required to update DataClass Data."); } else { - Set altMergeKeys = getAltMergeKeys(context); - if (altMergeKeys != null) - keyColumnNames.addAll(altMergeKeys); + // For MERGE: use merge keys (ClassId + Name) + keyColumnNames.addAll(DATA_CLASS_ALT_MERGE_KEYS); } } @@ -995,7 +994,11 @@ public DataIterator getDataIterator(DataIteratorContext context) if (null == input) return null; // Can happen if context has errors + boolean isMerge = context.getInsertOption() == QueryUpdateService.InsertOption.MERGE; + boolean isUpdate = context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE; + var drop = new CaseInsensitiveHashSet(); + var keysCheck = new CaseInsensitiveHashSet(); for (int i = 1; i <= input.getColumnCount(); i++) { String name = input.getColumnInfo(i).getName(); @@ -1003,18 +1006,44 @@ public DataIterator getDataIterator(DataIteratorContext context) boolean isContainerField = name.equalsIgnoreCase("Container") || name.equalsIgnoreCase("Folder"); if (isContainerField) { - if (context.getInsertOption().updateOnly || !context.isCrossFolderImport()) + if (isUpdate || !context.isCrossFolderImport()) drop.add(name); } + else if (ExpDataTable.Column.Name.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.Name.name()); + } else if (isReservedHeader(name)) + { + if (ExpDataTable.Column.RowId.name().equalsIgnoreCase(name)) + { + keysCheck.add(ExpDataTable.Column.RowId.name()); + if (isUpdate) + continue; + + // While accepting RowId during merge is not our preferred behavior, we want to give users a way + // to opt-in to the old behavior where RowId is accepted and ignored. + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) + { + context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging data. Specify only the data name instead.", ExpMaterialTable.Column.RowId.name())); + return null; + } + } + + if (ExpDataTable.Column.LSID.name().equalsIgnoreCase(name)) + keysCheck.add(ExpDataTable.Column.LSID.name()); drop.add(name); + } + else if (Column.ClassId.name().equalsIgnoreCase(name)) drop.add(name); } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) + + if ((isMerge || isUpdate) && keysCheck.size() == 1 && keysCheck.contains(LSID.name())) { - drop.remove("lsid"); - drop.remove("rowid");// keep rowid for audit log + String message = String.format("LSID is no longer accepted as a key for data %s. Specify a RowId or Name instead.", isMerge ? "merge" : "update"); + context.getErrors().addRowError(new ValidationException(message, LSID.name())); + return null; } if (!drop.isEmpty()) @@ -1269,7 +1298,7 @@ public Map> getExistingRows(User user, Container co Set lsids = new HashSet<>(); for (Map dataRow : dataRows.values()) - lsids.add((String) dataRow.get("lsid")); + lsids.add((String) dataRow.get("lsid")); // ? List seeds = ExperimentServiceImpl.get().getExpDatasByLSID(lsids); ExperimentServiceImpl.get().addRowsParentsFields(new HashSet<>(seeds), dataRows, user, container); @@ -1381,197 +1410,54 @@ else if (name != null) @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _dataClass.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } - - return result; - - } - - // DataClassDataUpdateService needs to skip Attachment column convert before _update - // TODO: move override when implementing consolidating dataclass update methods - @Override - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException { - if (PropertyType.ATTACHMENT == col.getPropertyType()) - return value; - - if (ALIAS_CONCEPT_URI.equals(col.getConceptURI())) - return value; - - return super.convertColumnValue(col, value, user, c, fileLinkDirPath); + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override - protected TableInfo getTableInfoForConversion() + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException // TODO remove { - // getDBTable() returns exp.data table, which lacks properties fields. - // TODO: this method can be removed when implementing consolidating dataclass update methods - return getQueryTable(); + throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @Override - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException + public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException { - // LSID was stripped by super.updateRows() and is needed to insert into the dataclass provisioned table - String lsid = (String)oldRow.get("lsid"); - if (lsid == null) - throw new ValidationException("lsid required to update row"); - - String newName = (String) row.get(Name.name()); - String oldName = (String) oldRow.get(Name.name()); - boolean hasNameChange = !StringUtils.isEmpty(newName) && !newName.equals(oldName); - - // Replace attachment columns with filename and keep AttachmentFiles - Map rowStripped = new CaseInsensitiveHashMap<>(); - Map attachments = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : row.entrySet()) - { - String name = entry.getKey(); - Object value = entry.getValue(); - if (isAttachmentProperty(name)) - { - if (value instanceof AttachmentFile file) - { - if (null != file.getFilename()) - { - rowStripped.put(name, file.getFilename()); - attachments.put(name, value); - } - } - else if (value != null && !StringUtils.isEmpty(String.valueOf(value))) - { - // Issue 53498: string value for attachment field is not allowed - throw new ValidationException("Cannot upload '" + value + "' to Attachment type field '" + name + "'."); - } - else - rowStripped.put(name, value); // if null or empty, remove attachment - } - else - { - rowStripped.put(name, value); - } - } - - for (String vocabularyDomainName : getVocabularyDomainProviders().keySet()) - { - DataClassVocabularyProviderProperties fieldVocabularyDomainProvider = getVocabularyDomainProviders().get(vocabularyDomainName); - if (fieldVocabularyDomainProvider != null) - rowStripped.putAll(fieldVocabularyDomainProvider.conceptURIVocabularyDomainProvider().getUpdateRowProperties(user, c, rowStripped, oldRow, getAttachmentParentFactory(), fieldVocabularyDomainProvider.sourceColumnName(), fieldVocabularyDomainProvider.vocabularyDomainName(), getVocabularyDomainProviders().size() > 1)); - } + if (rows == null || rows.isEmpty()) + return Collections.emptyList(); - // update exp.data - Map ret = new CaseInsensitiveHashMap<>(super._update(user, c, rowStripped, oldRow, keys)); + Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; + recordDataIteratorUsed(configParameters); - Integer rowId = (Integer) oldRow.get("RowId"); - if (rowId == null) - throw new ValidationException("RowId required to update row"); - keys = new Object[] {rowId}; - TableInfo t = _dataClassDataTableSupplier.get(); - if (t.getColumnNameSet().stream().anyMatch(rowStripped::containsKey)) - { - ret.putAll(Table.update(user, t, rowStripped, t.getColumn("rowId"), keys, null, Level.DEBUG)); - } + List> results = new ArrayList<>(); + int index = 0; - ExpDataImpl data = null; - if (hasNameChange) + while (index < rows.size()) { - data = ExperimentServiceImpl.get().getExpData(lsid); - ExperimentService.get().addObjectLegacyName(data.getObjectId(), ExperimentServiceImpl.getNamespacePrefix(ExpData.class), oldName, user); - } + // TODO: check for duplicates - // update comment - if (row.containsKey("flag") || row.containsKey("comment")) - { - Object o = row.containsKey("flag") ? row.get("flag") : row.get("comment"); - String flag = Objects.toString(o, null); + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - if (data == null) - data = ExperimentServiceImpl.get().getExpData(lsid); - if (data != null) - data.setComment(user, flag); - } + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; - // update aliases - if (row.containsKey("Alias")) - AliasInsertHelper.handleInsertUpdate(getContainer(), user, lsid, ExperimentService.get().getTinfoDataAliasMap(), row.get("Alias")); + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; - // handle attachments - removePreviousAttachments(user, c, row, oldRow); - ret.putAll(attachments); - addAttachments(user, c, ret, lsid); + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); + List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - // search index done in postcommit + if (context.getErrors().hasErrors()) + throw context.getErrors(); - ret.put("RowId", oldRow.get("RowId")); // return rowId for SearchService - ret.put("lsid", lsid); - return ret; - } + if (subRet != null) + results.addAll(subRet); - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - boolean useDib = false; - if (rows != null && !rows.isEmpty() && oldKeys == null) - useDib = rows.get(0).containsKey("lsid"); - - useDib = useDib && hasUniformKeys(rows); - - List> results; - if (useDib) - { - Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - finalConfigParameters.put(ExperimentService.QueryOptions.UseLsidForUpdate, true); - - recordDataIteratorUsed(configParameters); - results = super._updateRowsUsingDIB(user, container, rows, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), extraScriptContext); + // TODO: record partitions } - else - { - results = super.updateRows(user, container, rows, oldKeys, errors, configParameters, extraScriptContext); - DbScope scope = getUserSchema().getDbSchema().getScope(); - scope.addCommitTask(() -> - { - List orderedRowIds = new ArrayList<>(); - for (Map result : results) - { - Long rowId = MapUtils.getLong(result, RowId.name()); - if (rowId != null) - orderedRowIds.add(rowId); - } - Collections.sort(orderedRowIds); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - SearchService.get().defaultTask().getQueue(_dataClass.getContainer(), SearchService.PRIORITY.modified).addRunnable((q) -> - { - for (ExpDataImpl expData : ExperimentServiceImpl.get().getExpDatas(sublist)) - expData.index(q, null); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT); - - /* setup mini dataiterator pipeline to process lineage */ - DataIterator di = _toDataIteratorBuilder("updateRows.lineage", results).getDataIterator(new DataIteratorContext()); - ExpDataIterators.derive(user, container, di, false, _dataClass, true); - } + // summary audit? return results; } @@ -1597,58 +1483,12 @@ protected int truncateRows(User user, Container container) return ExperimentServiceImpl.get().truncateDataClass(_dataClass, user, container); } - private void removePreviousAttachments(User user, Container c, Map newRow, Map oldRow) - { - Lsid lsid = new Lsid((String)oldRow.get("LSID")); - - for (Map.Entry entry : newRow.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && oldRow.get(entry.getKey()) != null) - { - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - AttachmentService.get().deleteAttachment(parent, (String) oldRow.get(entry.getKey()), user); - } - } - } - @Override protected Domain getDomain() { return _dataClass.getDomain(); } - private void addAttachments(User user, Container c, Map row, String lsidStr) - { - if (row != null && lsidStr != null) - { - ArrayList attachmentFiles = new ArrayList<>(); - for (Map.Entry entry : row.entrySet()) - { - if (isAttachmentProperty(entry.getKey()) && entry.getValue() instanceof AttachmentFile file) - { - if (null != file.getFilename()) - attachmentFiles.add(file); - } - } - - if (!attachmentFiles.isEmpty()) - { - Lsid lsid = new Lsid(lsidStr); - AttachmentParent parent = new ExpDataClassAttachmentParent(c, lsid); - - try - { - AttachmentService.get().addAttachments(parent, attachmentFiles, user); - } - catch (IOException e) - { - throw UnexpectedException.wrap(e); - } - } - } - } - @Override public void configureDataIteratorContext(DataIteratorContext context) { diff --git a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java index c54d96aeec5..7214de841e8 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java @@ -1,978 +1,985 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Handler; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.XarFormatException; -import org.labkey.api.exp.XarSource; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.query.ExpDataClassDataTable; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.files.FileContentService; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineJob; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryRowReference; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.ValidationException; -import org.labkey.api.search.SearchResultTemplate; -import org.labkey.api.search.SearchScope; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DataClassReadPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HtmlString; -import org.labkey.api.util.LinkBuilder; -import org.labkey.api.util.MimeMap; -import org.labkey.api.util.NetworkDrive; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.URLHelper; -import org.labkey.api.util.InputBuilder; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.NavTree; -import org.labkey.api.view.ViewContext; -import org.labkey.api.webdav.SimpleDocumentResource; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; - -public class ExpDataImpl extends AbstractRunItemImpl implements ExpData -{ - public enum DataOperations - { - Edit("editing", UpdatePermission.class), - EditLineage("editing lineage", UpdatePermission.class), - Delete("deleting", DeletePermission.class), - Move("moving", MoveEntitiesPermission.class); - - private final String _description; // used as a suffix in messaging users about what is not allowed - private final Class _permissionClass; - - DataOperations(String description, Class permissionClass) - { - _description = description; - _permissionClass = permissionClass; - } - - public String getDescription() - { - return _description; - } - - public Class getPermissionClass() - { - return _permissionClass; - } - } - - public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, DataClassReadPermission.class); - } - }; - public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, MediaReadPermission.class); - } - }; - - /** Cache this because it can be expensive to recompute */ - private Boolean _finalRunOutput; - - /** - * Temporary mapping until experiment.xml contains the mime type - */ - private static final MimeMap MIME_MAP = new MimeMap(); - - static public List fromDatas(List datas) - { - List ret = new ArrayList<>(datas.size()); - for (Data data : datas) - { - ret.add(new ExpDataImpl(data)); - } - return ret; - } - - // For serialization - protected ExpDataImpl() {} - - public ExpDataImpl(Data data) - { - super(data); - } - - @Override - public void setComment(User user, String comment) throws ValidationException - { - setComment(user, comment, true); - } - - @Override - public void setComment(User user, String comment, boolean index) throws ValidationException - { - super.setComment(user, comment); - - if (index) - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - @Nullable - public ActionURL detailsURL() - { - DataType dataType = getDataType(); - if (dataType != null) - { - ActionURL url = dataType.getDetailsURL(this); - if (url != null) - return url; - } - - return _object.detailsURL(); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference() - { - return getQueryRowReference(null); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) - { - ExpDataClassImpl dc = getDataClass(user); - if (dc != null) - return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - - // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace - DataType type = getDataType(); - if (type != null) - { - QueryRowReference queryRowReference = type.getQueryRowReference(this); - if (queryRowReference != null) - return queryRowReference; - } - - return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - } - - @Override - public List getTargetApplications() - { - return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); - } - - @Override - public List getTargetRuns() - { - return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); - } - - @Override - public DataType getDataType() - { - return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); - } - - @Override - public void setDataFileURI(URI uri) - { - ensureUnlocked(); - _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); - } - - @Override - public void save(User user) - { - // Replace the default "Data" cpastype if the Data belongs to a DataClass - ExpDataClassImpl dataClass = getDataClass(); - if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) - setCpasType(dataClass.getLSID()); - - boolean isNew = getRowId() == 0; - save(user, ExperimentServiceImpl.get().getTinfoData(), true); - - if (isNew) - { - if (dataClass != null) - { - Map map = new HashMap<>(); - map.put("lsid", getLSID()); - Table.insert(user, dataClass.getTinfo(), map); - } - } - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - protected void save(User user, TableInfo table, boolean ensureObject) - { - assert ensureObject; - super.save(user, table, true); - } - - @Override - public URI getDataFileURI() - { - String url = _object.getDataFileUrl(); - if (url == null) - return null; - try - { - return new URI(_object.getDataFileUrl()); - } - catch (URISyntaxException use) - { - return null; - } - } - - @Override - public ExperimentDataHandler findDataHandler() - { - return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); - } - - @Override - public String getDataFileUrl() - { - return _object.getDataFileUrl(); - } - - @Override - public boolean hasFileScheme() - { - return !FileUtil.hasCloudScheme(getDataFileUrl()); - } - - @Override - @Nullable - public File getFile() - { - return _object.getFile(); - } - - @Override - public @Nullable FileLike getFileLike() - { - return _object.getFileLike(); - } - - @Override - @Nullable - public java.nio.file.Path getFilePath() - { - return _object.getFilePath(); - } - - @Override - public boolean isInlineImage() - { - return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); - } - - @Override - public void delete(User user) - { - delete(user, true); - } - - @Override - public void delete(User user, boolean deleteRunsUsingData) - { - ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); - } - - public String getMimeType() - { - if (null != getDataFileUrl()) - return MIME_MAP.getContentTypeFor(getDataFileUrl()); - else - return null; - } - - @Override - public boolean isFileOnDisk() - { - java.nio.file.Path f = getFilePath(); - if (f != null) - if (!FileUtil.hasCloudScheme(f)) - return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); - else - return Files.exists(f); - else - return false; - } - - public boolean isPathAccessible() - { - java.nio.file.Path path = getFilePath(); - return (null != path && Files.exists(path)); - } - - @Override - public String getCpasType() - { - String result = _object.getCpasType(); - if (result != null) - return result; - - ExpDataClass dataClass = getDataClass(); - if (dataClass != null) - return dataClass.getLSID(); - - return ExpData.DEFAULT_CPAS_TYPE; - } - - public void setGenerated(boolean generated) - { - ensureUnlocked(); - _object.setGenerated(generated); - } - - @Override - public boolean isGenerated() - { - return _object.isGenerated(); - } - - @Override - public boolean isFinalRunOutput() - { - if (_finalRunOutput == null) - { - ExpRun run = getRun(); - _finalRunOutput = run != null && run.isFinalOutput(this); - } - return _finalRunOutput.booleanValue(); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass() - { - return getDataClass(null); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass(@Nullable User user) - { - if (_object.getClassId() != null && getContainer() != null) - { - if (user == null) - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); - else - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); - } - - return null; - } - - @Override - public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException - { - String dataFileURL = getDataFileUrl(); - if (dataFileURL == null) - return; - - if (xarSource.shouldIgnoreDataFiles()) - { - job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); - return; - } - - job.debug("Trying to load data file " + dataFileURL + " into the system"); - - java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); - - if (!Files.exists(path)) - { - job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); - return; - } - - // Check that the file is under the pipeline root to prevent users from referencing a file that they - // don't have permission to import - PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); - if (!xarSource.allowImport(pr, job.getContainer(), path)) - { - if (pr == null) - { - job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); - return; - } - job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); - return; - } - - ExperimentDataHandler handler = findDataHandler(); - try - { - handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); - } - catch (ExperimentException e) - { - throw new XarFormatException(e); - } - - job.debug("Finished trying to load data file " + dataFileURL + " into the system"); - } - - // Get all text and int strings from the data class for indexing - private void getIndexValues( - Map props, - @NotNull ExpDataClassDataTableImpl table, - Set identifiersHi, - Set identifiersMed, - Set identifiersLo, - Set keywordHi, - Set keywordMed, - Set keywordsLo, - JSONObject jsonData - ) - { - CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); - for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) - skipColumns.add(column.name()); - skipColumns.add("Ancestors"); - skipColumns.add("Container"); - - processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); - } - - @Override - @NotNull - public Collection getAliases() - { - TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); - TableInfo ti = ExperimentService.get().getTinfoAlias(); - SQLFragment sql = new SQLFragment() - .append("SELECT a.name FROM ").append(mapTi, "m") - .append(" JOIN ").append(ti, "a") - .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); - sql.add(getLSID()); - ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); - return Collections.unmodifiableList(aliases); - } - - @Override - public String getDocumentId() - { - String dataClassName = "-"; - ExpDataClass dc = getDataClass(); - if (dc != null) - dataClassName = dc.getName(); - // why not just data:rowId? - return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); - } - - @Override - public Map getObjectProperties() - { - return getObjectProperties(getDataClass()); - } - - @Override - public Map getObjectProperties(@Nullable User user) - { - return getObjectProperties(getDataClass(user)); - } - - private Map getObjectProperties(ExpDataClassImpl dataClass) - { - HashMap ret = new HashMap<>(super.getObjectProperties()); - var ti = null == dataClass ? null : dataClass.getTinfo(); - if (null != ti) - { - ret.putAll(getObjectProperties(ti)); - } - return ret; - } - - private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) - { - if (resourceIdentifier.startsWith("data:")) - resourceIdentifier = resourceIdentifier.substring("data:".length()); - - Path path = Path.parse(resourceIdentifier); - if (path.size() != 3) - return null; - String containerId = path.get(0); - String dataClassName = path.get(1); - String rowIdString = path.get(2); - - long rowId; - try - { - rowId = Long.parseLong(rowIdString); - if (rowId == 0) - return null; - } - catch (NumberFormatException ex) - { - return null; - } - - Container c = ContainerManager.getForId(containerId); - if (c == null) - return null; - - ExpDataClass dc = null; - if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) - { - String dcKey = containerId + '-' + dataClassName; - dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); - } - - return new Pair<>(rowId, dc); - } - - @Nullable - public static ExpDataImpl fromDocumentId(String resourceIdentifier) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); - if (rowIdDataClass == null) - return null; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - if (dc != null) - return ExperimentServiceImpl.get().getExpData(dc, rowId); - else - return ExperimentServiceImpl.get().getExpData(rowId); - } - - @Nullable - public static Map fromDocumentIds(Collection resourceIdentifiers) - { - Map rowIdIdentifierMap = new LongHashMap<>(); - Map dcCache = new HashMap<>(); - Map dcMap = new LongHashMap<>(); - Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass - List rowIds = new ArrayList<>(); // data rowIds without dataClass - for (String resourceIdentifier : resourceIdentifiers) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); - if (rowIdDataClass == null) - continue; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - rowIdIdentifierMap.put(rowId, resourceIdentifier); - - if (dc != null) - { - dcMap.put(dc.getRowId(), dc); - dcRowIdMap - .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) - .add(rowId); - } - else - rowIds.add(rowId); - } - - List expDatas = new ArrayList<>(); - if (!rowIds.isEmpty()) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); - - if (!dcRowIdMap.isEmpty()) - { - for (Long dataClassId : dcRowIdMap.keySet()) - { - ExpDataClass dc = dcMap.get(dataClassId); - if (dc != null) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); - } - } - - Map identifierDatas = new HashMap<>(); - for (ExpData data : expDatas) - { - identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); - } - - return identifierDatas; - } - - @Override - public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) - { - java.nio.file.Path path = getFilePath(); - if (path == null) - { - return null; - } - - Container c = getContainer(); - if (c == null) - { - return null; - } - - return FileContentService.get().getWebDavUrl(path, c, type); - } - - @Override - public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) - { - Container container = getContainer(); - if (container == null) - return null; - - Map props = new HashMap<>(); - JSONObject jsonData = new JSONObject(); - Set keywordsHi = new HashSet<>(); - Set keywordsMed = new HashSet<>(); - Set keywordsLo = new HashSet<>(); - - Set identifiersHi = new HashSet<>(); - Set identifiersMed = new HashSet<>(); - Set identifiersLo = new HashSet<>(); - - StringBuilder body = new StringBuilder(); - - // Name is an identifier with the highest weight - identifiersHi.add(getName()); - keywordsMed.add(getName()); // also add to keywords since those are stemmed - - // Description is added as a keywordsLo -- in Biologics it is common for the description to - // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as - // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. - // CONSIDER: tokenize the description and extract identifiers - if (null != getDescription()) - keywordsLo.add(getDescription()); - - String comment = getComment(); - if (comment != null) - keywordsMed.add(comment); - - // Add aliases in parentheses in the title - StringBuilder title = new StringBuilder(getName()); - Collection aliases = getAliases(); - if (!aliases.isEmpty()) - { - title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); - identifiersHi.addAll(aliases); - } - - ExpDataClassImpl dc = getDataClass(User.getSearchUser()); - if (dc != null) - { - ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); - NavTree t = new NavTree(dc.getName(), show); - String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); - props.put(SearchService.PROPERTY.navtrail.toString(), nav); - - props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); - body.append(dc.getName()); - - if (tableInfo == null) - tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); - - if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) - throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); - - if (!expDataClassDataTable.getDataClass().equals(dc)) - throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); - - // Collect other text columns and lookup display columns - getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); - } - - // === Stored, not indexed - if (dc != null && dc.isMedia()) - props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); - else - props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); - props.put(SearchService.PROPERTY.title.toString(), title.toString()); - props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); - - ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); - view.setExtraPath(container.getId()); - String docId = getDocumentId(); - - // Generate a summary explicitly instead of relying on a summary to be extracted - // from the document body. Placing lookup values and the description in the body - // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. - StringBuilder summary = new StringBuilder(); - if (StringUtils.isNotEmpty(getDescription())) - summary.append(getDescription()).append("\n"); - - appendTokens(summary, keywordsMed); - appendTokens(summary, identifiersMed); - appendTokens(summary, identifiersLo); - - props.put(SearchService.PROPERTY.summary.toString(), summary); - - return new ExpDataResource( - getRowId(), - new Path(docId), - docId, - container.getEntityId(), - "text/plain", - body.toString(), - view, - props, - getCreatedBy(), - getCreated(), - getModifiedBy(), - getModified() - ); - } - - private static void appendTokens(StringBuilder sb, Collection toks) - { - if (toks.isEmpty()) - return; - - sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); - } - - private static class ExpDataResource extends SimpleDocumentResource - { - final long _rowId; - - public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) - { - super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); - _rowId = rowId; - } - - @Override - public void setLastIndexed(long ms, long modified) - { - ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); - } - } - - public static class DataSearchResultTemplate implements SearchResultTemplate - { - public static final String NAME = "data"; - public static final String PROPERTY = "dataclass"; - - @Nullable - @Override - public String getName() - { - return NAME; - } - - private ExpDataClass getDataClass() - { - if (HttpView.hasCurrentView()) - { - ViewContext ctx = HttpView.currentContext(); - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); - } - return null; - } - - @Nullable - @Override - public String getCategories() - { - ExpDataClass dataClass = getDataClass(); - - if (dataClass != null && dataClass.isMedia()) - return expMediaDataCategory.getName(); - - return expDataCategory.getName(); - } - - @Nullable - @Override - public SearchScope getSearchScope() - { - return SearchScope.FolderAndSubfolders; - } - - @NotNull - @Override - public String getResultNameSingular() - { - ExpDataClass dc = getDataClass(); - if (dc != null) - return dc.getName(); - return "data"; - } - - @NotNull - @Override - public String getResultNamePlural() - { - return getResultNameSingular(); - } - - @Override - public boolean includeNavigationLinks() - { - return true; - } - - @Override - public boolean includeAdvanceUI() - { - return false; - } - - @Nullable - @Override - public HtmlString getExtraHtml(ViewContext ctx) - { - String q = ctx.getActionURL().getParameter("q"); - - if (StringUtils.isNotBlank(q)) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); - url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); - - StringBuilder html = new StringBuilder(); - html.append("
"); - - appendParam(html, null, dataclass, "All", false, url); - for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) - { - appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); - } - - html.append("
"); - return HtmlString.unsafe(html.toString()); - } - else - { - return null; - } - } - - private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) - { - sb.append(""); - - if (!Objects.equals(dataclass, current)) - { - if (addParam) - url = url.clone().addParameter(PROPERTY, dataclass); - - sb.append(LinkBuilder.simpleLink(label, url)); - } - else - { - sb.append(label); - } - - sb.append(" "); - } - - @Override - public HtmlString getHiddenInputsHtml(ViewContext ctx) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); - } - - return null; - } - - - @Override - public String reviseQuery(ViewContext ctx, String q) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - - if (null != dataclass) - return "+(" + q + ") +" + PROPERTY + ":" + dataclass; - else - return q; - } - - @Override - public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) - { - SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); - - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - String text = root.getText(); - root.setText(text + " - " + dataclass); - } - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Handler; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.XarFormatException; +import org.labkey.api.exp.XarSource; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataClassDataTable; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.files.FileContentService; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineJob; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryRowReference; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.ValidationException; +import org.labkey.api.search.SearchResultTemplate; +import org.labkey.api.search.SearchScope; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DataClassReadPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HtmlString; +import org.labkey.api.util.LinkBuilder; +import org.labkey.api.util.MimeMap; +import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.URLHelper; +import org.labkey.api.util.InputBuilder; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.NavTree; +import org.labkey.api.view.ViewContext; +import org.labkey.api.webdav.SimpleDocumentResource; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; + +public class ExpDataImpl extends AbstractRunItemImpl implements ExpData +{ + public enum DataOperations + { + Edit("editing", UpdatePermission.class), + EditLineage("editing lineage", UpdatePermission.class), + Delete("deleting", DeletePermission.class), + Move("moving", MoveEntitiesPermission.class); + + private final String _description; // used as a suffix in messaging users about what is not allowed + private final Class _permissionClass; + + DataOperations(String description, Class permissionClass) + { + _description = description; + _permissionClass = permissionClass; + } + + public String getDescription() + { + return _description; + } + + public Class getPermissionClass() + { + return _permissionClass; + } + } + + public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, DataClassReadPermission.class); + } + }; + public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, MediaReadPermission.class); + } + }; + + /** Cache this because it can be expensive to recompute */ + private Boolean _finalRunOutput; + + /** + * Temporary mapping until experiment.xml contains the mime type + */ + private static final MimeMap MIME_MAP = new MimeMap(); + + static public List fromDatas(List datas) + { + List ret = new ArrayList<>(datas.size()); + for (Data data : datas) + { + ret.add(new ExpDataImpl(data)); + } + return ret; + } + + // For serialization + protected ExpDataImpl() {} + + public ExpDataImpl(Data data) + { + super(data); + } + + @Override + public void setComment(User user, String comment) throws ValidationException + { + setComment(user, comment, true); + } + + @Override + public void setComment(User user, String comment, boolean index) throws ValidationException + { + super.setComment(user, comment); + + if (index) + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + @Nullable + public ActionURL detailsURL() + { + DataType dataType = getDataType(); + if (dataType != null) + { + ActionURL url = dataType.getDetailsURL(this); + if (url != null) + return url; + } + + return _object.detailsURL(); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference() + { + return getQueryRowReference(null); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) + { + ExpDataClassImpl dc = getDataClass(user); + if (dc != null) + return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + + // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace + DataType type = getDataType(); + if (type != null) + { + QueryRowReference queryRowReference = type.getQueryRowReference(this); + if (queryRowReference != null) + return queryRowReference; + } + + return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + } + + @Override + public List getTargetApplications() + { + return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); + } + + @Override + public List getTargetRuns() + { + return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); + } + + @Override + public DataType getDataType() + { + return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); + } + + @Override + public void setDataFileURI(URI uri) + { + ensureUnlocked(); + _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); + } + + @Override + public void save(User user) + { + // Replace the default "Data" cpastype if the Data belongs to a DataClass + ExpDataClassImpl dataClass = getDataClass(); + if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) + setCpasType(dataClass.getLSID()); + + boolean isNew = getRowId() == 0; + save(user, ExperimentServiceImpl.get().getTinfoData(), true); + + if (isNew) + { + if (dataClass != null) + { + Map map = new HashMap<>(); + map.put("lsid", getLSID()); + Table.insert(user, dataClass.getTinfo(), map); + } + } + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + protected void save(User user, TableInfo table, boolean ensureObject) + { + assert ensureObject; + super.save(user, table, true); + } + + @Override + public URI getDataFileURI() + { + String url = _object.getDataFileUrl(); + if (url == null) + return null; + try + { + return new URI(_object.getDataFileUrl()); + } + catch (URISyntaxException use) + { + return null; + } + } + + @Override + public ExperimentDataHandler findDataHandler() + { + return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); + } + + @Override + public String getDataFileUrl() + { + return _object.getDataFileUrl(); + } + + @Override + public boolean hasFileScheme() + { + return !FileUtil.hasCloudScheme(getDataFileUrl()); + } + + @Override + @Nullable + public File getFile() + { + return _object.getFile(); + } + + @Override + public @Nullable FileLike getFileLike() + { + return _object.getFileLike(); + } + + @Override + @Nullable + public java.nio.file.Path getFilePath() + { + return _object.getFilePath(); + } + + @Override + public boolean isInlineImage() + { + return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); + } + + @Override + public void delete(User user) + { + delete(user, true); + } + + @Override + public void delete(User user, boolean deleteRunsUsingData) + { + ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); + } + + public String getMimeType() + { + if (null != getDataFileUrl()) + return MIME_MAP.getContentTypeFor(getDataFileUrl()); + else + return null; + } + + @Override + public boolean isFileOnDisk() + { + java.nio.file.Path f = getFilePath(); + if (f != null) + if (!FileUtil.hasCloudScheme(f)) + return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); + else + return Files.exists(f); + else + return false; + } + + public boolean isPathAccessible() + { + java.nio.file.Path path = getFilePath(); + return (null != path && Files.exists(path)); + } + + @Override + public String getCpasType() + { + String result = _object.getCpasType(); + if (result != null) + return result; + + ExpDataClass dataClass = getDataClass(); + if (dataClass != null) + return dataClass.getLSID(); + + return ExpData.DEFAULT_CPAS_TYPE; + } + + public void setGenerated(boolean generated) + { + ensureUnlocked(); + _object.setGenerated(generated); + } + + @Override + public boolean isGenerated() + { + return _object.isGenerated(); + } + + @Override + public boolean isFinalRunOutput() + { + if (_finalRunOutput == null) + { + ExpRun run = getRun(); + _finalRunOutput = run != null && run.isFinalOutput(this); + } + return _finalRunOutput.booleanValue(); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass() + { + return getDataClass(null); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass(@Nullable User user) + { + if (_object.getClassId() != null && getContainer() != null) + { + if (user == null) + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); + else + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); + } + + return null; + } + + @Override + public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException + { + String dataFileURL = getDataFileUrl(); + if (dataFileURL == null) + return; + + if (xarSource.shouldIgnoreDataFiles()) + { + job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); + return; + } + + job.debug("Trying to load data file " + dataFileURL + " into the system"); + + java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); + + if (!Files.exists(path)) + { + job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); + return; + } + + // Check that the file is under the pipeline root to prevent users from referencing a file that they + // don't have permission to import + PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); + if (!xarSource.allowImport(pr, job.getContainer(), path)) + { + if (pr == null) + { + job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); + return; + } + job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); + return; + } + + ExperimentDataHandler handler = findDataHandler(); + try + { + handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); + } + catch (ExperimentException e) + { + throw new XarFormatException(e); + } + + job.debug("Finished trying to load data file " + dataFileURL + " into the system"); + } + + // Get all text and int strings from the data class for indexing + private void getIndexValues( + Map props, + @NotNull ExpDataClassDataTableImpl table, + Set identifiersHi, + Set identifiersMed, + Set identifiersLo, + Set keywordHi, + Set keywordMed, + Set keywordsLo, + JSONObject jsonData + ) + { + CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); + for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) + skipColumns.add(column.name()); + skipColumns.add("Ancestors"); + skipColumns.add("Container"); + + processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); + } + + @Override + @NotNull + public Collection getAliases() + { + TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); + TableInfo ti = ExperimentService.get().getTinfoAlias(); + SQLFragment sql = new SQLFragment() + .append("SELECT a.name FROM ").append(mapTi, "m") + .append(" JOIN ").append(ti, "a") + .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); + sql.add(getLSID()); + ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); + return Collections.unmodifiableList(aliases); + } + + @Override + public String getDocumentId() + { + String dataClassName = "-"; + ExpDataClass dc = getDataClass(); + if (dc != null) + dataClassName = dc.getName(); + // why not just data:rowId? + return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); + } + + @Override + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); + } + + @Override + public Map getObjectProperties() + { + return getObjectProperties(getDataClass()); + } + + @Override + public Map getObjectProperties(@Nullable User user) + { + return getObjectProperties(getDataClass(user)); + } + + private Map getObjectProperties(ExpDataClassImpl dataClass) + { + HashMap ret = new HashMap<>(super.getObjectProperties()); + var ti = null == dataClass ? null : dataClass.getTinfo(); + if (null != ti) + { + ret.putAll(getObjectProperties(ti)); + } + return ret; + } + + private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) + { + if (resourceIdentifier.startsWith("data:")) + resourceIdentifier = resourceIdentifier.substring("data:".length()); + + Path path = Path.parse(resourceIdentifier); + if (path.size() != 3) + return null; + String containerId = path.get(0); + String dataClassName = path.get(1); + String rowIdString = path.get(2); + + long rowId; + try + { + rowId = Long.parseLong(rowIdString); + if (rowId == 0) + return null; + } + catch (NumberFormatException ex) + { + return null; + } + + Container c = ContainerManager.getForId(containerId); + if (c == null) + return null; + + ExpDataClass dc = null; + if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) + { + String dcKey = containerId + '-' + dataClassName; + dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); + } + + return new Pair<>(rowId, dc); + } + + @Nullable + public static ExpDataImpl fromDocumentId(String resourceIdentifier) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); + if (rowIdDataClass == null) + return null; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + if (dc != null) + return ExperimentServiceImpl.get().getExpData(dc, rowId); + else + return ExperimentServiceImpl.get().getExpData(rowId); + } + + @Nullable + public static Map fromDocumentIds(Collection resourceIdentifiers) + { + Map rowIdIdentifierMap = new LongHashMap<>(); + Map dcCache = new HashMap<>(); + Map dcMap = new LongHashMap<>(); + Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass + List rowIds = new ArrayList<>(); // data rowIds without dataClass + for (String resourceIdentifier : resourceIdentifiers) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); + if (rowIdDataClass == null) + continue; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + rowIdIdentifierMap.put(rowId, resourceIdentifier); + + if (dc != null) + { + dcMap.put(dc.getRowId(), dc); + dcRowIdMap + .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) + .add(rowId); + } + else + rowIds.add(rowId); + } + + List expDatas = new ArrayList<>(); + if (!rowIds.isEmpty()) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); + + if (!dcRowIdMap.isEmpty()) + { + for (Long dataClassId : dcRowIdMap.keySet()) + { + ExpDataClass dc = dcMap.get(dataClassId); + if (dc != null) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); + } + } + + Map identifierDatas = new HashMap<>(); + for (ExpData data : expDatas) + { + identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); + } + + return identifierDatas; + } + + @Override + public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) + { + java.nio.file.Path path = getFilePath(); + if (path == null) + { + return null; + } + + Container c = getContainer(); + if (c == null) + { + return null; + } + + return FileContentService.get().getWebDavUrl(path, c, type); + } + + @Override + public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) + { + Container container = getContainer(); + if (container == null) + return null; + + Map props = new HashMap<>(); + JSONObject jsonData = new JSONObject(); + Set keywordsHi = new HashSet<>(); + Set keywordsMed = new HashSet<>(); + Set keywordsLo = new HashSet<>(); + + Set identifiersHi = new HashSet<>(); + Set identifiersMed = new HashSet<>(); + Set identifiersLo = new HashSet<>(); + + StringBuilder body = new StringBuilder(); + + // Name is an identifier with the highest weight + identifiersHi.add(getName()); + keywordsMed.add(getName()); // also add to keywords since those are stemmed + + // Description is added as a keywordsLo -- in Biologics it is common for the description to + // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as + // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. + // CONSIDER: tokenize the description and extract identifiers + if (null != getDescription()) + keywordsLo.add(getDescription()); + + String comment = getComment(); + if (comment != null) + keywordsMed.add(comment); + + // Add aliases in parentheses in the title + StringBuilder title = new StringBuilder(getName()); + Collection aliases = getAliases(); + if (!aliases.isEmpty()) + { + title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); + identifiersHi.addAll(aliases); + } + + ExpDataClassImpl dc = getDataClass(User.getSearchUser()); + if (dc != null) + { + ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); + NavTree t = new NavTree(dc.getName(), show); + String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); + props.put(SearchService.PROPERTY.navtrail.toString(), nav); + + props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); + body.append(dc.getName()); + + if (tableInfo == null) + tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); + + if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) + throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); + + if (!expDataClassDataTable.getDataClass().equals(dc)) + throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); + + // Collect other text columns and lookup display columns + getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); + } + + // === Stored, not indexed + if (dc != null && dc.isMedia()) + props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); + else + props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); + props.put(SearchService.PROPERTY.title.toString(), title.toString()); + props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); + + ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); + view.setExtraPath(container.getId()); + String docId = getDocumentId(); + + // Generate a summary explicitly instead of relying on a summary to be extracted + // from the document body. Placing lookup values and the description in the body + // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. + StringBuilder summary = new StringBuilder(); + if (StringUtils.isNotEmpty(getDescription())) + summary.append(getDescription()).append("\n"); + + appendTokens(summary, keywordsMed); + appendTokens(summary, identifiersMed); + appendTokens(summary, identifiersLo); + + props.put(SearchService.PROPERTY.summary.toString(), summary); + + return new ExpDataResource( + getRowId(), + new Path(docId), + docId, + container.getEntityId(), + "text/plain", + body.toString(), + view, + props, + getCreatedBy(), + getCreated(), + getModifiedBy(), + getModified() + ); + } + + private static void appendTokens(StringBuilder sb, Collection toks) + { + if (toks.isEmpty()) + return; + + sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); + } + + private static class ExpDataResource extends SimpleDocumentResource + { + final long _rowId; + + public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) + { + super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); + _rowId = rowId; + } + + @Override + public void setLastIndexed(long ms, long modified) + { + ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); + } + } + + public static class DataSearchResultTemplate implements SearchResultTemplate + { + public static final String NAME = "data"; + public static final String PROPERTY = "dataclass"; + + @Nullable + @Override + public String getName() + { + return NAME; + } + + private ExpDataClass getDataClass() + { + if (HttpView.hasCurrentView()) + { + ViewContext ctx = HttpView.currentContext(); + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); + } + return null; + } + + @Nullable + @Override + public String getCategories() + { + ExpDataClass dataClass = getDataClass(); + + if (dataClass != null && dataClass.isMedia()) + return expMediaDataCategory.getName(); + + return expDataCategory.getName(); + } + + @Nullable + @Override + public SearchScope getSearchScope() + { + return SearchScope.FolderAndSubfolders; + } + + @NotNull + @Override + public String getResultNameSingular() + { + ExpDataClass dc = getDataClass(); + if (dc != null) + return dc.getName(); + return "data"; + } + + @NotNull + @Override + public String getResultNamePlural() + { + return getResultNameSingular(); + } + + @Override + public boolean includeNavigationLinks() + { + return true; + } + + @Override + public boolean includeAdvanceUI() + { + return false; + } + + @Nullable + @Override + public HtmlString getExtraHtml(ViewContext ctx) + { + String q = ctx.getActionURL().getParameter("q"); + + if (StringUtils.isNotBlank(q)) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); + url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); + + StringBuilder html = new StringBuilder(); + html.append("
"); + + appendParam(html, null, dataclass, "All", false, url); + for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) + { + appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); + } + + html.append("
"); + return HtmlString.unsafe(html.toString()); + } + else + { + return null; + } + } + + private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) + { + sb.append(""); + + if (!Objects.equals(dataclass, current)) + { + if (addParam) + url = url.clone().addParameter(PROPERTY, dataclass); + + sb.append(LinkBuilder.simpleLink(label, url)); + } + else + { + sb.append(label); + } + + sb.append(" "); + } + + @Override + public HtmlString getHiddenInputsHtml(ViewContext ctx) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); + } + + return null; + } + + + @Override + public String reviseQuery(ViewContext ctx, String q) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + + if (null != dataclass) + return "+(" + q + ") +" + PROPERTY + ":" + dataclass; + else + return q; + } + + @Override + public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) + { + SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); + + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + String text = root.getText(); + root.setText(text + " - " + dataclass); + } + } + } +} diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index bd8273f8d99..9a54d4d0246 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -161,8 +161,6 @@ public class SampleTypeUpdateServiceDI extends DefaultQueryUpdateService public static final String ROOT_RECOMPUTE_ROWID_SET = "RootIdToRecomputeSet"; public static final String PARENT_RECOMPUTE_NAME_SET = "ParentNameToRecomputeSet"; - public static final String EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE = "org.labkey.experiment.api.SampleTypeUpdateServiceDI#ALLOW_ROW_ID_SAMPLE_MERGE"; - public static final Map SAMPLE_ALT_IMPORT_NAME_COLS; private static final Map ALIQUOT_ROLLUP_FIELDS = Map.of( @@ -629,67 +627,6 @@ private void checkPartitionForDuplicates(List> partitionRows } } - /** - * Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion - */ - @Deprecated - @Override - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - Object unitsVal = null; - ColumnInfo unitsCol = null; - Object amountVal = null; - ColumnInfo amountCol = null; - if (row.containsKey(Units.name())) - { - unitsVal = row.get(Units.name()); - unitsCol = columnMap.get(Units.name()); - } - for (String colName : StoredAmount.namesAndLabels()) - { - if (row.containsKey(colName)) - { - amountVal = row.get(colName); - amountCol = columnMap.get(colName); - break; - } - } - if (amountVal != null) - { - String unitsStr = ""; - if (unitsVal != null) - unitsStr = " " + unitsVal; - - providedValues.put(PROVIDED_DATA_PREFIX + StoredAmount.label(), amountVal + unitsStr); - } - - Unit baseUnit = _sampleType != null ? _sampleType.getBaseUnit() : null; - - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - - Object value = entry.getValue(); - if (col != null && col == unitsCol) - { - value = _SamplesCoerceDataIterator.SampleUnitsConvertColumn.getValue(unitsVal, amountVal, amountCol != null, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else if (col != null && col == amountCol) - { - value = _SamplesCoerceDataIterator.SampleAmountConvertColumn.getValue(amountVal, unitsCol != null, unitsVal, baseUnit, _sampleType == null ? null : _sampleType.getName()); - } - else - { - value = coerceTypesValue(col, providedValues, entry.getKey(), value); - } - - result.put(entry.getKey(), value); - } - return result; - } - @Override public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws BatchValidationException @@ -835,29 +772,9 @@ public static boolean isAliquotStatusChangeNeedRecalc(Collection available @Override protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException { - if (row.containsKey(LSID.name()) && !(row.containsKey(RowId.name()) || row.containsKey(Name.name()))) - throw new ValidationException("Either RowId or Name is required to update a sample."); - - Map result = super.updateRow(user, container, row, oldRow, allowOwner, retainCreation); - - // add MaterialInput/DataInputs field from parent alias - try - { - Map parentAliases = _sampleType.getImportAliases(); - for (String alias : parentAliases.keySet()) - { - if (row.containsKey(alias)) - result.put(parentAliases.get(alias), result.get(alias)); - } - } - catch (IOException e) - { - throw new RuntimeException(e); - } + throw new UnsupportedOperationException("_update() is no longer supported for samples"); - return result; } @Override @@ -1447,7 +1364,7 @@ public DataIterator getDataIterator(DataIteratorContext context) // While accepting RowId during merge is not our preferred behavior, we want to give users a way // to opt-in to the old behavior where RowId is accepted and ignored. - if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_SAMPLE_MERGE)) + if (isMerge && !OptionalFeatureService.get().isFeatureEnabled(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE)) { context.getErrors().addRowError(new ValidationException("RowId is not accepted when merging samples. Specify only the sample name instead.", RowId.name())); return null; @@ -1466,8 +1383,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return null; } - if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate)) - drop.remove(LSID.name()); if (!drop.isEmpty()) di = new DropColumnsDataIterator(di, drop); @@ -1652,18 +1567,13 @@ static class _GenerateNamesDataIterator extends SimpleTranslator _nameState = sampleType.getNameGenState(skipDuplicateCheck, true, _container, user); lsidBuilder = sampleType.generateSampleLSID(); - boolean useLsidForUpdate = context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseLsidForUpdate); - if (useLsidForUpdate) - selectAll(CaseInsensitiveHashSet.of(Name.name(), RootMaterialRowId.name())); - else - selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); + selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); addColumn(new BaseColumnInfo(Name.fieldKey(), JdbcType.VARCHAR), (Supplier)() -> generatedName); - if (!useLsidForUpdate) - { - DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); - addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); - } + + DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); + addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); } diff --git a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java index 0b446196ae0..d089d6c2bc9 100644 --- a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java @@ -309,7 +309,6 @@ protected void importTsvData(FolderImportContext ctx, XarContext xarContext, Str options.put(SampleTypeService.ConfigParameters.DeferAliquotRuns, true); if (isUpdate) options.put(QueryUpdateService.ConfigParameters.SkipRequiredFieldValidation, true); - options.put(ExperimentService.QueryOptions.UseLsidForUpdate, !isUpdate); options.put(ExperimentService.QueryOptions.DeferRequiredLineageValidation, true); context.setConfigParameters(options); From 09dd6b06f3a9a15522644f9fa26cb462e3e53e76 Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 20:03:52 -0700 Subject: [PATCH 02/17] Enable upgrade script --- .../api/query/AbstractQueryUpdateService.java | 99 +++++++++++++++++++ .../postgresql/exp-26.004-26.005.sql | 1 + .../dbscripts/sqlserver/exp-26.004-26.005.sql | 1 + .../test/integration/DataClassCrud.ispec.ts | 17 +++- .../labkey/experiment/ExperimentModule.java | 2 +- .../api/ExpDataClassDataTableImpl.java | 39 ++------ .../api/SampleTypeUpdateServiceDI.java | 81 +-------------- 7 files changed, 131 insertions(+), 109 deletions(-) create mode 100644 experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql create mode 100644 experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 4972b62bd17..07c52e39623 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -16,6 +16,7 @@ package org.labkey.api.query; import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.jetbrains.annotations.NotNull; @@ -122,6 +123,8 @@ import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; import static org.labkey.api.files.FileContentService.UPLOADED_FILE; import static org.labkey.api.util.FileUtil.toFileForRead; import static org.labkey.api.util.FileUtil.toFileForWrite; @@ -903,6 +906,102 @@ public List> updateRows(User user, Container container, List return result; } + protected void validatePartitionedRowKeys(Collection columns) + { + // do nothing + } + + public List> updateRowsUsingPartitionedDIB( + DbScope.Transaction tx, + User user, + Container container, + List> rows, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) + { + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) + { + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + + validatePartitionedRowKeys(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + + // we need to throw if we don't want executeWithRetry() attempt commit() + if (context.getErrors().hasErrors()) + throw new DbScope.RetryPassthroughException(context.getErrors()); + + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) + { + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); + } + + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); + + return ret; + } + + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException { if (pkVals == null) diff --git a/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql b/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql new file mode 100644 index 00000000000..dd609550c06 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/postgresql/exp-26.004-26.005.sql @@ -0,0 +1 @@ +SELECT core.executeJavaUpgradeCode('dropProvisionedDataClassLsidColumn'); diff --git a/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql new file mode 100644 index 00000000000..5f8b8c8ae76 --- /dev/null +++ b/experiment/resources/schemas/dbscripts/sqlserver/exp-26.004-26.005.sql @@ -0,0 +1 @@ +EXEC core.executeJavaUpgradeCode 'dropProvisionedDataClassLsidColumn'; diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index afc9cfef66d..f84394137b7 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -459,7 +459,6 @@ describe('Duplicate IDs', () => { }] }, { ...topFolderOptions, ...editorUserOptions }).expect((result) => { errorResp = JSON.parse(result.text); - expect(errorResp).toBe('a'); expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); }); @@ -472,7 +471,7 @@ describe('Duplicate IDs', () => { rowId: data1RowId },{ description: 'update', - name: data2RowId + name: dataName2 },{ description: 'update', rowId: data1RowId @@ -953,6 +952,20 @@ describe('Data CRUD', () => { expect(caseInsensitive(row3, 'description')).toBe('mixedVal3 desc'); expect(caseInsensitive(row3, fieldName)).toBe('val3'); // fieldName value should not be updated for row3 + // update using name as key, should succeed, verify update is successful and data are updated correctly + await ExperimentCRUDUtils.updateRows(server, [ + { name: dataName1, description: 'updByName1', [fieldName]: 'nameVal1' }, + { name: 'mixed_rename2', description: 'updByName2', [fieldName]: 'nameVal2' }, + ], 'exp.data', dataType, topFolderOptions, editorUserOptions); + + rows = await ExperimentCRUDUtils.getRows(server, [row1RowId, row2RowId], 'exp.data', dataType, '*', topFolderOptions, adminOptions); + row1 = findRow(rows, row1RowId); + row2 = findRow(rows, row2RowId); + expect(caseInsensitive(row1, 'description')).toBe('updByName1'); + expect(caseInsensitive(row1, fieldName)).toBe('nameVal1'); + expect(caseInsensitive(row2, 'description')).toBe('updByName2'); + expect(caseInsensitive(row2, fieldName)).toBe('nameVal2'); + // update names of both rows using lsid (ignored) an rowId as key, verify update is successful and names are updated correctly const newName1 = 'RenamedByLsid1'; const newName2 = 'RenamedByLsid2'; diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 90d22e0091a..9eb5e1808e3 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -205,7 +205,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 26.004; + return 26.005; } @Nullable diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index f1bb75b8fed..2bffcbdb2b9 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -692,7 +692,6 @@ public SQLFragment getFromSQLExpanded(String alias, Set selectedColumn // all columns from dataclass property table except key columns Set pCols = new CaseInsensitiveHashSet(provisioned.getColumnNameSet()); pCols.remove("name"); - pCols.remove("lsid"); // TODO remove pCols.remove("rowId"); boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, pCols); @@ -1415,7 +1414,7 @@ protected Map updateRow(User user, Container container, Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) throws SQLException, ValidationException // TODO remove + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) { throw new UnsupportedOperationException("_update() is no longer supported for dataclass"); } @@ -1426,38 +1425,20 @@ public List> updateRows(User user, Container container, List if (rows == null || rows.isEmpty()) return Collections.emptyList(); + List> results; Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; recordDataIteratorUsed(configParameters); - List> results = new ArrayList<>(); - int index = 0; - - while (index < rows.size()) + try { - // TODO: check for duplicates - - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - if (context.getErrors().hasErrors()) - throw context.getErrors(); - - if (subRet != null) - results.addAll(subRet); - - // TODO: record partitions + results = getSchema().getScope().executeWithRetry(tx -> + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); + } + catch (DbScope.RetryPassthroughException retryException) + { + retryException.rethrow(BatchValidationException.class); + throw retryException.throwRuntimeException(); } - - // summary audit? return results; } diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 9a54d4d0246..9deaccfce62 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -531,66 +531,7 @@ public List> updateRows( try { results = getSchema().getDbSchema().getScope().executeWithRetry(tx -> - { - int index = 0; - int numPartitions = 0; - List> ret = new ArrayList<>(); - - Set observedRowIds = new HashSet<>(); - Set observedNames = new CaseInsensitiveHashSet(); - - while (index < rows.size()) - { - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - confirmAmountAndUnitsColumns(rowKeys); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - numPartitions++; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters); - - // skip audit summary for the partitions, we will perform it once at the end - context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); - - List> subRet = super._updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - // we need to throw if we don't want executeWithRetry() attempt commit() - if (context.getErrors().hasErrors()) - throw new DbScope.RetryPassthroughException(context.getErrors()); - - if (subRet != null) - { - ret.addAll(subRet); - - // Check if duplicate rows have been processed across the partitions - // Only start checking for duplicates after the first partition has been processed. - if (numPartitions > 1) - { - // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition - checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); - } - - if (errors.hasErrors()) - throw new DbScope.RetryPassthroughException(errors); - } - } - - if (numPartitions > 1) - { - var auditEvent = tx.getAuditEvent(); - if (auditEvent != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); - } - - _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, finalConfigParameters), ret.size()); - - return ret; - }); + updateRowsUsingPartitionedDIB(tx, user, container, rows, errors, finalConfigParameters, extraScriptContext)); } catch (DbScope.RetryPassthroughException retryException) { @@ -607,24 +548,10 @@ public List> updateRows( return results; } - private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + @Override + protected void validatePartitionedRowKeys(Collection columns) { - for (Map row : partitionRows) - { - Long rowId = MapUtils.getLong(row, RowId.name()); - if (rowId != null && !globalRowIds.add(rowId)) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); - return; - } - - Object nameObj = row.get(Name.name()); - if (nameObj != null && !globalNames.add(nameObj.toString())) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); - return; - } - } + confirmAmountAndUnitsColumns(columns); } @Override From b5e2ac2002fdbc694f9a7512777e838e1469633a Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 9 Mar 2026 20:05:58 -0700 Subject: [PATCH 03/17] CRLF --- .../labkey/api/exp/query/ExpDataTable.java | 178 +- .../api/query/AbstractQueryUpdateService.java | 3318 ++++++++--------- .../api/query/DefaultQueryUpdateService.java | 1872 +++++----- .../labkey/experiment/ExperimentModule.java | 2346 ++++++------ .../labkey/experiment/api/ExpDataImpl.java | 1970 +++++----- 5 files changed, 4842 insertions(+), 4842 deletions(-) diff --git a/api/src/org/labkey/api/exp/query/ExpDataTable.java b/api/src/org/labkey/api/exp/query/ExpDataTable.java index 99f42cb5b27..c18e3a8e7bd 100644 --- a/api/src/org/labkey/api/exp/query/ExpDataTable.java +++ b/api/src/org/labkey/api/exp/query/ExpDataTable.java @@ -1,89 +1,89 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.exp.query; - -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpExperiment; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.query.FieldKey; - -public interface ExpDataTable extends ExpTable -{ - enum Column - { - Alias, - ContentLink, - ClassId, // database table only - CpasType, // database table only - Created, - CreatedBy, - DataClass, - DataFileUrl, - Description, - DownloadLink, - FileExtension, - FileExists, - FileSize, - Flag, - Folder, - Generated, - InlineThumbnail, - Inputs, - LastIndexed, - LSID, - Modified, - ModifiedBy, - Name, - ObjectId, // database table only - Outputs, - Properties, - Protocol, - ReferenceCount, - Run, - RunApplication, - RunApplicationOutput, - RunId, // database table only - RowId, - SourceApplicationId, // database table only - SourceApplicationInput, - SourceProtocolApplication, - Thumbnail, - ViewFileLink, - ViewOrDownload, - WebDavUrl, - WebDavUrlRelative; - - public FieldKey fieldKey() - { - return FieldKey.fromParts(name()); - } - } - - void setExperiment(ExpExperiment experiment); - ExpExperiment getExperiment(); - void setRun(ExpRun run); - ExpRun getRun(); - - void setDataType(DataType type); - DataType getDataType(); - - MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); - MutableColumnInfo addDataInputColumn(String alias, String role); - MutableColumnInfo addInputRunCountColumn(String alias); -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.exp.query; + +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpExperiment; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.query.FieldKey; + +public interface ExpDataTable extends ExpTable +{ + enum Column + { + Alias, + ContentLink, + ClassId, // database table only + CpasType, // database table only + Created, + CreatedBy, + DataClass, + DataFileUrl, + Description, + DownloadLink, + FileExtension, + FileExists, + FileSize, + Flag, + Folder, + Generated, + InlineThumbnail, + Inputs, + LastIndexed, + LSID, + Modified, + ModifiedBy, + Name, + ObjectId, // database table only + Outputs, + Properties, + Protocol, + ReferenceCount, + Run, + RunApplication, + RunApplicationOutput, + RunId, // database table only + RowId, + SourceApplicationId, // database table only + SourceApplicationInput, + SourceProtocolApplication, + Thumbnail, + ViewFileLink, + ViewOrDownload, + WebDavUrl, + WebDavUrlRelative; + + public FieldKey fieldKey() + { + return FieldKey.fromParts(name()); + } + } + + void setExperiment(ExpExperiment experiment); + ExpExperiment getExperiment(); + void setRun(ExpRun run); + ExpRun getRun(); + + void setDataType(DataType type); + DataType getDataType(); + + MutableColumnInfo addMaterialInputColumn(String alias, SamplesSchema schema, String inputRole, ExpSampleType sampleType); + MutableColumnInfo addDataInputColumn(String alias, String role); + MutableColumnInfo addInputRunCountColumn(String alias); +} diff --git a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java index 07c52e39623..77bb1032fd1 100644 --- a/api/src/org/labkey/api/query/AbstractQueryUpdateService.java +++ b/api/src/org/labkey/api/query/AbstractQueryUpdateService.java @@ -1,1659 +1,1659 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.logging.log4j.LogManager; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Test; -import org.labkey.api.assay.AssayFileWriter; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.attachments.AttachmentParentFactory; -import org.labkey.api.attachments.SpringAttachmentFile; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.audit.provider.FileSystemAuditProvider; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.Sets; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.DbSequenceManager; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.MultiValuedForeignKey; -import org.labkey.api.data.PropertyStorageSpec; -import org.labkey.api.data.RuntimeSQLException; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.dataiterator.AttachmentDataIterator; -import org.labkey.api.dataiterator.DataIterator; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; -import org.labkey.api.dataiterator.ExistingRecordDataIterator; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.dataiterator.Pump; -import org.labkey.api.dataiterator.StandardDataIteratorBuilder; -import org.labkey.api.dataiterator.TriggerDataBuilderHelper; -import org.labkey.api.dataiterator.WrapperDataIterator; -import org.labkey.api.exceptions.OptimisticConflictException; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.list.ListDefinition; -import org.labkey.api.exp.list.ListService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.files.FileContentService; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.reader.TabLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.AdminPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.test.TestWhen; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JunitUtil; -import org.labkey.api.util.TestContext; -import org.labkey.api.util.URIUtil; -import org.labkey.api.view.NotFoundException; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.writer.VirtualFile; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.File; -import java.io.IOException; -import java.io.StringReader; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.SortedSet; -import java.util.TreeSet; -import java.util.function.Function; - -import static java.util.Objects.requireNonNull; -import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; -import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; -import static org.labkey.api.files.FileContentService.UPLOADED_FILE; -import static org.labkey.api.util.FileUtil.toFileForRead; -import static org.labkey.api.util.FileUtil.toFileForWrite; - -public abstract class AbstractQueryUpdateService implements QueryUpdateService -{ - protected final TableInfo _queryTable; - - private boolean _bulkLoad = false; - private CaseInsensitiveHashMap _columnImportMap = null; - private VirtualFile _att = null; - - /* AbstractQueryUpdateService is generally responsible for some shared functionality - * - triggers - * - coercion/validation - * - detailed logging - * - attachments - * - * If a subclass wants to disable some of these features (w/o subclassing), put flags here... - */ - protected boolean _enableExistingRecordsDataIterator = true; - protected Set _previouslyUpdatedRows = new HashSet<>(); - - protected AbstractQueryUpdateService(TableInfo queryTable) - { - if (queryTable == null) - throw new IllegalArgumentException(); - _queryTable = queryTable; - } - - protected TableInfo getQueryTable() - { - return _queryTable; - } - - public @NotNull Set getPreviouslyUpdatedRows() - { - return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) - { - return getQueryTable().hasPermission(user, acl); - } - - protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - return getRow(user, container, keys); - } - - protected abstract Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException; - - @Override - public List> getRows(User user, Container container, List> keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - List> result = new ArrayList<>(); - for (Map rowKeys : keys) - { - Map row = getRow(user, container, rowKeys); - if (row != null) - result.add(row); - } - return result; - } - - @Override - public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - if (!hasPermission(user, ReadPermission.class)) - throw new UnauthorizedException("You do not have permission to read data from this table."); - - Map> result = new LinkedHashMap<>(); - for (Map.Entry> key : keys.entrySet()) - { - Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); - if (row != null && !row.isEmpty()) - { - result.put(key.getKey(), row); - if (verifyNoCrossFolderData) - { - String dataContainer = (String) row.get("container"); - if (StringUtils.isEmpty(dataContainer)) - dataContainer = (String) row.get("folder"); - if (!container.getId().equals(dataContainer)) - throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); - } - } - else if (verifyExisting) - throw new InvalidKeyException("Data not found for " + key.getValue().values()); - } - return result; - } - - @Override - public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) - { - return false; - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) - { - return createTransactionAuditEvent(container, auditAction, null); - } - - public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) - { - long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); - TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); - if (details != null) - event.addDetails(details); - return event; - } - - public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) - { - UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); - - if (schema != null) - { - // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the - // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the - // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the - // table. - schema.getTable(auditEvent.getEventType(), false); - - transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); - - transaction.setAuditEvent(auditEvent); - } - } - - protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) - { - if (null == errors) - errors = new BatchValidationException(); - DataIteratorContext context = new DataIteratorContext(errors); - context.setInsertOption(forImport); - context.setConfigParameters(configParameters); - configureDataIteratorContext(context); - recordDataIteratorUsed(configParameters); - - return context; - } - - protected void recordDataIteratorUsed(@Nullable Map configParameters) - { - if (configParameters == null) - return; - - try - { - configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); - } - catch (UnsupportedOperationException ignore) - { - // configParameters is immutable, likely originated from a junit test - } - } - - /** - * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. - * Used only for generating ExistingRecordDataIterator at the moment. - */ - protected Set getSelectKeys(DataIteratorContext context) - { - if (!context.getAlternateKeys().isEmpty()) - return context.getAlternateKeys(); - return null; - } - - /* - * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. - * does NOT handle triggers or the insert/update iterator. - */ - public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) - { - DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); - - if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) - { - // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) - dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); - } - - dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); - dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); - dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); - return dib; - } - - - /** - * Implementation to use insertRows() while we migrate to using DIB for all code paths - *

- * DataIterator should/must use the same error collection as passed in - */ - @Deprecated - protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) - { - MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); - List> list = new ArrayList<>(); - List> ret; - Exception rowException; - - try - { - while (mapIterator.next()) - list.add(mapIterator.getMap()); - ret = insertRows(user, container, list, errors, null, extraScriptContext); - if (errors.hasErrors()) - return 0; - return ret.size(); - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - return 0; - } - catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) - { - rowException = x; - } - finally - { - DataIteratorUtil.closeQuietly(mapIterator); - } - errors.addRowError(new ValidationException(rowException.getMessage())); - return 0; - } - - protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) - { - return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); - } - - protected boolean hasInsertRowsPermission(User user) - { - return hasPermission(user, InsertPermission.class); - } - - protected boolean hasDeleteRowsPermission(User user) - { - return hasPermission(user, DeletePermission.class); - } - - protected boolean hasUpdateRowsPermission(User user) - { - return hasPermission(user, UpdatePermission.class); - } - - // override this - protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) - { - } - - protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasImportRowsPermission(user, container, context)) - throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) - assert(getQueryTable().supportsInsertOption(context.getInsertOption())); - - context.getErrors().setExtraContext(extraScriptContext); - if (extraScriptContext != null) - { - context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); - } - - preImportDIBValidation(in, null); - - boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); - boolean hasTableScript = hasTableScript(container); - TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); - if (!skipTriggers) - { - in = preTriggerDataIterator(in, context); - if (hasTableScript) - in = helper.before(in); - } - DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); - DataIteratorBuilder out = importDIB; - - if (!skipTriggers) - { - if (hasTableScript) - out = helper.after(importDIB); - - out = postTriggerDataIterator(out, context); - } - - if (hasTableScript) - { - context.setFailFast(false); - context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); - } - int count = _pump(out, outputRows, context); - - if (context.getErrors().hasErrors()) - return 0; - - if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) - _addSummaryAuditEvent(container, user, context, count); - - return count; - } - - protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) - { - return in; - } - - protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) - { - return out; - } - - /** this is extracted so subclasses can add wrap */ - protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) - { - DataIterator it = etl.getDataIterator(context); - - if (null == it) - return 0; - - try - { - if (null != rows) - { - MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); - it = new WrapperDataIterator(maps) - { - @Override - public boolean next() throws BatchValidationException - { - boolean ret = super.next(); - if (ret) - rows.add(((MapDataIterator)_delegate).getMap()); - return ret; - } - }; - } - - Pump pump = new Pump(it, context); - pump.run(); - - return pump.getRowCount(); - } - finally - { - DataIteratorUtil.closeQuietly(it); - } - } - - /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ - protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) - { - afterInsertUpdate(count, errors); - } - - protected void afterInsertUpdate(int count, BatchValidationException errors) - {} - - @Override - public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - return loadRows(user, container, rows, null, context, extraScriptContext); - } - - public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) - { - configureDataIteratorContext(context); - int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - return count; - } - - @Override - public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) - { - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); - int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); - afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); - return count; - } - - @Override - public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - { - throw new UnsupportedOperationException("merge is not supported for all tables"); - } - - private boolean hasTableScript(Container container) - { - return getQueryTable().hasTriggers(container); - } - - - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); - } - - - protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); - ArrayList> outputRows = new ArrayList<>(); - int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); - afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); - - if (context.getErrors().hasErrors()) - return null; - - return outputRows; - } - - // not yet supported - protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, - DataIteratorContext context, @Nullable Map extraScriptContext) - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); - } - - - protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) - { - // TODO probably can't assume all rows have all columns - // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) - // TODO optimize ArrayListMap? - Set colNames; - - if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) - { - colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); - } - else - { - // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet - colNames = Sets.newCaseInsensitiveHashSet(); - for (Map row : rows) - colNames.addAll(row.keySet()); - } - - preImportDIBValidation(null, colNames); - return MapDataIterator.of(colNames, rows, debugName); - } - - - /** @deprecated switch to using DIB based method */ - @Deprecated - protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) - throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasInsertRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to insert data into this table."); - - assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - boolean hasTableScript = hasTableScript(container); - - errors.setExtraContext(extraScriptContext); - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - row = normalizeColumnNames(row); - try - { - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), false); - if (hasTableScript) - { - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); - } - row = insertRow(user, container, row); - if (row == null) - continue; - - if (hasTableScript) - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); - result.add(row); - } - catch (SQLException sqlx) - { - if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) - { - ValidationException vex = new ValidationException(sqlx.getMessage()); - vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); - errors.addRowError(vex); - } - else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) - { - // if we already have some errors, just break - break; - } - else - { - throw sqlx; - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - } - - if (hasTableScript) - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); - - return result; - } - - protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) - { - if (!isBulkLoad()) - { - AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; - String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); - getQueryTable().getAuditHandler(auditBehavior) - .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); - } - } - - private Map normalizeColumnNames(Map row) - { - if(_columnImportMap == null) - { - _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); - } - - Map newRow = new CaseInsensitiveHashMap<>(); - CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); - columns.addAll(row.keySet()); - - String newName; - for(String key : row.keySet()) - { - if(_columnImportMap.containsKey(key)) - { - //it is possible for a normalized name to conflict with an existing property. if so, defer to the original - newName = _columnImportMap.get(key).getName(); - if(!columns.contains(newName)){ - newRow.put(newName, row.get(key)); - continue; - } - } - newRow.put(key, row.get(key)); - } - - return newRow; - } - - @Override - public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws DuplicateKeyException, QueryUpdateServiceException, SQLException - { - try - { - List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); - afterInsertUpdate(null==ret?0:ret.size(), errors); - if (errors.hasErrors()) - return null; - return ret; - } - catch (BatchValidationException x) - { - assert x == errors; - assert x.hasErrors(); - } - return null; - } - - protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) - { - if (col != null && value != null && - !col.getJavaObjectClass().isInstance(value) && - !(value instanceof AttachmentFile) && - !(value instanceof MultipartFile) && - !(value instanceof String[]) && - !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) - { - try - { - if (col.getKindOfQuantity() != null) - providedValues.put(key, value); - if (PropertyType.FILE_LINK.equals(col.getPropertyType())) - value = ExpDataFileConverter.convert(value); - else - value = col.convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw e; - } - catch (ConversionException e) - { - // That's OK, the transformation script may be able to fix up the value before it gets inserted - } - } - - return value; - } - - /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ - @Deprecated - protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) - { - Map result = new CaseInsensitiveHashMap<>(row.size()); - Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); - for (Map.Entry entry : row.entrySet()) - { - ColumnInfo col = columnMap.get(entry.getKey()); - Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); - result.put(entry.getKey(), value); - } - - return result; - } - - protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - - protected boolean firstUpdateRow = true; - Function,Map> updateTransform = Function.identity(); - - /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ - final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - if (firstUpdateRow) - { - firstUpdateRow = false; - if (null != OntologyService.get()) - { - var t = OntologyService.get().getConceptUpdateHandler(_queryTable); - if (null != t) - updateTransform = t; - } - } - row = updateTransform.apply(row); - return updateRow(user, container, row, oldRow, configParameters); - } - - // used by updateRows to check if all rows have the same set of keys - // prepared statement can only be used to updateRows if all rows have the same set of keys - protected static boolean hasUniformKeys(List> rowsToUpdate) - { - if (rowsToUpdate == null || rowsToUpdate.isEmpty()) - return false; - - if (rowsToUpdate.size() == 1) - return true; - - Set keys = rowsToUpdate.get(0).keySet(); - int keySize = keys.size(); - - for (int i = 1 ; i < rowsToUpdate.size(); i ++) - { - Set otherKeys = rowsToUpdate.get(i).keySet(); - if (otherKeys.size() != keySize) - return false; - if (!otherKeys.containsAll(keys)) - return false; - } - - return true; - } - - @Override - public List> updateRows(User user, Container container, List> rows, List> oldKeys, - BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasUpdateRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to update data in this table."); - - if (oldKeys != null && rows.size() != oldKeys.size()) - throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); - - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); - - List> result = new ArrayList<>(rows.size()); - List> oldRows = new ArrayList<>(rows.size()); - List> providedValues = new ArrayList<>(rows.size()); - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - for (int i = 0; i < rows.size(); i++) - { - Map row = rows.get(i); - providedValues.add(new CaseInsensitiveHashMap<>()); - row = coerceTypes(row, providedValues.get(i), true); - try - { - Map oldKey = oldKeys == null ? row : oldKeys.get(i); - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, oldKey); - if (oldRow == null) - throw new NotFoundException("The existing row was not found."); - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); - Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); - if (!streaming) - { - result.add(updatedRow); - oldRows.add(oldRow); - } - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); - } - catch (OptimisticConflictException e) - { - errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); - afterInsertUpdate(null==result?0:result.size(), errors, true); - - if (errors.hasErrors()) - throw errors; - - addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); - - return result; - } - - protected void validatePartitionedRowKeys(Collection columns) - { - // do nothing - } - - public List> updateRowsUsingPartitionedDIB( - DbScope.Transaction tx, - User user, - Container container, - List> rows, - BatchValidationException errors, - @Nullable Map configParameters, - Map extraScriptContext - ) - { - int index = 0; - int numPartitions = 0; - List> ret = new ArrayList<>(); - - Set observedRowIds = new HashSet<>(); - Set observedNames = new CaseInsensitiveHashSet(); - - while (index < rows.size()) - { - CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); - - validatePartitionedRowKeys(rowKeys); - - int nextIndex = index + 1; - while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) - nextIndex++; - - List> rowsToProcess = rows.subList(index, nextIndex); - index = nextIndex; - numPartitions++; - - DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); - - // skip audit summary for the partitions, we will perform it once at the end - context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); - - List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); - - // we need to throw if we don't want executeWithRetry() attempt commit() - if (context.getErrors().hasErrors()) - throw new DbScope.RetryPassthroughException(context.getErrors()); - - if (subRet != null) - { - ret.addAll(subRet); - - // Check if duplicate rows have been processed across the partitions - // Only start checking for duplicates after the first partition has been processed. - if (numPartitions > 1) - { - // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition - checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); - } - - if (errors.hasErrors()) - throw new DbScope.RetryPassthroughException(errors); - } - } - - if (numPartitions > 1) - { - var auditEvent = tx.getAuditEvent(); - if (auditEvent != null) - auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); - } - - _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); - - return ret; - } - - private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) - { - for (Map row : partitionRows) - { - Long rowId = MapUtils.getLong(row, RowId.name()); - if (rowId != null && !globalRowIds.add(rowId)) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); - return; - } - - Object nameObj = row.get(Name.name()); - if (nameObj != null && !globalNames.add(nameObj.toString())) - { - errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); - return; - } - } - } - - protected void checkDuplicateUpdate(Object pkVals) throws ValidationException - { - if (pkVals == null) - return; - - Set updatedRows = getPreviouslyUpdatedRows(); - - Object[] keysObj; - if (pkVals.getClass().isArray()) - keysObj = (Object[]) pkVals; - else if (pkVals instanceof Map map) - { - List orderedKeyVals = new ArrayList<>(); - SortedSet sortedKeys = new TreeSet<>(map.keySet()); - for (String key : sortedKeys) - orderedKeyVals.add(map.get(key)); - keysObj = orderedKeyVals.toArray(); - } - else - keysObj = new Object[]{pkVals}; - - if (keysObj.length == 1) - { - if (updatedRows.contains(keysObj[0])) - throw new ValidationException("Duplicate key provided: " + keysObj[0]); - updatedRows.add(keysObj[0]); - return; - } - - List keys = new ArrayList<>(); - for (Object key : keysObj) - keys.add(String.valueOf(key)); - if (updatedRows.contains(keys)) - throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); - updatedRows.add(keys); - } - - @Override - public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException("Move is not supported for this table type."); - } - - protected abstract Map deleteRow(User user, Container container, Map oldRow) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; - - protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return deleteRow(user, container, oldRow); - } - - @Override - public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to delete data from this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); - - // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container - boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; - - List> result = new ArrayList<>(keys.size()); - for (int i = 0; i < keys.size(); i++) - { - Map key = keys.get(i); - try - { - Map oldRow = null; - if (!streaming) - { - oldRow = getRow(user, container, key); - // if row doesn't exist, bail early - if (oldRow == null) - continue; - } - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); - Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); - if (!streaming && updatedRow == null) - continue; - - getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); - result.add(updatedRow); - } - catch (InvalidKeyException ex) - { - ValidationException vex = new ValidationException(ex.getMessage()); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (ValidationException vex) - { - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - catch (RuntimeValidationException rvex) - { - ValidationException vex = rvex.getValidationException(); - errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); - } - } - - // Fire triggers, if any, and also throw if there are any errors - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); - - addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); - - return result; - } - - protected int truncateRows(User user, Container container) - throws QueryUpdateServiceException, SQLException - { - throw new UnsupportedOperationException(); - } - - @Override - public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) - throws BatchValidationException, QueryUpdateServiceException, SQLException - { - if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) - throw new UnauthorizedException("You do not have permission to truncate this table."); - - BatchValidationException errors = new BatchValidationException(); - errors.setExtraContext(extraScriptContext); - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); - - int result = truncateRows(user, container); - - getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); - addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); - - return result; - } - - @Override - public void setBulkLoad(boolean bulkLoad) - { - _bulkLoad = bulkLoad; - } - - @Override - public boolean isBulkLoad() - { - return _bulkLoad; - } - - public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException - { - FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); - return saveFile(user, container, name, value, dirPath); - } - - /** - * Save uploaded file to dirName directory under file or pipeline root. - */ - public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException - { - if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) - throw new ValidationException("Invalid file value"); - - String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; - FileLike file = null; - try - { - FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); - - FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); - if (value instanceof MultipartFile multipartFile) - { - // Once we've found one, write it to disk and replace the row's value with just the File reference to it - if (multipartFile.isEmpty()) - { - throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); - } - file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); - checkFileUnderRoot(container, file); - multipartFile.transferTo(toFileForWrite(file)); - event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); - event.setProvidedFileName(multipartFile.getOriginalFilename()); - } - else - { - SpringAttachmentFile saf = (SpringAttachmentFile) value; - file = FileUtil.findUniqueFileName(saf.getFilename(), dir); - checkFileUnderRoot(container, file); - saf.saveTo(file); - event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); - event.setProvidedFileName(saf.getFilename()); - } - event.setFile(file.getName()); - event.setFieldName(name); - event.setDirectory(file.getParent().toURI().getPath()); - AuditLogService.get().addEvent(user, event); - } - catch (IOException | ExperimentException e) - { - throw new QueryUpdateServiceException(e); - } - - ensureExpData(user, container, file.toNioPathForRead().toFile()); - return file; - } - - public static ExpData ensureExpData(User user, Container container, File file) - { - ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); - // create exp.data record - if (existingData == null) - { - File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); - ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); - data.setName(file.getName()); - data.setDataFileURI(canonicalFile.toPath().toUri()); - if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) - { - // If the path is too long to store, bail out without creating an exp.data row - data.save(user); - } - - return data; - } - - return existingData; - } - - // For security reasons, make sure the user hasn't tried to reference a file that's not under - // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server - static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException - { - Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); - if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) - return file; - - PipeRoot root = PipelineService.get().findPipelineRoot(container); - if (root == null) - throw new ExperimentException("Pipeline root not available in container " + container.getPath()); - - if (!root.isUnderRoot(toFileForRead(file))) - { - throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); - } - - return file; - } - - protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) - { - if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level - { - AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); - String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); - boolean skipAuditLevelCheck = false; - if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) - { - if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad - skipAuditLevelCheck = true; - } - getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); - } - } - - /** - * Is used by the AttachmentDataIterator to point to the location of the serialized - * attachment files. - */ - public void setAttachmentDirectory(VirtualFile att) - { - _att = att; - } - - @Nullable - protected VirtualFile getAttachmentDirectory() - { - return _att; - } - - /** - * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory - * implementation in order to resolve the attachment parent on incoming attachment files. - */ - @Nullable - protected AttachmentParentFactory getAttachmentParentFactory() - { - return null; - } - - /** Translate between the column name that query is exposing to the column name that actually lives in the database */ - protected static void aliasColumns(Map columnMapping, Map row) - { - for (Map.Entry entry : columnMapping.entrySet()) - { - if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) - { - row.put(entry.getKey(), row.get(entry.getValue())); - } - } - } - - /** - * The database table has underscores for MV column names, but we expose a column without the underscore. - * Therefore, we need to translate between the two sets of column names. - * @return database column name -> exposed TableInfo column name - */ - protected static Map createMVMapping(Domain domain) - { - Map result = new CaseInsensitiveHashMap<>(); - if (domain != null) - { - for (DomainProperty domainProperty : domain.getProperties()) - { - if (domainProperty.isMvEnabled()) - { - result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); - } - } - } - return result; - } - - @TestWhen(TestWhen.When.BVT) - public static class TestCase extends Assert - { - private boolean _useAlias = false; - - static TabLoader getTestData() throws IOException - { - TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); - testData.parseAsCSV(); - testData.getColumns()[0].clazz = Integer.class; - testData.getColumns()[1].clazz = Integer.class; - testData.getColumns()[2].clazz = String.class; - return testData; - } - - @BeforeClass - public static void createList() throws Exception - { - if (null == ListService.get()) - return; - deleteList(); - - TabLoader testData = getTestData(); - String hash = GUID.makeHash(); - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - assertNotNull(lists); - - ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); - R.setKeyName("pk"); - Domain d = requireNonNull(R.getDomain()); - for (int i=0 ; i> getRows() - { - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); - TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); - return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); - } - - @Before - public void resetList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - qus.truncateRows(user, c, null, null); - } - - @AfterClass - public static void deleteList() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - ListService s = ListService.get(); - Map m = s.getLists(c); - if (m.containsKey("R")) - m.get("R").delete(user); - } - - void validateDefaultData(List> rows) - { - assertEquals(3, rows.size()); - - assertEquals(0, rows.get(0).get("pk")); - assertEquals(1, rows.get(1).get("pk")); - assertEquals(2, rows.get(2).get("pk")); - - assertEquals(0, rows.get(0).get("i")); - assertEquals(1, rows.get(1).get("i")); - assertEquals(2, rows.get(2).get("i")); - - assertEquals("zero", rows.get(0).get("s")); - assertEquals("one", rows.get(1).get("s")); - assertEquals("two", rows.get(2).get("s")); - } - - @Test - public void INSERT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertFalse(errors.hasErrors()); - validateDefaultData(rows); - validateDefaultData(getRows()); - - qus.insertRows(user, c, getTestData().load(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void UPSERT() throws Exception - { - if (null == ListService.get()) - return; - /* not sure how you use/test ImportOptions.UPSERT - * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? - */ - } - - @Test - public void IMPORT() throws Exception - { - if (null == ListService.get()) - return; - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - assert(getRows().isEmpty()); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - BatchValidationException errors = new BatchValidationException(); - var count = qus.importRows(user, c, getTestData(), errors, null, null); - assertFalse(errors.hasErrors()); - assert(count == 3); - validateDefaultData(getRows()); - - qus.importRows(user, c, getTestData(), errors, null, null); - assertTrue(errors.hasErrors()); - } - - @Test - public void MERGE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - BatchValidationException errors = new BatchValidationException() - { - @Override - public void addRowError(ValidationException vex) - { - LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); - fail(vex.getMessage()); - } - }; - int count=0; - try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) - { - var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - if (!errors.hasErrors()) - { - tx.commit(); - count = ret; - } - } - assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is not updated - assertEquals(2, rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - - // merge should fail if duplicate keys are provided - errors = new BatchValidationException(); - mergeRows = new ArrayList<>(); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - } - - @Test - public void UPDATE() throws Exception - { - if (null == ListService.get()) - return; - INSERT(); - assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var updateRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - - // update using data iterator - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(1, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO-UP", rows.get(2).get("s")); - // test existing row value is not updated/erased - assertEquals(2, rows.get(2).get("i")); - - // update should fail if a new record is provided - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - - // Issue 52728: update should fail if duplicate key is provide - updateRows = new ArrayList<>(); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); - updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); - - // use DIB - context = new DataIteratorContext(); - context.setInsertOption(InsertOption.UPDATE); - qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); - assertTrue(context.getErrors().hasErrors()); - assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); - - // use updateRows - if (!_useAlias) // _update using alias is not supported - { - BatchValidationException errors = new BatchValidationException(); - try - { - qus.updateRows(user, c, updateRows, null, errors, null, null); - } - catch (Exception e) - { - - } - assertTrue(errors.hasErrors()); - assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); - - } - } - - @Test - public void REPLACE() throws Exception - { - if (null == ListService.get()) - return; - assert(getRows().isEmpty()); - INSERT(); - - User user = TestContext.get().getUser(); - Container c = JunitUtil.getTestContainer(); - TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); - QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); - var mergeRows = new ArrayList>(); - String colName = _useAlias ? "s_alias" : "s"; - String pkName = _useAlias ? "pk_alias" : "pk"; - mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); - mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); - DataIteratorContext context = new DataIteratorContext(); - context.setInsertOption(InsertOption.REPLACE); - var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); - assertFalse(context.getErrors().hasErrors()); - assertEquals(2, count); - var rows = getRows(); - // test existing row value is updated - assertEquals("TWO", rows.get(2).get("s")); - // test existing row value is updated - assertNull(rows.get(2).get("i")); - // test new row - assertEquals("THREE", rows.get(3).get("s")); - assertNull(rows.get(3).get("i")); - } - - @Test - public void IMPORT_IDENTITY() - { - if (null == ListService.get()) - return; - // TODO - } - - @Test - public void ALIAS_MERGE() throws Exception - { - _useAlias = true; - MERGE(); - } - - @Test - public void ALIAS_REPLACE() throws Exception - { - _useAlias = true; - REPLACE(); - } - - @Test - public void ALIAS_UPDATE() throws Exception - { - _useAlias = true; - UPDATE(); - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.collections4.MapUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.labkey.api.assay.AssayFileWriter; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.attachments.AttachmentParentFactory; +import org.labkey.api.attachments.SpringAttachmentFile; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.TransactionAuditProvider; +import org.labkey.api.audit.provider.FileSystemAuditProvider; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.DbSequenceManager; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.MultiValuedForeignKey; +import org.labkey.api.data.PropertyStorageSpec; +import org.labkey.api.data.RuntimeSQLException; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.DataIterator; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.DetailedAuditLogDataIterator; +import org.labkey.api.dataiterator.ExistingRecordDataIterator; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.dataiterator.Pump; +import org.labkey.api.dataiterator.StandardDataIteratorBuilder; +import org.labkey.api.dataiterator.TriggerDataBuilderHelper; +import org.labkey.api.dataiterator.WrapperDataIterator; +import org.labkey.api.exceptions.OptimisticConflictException; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.list.ListDefinition; +import org.labkey.api.exp.list.ListService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.files.FileContentService; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.reader.TabLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.AdminPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.test.TestWhen; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JunitUtil; +import org.labkey.api.util.TestContext; +import org.labkey.api.util.URIUtil; +import org.labkey.api.view.NotFoundException; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.writer.VirtualFile; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.File; +import java.io.IOException; +import java.io.StringReader; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Function; + +import static java.util.Objects.requireNonNull; +import static org.labkey.api.audit.TransactionAuditProvider.DB_SEQUENCE_NAME; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior; +import static org.labkey.api.dataiterator.DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.Name; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.RowId; +import static org.labkey.api.files.FileContentService.UPLOADED_FILE; +import static org.labkey.api.util.FileUtil.toFileForRead; +import static org.labkey.api.util.FileUtil.toFileForWrite; + +public abstract class AbstractQueryUpdateService implements QueryUpdateService +{ + protected final TableInfo _queryTable; + + private boolean _bulkLoad = false; + private CaseInsensitiveHashMap _columnImportMap = null; + private VirtualFile _att = null; + + /* AbstractQueryUpdateService is generally responsible for some shared functionality + * - triggers + * - coercion/validation + * - detailed logging + * - attachments + * + * If a subclass wants to disable some of these features (w/o subclassing), put flags here... + */ + protected boolean _enableExistingRecordsDataIterator = true; + protected Set _previouslyUpdatedRows = new HashSet<>(); + + protected AbstractQueryUpdateService(TableInfo queryTable) + { + if (queryTable == null) + throw new IllegalArgumentException(); + _queryTable = queryTable; + } + + protected TableInfo getQueryTable() + { + return _queryTable; + } + + public @NotNull Set getPreviouslyUpdatedRows() + { + return _previouslyUpdatedRows == null ? new HashSet<>() : _previouslyUpdatedRows; + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class acl) + { + return getQueryTable().hasPermission(user, acl); + } + + protected Map getRow(User user, Container container, Map keys, boolean allowCrossContainer) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + return getRow(user, container, keys); + } + + protected abstract Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException; + + @Override + public List> getRows(User user, Container container, List> keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + List> result = new ArrayList<>(); + for (Map rowKeys : keys) + { + Map row = getRow(user, container, rowKeys); + if (row != null) + result.add(row); + } + return result; + } + + @Override + public Map> getExistingRows(User user, Container container, Map> keys, boolean verifyNoCrossFolderData, boolean verifyExisting, @Nullable Set columns) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + if (!hasPermission(user, ReadPermission.class)) + throw new UnauthorizedException("You do not have permission to read data from this table."); + + Map> result = new LinkedHashMap<>(); + for (Map.Entry> key : keys.entrySet()) + { + Map row = getRow(user, container, key.getValue(), verifyNoCrossFolderData); + if (row != null && !row.isEmpty()) + { + result.put(key.getKey(), row); + if (verifyNoCrossFolderData) + { + String dataContainer = (String) row.get("container"); + if (StringUtils.isEmpty(dataContainer)) + dataContainer = (String) row.get("folder"); + if (!container.getId().equals(dataContainer)) + throw new InvalidKeyException("Data does not belong to folder '" + container.getName() + "': " + key.getValue().values()); + } + } + else if (verifyExisting) + throw new InvalidKeyException("Data not found for " + key.getValue().values()); + } + return result; + } + + @Override + public boolean hasExistingRowsInOtherContainers(Container container, Map> keys) + { + return false; + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction) + { + return createTransactionAuditEvent(container, auditAction, null); + } + + public static TransactionAuditProvider.TransactionAuditEvent createTransactionAuditEvent(Container container, QueryService.AuditAction auditAction, @Nullable Map details) + { + long auditId = DbSequenceManager.get(ContainerManager.getRoot(), DB_SEQUENCE_NAME).next(); + TransactionAuditProvider.TransactionAuditEvent event = new TransactionAuditProvider.TransactionAuditEvent(container, auditAction, auditId); + if (details != null) + event.addDetails(details); + return event; + } + + public static void addTransactionAuditEvent(DbScope.Transaction transaction, User user, TransactionAuditProvider.TransactionAuditEvent auditEvent) + { + UserSchema schema = AuditLogService.getAuditLogSchema(user, ContainerManager.getRoot()); + + if (schema != null) + { + // This is a little hack to ensure that the audit table has actually been created and gets put into the table cache by the time the + // pre-commit task is executed. Otherwise, since the creation of the table happens while within the commit for the + // outermost transaction, it looks like there is a close that hasn't happened when trying to commit the transaction for creating the + // table. + schema.getTable(auditEvent.getEventType(), false); + + transaction.addCommitTask(() -> AuditLogService.get().addEvent(user, auditEvent), DbScope.CommitTaskOption.PRECOMMIT); + + transaction.setAuditEvent(auditEvent); + } + } + + protected final DataIteratorContext getDataIteratorContext(BatchValidationException errors, InsertOption forImport, Map configParameters) + { + if (null == errors) + errors = new BatchValidationException(); + DataIteratorContext context = new DataIteratorContext(errors); + context.setInsertOption(forImport); + context.setConfigParameters(configParameters); + configureDataIteratorContext(context); + recordDataIteratorUsed(configParameters); + + return context; + } + + protected void recordDataIteratorUsed(@Nullable Map configParameters) + { + if (configParameters == null) + return; + + try + { + configParameters.put(TransactionAuditProvider.TransactionDetail.DataIteratorUsed, true); + } + catch (UnsupportedOperationException ignore) + { + // configParameters is immutable, likely originated from a junit test + } + } + + /** + * If QUS wants to use something other than PKs to select existing rows for merge, it can override this method. + * Used only for generating ExistingRecordDataIterator at the moment. + */ + protected Set getSelectKeys(DataIteratorContext context) + { + if (!context.getAlternateKeys().isEmpty()) + return context.getAlternateKeys(); + return null; + } + + /* + * construct the core DataIterator transformation pipeline for this table, may be just StandardDataIteratorBuilder. + * does NOT handle triggers or the insert/update iterator. + */ + public DataIteratorBuilder createImportDIB(User user, Container container, DataIteratorBuilder data, DataIteratorContext context) + { + DataIteratorBuilder dib = StandardDataIteratorBuilder.forInsert(getQueryTable(), data, container, user); + + if (_enableExistingRecordsDataIterator || context.getInsertOption().updateOnly) + { + // some tables need to generate PKs, so they need to add ExistingRecordDataIterator in persistRows() (after generating PK, before inserting) + dib = ExistingRecordDataIterator.createBuilder(dib, getQueryTable(), getSelectKeys(context)); + } + + dib = ((UpdateableTableInfo) getQueryTable()).persistRows(dib, context); + dib = AttachmentDataIterator.getAttachmentDataIteratorBuilder(getQueryTable(), dib, user, context.getInsertOption().batch ? getAttachmentDirectory() : null, container, getAttachmentParentFactory()); + dib = DetailedAuditLogDataIterator.getDataIteratorBuilder(getQueryTable(), dib, context.getInsertOption(), user, container, null); + return dib; + } + + + /** + * Implementation to use insertRows() while we migrate to using DIB for all code paths + *

+ * DataIterator should/must use the same error collection as passed in + */ + @Deprecated + protected int _importRowsUsingInsertRows(User user, Container container, DataIterator rows, BatchValidationException errors, Map extraScriptContext) + { + MapDataIterator mapIterator = DataIteratorUtil.wrapMap(rows, true); + List> list = new ArrayList<>(); + List> ret; + Exception rowException; + + try + { + while (mapIterator.next()) + list.add(mapIterator.getMap()); + ret = insertRows(user, container, list, errors, null, extraScriptContext); + if (errors.hasErrors()) + return 0; + return ret.size(); + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + return 0; + } + catch (QueryUpdateServiceException | DuplicateKeyException | SQLException x) + { + rowException = x; + } + finally + { + DataIteratorUtil.closeQuietly(mapIterator); + } + errors.addRowError(new ValidationException(rowException.getMessage())); + return 0; + } + + protected boolean hasImportRowsPermission(User user, Container container, DataIteratorContext context) + { + return hasPermission(user, context.getInsertOption().updateOnly ? UpdatePermission.class : InsertPermission.class); + } + + protected boolean hasInsertRowsPermission(User user) + { + return hasPermission(user, InsertPermission.class); + } + + protected boolean hasDeleteRowsPermission(User user) + { + return hasPermission(user, DeletePermission.class); + } + + protected boolean hasUpdateRowsPermission(User user) + { + return hasPermission(user, UpdatePermission.class); + } + + // override this + protected void preImportDIBValidation(@Nullable DataIteratorBuilder in, @Nullable Collection inputColumns) + { + } + + protected int _importRowsUsingDIB(User user, Container container, DataIteratorBuilder in, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasImportRowsPermission(user, container, context)) + throw new UnauthorizedException("You do not have permission to " + (context.getInsertOption().updateOnly ? "update data in this table." : "insert data into this table.")); + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipInsertOptionValidation)) + assert(getQueryTable().supportsInsertOption(context.getInsertOption())); + + context.getErrors().setExtraContext(extraScriptContext); + if (extraScriptContext != null) + { + context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE)); + } + + preImportDIBValidation(in, null); + + boolean skipTriggers = context.getConfigParameterBoolean(ConfigParameters.SkipTriggers) || context.isCrossTypeImport() || context.isCrossFolderImport(); + boolean hasTableScript = hasTableScript(container); + TriggerDataBuilderHelper helper = new TriggerDataBuilderHelper(getQueryTable(), container, user, extraScriptContext, context.getInsertOption().useImportAliases); + if (!skipTriggers) + { + in = preTriggerDataIterator(in, context); + if (hasTableScript) + in = helper.before(in); + } + DataIteratorBuilder importDIB = createImportDIB(user, container, in, context); + DataIteratorBuilder out = importDIB; + + if (!skipTriggers) + { + if (hasTableScript) + out = helper.after(importDIB); + + out = postTriggerDataIterator(out, context); + } + + if (hasTableScript) + { + context.setFailFast(false); + context.setMaxRowErrors(Math.max(context.getMaxRowErrors(),1000)); + } + int count = _pump(out, outputRows, context); + + if (context.getErrors().hasErrors()) + return 0; + + if (!context.getConfigParameterBoolean(ConfigParameters.SkipAuditSummary)) + _addSummaryAuditEvent(container, user, context, count); + + return count; + } + + protected DataIteratorBuilder preTriggerDataIterator(DataIteratorBuilder in, DataIteratorContext context) + { + return in; + } + + protected DataIteratorBuilder postTriggerDataIterator(DataIteratorBuilder out, DataIteratorContext context) + { + return out; + } + + /** this is extracted so subclasses can add wrap */ + protected int _pump(DataIteratorBuilder etl, final @Nullable ArrayList> rows, DataIteratorContext context) + { + DataIterator it = etl.getDataIterator(context); + + if (null == it) + return 0; + + try + { + if (null != rows) + { + MapDataIterator maps = DataIteratorUtil.wrapMap(it, false); + it = new WrapperDataIterator(maps) + { + @Override + public boolean next() throws BatchValidationException + { + boolean ret = super.next(); + if (ret) + rows.add(((MapDataIterator)_delegate).getMap()); + return ret; + } + }; + } + + Pump pump = new Pump(it, context); + pump.run(); + + return pump.getRowCount(); + } + finally + { + DataIteratorUtil.closeQuietly(it); + } + } + + /* can be used for simple bookkeeping tasks, per row processing belongs in a data iterator */ + protected void afterInsertUpdate(int count, BatchValidationException errors, boolean isUpdate) + { + afterInsertUpdate(count, errors); + } + + protected void afterInsertUpdate(int count, BatchValidationException errors) + {} + + @Override + public int loadRows(User user, Container container, DataIteratorBuilder rows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + return loadRows(user, container, rows, null, context, extraScriptContext); + } + + public int loadRows(User user, Container container, DataIteratorBuilder rows, @Nullable final ArrayList> outputRows, DataIteratorContext context, @Nullable Map extraScriptContext) + { + configureDataIteratorContext(context); + int count = _importRowsUsingDIB(user, container, rows, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + return count; + } + + @Override + public int importRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, Map configParameters, @Nullable Map extraScriptContext) + { + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.IMPORT, configParameters); + int count = _importRowsUsingInsertRows(user, container, rows.getDataIterator(context), errors, extraScriptContext); + afterInsertUpdate(count, errors, context.getInsertOption().updateOnly); + return count; + } + + @Override + public int mergeRows(User user, Container container, DataIteratorBuilder rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + { + throw new UnsupportedOperationException("merge is not supported for all tables"); + } + + private boolean hasTableScript(Container container) + { + return getQueryTable().hasTriggers(container); + } + + + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Not implemented by this QueryUpdateService"); + } + + + protected @Nullable List> _insertRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + protected @Nullable List> _insertUpdateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + DataIteratorBuilder dib = _toDataIteratorBuilder(getClass().getSimpleName() + (context.getInsertOption().updateOnly ? ".updateRows" : ".insertRows()"), rows); + ArrayList> outputRows = new ArrayList<>(); + int count = _importRowsUsingDIB(user, container, dib, outputRows, context, extraScriptContext); + afterInsertUpdate(count, context.getErrors(), context.getInsertOption().updateOnly); + + if (context.getErrors().hasErrors()) + return null; + + return outputRows; + } + + // not yet supported + protected @Nullable List> _updateRowsUsingDIB(User user, Container container, List> rows, + DataIteratorContext context, @Nullable Map extraScriptContext) + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + return _insertUpdateRowsUsingDIB(user, container, rows, context, extraScriptContext); + } + + + protected DataIteratorBuilder _toDataIteratorBuilder(String debugName, List> rows) + { + // TODO probably can't assume all rows have all columns + // TODO can we assume that all rows refer to columns consistently? (not PTID and MouseId for the same column) + // TODO optimize ArrayListMap? + Set colNames; + + if (!rows.isEmpty() && rows.get(0) instanceof ArrayListMap) + { + colNames = ((ArrayListMap)rows.get(0)).getFindMap().keySet(); + } + else + { + // Preserve casing by using wrapped CaseInsensitiveHashMap instead of CaseInsensitiveHashSet + colNames = Sets.newCaseInsensitiveHashSet(); + for (Map row : rows) + colNames.addAll(row.keySet()); + } + + preImportDIBValidation(null, colNames); + return MapDataIterator.of(colNames, rows, debugName); + } + + + /** @deprecated switch to using DIB based method */ + @Deprecated + protected List> _insertRowsUsingInsertRow(User user, Container container, List> rows, BatchValidationException errors, Map extraScriptContext) + throws DuplicateKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasInsertRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to insert data into this table."); + + assert(getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + boolean hasTableScript = hasTableScript(container); + + errors.setExtraContext(extraScriptContext); + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + row = normalizeColumnNames(row); + try + { + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), false); + if (hasTableScript) + { + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, true, i, row, null, extraScriptContext); + } + row = insertRow(user, container, row); + if (row == null) + continue; + + if (hasTableScript) + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.INSERT, false, i, row, null, extraScriptContext); + result.add(row); + } + catch (SQLException sqlx) + { + if (StringUtils.startsWith(sqlx.getSQLState(), "22") || RuntimeSQLException.isConstraintException(sqlx)) + { + ValidationException vex = new ValidationException(sqlx.getMessage()); + vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i+1); + errors.addRowError(vex); + } + else if (SqlDialect.isTransactionException(sqlx) && errors.hasErrors()) + { + // if we already have some errors, just break + break; + } + else + { + throw sqlx; + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + } + + if (hasTableScript) + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.INSERT, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.INSERT, null, result, null, providedValues); + + return result; + } + + protected void addAuditEvent(User user, Container container, QueryService.AuditAction auditAction, @Nullable Map configParameters, @Nullable List> rows, @Nullable List> existingRows, @Nullable List> providedValues) + { + if (!isBulkLoad()) + { + AuditBehaviorType auditBehavior = configParameters != null ? (AuditBehaviorType) configParameters.get(AuditBehavior) : null; + String userComment = configParameters == null ? null : (String) configParameters.get(AuditUserComment); + getQueryTable().getAuditHandler(auditBehavior) + .addAuditEvent(user, container, getQueryTable(), auditBehavior, userComment, auditAction, rows, existingRows, providedValues); + } + } + + private Map normalizeColumnNames(Map row) + { + if(_columnImportMap == null) + { + _columnImportMap = (CaseInsensitiveHashMap)ImportAliasable.Helper.createImportMap(getQueryTable().getColumns(), false); + } + + Map newRow = new CaseInsensitiveHashMap<>(); + CaseInsensitiveHashSet columns = new CaseInsensitiveHashSet(); + columns.addAll(row.keySet()); + + String newName; + for(String key : row.keySet()) + { + if(_columnImportMap.containsKey(key)) + { + //it is possible for a normalized name to conflict with an existing property. if so, defer to the original + newName = _columnImportMap.get(key).getName(); + if(!columns.contains(newName)){ + newRow.put(newName, row.get(key)); + continue; + } + } + newRow.put(key, row.get(key)); + } + + return newRow; + } + + @Override + public List> insertRows(User user, Container container, List> rows, BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws DuplicateKeyException, QueryUpdateServiceException, SQLException + { + try + { + List> ret = _insertRowsUsingInsertRow(user, container, rows, errors, extraScriptContext); + afterInsertUpdate(null==ret?0:ret.size(), errors); + if (errors.hasErrors()) + return null; + return ret; + } + catch (BatchValidationException x) + { + assert x == errors; + assert x.hasErrors(); + } + return null; + } + + protected Object coerceTypesValue(ColumnInfo col, Map providedValues, String key, Object value) + { + if (col != null && value != null && + !col.getJavaObjectClass().isInstance(value) && + !(value instanceof AttachmentFile) && + !(value instanceof MultipartFile) && + !(value instanceof String[]) && + !(col.isMultiValued() || col.getFk() instanceof MultiValuedForeignKey)) + { + try + { + if (col.getKindOfQuantity() != null) + providedValues.put(key, value); + if (PropertyType.FILE_LINK.equals(col.getPropertyType())) + value = ExpDataFileConverter.convert(value); + else + value = col.convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw e; + } + catch (ConversionException e) + { + // That's OK, the transformation script may be able to fix up the value before it gets inserted + } + } + + return value; + } + + /** Attempt to make the passed in types match the expected types so the script doesn't have to do the conversion */ + @Deprecated + protected Map coerceTypes(Map row, Map providedValues, boolean isUpdate) + { + Map result = new CaseInsensitiveHashMap<>(row.size()); + Map columnMap = ImportAliasable.Helper.createImportMap(_queryTable.getColumns(), true); + for (Map.Entry entry : row.entrySet()) + { + ColumnInfo col = columnMap.get(entry.getKey()); + Object value = coerceTypesValue(col, providedValues, entry.getKey(), entry.getValue()); + result.put(entry.getKey(), value); + } + + return result; + } + + protected abstract Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + + protected boolean firstUpdateRow = true; + Function,Map> updateTransform = Function.identity(); + + /* Do standard AQUS stuff here, then call the subclass specific implementation of updateRow() */ + final protected Map updateOneRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + if (firstUpdateRow) + { + firstUpdateRow = false; + if (null != OntologyService.get()) + { + var t = OntologyService.get().getConceptUpdateHandler(_queryTable); + if (null != t) + updateTransform = t; + } + } + row = updateTransform.apply(row); + return updateRow(user, container, row, oldRow, configParameters); + } + + // used by updateRows to check if all rows have the same set of keys + // prepared statement can only be used to updateRows if all rows have the same set of keys + protected static boolean hasUniformKeys(List> rowsToUpdate) + { + if (rowsToUpdate == null || rowsToUpdate.isEmpty()) + return false; + + if (rowsToUpdate.size() == 1) + return true; + + Set keys = rowsToUpdate.get(0).keySet(); + int keySize = keys.size(); + + for (int i = 1 ; i < rowsToUpdate.size(); i ++) + { + Set otherKeys = rowsToUpdate.get(i).keySet(); + if (otherKeys.size() != keySize) + return false; + if (!otherKeys.containsAll(keys)) + return false; + } + + return true; + } + + @Override + public List> updateRows(User user, Container container, List> rows, List> oldKeys, + BatchValidationException errors, @Nullable Map configParameters, Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasUpdateRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to update data in this table."); + + if (oldKeys != null && rows.size() != oldKeys.size()) + throw new IllegalArgumentException("rows and oldKeys are required to be the same length, but were " + rows.size() + " and " + oldKeys + " in length, respectively"); + + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, true, errors, extraScriptContext); + + List> result = new ArrayList<>(rows.size()); + List> oldRows = new ArrayList<>(rows.size()); + List> providedValues = new ArrayList<>(rows.size()); + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + for (int i = 0; i < rows.size(); i++) + { + Map row = rows.get(i); + providedValues.add(new CaseInsensitiveHashMap<>()); + row = coerceTypes(row, providedValues.get(i), true); + try + { + Map oldKey = oldKeys == null ? row : oldKeys.get(i); + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, oldKey); + if (oldRow == null) + throw new NotFoundException("The existing row was not found."); + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, true, i, row, oldRow, extraScriptContext); + Map updatedRow = updateOneRow(user, container, row, oldRow, configParameters); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow, extraScriptContext); + if (!streaming) + { + result.add(updatedRow); + oldRows.add(oldRow); + } + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), row, i)); + } + catch (OptimisticConflictException e) + { + errors.addRowError(new ValidationException("Unable to update. Row may have been deleted.")); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.UPDATE, false, errors, extraScriptContext); + afterInsertUpdate(null==result?0:result.size(), errors, true); + + if (errors.hasErrors()) + throw errors; + + addAuditEvent(user, container, QueryService.AuditAction.UPDATE, configParameters, result, oldRows, providedValues); + + return result; + } + + protected void validatePartitionedRowKeys(Collection columns) + { + // do nothing + } + + public List> updateRowsUsingPartitionedDIB( + DbScope.Transaction tx, + User user, + Container container, + List> rows, + BatchValidationException errors, + @Nullable Map configParameters, + Map extraScriptContext + ) + { + int index = 0; + int numPartitions = 0; + List> ret = new ArrayList<>(); + + Set observedRowIds = new HashSet<>(); + Set observedNames = new CaseInsensitiveHashSet(); + + while (index < rows.size()) + { + CaseInsensitiveHashSet rowKeys = new CaseInsensitiveHashSet(rows.get(index).keySet()); + + validatePartitionedRowKeys(rowKeys); + + int nextIndex = index + 1; + while (nextIndex < rows.size() && rowKeys.equals(new CaseInsensitiveHashSet(rows.get(nextIndex).keySet()))) + nextIndex++; + + List> rowsToProcess = rows.subList(index, nextIndex); + index = nextIndex; + numPartitions++; + + DataIteratorContext context = getDataIteratorContext(errors, InsertOption.UPDATE, configParameters); + + // skip audit summary for the partitions, we will perform it once at the end + context.putConfigParameter(ConfigParameters.SkipAuditSummary, true); + + List> subRet = _updateRowsUsingDIB(user, container, rowsToProcess, context, extraScriptContext); + + // we need to throw if we don't want executeWithRetry() attempt commit() + if (context.getErrors().hasErrors()) + throw new DbScope.RetryPassthroughException(context.getErrors()); + + if (subRet != null) + { + ret.addAll(subRet); + + // Check if duplicate rows have been processed across the partitions + // Only start checking for duplicates after the first partition has been processed. + if (numPartitions > 1) + { + // If we are on the second partition, then lazily check all previous rows, otherwise check only the current partition + checkPartitionForDuplicates(numPartitions == 2 ? ret : subRet, observedRowIds, observedNames, errors); + } + + if (errors.hasErrors()) + throw new DbScope.RetryPassthroughException(errors); + } + } + + if (numPartitions > 1) + { + var auditEvent = tx.getAuditEvent(); + if (auditEvent != null) + auditEvent.addDetail(TransactionAuditProvider.TransactionDetail.DataIteratorPartitions, numPartitions); + } + + _addSummaryAuditEvent(container, user, getDataIteratorContext(errors, InsertOption.UPDATE, configParameters), ret.size()); + + return ret; + } + + private void checkPartitionForDuplicates(List> partitionRows, Set globalRowIds, Set globalNames, BatchValidationException errors) + { + for (Map row : partitionRows) + { + Long rowId = MapUtils.getLong(row, RowId.name()); + if (rowId != null && !globalRowIds.add(rowId)) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + rowId)); + return; + } + + Object nameObj = row.get(Name.name()); + if (nameObj != null && !globalNames.add(nameObj.toString())) + { + errors.addRowError(new ValidationException("Duplicate key provided: " + nameObj)); + return; + } + } + } + + protected void checkDuplicateUpdate(Object pkVals) throws ValidationException + { + if (pkVals == null) + return; + + Set updatedRows = getPreviouslyUpdatedRows(); + + Object[] keysObj; + if (pkVals.getClass().isArray()) + keysObj = (Object[]) pkVals; + else if (pkVals instanceof Map map) + { + List orderedKeyVals = new ArrayList<>(); + SortedSet sortedKeys = new TreeSet<>(map.keySet()); + for (String key : sortedKeys) + orderedKeyVals.add(map.get(key)); + keysObj = orderedKeyVals.toArray(); + } + else + keysObj = new Object[]{pkVals}; + + if (keysObj.length == 1) + { + if (updatedRows.contains(keysObj[0])) + throw new ValidationException("Duplicate key provided: " + keysObj[0]); + updatedRows.add(keysObj[0]); + return; + } + + List keys = new ArrayList<>(); + for (Object key : keysObj) + keys.add(String.valueOf(key)); + if (updatedRows.contains(keys)) + throw new ValidationException("Duplicate key provided: " + StringUtils.join(keys, ", ")); + updatedRows.add(keys); + } + + @Override + public Map moveRows(User user, Container container, Container targetContainer, List> rows, BatchValidationException errors, @Nullable Map configParameters, @Nullable Map extraScriptContext) throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException("Move is not supported for this table type."); + } + + protected abstract Map deleteRow(User user, Container container, Map oldRow) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException; + + protected Map deleteRow(User user, Container container, Map oldRow, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return deleteRow(user, container, oldRow); + } + + @Override + public List> deleteRows(User user, Container container, List> keys, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws InvalidKeyException, BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to delete data from this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, true, errors, extraScriptContext); + + // TODO: Support update/delete without selecting the existing row -- unfortunately, we currently get the existing row to check its container matches the incoming container + boolean streaming = false; //_queryTable.canStreamTriggers(container) && _queryTable.getAuditBehavior() != AuditBehaviorType.NONE; + + List> result = new ArrayList<>(keys.size()); + for (int i = 0; i < keys.size(); i++) + { + Map key = keys.get(i); + try + { + Map oldRow = null; + if (!streaming) + { + oldRow = getRow(user, container, key); + // if row doesn't exist, bail early + if (oldRow == null) + continue; + } + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, true, i, null, oldRow, extraScriptContext); + Map updatedRow = deleteRow(user, container, oldRow, configParameters, extraScriptContext); + if (!streaming && updatedRow == null) + continue; + + getQueryTable().fireRowTrigger(container, user, TableInfo.TriggerType.DELETE, false, i, null, updatedRow, extraScriptContext); + result.add(updatedRow); + } + catch (InvalidKeyException ex) + { + ValidationException vex = new ValidationException(ex.getMessage()); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (ValidationException vex) + { + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + catch (RuntimeValidationException rvex) + { + ValidationException vex = rvex.getValidationException(); + errors.addRowError(vex.fillIn(getQueryTable().getPublicSchemaName(), getQueryTable().getName(), key, i)); + } + } + + // Fire triggers, if any, and also throw if there are any errors + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.DELETE, false, errors, extraScriptContext); + + addAuditEvent(user, container, QueryService.AuditAction.DELETE, configParameters, result, null, null); + + return result; + } + + protected int truncateRows(User user, Container container) + throws QueryUpdateServiceException, SQLException + { + throw new UnsupportedOperationException(); + } + + @Override + public int truncateRows(User user, Container container, @Nullable Map configParameters, @Nullable Map extraScriptContext) + throws BatchValidationException, QueryUpdateServiceException, SQLException + { + if (!container.hasPermission(user, AdminPermission.class) && !hasDeleteRowsPermission(user)) + throw new UnauthorizedException("You do not have permission to truncate this table."); + + BatchValidationException errors = new BatchValidationException(); + errors.setExtraContext(extraScriptContext); + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, true, errors, extraScriptContext); + + int result = truncateRows(user, container); + + getQueryTable().fireBatchTrigger(container, user, TableInfo.TriggerType.TRUNCATE, false, errors, extraScriptContext); + addAuditEvent(user, container, QueryService.AuditAction.TRUNCATE, configParameters, null, null, null); + + return result; + } + + @Override + public void setBulkLoad(boolean bulkLoad) + { + _bulkLoad = bulkLoad; + } + + @Override + public boolean isBulkLoad() + { + return _bulkLoad; + } + + public static Object saveFile(User user, Container container, String name, Object value, @Nullable String dirName) throws ValidationException, QueryUpdateServiceException + { + FileLike dirPath = AssayFileWriter.getUploadDirectoryPath(container, dirName); + return saveFile(user, container, name, value, dirPath); + } + + /** + * Save uploaded file to dirName directory under file or pipeline root. + */ + public static Object saveFile(User user, Container container, String name, Object value, @Nullable FileLike dirPath) throws ValidationException, QueryUpdateServiceException + { + if (!(value instanceof MultipartFile) && !(value instanceof SpringAttachmentFile)) + throw new ValidationException("Invalid file value"); + + String auditMessageFormat = "Saved file '%s' for field '%s' in folder %s."; + FileLike file = null; + try + { + FileLike dir = AssayFileWriter.ensureUploadDirectory(dirPath); + + FileSystemAuditProvider.FileSystemAuditEvent event = new FileSystemAuditProvider.FileSystemAuditEvent(container, null); + if (value instanceof MultipartFile multipartFile) + { + // Once we've found one, write it to disk and replace the row's value with just the File reference to it + if (multipartFile.isEmpty()) + { + throw new ValidationException("File " + multipartFile.getOriginalFilename() + " for field " + name + " has no content"); + } + file = FileUtil.findUniqueFileName(multipartFile.getOriginalFilename(), dir); + checkFileUnderRoot(container, file); + multipartFile.transferTo(toFileForWrite(file)); + event.setComment(String.format(auditMessageFormat, multipartFile.getOriginalFilename(), name, container.getPath())); + event.setProvidedFileName(multipartFile.getOriginalFilename()); + } + else + { + SpringAttachmentFile saf = (SpringAttachmentFile) value; + file = FileUtil.findUniqueFileName(saf.getFilename(), dir); + checkFileUnderRoot(container, file); + saf.saveTo(file); + event.setComment(String.format(auditMessageFormat, saf.getFilename(), name, container.getPath())); + event.setProvidedFileName(saf.getFilename()); + } + event.setFile(file.getName()); + event.setFieldName(name); + event.setDirectory(file.getParent().toURI().getPath()); + AuditLogService.get().addEvent(user, event); + } + catch (IOException | ExperimentException e) + { + throw new QueryUpdateServiceException(e); + } + + ensureExpData(user, container, file.toNioPathForRead().toFile()); + return file; + } + + public static ExpData ensureExpData(User user, Container container, File file) + { + ExpData existingData = ExperimentService.get().getExpDataByURL(file, container); + // create exp.data record + if (existingData == null) + { + File canonicalFile = FileUtil.getAbsoluteCaseSensitiveFile(file); + ExpData data = ExperimentService.get().createData(container, UPLOADED_FILE); + data.setName(file.getName()); + data.setDataFileURI(canonicalFile.toPath().toUri()); + if (data.getDataFileUrl() != null && data.getDataFileUrl().length() <= ExperimentService.get().getTinfoData().getColumn("DataFileURL").getScale()) + { + // If the path is too long to store, bail out without creating an exp.data row + data.save(user); + } + + return data; + } + + return existingData; + } + + // For security reasons, make sure the user hasn't tried to reference a file that's not under + // the pipeline root or @assayfiles root. Otherwise, they could get access to any file on the server + static FileLike checkFileUnderRoot(Container container, FileLike file) throws ExperimentException + { + Path assayFilesRoot = FileContentService.get().getFileRootPath(container, FileContentService.ContentType.assayfiles); + if (assayFilesRoot != null && URIUtil.isDescendant(assayFilesRoot.toUri(), file.toURI())) + return file; + + PipeRoot root = PipelineService.get().findPipelineRoot(container); + if (root == null) + throw new ExperimentException("Pipeline root not available in container " + container.getPath()); + + if (!root.isUnderRoot(toFileForRead(file))) + { + throw new ExperimentException("Cannot reference file '" + file + "' from " + container.getPath()); + } + + return file; + } + + protected void _addSummaryAuditEvent(Container container, User user, DataIteratorContext context, int count) + { + if (!context.isCrossTypeImport() && !context.isCrossFolderImport()) // audit handled at table level + { + AuditBehaviorType auditType = (AuditBehaviorType) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditBehavior); + String auditUserComment = (String) context.getConfigParameter(DetailedAuditLogDataIterator.AuditConfigs.AuditUserComment); + boolean skipAuditLevelCheck = false; + if (context.getConfigParameterBoolean(QueryUpdateService.ConfigParameters.BulkLoad)) + { + if (getQueryTable().getEffectiveAuditBehavior(auditType) == AuditBehaviorType.DETAILED) // allow ETL to demote audit level for bulkLoad + skipAuditLevelCheck = true; + } + getQueryTable().getAuditHandler(auditType).addSummaryAuditEvent(user, container, getQueryTable(), context.getInsertOption().auditAction, count, auditType, auditUserComment, skipAuditLevelCheck); + } + } + + /** + * Is used by the AttachmentDataIterator to point to the location of the serialized + * attachment files. + */ + public void setAttachmentDirectory(VirtualFile att) + { + _att = att; + } + + @Nullable + protected VirtualFile getAttachmentDirectory() + { + return _att; + } + + /** + * QUS instances that allow import of attachments through the AttachmentDataIterator should furnish a factory + * implementation in order to resolve the attachment parent on incoming attachment files. + */ + @Nullable + protected AttachmentParentFactory getAttachmentParentFactory() + { + return null; + } + + /** Translate between the column name that query is exposing to the column name that actually lives in the database */ + protected static void aliasColumns(Map columnMapping, Map row) + { + for (Map.Entry entry : columnMapping.entrySet()) + { + if (row.containsKey(entry.getValue()) && !row.containsKey(entry.getKey())) + { + row.put(entry.getKey(), row.get(entry.getValue())); + } + } + } + + /** + * The database table has underscores for MV column names, but we expose a column without the underscore. + * Therefore, we need to translate between the two sets of column names. + * @return database column name -> exposed TableInfo column name + */ + protected static Map createMVMapping(Domain domain) + { + Map result = new CaseInsensitiveHashMap<>(); + if (domain != null) + { + for (DomainProperty domainProperty : domain.getProperties()) + { + if (domainProperty.isMvEnabled()) + { + result.put(PropertyStorageSpec.getMvIndicatorStorageColumnName(domainProperty.getPropertyDescriptor()), domainProperty.getName() + MvColumn.MV_INDICATOR_SUFFIX); + } + } + } + return result; + } + + @TestWhen(TestWhen.When.BVT) + public static class TestCase extends Assert + { + private boolean _useAlias = false; + + static TabLoader getTestData() throws IOException + { + TabLoader testData = new TabLoader(new StringReader("pk,i,s\n0,0,zero\n1,1,one\n2,2,two"),true); + testData.parseAsCSV(); + testData.getColumns()[0].clazz = Integer.class; + testData.getColumns()[1].clazz = Integer.class; + testData.getColumns()[2].clazz = String.class; + return testData; + } + + @BeforeClass + public static void createList() throws Exception + { + if (null == ListService.get()) + return; + deleteList(); + + TabLoader testData = getTestData(); + String hash = GUID.makeHash(); + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + assertNotNull(lists); + + ListDefinition R = s.createList(c, "R", ListDefinition.KeyType.Integer); + R.setKeyName("pk"); + Domain d = requireNonNull(R.getDomain()); + for (int i=0 ; i> getRows() + { + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + UserSchema lists = (UserSchema)DefaultSchema.get(user, c).getSchema("lists"); + TableInfo rTableInfo = requireNonNull(lists.getTable("R", null)); + return Arrays.asList(new TableSelector(rTableInfo, TableSelector.ALL_COLUMNS, null, new Sort("PK")).getMapArray()); + } + + @Before + public void resetList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + qus.truncateRows(user, c, null, null); + } + + @AfterClass + public static void deleteList() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + ListService s = ListService.get(); + Map m = s.getLists(c); + if (m.containsKey("R")) + m.get("R").delete(user); + } + + void validateDefaultData(List> rows) + { + assertEquals(3, rows.size()); + + assertEquals(0, rows.get(0).get("pk")); + assertEquals(1, rows.get(1).get("pk")); + assertEquals(2, rows.get(2).get("pk")); + + assertEquals(0, rows.get(0).get("i")); + assertEquals(1, rows.get(1).get("i")); + assertEquals(2, rows.get(2).get("i")); + + assertEquals("zero", rows.get(0).get("s")); + assertEquals("one", rows.get(1).get("s")); + assertEquals("two", rows.get(2).get("s")); + } + + @Test + public void INSERT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var rows = qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertFalse(errors.hasErrors()); + validateDefaultData(rows); + validateDefaultData(getRows()); + + qus.insertRows(user, c, getTestData().load(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void UPSERT() throws Exception + { + if (null == ListService.get()) + return; + /* not sure how you use/test ImportOptions.UPSERT + * the only row returning QUS method is insertRows(), which doesn't let you specify the InsertOption? + */ + } + + @Test + public void IMPORT() throws Exception + { + if (null == ListService.get()) + return; + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + assert(getRows().isEmpty()); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + BatchValidationException errors = new BatchValidationException(); + var count = qus.importRows(user, c, getTestData(), errors, null, null); + assertFalse(errors.hasErrors()); + assert(count == 3); + validateDefaultData(getRows()); + + qus.importRows(user, c, getTestData(), errors, null, null); + assertTrue(errors.hasErrors()); + } + + @Test + public void MERGE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + BatchValidationException errors = new BatchValidationException() + { + @Override + public void addRowError(ValidationException vex) + { + LogManager.getLogger(AbstractQueryUpdateService.class).error("test error", vex); + fail(vex.getMessage()); + } + }; + int count=0; + try (var tx = rTableInfo.getSchema().getScope().ensureTransaction()) + { + var ret = qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + if (!errors.hasErrors()) + { + tx.commit(); + count = ret; + } + } + assertFalse("mergeRows error(s): " + errors.getMessage(), errors.hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is not updated + assertEquals(2, rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + + // merge should fail if duplicate keys are provided + errors = new BatchValidationException(); + mergeRows = new ArrayList<>(); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + qus.mergeRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), errors, null, null); + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + } + + @Test + public void UPDATE() throws Exception + { + if (null == ListService.get()) + return; + INSERT(); + assertEquals("Wrong number of rows after INSERT", 3, getRows().size()); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var updateRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + + // update using data iterator + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + var count = qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(1, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO-UP", rows.get(2).get("s")); + // test existing row value is not updated/erased + assertEquals(2, rows.get(2).get("i")); + + // update should fail if a new record is provided + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,123,colName,"NEW")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + + // Issue 52728: update should fail if duplicate key is provide + updateRows = new ArrayList<>(); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-2")); + updateRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO-UP-UP-2")); + + // use DIB + context = new DataIteratorContext(); + context.setInsertOption(InsertOption.UPDATE); + qus.loadRows(user, c, MapDataIterator.of(updateRows.get(0).keySet(), updateRows), context, null); + assertTrue(context.getErrors().hasErrors()); + assertTrue("Duplicate key error: " + context.getErrors().getMessage(), context.getErrors().getMessage().contains("Duplicate key provided: 2")); + + // use updateRows + if (!_useAlias) // _update using alias is not supported + { + BatchValidationException errors = new BatchValidationException(); + try + { + qus.updateRows(user, c, updateRows, null, errors, null, null); + } + catch (Exception e) + { + + } + assertTrue(errors.hasErrors()); + assertTrue("Duplicate key error: " + errors.getMessage(), errors.getMessage().contains("Duplicate key provided: 2")); + + } + } + + @Test + public void REPLACE() throws Exception + { + if (null == ListService.get()) + return; + assert(getRows().isEmpty()); + INSERT(); + + User user = TestContext.get().getUser(); + Container c = JunitUtil.getTestContainer(); + TableInfo rTableInfo = ((UserSchema)DefaultSchema.get(user, c).getSchema("lists")).getTable("R", null); + QueryUpdateService qus = requireNonNull(rTableInfo.getUpdateService()); + var mergeRows = new ArrayList>(); + String colName = _useAlias ? "s_alias" : "s"; + String pkName = _useAlias ? "pk_alias" : "pk"; + mergeRows.add(CaseInsensitiveHashMap.of(pkName,2,colName,"TWO")); + mergeRows.add(CaseInsensitiveHashMap.of(pkName,3,colName,"THREE")); + DataIteratorContext context = new DataIteratorContext(); + context.setInsertOption(InsertOption.REPLACE); + var count = qus.loadRows(user, c, MapDataIterator.of(mergeRows.get(0).keySet(), mergeRows), context, null); + assertFalse(context.getErrors().hasErrors()); + assertEquals(2, count); + var rows = getRows(); + // test existing row value is updated + assertEquals("TWO", rows.get(2).get("s")); + // test existing row value is updated + assertNull(rows.get(2).get("i")); + // test new row + assertEquals("THREE", rows.get(3).get("s")); + assertNull(rows.get(3).get("i")); + } + + @Test + public void IMPORT_IDENTITY() + { + if (null == ListService.get()) + return; + // TODO + } + + @Test + public void ALIAS_MERGE() throws Exception + { + _useAlias = true; + MERGE(); + } + + @Test + public void ALIAS_REPLACE() throws Exception + { + _useAlias = true; + REPLACE(); + } + + @Test + public void ALIAS_UPDATE() throws Exception + { + _useAlias = true; + UPDATE(); + } + } +} diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index f62a69ca510..d04ea279c65 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -1,936 +1,936 @@ -/* - * Copyright (c) 2009-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.query; - -import org.apache.commons.beanutils.ConversionException; -import org.apache.commons.beanutils.ConvertUtils; -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.collections.ArrayListMap; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveMapWrapper; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ConvertHelper; -import org.labkey.api.data.ExpDataFileConverter; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MvUtil; -import org.labkey.api.data.Parameter; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpdateableTableInfo; -import org.labkey.api.data.validator.ColumnValidator; -import org.labkey.api.data.validator.ColumnValidators; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.DataIteratorUtil; -import org.labkey.api.dataiterator.MapDataIterator; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.OntologyObject; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.PropertyDescriptor; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.ValidatorContext; -import org.labkey.api.reader.ColumnDescriptor; -import org.labkey.api.reader.DataLoader; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.CachingSupplier; -import org.labkey.api.util.Pair; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.vfs.FileLike; -import org.springframework.web.multipart.MultipartFile; - -import java.io.IOException; -import java.nio.file.Path; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.function.Supplier; - -/** - * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. - * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the - * hard table columns. - */ -public class DefaultQueryUpdateService extends AbstractQueryUpdateService -{ - private final TableInfo _dbTable; - private DomainUpdateHelper _helper = null; - /** - * Map from DbTable column names to QueryTable column names, if they have been aliased - */ - protected Map _columnMapping = Collections.emptyMap(); - /** - * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process - */ - private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); - private final ValidatorContext _validatorContext; - private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); - - public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) - { - super(queryTable); - _dbTable = dbTable; - - if (queryTable.getUserSchema() == null) - throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); - - _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); - } - - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) - { - this(queryTable, dbTable); - _helper = helper; - } - - /** - * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased - */ - public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) - { - this(queryTable, dbTable); - _columnMapping = columnMapping; - } - - protected TableInfo getDbTable() - { - return _dbTable; - } - - protected Domain getDomain() - { - return _helper == null ? null : _helper.getDomain(); - } - - protected ColumnInfo getObjectUriColumn() - { - return _helper == null ? null : _helper.getObjectUriColumn(); - } - - protected String createObjectURI() - { - return _helper == null ? null : _helper.createObjectURI(); - } - - protected Iterable getPropertyColumns() - { - return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); - } - - protected Map getColumnMapping() - { - return _columnMapping; - } - - /** - * Returns the container that the domain is defined - */ - protected Container getDomainContainer(Container c) - { - return _helper == null ? c : _helper.getDomainContainer(c); - } - - /** - * Returns the container to insert/update values into - */ - protected Container getDomainObjContainer(Container c) - { - return _helper == null ? c : _helper.getDomainObjContainer(c); - } - - protected Set getAutoPopulatedColumns() - { - return Table.AUTOPOPULATED_COLUMN_NAMES; - } - - public interface DomainUpdateHelper - { - Domain getDomain(); - - ColumnInfo getObjectUriColumn(); - - String createObjectURI(); - - // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. - Iterable getPropertyColumns(); - - Container getDomainContainer(Container c); - - Container getDomainObjContainer(Container c); - } - - public class ImportHelper implements OntologyManager.ImportHelper - { - ImportHelper() - { - } - - @Override - public String beforeImportObject(Map map) - { - ColumnInfo objectUriCol = getObjectUriColumn(); - - // Get existing Lsid - String lsid = (String) map.get(objectUriCol.getName()); - if (lsid != null) - return lsid; - - // Generate a new Lsid - lsid = createObjectURI(); - map.put(objectUriCol.getName(), lsid); - return lsid; - } - - @Override - public void afterBatchInsert(int currentRow) - { - } - - @Override - public void updateStatistics(int currentRow) - { - } - } - - @Override - protected Map getRow(User user, Container container, Map keys) - throws InvalidKeyException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, keys); - Map row = _select(container, getKeys(keys, container)); - - //PostgreSQL includes a column named _row for the row index, but since this is selecting by - //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it - if (null != row) - { - if (row instanceof ArrayListMap) - ((ArrayListMap) row).getFindMap().remove("_row"); - else - row.remove("_row"); - } - - return row; - } - - protected Map _select(Container container, Object[] keys) throws ConversionException - { - TableInfo table = getDbTable(); - Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); - - Map row = new TableSelector(table).getMap(typedParameters); - - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) - { - String lsid = (String) row.get(objectUriCol.getName()); - if (lsid != null) - { - Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); - if (!propertyValues.isEmpty()) - { - // convert PropertyURI->value map into "Property name"->value map - Map propertyMap = domain.createImportMap(false); - for (Map.Entry entry : propertyValues.entrySet()) - { - String propertyURI = entry.getKey(); - DomainProperty dp = propertyMap.get(propertyURI); - PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; - if (pd != null) - row.put(pd.getName(), entry.getValue()); - } - } - } - // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been - // inserted before the table was made extensible), but make sure that we got an LSID field - // when fetching the row - else if (!row.containsKey(objectUriCol.getName())) - { - throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); - } - } - - return row; - } - - - private Object[] convertToTypedValues(Object[] keys, List cols) - { - Object[] typedParameters = new Object[keys.length]; - int t = 0; - for (int i = 0; i < keys.length; i++) - { - if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) - { - typedParameters[t++] = keys[i]; - continue; - } - Object v = keys[i]; - JdbcType type = cols.get(i).getJdbcType(); - if (v instanceof String) - v = type.convert(v); - Parameter.TypedValue tv = new Parameter.TypedValue(v, type); - typedParameters[t++] = tv; - } - return typedParameters; - } - - - @Override - protected Map insertRow(User user, Container container, Map row) - throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - aliasColumns(_columnMapping, row); - convertTypes(user, container, row); - setSpecialColumns(container, row, user, InsertPermission.class); - validateInsertRow(row); - return _insert(user, container, row); - } - - protected Map _insert(User user, Container c, Map row) - throws SQLException, ValidationException - { - assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - List pds = new ArrayList<>(); - Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - pds.add(pd); - Object value = getPropertyValue(row, pd); - values.put(pd.getPropertyURI(), value); - } - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); - String lsid = collector.getLsid(); - - // Add the new lsid to the row map. - row.put(objectUriCol.getName(), lsid); - } - - return Table.insert(user, getDbTable(), row); - } - catch (RuntimeValidationException e) - { - throw e.getValidationException(); - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - } - - @Override - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - return updateRow(user, container, row, oldRow, false, false); - } - - protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) - throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException - { - Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); - - // Flip the key/value pairs around for easy lookup - Map queryToDb = new CaseInsensitiveHashMap<>(); - for (Map.Entry entry : _columnMapping.entrySet()) - { - queryToDb.put(entry.getValue(), entry.getKey()); - } - - setSpecialColumns(container, row, user, UpdatePermission.class); - - Map tableAliasesMap = _tableMapSupplier.get(); - Map> colFrequency = new HashMap<>(); - - //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table - for (Map.Entry entry: row.entrySet()) - { - if (!rowStripped.containsKey(entry.getKey())) - { - ColumnInfo col = getQueryTable().getColumn(entry.getKey()); - - if (null == col) - { - col = tableAliasesMap.get(entry.getKey()); - } - - if (null != col) - { - final String name = col.getName(); - - // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. - if (col.isReadOnly() || col.isCalculated()) - continue; - - //when updating a row, we should strip the following fields, as they are - //automagically maintained by the table layer, and should not be allowed - //to change once the record exists. - //unfortunately, the Table.update() method doesn't strip these, so we'll - //do that here. - // Owner, CreatedBy, Created, EntityId - if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) - || (!allowOwner && name.equalsIgnoreCase("Owner")) - || name.equalsIgnoreCase("EntityId")) - continue; - - // Throw error if more than one row properties having different values match up to the same column. - if (!colFrequency.containsKey(col)) - { - colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); - } - else - { - if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) - { - throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); - } - } - - // We want a map using the DbTable column names as keys, so figure out the right name to use - String dbName = queryToDb.getOrDefault(name, name); - rowStripped.put(dbName, entry.getValue()); - } - } - } - - convertTypes(user, container, rowStripped); - validateUpdateRow(rowStripped); - - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (rowContainer == null) - { - throw new ValidationException("Unknown container: " + row.get("container")); - } - else - { - Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); - if (null != oldContainer && !rowContainer.equals(oldContainer)) - throw new UnauthorizedException("The row is from the wrong container."); - } - } - - Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); - - //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes - //the primary key columns as well as those marked read-only. So we can't simply return the map returned - //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. - row.putAll(updatedRow); - return row; - } - - protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException - { - DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); - List validators = ColumnValidators.create(column, dp); - for (ColumnValidator v : validators) - { - String msg = v.validate(-1, value, _validatorContext, providedValue); - if (msg != null) - throw new ValidationException(msg, column.getName()); - } - } - - protected void validateInsertRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - Object value = row.get(col.getColumnName()); - - // Check required values aren't null or empty - if (null == value || value instanceof String s && s.isEmpty()) - { - if (!col.isAutoIncrement() && col.isRequired() && - !getAutoPopulatedColumns().contains(col.getName()) && - col.getJdbcDefaultValue() == null) - { - throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); - } - } - else - { - validateValue(col, value, null); - } - } - } - - protected void validateUpdateRow(Map row) throws ValidationException - { - for (ColumnInfo col : getQueryTable().getColumns()) - { - // Only validate incoming values - if (row.containsKey(col.getColumnName())) - { - Object value = row.get(col.getColumnName()); - validateValue(col, value, null); - } - } - } - - protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) - throws SQLException, ValidationException - { - assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); - - try - { - ColumnInfo objectUriCol = getObjectUriColumn(); - Domain domain = getDomain(); - - // The lsid may be null for the row until a property has been inserted - String lsid = null; - if (objectUriCol != null) - lsid = (String) oldRow.get(objectUriCol.getName()); - - List tableProperties = new ArrayList<>(); - if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) - { - // convert "Property name"->value map into PropertyURI->value map - Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); - - for (PropertyColumn pc : getPropertyColumns()) - { - PropertyDescriptor pd = pc.getPropertyDescriptor(); - tableProperties.add(pd); - - // clear out the old value if it exists and is contained in the new row (it may be incoming as null) - if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) - OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); - - Object value = getPropertyValue(row, pd); - if (value != null) - newValues.put(pd.getPropertyURI(), value); - } - - // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() - newValues.put(objectUriCol.getName(), lsid); - - LsidCollector collector = new LsidCollector(); - OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); - - // Update the lsid in the row: the lsid may have not existed in the row before the update. - lsid = collector.getLsid(); - row.put(objectUriCol.getName(), lsid); - } - - // Get lsid value if it hasn't been set. - // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) - if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) - { - String objectUriColName = updateableTableInfo.getObjectURIColumnName(); - if (objectUriColName != null) - lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); - } - - // handle vocabulary properties - if (lsid != null) - { - for (Map.Entry rowEntry : row.entrySet()) - { - String colName = rowEntry.getKey(); - Object value = rowEntry.getValue(); - - ColumnInfo col = getQueryTable().getColumn(colName); - if (col instanceof PropertyColumn propCol) - { - PropertyDescriptor pd = propCol.getPropertyDescriptor(); - if (pd.isVocabulary() && !tableProperties.contains(pd)) - { - OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); - } - } - } - } - } - catch (BatchValidationException e) - { - throw e.getLastRowError(); - } - - checkDuplicateUpdate(keys); - - return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) - } - - private static class LsidCollector implements OntologyManager.RowCallback - { - private String _lsid; - - @Override - public void rowProcessed(Map row, String lsid) - { - if (_lsid != null) - { - throw new IllegalStateException("Only expected a single LSID"); - } - _lsid = lsid; - } - - public String getLsid() - { - if (_lsid == null) - { - throw new IllegalStateException("No LSID returned"); - } - return _lsid; - } - } - - // Get value from row map where the keys are column names. - private Object getPropertyValue(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return row.get(pd.getName()); - - if (row.containsKey(pd.getLabel())) - return row.get(pd.getLabel()); - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return row.get(alias); - } - - return null; - } - - // Checks a value exists in the row map (value may be null) - private boolean hasProperty(Map row, PropertyDescriptor pd) - { - if (row.containsKey(pd.getName())) - return true; - - if (row.containsKey(pd.getLabel())) - return true; - - for (String alias : pd.getImportAliasSet()) - { - if (row.containsKey(alias)) - return true; - } - - return false; - } - - @Override - protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException - { - if (oldRowMap == null) - return null; - - aliasColumns(_columnMapping, oldRowMap); - - if (container != null && getDbTable().getColumn("container") != null) - { - // UNDONE: 9077: check container permission on each row before delete - Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); - if (null != rowContainer && !container.equals(rowContainer)) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (container.allowRowMutationForContainer(rowContainer)) - container = rowContainer; - else - throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); - } - } - - _delete(container, oldRowMap); - return oldRowMap; - } - - protected void _delete(Container c, Map row) throws InvalidKeyException - { - ColumnInfo objectUriCol = getObjectUriColumn(); - if (objectUriCol != null) - { - String lsid = (String)row.get(objectUriCol.getName()); - if (lsid != null) - { - OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); - if (oo != null) - OntologyManager.deleteProperties(c, oo.getObjectId()); - } - } - Table.delete(getDbTable(), getKeys(row, c)); - } - - // classes should override this method if they need to do more work than delete all the rows from the table - // this implementation will delete all rows from the table for the given container as well as delete - // any properties associated with the table - @Override - protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException - { - // get rid of the properties for this table - if (null != getObjectUriColumn()) - { - SQLFragment lsids = new SQLFragment() - .append("SELECT t.").append(getObjectUriColumn().getColumnName()) - .append(" FROM ").append(getDbTable(), "t") - .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); - if (null != getDbTable().getColumn("container")) - { - lsids.append(" AND t.Container = ?"); - lsids.add(container.getId()); - } - - OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); - } - - // delete all the rows in this table, scoping to the container if the column - // is available - if (null != getDbTable().getColumn("container")) - return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); - - return Table.delete(getDbTable()); - } - - protected Object[] getKeys(Map map, Container container) throws InvalidKeyException - { - //build an array of pk values based on the table info - TableInfo table = getDbTable(); - List pks = table.getPkColumns(); - Object[] pkVals = new Object[pks.size()]; - - if (map == null || map.isEmpty()) - return pkVals; - - for (int idx = 0; idx < pks.size(); ++idx) - { - ColumnInfo pk = pks.get(idx); - Object pkValue = map.get(pk.getName()); - // Check the type and coerce if needed - if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) - { - try - { - pkValue = pk.convert(pkValue); - } - catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } - } - pkVals[idx] = pkValue; - if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) - { - pkVals[idx] = container; - } - if(null == pkVals[idx]) - { - throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); - } - } - return pkVals; - } - - private Map _missingValues = null; - private Container _missingValuesContainer; - - protected boolean validMissingValue(Container c, String mv) - { - if (null == c) - return false; - if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) - { - _missingValues = MvUtil.getIndicatorsAndLabels(c); - _missingValuesContainer = c; - } - return _missingValues.containsKey(mv); - } - - final protected void convertTypes(User user, Container c, Map row) throws ValidationException - { - convertTypes(user, c, row, getDbTable(), null); - } - - // TODO Path->FileObject - // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? - protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException - { - for (ColumnInfo col : t.getColumns()) - { - if (col.isMvIndicatorColumn()) - continue; - boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); - if (!isColumnPresent) - continue; - - Object value = row.get(col.getName()); - - /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. - * If you update this code, check that code as well. */ - if (col.isMvEnabled()) - { - if (value instanceof String s && StringUtils.isEmpty(s)) - value = null; - - Object mvObj = row.get(col.getMvColumnName().getName()); - String mv = Objects.toString(mvObj, null); - if (StringUtils.isEmpty(mv)) - mv = null; - - if (null != mv) - { - if (!validMissingValue(c, mv)) - throw new ValidationException("Value is not a valid missing value indicator: " + mv); - } - else if (null != value) - { - String s = Objects.toString(value, null); - if (validMissingValue(c, s)) - { - mv = s; - value = null; - } - } - row.put(col.getMvColumnName().getName(), mv); - } - - value = convertColumnValue(col, value, user, c, fileLinkDirPath); - row.put(col.getName(), value); - } - } - - protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException - { - // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() - // improve handling of conversion errors - try - { - if (PropertyType.FILE_LINK == col.getPropertyType()) - { - if ((value instanceof MultipartFile || value instanceof AttachmentFile)) - { - FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); - value = fl.toNioPathForRead().toString(); - } - return ExpDataFileConverter.convert(value); - } - return col.getConvertFn().convert(value); - } - catch (ConvertHelper.FileConversionException e) - { - throw new ValidationException(e.getMessage()); - } - catch (ConversionException e) - { - String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); - throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); - } - catch (QueryUpdateServiceException e) - { - throw new ValidationException("Save file link failed: " + col.getName()); - } - } - - /** - * Override this method to alter the row before insert or update. - * For example, you can automatically adjust certain column values based on context. - * @param container The current container - * @param row The row data - * @param user The current user - * @param clazz A permission class to test - */ - protected void setSpecialColumns(Container container, Map row, User user, Class clazz) - { - if (null != container) - { - //Issue 15301: allow workbooks records to be deleted/updated from the parent container - if (row.get("container") != null) - { - Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); - if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) - { - row.put("container", rowContainer.getId()); //normalize to container ID - return; //accept the row-provided value - } - } - row.put("container", container.getId()); - } - } - - protected boolean hasAttachmentProperties() - { - Domain domain = getDomain(); - if (null != domain) - { - for (DomainProperty dp : domain.getProperties()) - if (null != dp && isAttachmentProperty(dp)) - return true; - } - return false; - } - - protected boolean isAttachmentProperty(@NotNull DomainProperty dp) - { - PropertyDescriptor pd = dp.getPropertyDescriptor(); - return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); - } - - protected boolean isAttachmentProperty(String name) - { - DomainProperty dp = getDomain().getPropertyByName(name); - if (dp != null) - return isAttachmentProperty(dp); - return false; - } - - protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException - { - if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) - { - boolean hasContainerField = false; - for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) - { - String fieldName = columnDescriptor.getColumnName(); - if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) - { - hasContainerField = true; - break; - } - } - if (!hasContainerField) - context.setCrossFolderImport(false); - } - } -} +/* + * Copyright (c) 2009-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.query; + +import org.apache.commons.beanutils.ConversionException; +import org.apache.commons.beanutils.ConvertUtils; +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentFile; +import org.labkey.api.collections.ArrayListMap; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveMapWrapper; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ConvertHelper; +import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MvUtil; +import org.labkey.api.data.Parameter; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpdateableTableInfo; +import org.labkey.api.data.validator.ColumnValidator; +import org.labkey.api.data.validator.ColumnValidators; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.DataIteratorUtil; +import org.labkey.api.dataiterator.MapDataIterator; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.OntologyObject; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.PropertyDescriptor; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.ValidatorContext; +import org.labkey.api.reader.ColumnDescriptor; +import org.labkey.api.reader.DataLoader; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.CachingSupplier; +import org.labkey.api.util.Pair; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.vfs.FileLike; +import org.springframework.web.multipart.MultipartFile; + +import java.io.IOException; +import java.nio.file.Path; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.function.Supplier; + +/** + * QueryUpdateService implementation that supports Query TableInfos that are backed by both a hard table and a Domain. + * To update the Domain, a DomainUpdateHelper is required, otherwise the DefaultQueryUpdateService will only update the + * hard table columns. + */ +public class DefaultQueryUpdateService extends AbstractQueryUpdateService +{ + private final TableInfo _dbTable; + private DomainUpdateHelper _helper = null; + /** + * Map from DbTable column names to QueryTable column names, if they have been aliased + */ + protected Map _columnMapping = Collections.emptyMap(); + /** + * Hold onto the ColumnInfos, so we don't have to regenerate them for every row we process + */ + private final Supplier> _tableMapSupplier = new CachingSupplier<>(() -> DataIteratorUtil.createTableMap(getQueryTable(), true)); + private final ValidatorContext _validatorContext; + private final FileColumnValueMapper _fileColumnValueMapping = new FileColumnValueMapper(); + + public DefaultQueryUpdateService(@NotNull TableInfo queryTable, TableInfo dbTable) + { + super(queryTable); + _dbTable = dbTable; + + if (queryTable.getUserSchema() == null) + throw new RuntimeValidationException("User schema not defined for " + queryTable.getName()); + + _validatorContext = new ValidatorContext(queryTable.getUserSchema().getContainer(), queryTable.getUserSchema().getUser()); + } + + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, DomainUpdateHelper helper) + { + this(queryTable, dbTable); + _helper = helper; + } + + /** + * @param columnMapping Map from DbTable column names to QueryTable column names, if they have been aliased + */ + public DefaultQueryUpdateService(TableInfo queryTable, TableInfo dbTable, Map columnMapping) + { + this(queryTable, dbTable); + _columnMapping = columnMapping; + } + + protected TableInfo getDbTable() + { + return _dbTable; + } + + protected Domain getDomain() + { + return _helper == null ? null : _helper.getDomain(); + } + + protected ColumnInfo getObjectUriColumn() + { + return _helper == null ? null : _helper.getObjectUriColumn(); + } + + protected String createObjectURI() + { + return _helper == null ? null : _helper.createObjectURI(); + } + + protected Iterable getPropertyColumns() + { + return _helper == null ? Collections.emptyList() : _helper.getPropertyColumns(); + } + + protected Map getColumnMapping() + { + return _columnMapping; + } + + /** + * Returns the container that the domain is defined + */ + protected Container getDomainContainer(Container c) + { + return _helper == null ? c : _helper.getDomainContainer(c); + } + + /** + * Returns the container to insert/update values into + */ + protected Container getDomainObjContainer(Container c) + { + return _helper == null ? c : _helper.getDomainObjContainer(c); + } + + protected Set getAutoPopulatedColumns() + { + return Table.AUTOPOPULATED_COLUMN_NAMES; + } + + public interface DomainUpdateHelper + { + Domain getDomain(); + + ColumnInfo getObjectUriColumn(); + + String createObjectURI(); + + // Could probably be just Iterable or be removed and just get all PropertyDescriptors in the Domain. + Iterable getPropertyColumns(); + + Container getDomainContainer(Container c); + + Container getDomainObjContainer(Container c); + } + + public class ImportHelper implements OntologyManager.ImportHelper + { + ImportHelper() + { + } + + @Override + public String beforeImportObject(Map map) + { + ColumnInfo objectUriCol = getObjectUriColumn(); + + // Get existing Lsid + String lsid = (String) map.get(objectUriCol.getName()); + if (lsid != null) + return lsid; + + // Generate a new Lsid + lsid = createObjectURI(); + map.put(objectUriCol.getName(), lsid); + return lsid; + } + + @Override + public void afterBatchInsert(int currentRow) + { + } + + @Override + public void updateStatistics(int currentRow) + { + } + } + + @Override + protected Map getRow(User user, Container container, Map keys) + throws InvalidKeyException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, keys); + Map row = _select(container, getKeys(keys, container)); + + //PostgreSQL includes a column named _row for the row index, but since this is selecting by + //primary key, it will always be 1, which is not only unnecessary, but confusing, so strip it + if (null != row) + { + if (row instanceof ArrayListMap) + ((ArrayListMap) row).getFindMap().remove("_row"); + else + row.remove("_row"); + } + + return row; + } + + protected Map _select(Container container, Object[] keys) throws ConversionException + { + TableInfo table = getDbTable(); + Object[] typedParameters = convertToTypedValues(keys, table.getPkColumns()); + + Map row = new TableSelector(table).getMap(typedParameters); + + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty() && row != null) + { + String lsid = (String) row.get(objectUriCol.getName()); + if (lsid != null) + { + Map propertyValues = OntologyManager.getProperties(getDomainObjContainer(container), lsid); + if (!propertyValues.isEmpty()) + { + // convert PropertyURI->value map into "Property name"->value map + Map propertyMap = domain.createImportMap(false); + for (Map.Entry entry : propertyValues.entrySet()) + { + String propertyURI = entry.getKey(); + DomainProperty dp = propertyMap.get(propertyURI); + PropertyDescriptor pd = dp != null ? dp.getPropertyDescriptor() : null; + if (pd != null) + row.put(pd.getName(), entry.getValue()); + } + } + } + // Issue 46985: Be tolerant of a row not having an LSID value (as the row may have been + // inserted before the table was made extensible), but make sure that we got an LSID field + // when fetching the row + else if (!row.containsKey(objectUriCol.getName())) + { + throw new IllegalStateException("LSID value not returned when querying table - " + table.getName()); + } + } + + return row; + } + + + private Object[] convertToTypedValues(Object[] keys, List cols) + { + Object[] typedParameters = new Object[keys.length]; + int t = 0; + for (int i = 0; i < keys.length; i++) + { + if (i >= cols.size() || keys[i] instanceof Parameter.TypedValue) + { + typedParameters[t++] = keys[i]; + continue; + } + Object v = keys[i]; + JdbcType type = cols.get(i).getJdbcType(); + if (v instanceof String) + v = type.convert(v); + Parameter.TypedValue tv = new Parameter.TypedValue(v, type); + typedParameters[t++] = tv; + } + return typedParameters; + } + + + @Override + protected Map insertRow(User user, Container container, Map row) + throws DuplicateKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + aliasColumns(_columnMapping, row); + convertTypes(user, container, row); + setSpecialColumns(container, row, user, InsertPermission.class); + validateInsertRow(row); + return _insert(user, container, row); + } + + protected Map _insert(User user, Container c, Map row) + throws SQLException, ValidationException + { + assert (getQueryTable().supportsInsertOption(InsertOption.INSERT)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + List pds = new ArrayList<>(); + Map values = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + pds.add(pd); + Object value = getPropertyValue(row, pd); + values.put(pd.getPropertyURI(), value); + } + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), pds, MapDataIterator.of(Collections.singletonList(values)).getDataIterator(new DataIteratorContext()), true, collector); + String lsid = collector.getLsid(); + + // Add the new lsid to the row map. + row.put(objectUriCol.getName(), lsid); + } + + return Table.insert(user, getDbTable(), row); + } + catch (RuntimeValidationException e) + { + throw e.getValidationException(); + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + } + + @Override + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, @Nullable Map configParameters) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + return updateRow(user, container, row, oldRow, false, false); + } + + protected Map updateRow(User user, Container container, Map row, @NotNull Map oldRow, boolean allowOwner, boolean retainCreation) + throws InvalidKeyException, ValidationException, QueryUpdateServiceException, SQLException + { + Map rowStripped = new CaseInsensitiveHashMap<>(row.size()); + + // Flip the key/value pairs around for easy lookup + Map queryToDb = new CaseInsensitiveHashMap<>(); + for (Map.Entry entry : _columnMapping.entrySet()) + { + queryToDb.put(entry.getValue(), entry.getKey()); + } + + setSpecialColumns(container, row, user, UpdatePermission.class); + + Map tableAliasesMap = _tableMapSupplier.get(); + Map> colFrequency = new HashMap<>(); + + //resolve passed in row including columns in the table and other properties (vocabulary properties) not in the Domain/table + for (Map.Entry entry: row.entrySet()) + { + if (!rowStripped.containsKey(entry.getKey())) + { + ColumnInfo col = getQueryTable().getColumn(entry.getKey()); + + if (null == col) + { + col = tableAliasesMap.get(entry.getKey()); + } + + if (null != col) + { + final String name = col.getName(); + + // Skip readonly and wrapped columns. The wrapped column is usually a pk column and can't be updated. + if (col.isReadOnly() || col.isCalculated()) + continue; + + //when updating a row, we should strip the following fields, as they are + //automagically maintained by the table layer, and should not be allowed + //to change once the record exists. + //unfortunately, the Table.update() method doesn't strip these, so we'll + //do that here. + // Owner, CreatedBy, Created, EntityId + if ((!retainCreation && (name.equalsIgnoreCase("CreatedBy") || name.equalsIgnoreCase("Created"))) + || (!allowOwner && name.equalsIgnoreCase("Owner")) + || name.equalsIgnoreCase("EntityId")) + continue; + + // Throw error if more than one row properties having different values match up to the same column. + if (!colFrequency.containsKey(col)) + { + colFrequency.put(col, Pair.of(entry.getKey(),entry.getValue())); + } + else + { + if (!Objects.equals(colFrequency.get(col).second, entry.getValue())) + { + throw new ValidationException("Property key - " + colFrequency.get(col).first + " and " + entry.getKey() + " matched for the same column."); + } + } + + // We want a map using the DbTable column names as keys, so figure out the right name to use + String dbName = queryToDb.getOrDefault(name, name); + rowStripped.put(dbName, entry.getValue()); + } + } + } + + convertTypes(user, container, rowStripped); + validateUpdateRow(rowStripped); + + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (rowContainer == null) + { + throw new ValidationException("Unknown container: " + row.get("container")); + } + else + { + Container oldContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRow).get("container"), container, user, getQueryTable(), UpdatePermission.class, null); + if (null != oldContainer && !rowContainer.equals(oldContainer)) + throw new UnauthorizedException("The row is from the wrong container."); + } + } + + Map updatedRow = _update(user, container, rowStripped, oldRow, oldRow == null ? getKeys(row, container) : getKeys(oldRow, container)); + + //when passing a map for the row, the Table layer returns the map of fields it updated, which excludes + //the primary key columns as well as those marked read-only. So we can't simply return the map returned + //from Table.update(). Instead, we need to copy values from updatedRow into row and return that. + row.putAll(updatedRow); + return row; + } + + protected void validateValue(ColumnInfo column, Object value, Object providedValue) throws ValidationException + { + DomainProperty dp = getDomain() == null ? null : getDomain().getPropertyByName(column.getColumnName()); + List validators = ColumnValidators.create(column, dp); + for (ColumnValidator v : validators) + { + String msg = v.validate(-1, value, _validatorContext, providedValue); + if (msg != null) + throw new ValidationException(msg, column.getName()); + } + } + + protected void validateInsertRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + Object value = row.get(col.getColumnName()); + + // Check required values aren't null or empty + if (null == value || value instanceof String s && s.isEmpty()) + { + if (!col.isAutoIncrement() && col.isRequired() && + !getAutoPopulatedColumns().contains(col.getName()) && + col.getJdbcDefaultValue() == null) + { + throw new ValidationException("A value is required for field '" + col.getName() + "'", col.getName()); + } + } + else + { + validateValue(col, value, null); + } + } + } + + protected void validateUpdateRow(Map row) throws ValidationException + { + for (ColumnInfo col : getQueryTable().getColumns()) + { + // Only validate incoming values + if (row.containsKey(col.getColumnName())) + { + Object value = row.get(col.getColumnName()); + validateValue(col, value, null); + } + } + } + + protected Map _update(User user, Container c, Map row, Map oldRow, Object[] keys) + throws SQLException, ValidationException + { + assert(getQueryTable().supportsInsertOption(InsertOption.UPDATE)); + + try + { + ColumnInfo objectUriCol = getObjectUriColumn(); + Domain domain = getDomain(); + + // The lsid may be null for the row until a property has been inserted + String lsid = null; + if (objectUriCol != null) + lsid = (String) oldRow.get(objectUriCol.getName()); + + List tableProperties = new ArrayList<>(); + if (objectUriCol != null && domain != null && !domain.getProperties().isEmpty()) + { + // convert "Property name"->value map into PropertyURI->value map + Map newValues = new CaseInsensitiveMapWrapper<>(new HashMap<>()); + + for (PropertyColumn pc : getPropertyColumns()) + { + PropertyDescriptor pd = pc.getPropertyDescriptor(); + tableProperties.add(pd); + + // clear out the old value if it exists and is contained in the new row (it may be incoming as null) + if (lsid != null && (hasProperty(row, pd) && hasProperty(oldRow, pd))) + OntologyManager.deleteProperty(lsid, pd.getPropertyURI(), getDomainObjContainer(c), getDomainContainer(c)); + + Object value = getPropertyValue(row, pd); + if (value != null) + newValues.put(pd.getPropertyURI(), value); + } + + // Note: copy lsid into newValues map so it will be found by the ImportHelper.beforeImportObject() + newValues.put(objectUriCol.getName(), lsid); + + LsidCollector collector = new LsidCollector(); + OntologyManager.insertTabDelimited(getDomainObjContainer(c), user, null, new ImportHelper(), tableProperties, MapDataIterator.of(Collections.singletonList(newValues)).getDataIterator(new DataIteratorContext()), true, collector); + + // Update the lsid in the row: the lsid may have not existed in the row before the update. + lsid = collector.getLsid(); + row.put(objectUriCol.getName(), lsid); + } + + // Get lsid value if it hasn't been set. + // This should only happen if the QueryUpdateService doesn't have a DomainUpdateHelper (DataClass and SampleType) + if (lsid == null && getQueryTable() instanceof UpdateableTableInfo updateableTableInfo) + { + String objectUriColName = updateableTableInfo.getObjectURIColumnName(); + if (objectUriColName != null) + lsid = (String) row.getOrDefault(objectUriColName, oldRow.get(objectUriColName)); + } + + // handle vocabulary properties + if (lsid != null) + { + for (Map.Entry rowEntry : row.entrySet()) + { + String colName = rowEntry.getKey(); + Object value = rowEntry.getValue(); + + ColumnInfo col = getQueryTable().getColumn(colName); + if (col instanceof PropertyColumn propCol) + { + PropertyDescriptor pd = propCol.getPropertyDescriptor(); + if (pd.isVocabulary() && !tableProperties.contains(pd)) + { + OntologyManager.updateObjectProperty(user, c, pd, lsid, value, null, false); + } + } + } + } + } + catch (BatchValidationException e) + { + throw e.getLastRowError(); + } + + checkDuplicateUpdate(keys); + + return Table.update(user, getDbTable(), row, keys); // Cache-invalidation handled in caller (TreatmentManager.saveAssaySpecimen()) + } + + private static class LsidCollector implements OntologyManager.RowCallback + { + private String _lsid; + + @Override + public void rowProcessed(Map row, String lsid) + { + if (_lsid != null) + { + throw new IllegalStateException("Only expected a single LSID"); + } + _lsid = lsid; + } + + public String getLsid() + { + if (_lsid == null) + { + throw new IllegalStateException("No LSID returned"); + } + return _lsid; + } + } + + // Get value from row map where the keys are column names. + private Object getPropertyValue(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return row.get(pd.getName()); + + if (row.containsKey(pd.getLabel())) + return row.get(pd.getLabel()); + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return row.get(alias); + } + + return null; + } + + // Checks a value exists in the row map (value may be null) + private boolean hasProperty(Map row, PropertyDescriptor pd) + { + if (row.containsKey(pd.getName())) + return true; + + if (row.containsKey(pd.getLabel())) + return true; + + for (String alias : pd.getImportAliasSet()) + { + if (row.containsKey(alias)) + return true; + } + + return false; + } + + @Override + protected Map deleteRow(User user, Container container, Map oldRowMap) throws QueryUpdateServiceException, SQLException, InvalidKeyException + { + if (oldRowMap == null) + return null; + + aliasColumns(_columnMapping, oldRowMap); + + if (container != null && getDbTable().getColumn("container") != null) + { + // UNDONE: 9077: check container permission on each row before delete + Container rowContainer = UserSchema.translateRowSuppliedContainer(new CaseInsensitiveHashMap<>(oldRowMap).get("container"), container, user, getQueryTable(), DeletePermission.class, null); + if (null != rowContainer && !container.equals(rowContainer)) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (container.allowRowMutationForContainer(rowContainer)) + container = rowContainer; + else + throw new UnauthorizedException("The row is from the container: " + rowContainer.getId() + " which does not allow deletes from the container: " + container.getPath()); + } + } + + _delete(container, oldRowMap); + return oldRowMap; + } + + protected void _delete(Container c, Map row) throws InvalidKeyException + { + ColumnInfo objectUriCol = getObjectUriColumn(); + if (objectUriCol != null) + { + String lsid = (String)row.get(objectUriCol.getName()); + if (lsid != null) + { + OntologyObject oo = OntologyManager.getOntologyObject(c, lsid); + if (oo != null) + OntologyManager.deleteProperties(c, oo.getObjectId()); + } + } + Table.delete(getDbTable(), getKeys(row, c)); + } + + // classes should override this method if they need to do more work than delete all the rows from the table + // this implementation will delete all rows from the table for the given container as well as delete + // any properties associated with the table + @Override + protected int truncateRows(User user, Container container) throws QueryUpdateServiceException, SQLException + { + // get rid of the properties for this table + if (null != getObjectUriColumn()) + { + SQLFragment lsids = new SQLFragment() + .append("SELECT t.").append(getObjectUriColumn().getColumnName()) + .append(" FROM ").append(getDbTable(), "t") + .append(" WHERE t.").append(getObjectUriColumn().getColumnName()).append(" IS NOT NULL"); + if (null != getDbTable().getColumn("container")) + { + lsids.append(" AND t.Container = ?"); + lsids.add(container.getId()); + } + + OntologyManager.deleteOntologyObjects(ExperimentService.get().getSchema(), lsids, container); + } + + // delete all the rows in this table, scoping to the container if the column + // is available + if (null != getDbTable().getColumn("container")) + return Table.delete(getDbTable(), SimpleFilter.createContainerFilter(container)); + + return Table.delete(getDbTable()); + } + + protected Object[] getKeys(Map map, Container container) throws InvalidKeyException + { + //build an array of pk values based on the table info + TableInfo table = getDbTable(); + List pks = table.getPkColumns(); + Object[] pkVals = new Object[pks.size()]; + + if (map == null || map.isEmpty()) + return pkVals; + + for (int idx = 0; idx < pks.size(); ++idx) + { + ColumnInfo pk = pks.get(idx); + Object pkValue = map.get(pk.getName()); + // Check the type and coerce if needed + if (pkValue != null && !pk.getJavaObjectClass().isInstance(pkValue)) + { + try + { + pkValue = pk.convert(pkValue); + } + catch (ConversionException ignored) { /* Maybe the database can do the conversion */ } + } + pkVals[idx] = pkValue; + if (null == pkVals[idx] && pk.getColumnName().equalsIgnoreCase("Container")) + { + pkVals[idx] = container; + } + if(null == pkVals[idx]) + { + throw new InvalidKeyException("Value for key field '" + pk.getName() + "' was null or not supplied!", map); + } + } + return pkVals; + } + + private Map _missingValues = null; + private Container _missingValuesContainer; + + protected boolean validMissingValue(Container c, String mv) + { + if (null == c) + return false; + if (null == _missingValues || !c.getId().equals(_missingValuesContainer.getId())) + { + _missingValues = MvUtil.getIndicatorsAndLabels(c); + _missingValuesContainer = c; + } + return _missingValues.containsKey(mv); + } + + final protected void convertTypes(User user, Container c, Map row) throws ValidationException + { + convertTypes(user, c, row, getDbTable(), null); + } + + // TODO Path->FileObject + // why is coerceTypes() in AbstractQueryUpdateService and convertTypes() in DefaultQueryUpdateService? + protected void convertTypes(User user, Container c, Map row, TableInfo t, @Nullable Path fileLinkDirPath) throws ValidationException + { + for (ColumnInfo col : t.getColumns()) + { + if (col.isMvIndicatorColumn()) + continue; + boolean isColumnPresent = row.containsKey(col.getName()) || col.isMvEnabled() && row.containsKey(col.getMvColumnName().getName()); + if (!isColumnPresent) + continue; + + Object value = row.get(col.getName()); + + /* NOTE: see MissingValueConvertColumn.convert() these methods should have similar behavior. + * If you update this code, check that code as well. */ + if (col.isMvEnabled()) + { + if (value instanceof String s && StringUtils.isEmpty(s)) + value = null; + + Object mvObj = row.get(col.getMvColumnName().getName()); + String mv = Objects.toString(mvObj, null); + if (StringUtils.isEmpty(mv)) + mv = null; + + if (null != mv) + { + if (!validMissingValue(c, mv)) + throw new ValidationException("Value is not a valid missing value indicator: " + mv); + } + else if (null != value) + { + String s = Objects.toString(value, null); + if (validMissingValue(c, s)) + { + mv = s; + value = null; + } + } + row.put(col.getMvColumnName().getName(), mv); + } + + value = convertColumnValue(col, value, user, c, fileLinkDirPath); + row.put(col.getName(), value); + } + } + + protected Object convertColumnValue(ColumnInfo col, Object value, User user, Container c, @Nullable Path fileLinkDirPath) throws ValidationException + { + // Issue 13951: PSQLException from org.labkey.api.query.DefaultQueryUpdateService._update() + // improve handling of conversion errors + try + { + if (PropertyType.FILE_LINK == col.getPropertyType()) + { + if ((value instanceof MultipartFile || value instanceof AttachmentFile)) + { + FileLike fl = (FileLike)_fileColumnValueMapping.saveFileColumnValue(user, c, fileLinkDirPath, col.getName(), value); + value = fl.toNioPathForRead().toString(); + } + return ExpDataFileConverter.convert(value); + } + return col.getConvertFn().convert(value); + } + catch (ConvertHelper.FileConversionException e) + { + throw new ValidationException(e.getMessage()); + } + catch (ConversionException e) + { + String type = ColumnInfo.getFriendlyTypeName(col.getJdbcType().getJavaClass()); + throw new ValidationException("Unable to convert value '" + value.toString() + "' to " + type, col.getName()); + } + catch (QueryUpdateServiceException e) + { + throw new ValidationException("Save file link failed: " + col.getName()); + } + } + + /** + * Override this method to alter the row before insert or update. + * For example, you can automatically adjust certain column values based on context. + * @param container The current container + * @param row The row data + * @param user The current user + * @param clazz A permission class to test + */ + protected void setSpecialColumns(Container container, Map row, User user, Class clazz) + { + if (null != container) + { + //Issue 15301: allow workbooks records to be deleted/updated from the parent container + if (row.get("container") != null) + { + Container rowContainer = UserSchema.translateRowSuppliedContainer(row.get("container"), container, user, getQueryTable(), clazz, null); + if (rowContainer != null && container.allowRowMutationForContainer(rowContainer)) + { + row.put("container", rowContainer.getId()); //normalize to container ID + return; //accept the row-provided value + } + } + row.put("container", container.getId()); + } + } + + protected boolean hasAttachmentProperties() + { + Domain domain = getDomain(); + if (null != domain) + { + for (DomainProperty dp : domain.getProperties()) + if (null != dp && isAttachmentProperty(dp)) + return true; + } + return false; + } + + protected boolean isAttachmentProperty(@NotNull DomainProperty dp) + { + PropertyDescriptor pd = dp.getPropertyDescriptor(); + return PropertyType.ATTACHMENT.equals(pd.getPropertyType()); + } + + protected boolean isAttachmentProperty(String name) + { + DomainProperty dp = getDomain().getPropertyByName(name); + if (dp != null) + return isAttachmentProperty(dp); + return false; + } + + protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIteratorContext context) throws IOException + { + if (!context.getInsertOption().updateOnly && context.isCrossFolderImport() && rows instanceof DataLoader dataLoader) + { + boolean hasContainerField = false; + for (ColumnDescriptor columnDescriptor : dataLoader.getColumns()) + { + String fieldName = columnDescriptor.getColumnName(); + if (fieldName.equalsIgnoreCase("Container") || fieldName.equalsIgnoreCase("Folder")) + { + hasContainerField = true; + break; + } + } + if (!hasContainerField) + context.setCrossFolderImport(false); + } + } +} diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index 9eb5e1808e3..4f1dae671a6 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -1,1173 +1,1173 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.experiment; - -import org.apache.commons.lang3.math.NumberUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.admin.FolderSerializationRegistry; -import org.labkey.api.assay.AssayProvider; -import org.labkey.api.assay.AssayService; -import org.labkey.api.attachments.AttachmentService; -import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.SampleTimelineAuditEvent; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbSchemaType; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.NameGenerator; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SimpleFilter.FilterClause; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.data.UpgradeCode; -import org.labkey.api.defaults.DefaultValueService; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.ExperimentRunType; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyType; -import org.labkey.api.exp.api.DefaultExperimentDataHandler; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpLineageService; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpProtocolAttachmentType; -import org.labkey.api.exp.api.ExpRunAttachmentType; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentJSONConverter; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.FilterProtocolInputCriteria; -import org.labkey.api.exp.api.SampleTypeDomainKind; -import org.labkey.api.exp.api.SampleTypeService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DomainAuditProvider; -import org.labkey.api.exp.property.DomainPropertyAuditProvider; -import org.labkey.api.exp.property.ExperimentProperty; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.property.SystemProperty; -import org.labkey.api.exp.query.ExpDataClassTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.exp.xar.LSIDRelativizer; -import org.labkey.api.exp.xar.LsidUtils; -import org.labkey.api.files.FileContentService; -import org.labkey.api.files.TableUpdaterFileListener; -import org.labkey.api.migration.DatabaseMigrationService; -import org.labkey.api.migration.ExperimentDeleteService; -import org.labkey.api.migration.MigrationTableHandler; -import org.labkey.api.module.ModuleContext; -import org.labkey.api.module.ModuleLoader; -import org.labkey.api.module.SpringModule; -import org.labkey.api.module.Summary; -import org.labkey.api.ontology.OntologyService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.FilteredTable; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.UserSchema; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.roles.RoleManager; -import org.labkey.api.settings.AppProps; -import org.labkey.api.settings.OptionalFeatureService; -import org.labkey.api.usageMetrics.UsageMetricsService; -import org.labkey.api.util.GUID; -import org.labkey.api.util.JspTestCase; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.SystemMaintenance; -import org.labkey.api.view.AlwaysAvailableWebPartFactory; -import org.labkey.api.view.BaseWebPartFactory; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.JspView; -import org.labkey.api.view.Portal; -import org.labkey.api.view.ViewContext; -import org.labkey.api.view.WebPartFactory; -import org.labkey.api.view.WebPartView; -import org.labkey.api.view.template.WarningService; -import org.labkey.api.vocabulary.security.DesignVocabularyPermission; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.api.webdav.WebdavService; -import org.labkey.api.writer.ContainerUser; -import org.labkey.experiment.api.DataClassDomainKind; -import org.labkey.experiment.api.ExpDataClassImpl; -import org.labkey.experiment.api.ExpDataClassTableImpl; -import org.labkey.experiment.api.ExpDataClassType; -import org.labkey.experiment.api.ExpDataImpl; -import org.labkey.experiment.api.ExpDataTableImpl; -import org.labkey.experiment.api.ExpMaterialImpl; -import org.labkey.experiment.api.ExpProtocolImpl; -import org.labkey.experiment.api.ExpSampleTypeImpl; -import org.labkey.experiment.api.ExpSampleTypeTableImpl; -import org.labkey.experiment.api.ExperimentServiceImpl; -import org.labkey.experiment.api.ExperimentStressTest; -import org.labkey.experiment.api.GraphAlgorithms; -import org.labkey.experiment.api.LineageTest; -import org.labkey.experiment.api.LogDataType; -import org.labkey.experiment.api.Protocol; -import org.labkey.experiment.api.SampleTypeServiceImpl; -import org.labkey.experiment.api.SampleTypeUpdateServiceDI; -import org.labkey.experiment.api.UniqueValueCounterTestCase; -import org.labkey.experiment.api.VocabularyDomainKind; -import org.labkey.experiment.api.data.ChildOfCompareType; -import org.labkey.experiment.api.data.ChildOfMethod; -import org.labkey.experiment.api.data.LineageCompareType; -import org.labkey.experiment.api.data.ParentOfCompareType; -import org.labkey.experiment.api.data.ParentOfMethod; -import org.labkey.experiment.api.property.DomainImpl; -import org.labkey.experiment.api.property.DomainPropertyImpl; -import org.labkey.experiment.api.property.LengthValidator; -import org.labkey.experiment.api.property.LookupValidator; -import org.labkey.experiment.api.property.PropertyServiceImpl; -import org.labkey.experiment.api.property.RangeValidator; -import org.labkey.experiment.api.property.RegExValidator; -import org.labkey.experiment.api.property.StorageNameGenerator; -import org.labkey.experiment.api.property.StorageProvisionerImpl; -import org.labkey.experiment.api.property.TextChoiceValidator; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.controllers.property.PropertyController; -import org.labkey.experiment.defaults.DefaultValueServiceImpl; -import org.labkey.experiment.lineage.ExpLineageServiceImpl; -import org.labkey.experiment.lineage.LineagePerfTest; -import org.labkey.experiment.pipeline.ExperimentPipelineProvider; -import org.labkey.experiment.pipeline.XarTestPipelineJob; -import org.labkey.experiment.samples.DataClassFolderImporter; -import org.labkey.experiment.samples.DataClassFolderWriter; -import org.labkey.experiment.samples.SampleStatusFolderImporter; -import org.labkey.experiment.samples.SampleTimelineAuditProvider; -import org.labkey.experiment.samples.SampleTypeFolderImporter; -import org.labkey.experiment.samples.SampleTypeFolderWriter; -import org.labkey.experiment.security.DataClassDesignerRole; -import org.labkey.experiment.security.SampleTypeDesignerRole; -import org.labkey.experiment.types.TypesController; -import org.labkey.experiment.xar.FolderXarImporterFactory; -import org.labkey.experiment.xar.FolderXarWriterFactory; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; -import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; -import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; - -public class ExperimentModule extends SpringModule -{ - private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; - private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; - - public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; - public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; - public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; - public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; - - @Override - public String getName() - { - return MODULE_NAME; - } - - @Override - public Double getSchemaVersion() - { - return 26.005; - } - - @Nullable - @Override - public UpgradeCode getUpgradeCode() - { - return new ExperimentUpgradeCode(); - } - - @Override - protected void init() - { - addController("experiment", ExperimentController.class); - addController("experiment-types", TypesController.class); - addController("property", PropertyController.class); - ExperimentService.setInstance(new ExperimentServiceImpl()); - SampleTypeService.setInstance(new SampleTypeServiceImpl()); - DefaultValueService.setInstance(new DefaultValueServiceImpl()); - StorageProvisioner.setInstance(StorageProvisionerImpl.get()); - ExpLineageService.setInstance(new ExpLineageServiceImpl()); - - PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); - PropertyService.setInstance(propertyServiceImpl); - UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); - - UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); - - ExperimentProperty.register(); - SamplesSchema.register(this); - ExpSchema.register(this); - - PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); - PropertyService.get().registerDomainKind(new DataClassDomainKind()); - PropertyService.get().registerDomainKind(new VocabularyDomainKind()); - - QueryService.get().addCompareType(new ChildOfCompareType()); - QueryService.get().addCompareType(new ParentOfCompareType()); - QueryService.get().addCompareType(new LineageCompareType()); - QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); - QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); - QueryService.get().addQueryListener(new PropertyQueryChangeListener()); - - PropertyService.get().registerValidatorKind(new RegExValidator()); - PropertyService.get().registerValidatorKind(new RangeValidator()); - PropertyService.get().registerValidatorKind(new LookupValidator()); - PropertyService.get().registerValidatorKind(new LengthValidator()); - PropertyService.get().registerValidatorKind(new TextChoiceValidator()); - - ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); - ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); - ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); - ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); - ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", - "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); - if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) - { - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", - "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); - } - else - { - OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", - "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); - - OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", - "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); - - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", - "Support selecting more than one value for text choice fields", false); - } - OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", - "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", - "Support for querying lineage of experiment objects", false); - OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", - "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); - - RoleManager.registerPermission(new DesignVocabularyPermission(), true); - RoleManager.registerRole(new SampleTypeDesignerRole()); - RoleManager.registerRole(new DataClassDesignerRole()); - - AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); - AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); - - WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); - ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); - - addModuleProperty(new LineageMaximumDepthModuleProperty(this)); - WarningService.get().register(new ExperimentWarningProvider()); - } - - @Override - public boolean hasScripts() - { - return true; - } - - @Override - @NotNull - protected Collection createWebPartFactories() - { - List result = new ArrayList<>(); - - BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); - } - }; - runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); - result.add(runGroupsFactory); - - BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new RunTypeWebPart(); - } - }; - result.add(runTypesFactory); - - result.add(new ExperimentRunWebPartFactory()); - BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); - result.add(sampleTypeFactory); - result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); - view.setTitle("Samples"); - return view; - } - }); - - result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); - } - }); - - BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) - { - @Override - public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) - { - return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); - } - }; - narrowProtocolFactory.addLegacyNames("Narrow Protocols"); - result.add(narrowProtocolFactory); - - return result; - } - - private void addDataResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() - { - @Override - public WebdavResource resolve(@NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return data.createIndexDocument(null); - } - - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); - if (data == null) - return null; - - return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); - if (idDataMap == null) - return null; - - Map> searchJsonMap = new HashMap<>(); - for (String resourceIdentifier : idDataMap.keySet()) - searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); - return searchJsonMap; - } - }); - } - - private void addDataClassResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); - if (dataClass == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); - - return properties; - } - }); - } - - private void addSampleTypeResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); - if (sampleType == null) - return null; - - Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); - - //Need to map to proper Icon - properties.put("type", "sampleSet"); - - return properties; - } - }); - } - - private void addSampleResourceResolver(String categoryName) - { - SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ - @Override - public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) - { - int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId == 0) - return null; - - ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); - if (material == null) - return null; - - return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); - } - - @Override - public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) - { - Set rowIds = new HashSet<>(); - Map rowIdIdentifierMap = new LongHashMap<>(); - for (String resourceIdentifier : resourceIdentifiers) - { - long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); - if (rowId != 0) - { - rowIds.add(rowId); - rowIdIdentifierMap.put(rowId, resourceIdentifier); - } - } - - Map> searchJsonMap = new HashMap<>(); - for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) - { - searchJsonMap.put( - rowIdIdentifierMap.get(material.getRowId()), - ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() - ); - } - - return searchJsonMap; - } - }); - } - - @Override - protected void startupAfterSpringConfig(ModuleContext moduleContext) - { - SearchService ss = SearchService.get(); -// ss.addSearchCategory(OntologyManager.conceptCategory); - ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); - ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); - ss.addSearchCategory(ExpMaterialImpl.searchCategory); - ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); - ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); - ss.addSearchCategory(ExpDataImpl.expDataCategory); - ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); - ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); - addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); - addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); - addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); - addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); - addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); - addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); - ss.addDocumentProvider(ExperimentServiceImpl.get()); - - PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); - ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); - ExperimentService.get().registerDataType(new LogDataType()); - - AuditLogService.get().registerAuditType(new DomainAuditProvider()); - AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); - AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); - AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); - - FileContentService fileContentService = FileContentService.get(); - if (null != fileContentService) - { - fileContentService.addFileListener(new ExpDataFileListener()); - fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); - fileContentService.addFileListener(new FileLinkFileListener()); - } - ContainerManager.addContainerListener(new ContainerManager.ContainerListener() - { - @Override - public void containerDeleted(Container c, User user) - { - try - { - ExperimentService.get().deleteAllExpObjInContainer(c, user); - } - catch (ExperimentException ee) - { - throw new RuntimeException(ee); - } - } - }, - // This is in the Last group because when a container is deleted, - // the Experiment listener needs to be called after the Study listener, - // because Study needs the metadata held by Experiment to delete properly. - // but it should be before the CoreContainerListener - ContainerManager.ContainerListener.Order.Last); - - if (ModuleLoader.getInstance().shouldInsertData()) - SystemProperty.registerProperties(); - - FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); - if (null != folderRegistry) - { - folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); - folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); - folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); - folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); - folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); - folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); - } - - AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); - - WebdavService.get().addProvider(new ScriptsResourceProvider()); - - SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); - - UsageMetricsService svc = UsageMetricsService.get(); - if (null != svc) - { - svc.registerUsageMetrics(getName(), () -> { - Map results = new HashMap<>(); - - DbSchema schema = ExperimentService.get().getSchema(); - if (AssayService.get() != null) - { - Map assayMetrics = new HashMap<>(); - SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); - SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); - for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) - { - Map protocolMetrics = new HashMap<>(); - - // Run count across all assay designs of this type - SQLFragment runSQL = new SQLFragment(baseRunSQL); - runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); - protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); - - // Number of assay designs of this type - SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); - protocolSQL.add(assayProvider.getProtocolPattern()); - protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); - List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); - protocolMetrics.put("protocolCount", protocols.size()); - - List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); - - protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); - - // Primary implementation class - protocolMetrics.put("implementingClass", assayProvider.getClass()); - - assayMetrics.put(assayProvider.getName(), protocolMetrics); - } - assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); - - assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); - SQLFragment runsWithPlateSQL = new SQLFragment(""" - SELECT COUNT(*) FROM exp.experimentrun r - INNER JOIN exp.object o ON o.objectUri = r.lsid - INNER JOIN exp.objectproperty op ON op.objectId = o.objectId - WHERE op.propertyid IN ( - SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? - )"""); - assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); - assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); - - assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - - // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file - assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ - SELECT COUNT(*) FROM ( - SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data - WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( - SELECT rowid FROM exp.protocolapplication - WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') - ) - GROUP BY sourceapplicationid - ) x WHERE count > 1""").getObject(Long.class)); - - Map sampleLookupCountMetrics = new HashMap<>(); - SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); - - SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); - sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); - sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); - resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); - - SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( - """ - SELECT COUNT(*) FROM ( - SELECT PD.domainid, COUNT(*) AS PropCount - FROM exp.propertydescriptor D - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) - AND propertyuri LIKE ? - GROUP BY PD.domainid - ) X WHERE X.PropCount > 1""" - ); - resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); - sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); - - assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); - - - // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) - results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - - results.put("assay", assayMetrics); - } - - results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); - results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); - - if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries - { - Collection> numSampleCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); - results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( - map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") - ).count()); - } - UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); - FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); - - SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + - " FROM (\n" + - " -- updates that are marked as lineage updates\n" + - " (SELECT DISTINCT transactionId\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + - " AND comment = 'Sample was updated.'\n" + - " ) a1\n" + - " JOIN\n" + - " -- but have associated entries that are not lineage updates\n" + - " (SELECT DISTINCT transactionid\n" + - " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + - " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + - " ON a1.transactionid = a2.transactionid\n" + - " )"); - - results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); - - results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); - results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); - results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); - results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); - results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); - results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); - results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); - results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); - results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); - - results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + - "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); - String duplicateCaseInsensitiveSampleNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.material - WHERE materialsourceid IS NOT NULL - GROUP BY LOWER(name), materialsourceid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - String duplicateCaseInsensitiveDataNameCountSql = """ - SELECT COUNT(*) FROM - ( - SELECT 1 AS found - FROM exp.data - WHERE classid IS NOT NULL - GROUP BY LOWER(name), classid - HAVING COUNT(*) > 1 - ) AS duplicates - """; - results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); - results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); - - results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); - results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); - results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + - "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); - if (schema.getSqlDialect().isPostgreSQL()) - { - Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ - SELECT totalCount, numberNameCount FROM - (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t - JOIN - (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns - ON t.cpastype = ns.cpastype""").getMapCollection(); - results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); - results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> - (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); - } - - results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); - results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); - results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); - results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); - - results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); - results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); - - results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); - results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT D.PropertyURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); - - results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); - results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); - results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); - - results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ - SELECT COUNT(DISTINCT DD.DomainURI) FROM - exp.PropertyDescriptor D\s - JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid - JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId - WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); - - results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); - results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); - - results.putAll(ExperimentService.get().getDomainMetrics()); - - return results; - }); - } - } - - @Override - public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) - { - ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); - service.registerSchemaHandler(handler); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerTableHandler(new MigrationTableHandler() - { - @Override - public TableInfo getTableInfo() - { - return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); - } - - @Override - public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) - { - // Include experiment runs that were copied - FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); - if (includedClause != null) - filter.addClause(includedClause); - } - }); - service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); - DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); - service.registerSchemaHandler(dcHandler); - ExperimentDeleteService.setInstance(dcHandler); - } - - @Override - @NotNull - public Collection getSummary(Container c) - { - Collection list = new LinkedList<>(); - int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); - if (runGroupCount > 0) - list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); - - User user = HttpView.currentContext().getUser(); - - Set runTypes = ExperimentService.get().getExperimentRunTypes(c); - for (ExperimentRunType runType : runTypes) - { - if (runType == ExperimentRunType.ALL_RUNS_TYPE) - continue; - - long runCount = runType.getRunCount(user, c); - if (runCount > 0) - list.add(runCount + " runs of type " + runType.getDescription()); - } - - int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); - if (dataClassCount > 0) - list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); - - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); - - return list; - } - - @Override - public @NotNull ArrayList getDetailedSummary(Container c, User user) - { - ArrayList summaries = new ArrayList<>(); - - // Assay types - long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); - if (assayTypeCount > 0) - summaries.add(new Summary(assayTypeCount, "Assay Type")); - - // Run count - int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); - if (runGroupCount > 0) - summaries.add(new Summary(runGroupCount, "Assay run")); - - // Number of Data Classes - List dataClasses = ExperimentService.get().getDataClasses(c, false); - int dataClassCount = dataClasses.size(); - if (dataClassCount > 0) - summaries.add(new Summary(dataClassCount, "Data Class")); - - ExpSchema expSchema = new ExpSchema(user, c); - - // Individual Data Class row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 47919: The "DataCount" column is filtered to only count data in the target container - if (table instanceof ExpDataClassTableImpl tableImpl) - tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpDataClassTable.Column.Name.name()); - columns.add(ExpDataClassTable.Column.DataCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - summaries.add(new Summary(count, entry.getKey())); - } - } - - // Sample Types - int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); - if (sampleTypeCount > 0) - summaries.add(new Summary(sampleTypeCount, "Sample Type")); - - // Individual Sample Type row counts - { - // The table-level container filter is set to ensure data class types are included - // that may not be defined in the target container but may have rows of data in the target container - TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); - - // Issue 51557: The "SampleCount" column is filtered to only count data in the target container - if (table instanceof ExpSampleTypeTableImpl tableImpl) - tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); - - Set columns = new LinkedHashSet<>(); - columns.add(ExpSampleTypeTable.Column.Name.name()); - columns.add(ExpSampleTypeTable.Column.SampleCount.name()); - - Map results = new TableSelector(table, columns).getValueMap(String.class); - for (var entry : results.entrySet()) - { - long count = entry.getValue().longValue(); - if (count > 0) - { - String name = entry.getKey(); - Summary s = name.equals("MixtureBatches") - ? new Summary(count, "Batch") - : new Summary(count, name); - summaries.add(s); - } - } - } - - return summaries; - } - - @Override - public @NotNull Set> getIntegrationTests() - { - return Set.of( - DomainImpl.TestCase.class, - DomainPropertyImpl.TestCase.class, - ExpDataTableImpl.TestCase.class, - ExperimentServiceImpl.AuditDomainUriTest.class, - ExperimentServiceImpl.LineageQueryTestCase.class, - ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, - ExperimentServiceImpl.TestCase.class, - ExperimentStressTest.class, - LineagePerfTest.class, - LineageTest.class, - OntologyManager.TestCase.class, - PropertyServiceImpl.TestCase.class, - SampleTypeServiceImpl.TestCase.class, - StorageNameGenerator.TestCase.class, - StorageProvisionerImpl.TestCase.class, - UniqueValueCounterTestCase.class, - XarTestPipelineJob.TestCase.class - ); - } - - @Override - public @NotNull Collection>> getIntegrationTestFactories() - { - List>> list = new ArrayList<>(super.getIntegrationTestFactories()); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); - list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); - return list; - } - - @Override - public @NotNull Set> getUnitTests() - { - return Set.of( - GraphAlgorithms.TestCase.class, - LSIDRelativizer.TestCase.class, - Lsid.TestCase.class, - LsidUtils.TestCase.class, - PropertyController.TestCase.class, - Quantity.TestCase.class, - Unit.TestCase.class - ); - } - - @Override - @NotNull - public Collection getSchemaNames() - { - return List.of( - ExpSchema.SCHEMA_NAME, - DataClassDomainKind.PROVISIONED_SCHEMA_NAME, - SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME - ); - } - - @NotNull - @Override - public Collection getProvisionedSchemaNames() - { - return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); - } - - @Override - public JSONObject getPageContextJson(ContainerUser context) - { - JSONObject json = super.getPageContextJson(context); - json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); - return json; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.experiment; + +import org.apache.commons.lang3.math.NumberUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.admin.FolderSerializationRegistry; +import org.labkey.api.assay.AssayProvider; +import org.labkey.api.assay.AssayService; +import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.audit.AuditLogService; +import org.labkey.api.audit.SampleTimelineAuditEvent; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.NameGenerator; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.UpgradeCode; +import org.labkey.api.defaults.DefaultValueService; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.ExperimentRunType; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.DefaultExperimentDataHandler; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpLineageService; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpProtocolAttachmentType; +import org.labkey.api.exp.api.ExpRunAttachmentType; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentJSONConverter; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.FilterProtocolInputCriteria; +import org.labkey.api.exp.api.SampleTypeDomainKind; +import org.labkey.api.exp.api.SampleTypeService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DomainAuditProvider; +import org.labkey.api.exp.property.DomainPropertyAuditProvider; +import org.labkey.api.exp.property.ExperimentProperty; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.property.SystemProperty; +import org.labkey.api.exp.query.ExpDataClassTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.exp.xar.LSIDRelativizer; +import org.labkey.api.exp.xar.LsidUtils; +import org.labkey.api.files.FileContentService; +import org.labkey.api.files.TableUpdaterFileListener; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.ExperimentDeleteService; +import org.labkey.api.migration.MigrationTableHandler; +import org.labkey.api.module.ModuleContext; +import org.labkey.api.module.ModuleLoader; +import org.labkey.api.module.SpringModule; +import org.labkey.api.module.Summary; +import org.labkey.api.ontology.OntologyService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.FilteredTable; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.UserSchema; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.roles.RoleManager; +import org.labkey.api.settings.AppProps; +import org.labkey.api.settings.OptionalFeatureService; +import org.labkey.api.usageMetrics.UsageMetricsService; +import org.labkey.api.util.GUID; +import org.labkey.api.util.JspTestCase; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.SystemMaintenance; +import org.labkey.api.view.AlwaysAvailableWebPartFactory; +import org.labkey.api.view.BaseWebPartFactory; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.JspView; +import org.labkey.api.view.Portal; +import org.labkey.api.view.ViewContext; +import org.labkey.api.view.WebPartFactory; +import org.labkey.api.view.WebPartView; +import org.labkey.api.view.template.WarningService; +import org.labkey.api.vocabulary.security.DesignVocabularyPermission; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.api.webdav.WebdavService; +import org.labkey.api.writer.ContainerUser; +import org.labkey.experiment.api.DataClassDomainKind; +import org.labkey.experiment.api.ExpDataClassImpl; +import org.labkey.experiment.api.ExpDataClassTableImpl; +import org.labkey.experiment.api.ExpDataClassType; +import org.labkey.experiment.api.ExpDataImpl; +import org.labkey.experiment.api.ExpDataTableImpl; +import org.labkey.experiment.api.ExpMaterialImpl; +import org.labkey.experiment.api.ExpProtocolImpl; +import org.labkey.experiment.api.ExpSampleTypeImpl; +import org.labkey.experiment.api.ExpSampleTypeTableImpl; +import org.labkey.experiment.api.ExperimentServiceImpl; +import org.labkey.experiment.api.ExperimentStressTest; +import org.labkey.experiment.api.GraphAlgorithms; +import org.labkey.experiment.api.LineageTest; +import org.labkey.experiment.api.LogDataType; +import org.labkey.experiment.api.Protocol; +import org.labkey.experiment.api.SampleTypeServiceImpl; +import org.labkey.experiment.api.SampleTypeUpdateServiceDI; +import org.labkey.experiment.api.UniqueValueCounterTestCase; +import org.labkey.experiment.api.VocabularyDomainKind; +import org.labkey.experiment.api.data.ChildOfCompareType; +import org.labkey.experiment.api.data.ChildOfMethod; +import org.labkey.experiment.api.data.LineageCompareType; +import org.labkey.experiment.api.data.ParentOfCompareType; +import org.labkey.experiment.api.data.ParentOfMethod; +import org.labkey.experiment.api.property.DomainImpl; +import org.labkey.experiment.api.property.DomainPropertyImpl; +import org.labkey.experiment.api.property.LengthValidator; +import org.labkey.experiment.api.property.LookupValidator; +import org.labkey.experiment.api.property.PropertyServiceImpl; +import org.labkey.experiment.api.property.RangeValidator; +import org.labkey.experiment.api.property.RegExValidator; +import org.labkey.experiment.api.property.StorageNameGenerator; +import org.labkey.experiment.api.property.StorageProvisionerImpl; +import org.labkey.experiment.api.property.TextChoiceValidator; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.controllers.property.PropertyController; +import org.labkey.experiment.defaults.DefaultValueServiceImpl; +import org.labkey.experiment.lineage.ExpLineageServiceImpl; +import org.labkey.experiment.lineage.LineagePerfTest; +import org.labkey.experiment.pipeline.ExperimentPipelineProvider; +import org.labkey.experiment.pipeline.XarTestPipelineJob; +import org.labkey.experiment.samples.DataClassFolderImporter; +import org.labkey.experiment.samples.DataClassFolderWriter; +import org.labkey.experiment.samples.SampleStatusFolderImporter; +import org.labkey.experiment.samples.SampleTimelineAuditProvider; +import org.labkey.experiment.samples.SampleTypeFolderImporter; +import org.labkey.experiment.samples.SampleTypeFolderWriter; +import org.labkey.experiment.security.DataClassDesignerRole; +import org.labkey.experiment.security.SampleTypeDesignerRole; +import org.labkey.experiment.types.TypesController; +import org.labkey.experiment.xar.FolderXarImporterFactory; +import org.labkey.experiment.xar.FolderXarWriterFactory; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.data.ColumnRenderPropertiesImpl.STORAGE_UNIQUE_ID_CONCEPT_URI; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.TEXT_CHOICE_CONCEPT_URI; +import static org.labkey.api.exp.api.ExperimentService.MODULE_NAME; +import static org.labkey.api.exp.query.ExpSchema.SAMPLE_FILES_TABLE; + +public class ExperimentModule extends SpringModule +{ + private static final String SAMPLE_TYPE_WEB_PART_NAME = "Sample Types"; + private static final String PROTOCOL_WEB_PART_NAME = "Protocols"; + + public static final String AMOUNT_AND_UNIT_UPGRADE_PROP = "AmountAndUnitAudit"; + public static final String TRANSACTION_ID_PROP = "AuditTransactionId"; + public static final String AUDIT_COUNT_PROP = "AuditRecordCount"; + public static final String EXPERIMENT_RUN_WEB_PART_NAME = "Experiment Runs"; + + @Override + public String getName() + { + return MODULE_NAME; + } + + @Override + public Double getSchemaVersion() + { + return 26.005; + } + + @Nullable + @Override + public UpgradeCode getUpgradeCode() + { + return new ExperimentUpgradeCode(); + } + + @Override + protected void init() + { + addController("experiment", ExperimentController.class); + addController("experiment-types", TypesController.class); + addController("property", PropertyController.class); + ExperimentService.setInstance(new ExperimentServiceImpl()); + SampleTypeService.setInstance(new SampleTypeServiceImpl()); + DefaultValueService.setInstance(new DefaultValueServiceImpl()); + StorageProvisioner.setInstance(StorageProvisionerImpl.get()); + ExpLineageService.setInstance(new ExpLineageServiceImpl()); + + PropertyServiceImpl propertyServiceImpl = new PropertyServiceImpl(); + PropertyService.setInstance(propertyServiceImpl); + UsageMetricsService.get().registerUsageMetrics(getName(), propertyServiceImpl); + + UsageMetricsService.get().registerUsageMetrics(getName(), FileLinkMetricsProvider.getInstance()); + + ExperimentProperty.register(); + SamplesSchema.register(this); + ExpSchema.register(this); + + PropertyService.get().registerDomainKind(new SampleTypeDomainKind()); + PropertyService.get().registerDomainKind(new DataClassDomainKind()); + PropertyService.get().registerDomainKind(new VocabularyDomainKind()); + + QueryService.get().addCompareType(new ChildOfCompareType()); + QueryService.get().addCompareType(new ParentOfCompareType()); + QueryService.get().addCompareType(new LineageCompareType()); + QueryService.get().registerMethod(ChildOfMethod.NAME, new ChildOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().registerMethod(ParentOfMethod.NAME, new ParentOfMethod(), JdbcType.BOOLEAN, 2, 3); + QueryService.get().addQueryListener(new ExperimentQueryChangeListener()); + QueryService.get().addQueryListener(new PropertyQueryChangeListener()); + + PropertyService.get().registerValidatorKind(new RegExValidator()); + PropertyService.get().registerValidatorKind(new RangeValidator()); + PropertyService.get().registerValidatorKind(new LookupValidator()); + PropertyService.get().registerValidatorKind(new LengthValidator()); + PropertyService.get().registerValidatorKind(new TextChoiceValidator()); + + ExperimentService.get().registerExperimentDataHandler(new DefaultExperimentDataHandler()); + ExperimentService.get().registerProtocolInputCriteria(new FilterProtocolInputCriteria.Factory()); + ExperimentService.get().registerNameExpressionType("sampletype", "exp", "MaterialSource", "nameexpression"); + ExperimentService.get().registerNameExpressionType("aliquots", "exp", "MaterialSource", "aliquotnameexpression"); + ExperimentService.get().registerNameExpressionType("dataclass", "exp", "DataClass", "nameexpression"); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.EXPERIMENTAL_RESOLVE_PROPERTY_URI_COLUMNS, "Resolve property URIs as columns on experiment tables", + "If a column is not found on an experiment table, attempt to resolve the column name as a Property URI and add it as a property column", false); + if (CoreSchema.getInstance().getSqlDialect().isSqlServer()) + { + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_WITH_COUNTER, "Use strict incremental withCounter and rootSampleCount expression", + "When withCounter or rootSampleCount is used in name expression, make sure the count increments one-by-one and does not jump.", true); + } + else + { + OptionalFeatureService.get().addExperimentalFeatureFlag(SAMPLE_FILES_TABLE, "Manage Unreferenced Sample Files", + "Enable 'Unreferenced Sample Files' table to view and delete sample files that are no longer referenced by samples", false); + + OptionalFeatureService.get().addExperimentalFeatureFlag(NameGenerator.EXPERIMENTAL_ALLOW_GAP_COUNTER, "Allow gap with withCounter and rootSampleCount expression", + "Check this option if gaps in the count generated by withCounter or rootSampleCount name expression are allowed.", true); + + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.MULTI_VALUE_TEXT_CHOICE, "Allow multi-value Text Choice properties", + "Support selecting more than one value for text choice fields", false); + } + OptionalFeatureService.get().addExperimentalFeatureFlag(AppProps.QUANTITY_COLUMN_SUFFIX_TESTING, "Quantity column suffix testing", + "If a column name contains a \"__\" suffix, this feature allows for testing it as a Quantity display column", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_FROM_EXPANCESTORS, "SQL syntax: 'FROM EXPANCESTORS()'", + "Support for querying lineage of experiment objects", false); + OptionalFeatureService.get().addExperimentalFeatureFlag(ExperimentService.EXPERIMENTAL_FEATURE_ALLOW_ROW_ID_MERGE, "Allow RowId to be accepted when merging samples or dataclass data", + "If the incoming data includes a RowId column we will allow the column but ignore it's values.", false); + + RoleManager.registerPermission(new DesignVocabularyPermission(), true); + RoleManager.registerRole(new SampleTypeDesignerRole()); + RoleManager.registerRole(new DataClassDesignerRole()); + + AttachmentService.get().registerAttachmentParentType(ExpRunAttachmentType.get()); + AttachmentService.get().registerAttachmentParentType(ExpProtocolAttachmentType.get()); + + WebdavService.get().addExpDataProvider((path, container) -> ExperimentService.get().getAllExpDataByURL(path, container)); + ExperimentService.get().registerObjectReferencer(ExperimentServiceImpl.get()); + + addModuleProperty(new LineageMaximumDepthModuleProperty(this)); + WarningService.get().register(new ExperimentWarningProvider()); + } + + @Override + public boolean hasScripts() + { + return true; + } + + @Override + @NotNull + protected Collection createWebPartFactories() + { + List result = new ArrayList<>(); + + BaseWebPartFactory runGroupsFactory = new BaseWebPartFactory(RunGroupWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunGroupWebPart(portalCtx, WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), webPart); + } + }; + runGroupsFactory.addLegacyNames("Experiments", "Experiment", "Experiment Navigator", "Narrow Experiments"); + result.add(runGroupsFactory); + + BaseWebPartFactory runTypesFactory = new BaseWebPartFactory(RunTypeWebPart.WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new RunTypeWebPart(); + } + }; + result.add(runTypesFactory); + + result.add(new ExperimentRunWebPartFactory()); + BaseWebPartFactory sampleTypeFactory = new BaseWebPartFactory(SAMPLE_TYPE_WEB_PART_NAME, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new SampleTypeWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + sampleTypeFactory.addLegacyNames("Narrow Sample Sets", "Sample Sets"); + result.add(sampleTypeFactory); + result.add(new AlwaysAvailableWebPartFactory("Samples Menu", false, false, WebPartFactory.LOCATION_MENUBAR) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + WebPartView view = new JspView<>("/org/labkey/experiment/samplesAndAnalytes.jsp", webPart); + view.setTitle("Samples"); + return view; + } + }); + + result.add(new AlwaysAvailableWebPartFactory("Data Classes", false, false, WebPartFactory.LOCATION_BODY, WebPartFactory.LOCATION_RIGHT) { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new DataClassWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx, webPart); + } + }); + + BaseWebPartFactory narrowProtocolFactory = new BaseWebPartFactory(PROTOCOL_WEB_PART_NAME, WebPartFactory.LOCATION_RIGHT) + { + @Override + public WebPartView getWebPartView(@NotNull ViewContext portalCtx, @NotNull Portal.WebPart webPart) + { + return new ProtocolWebPart(WebPartFactory.LOCATION_RIGHT.equalsIgnoreCase(webPart.getLocation()), portalCtx); + } + }; + narrowProtocolFactory.addLegacyNames("Narrow Protocols"); + result.add(narrowProtocolFactory); + + return result; + } + + private void addDataResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver() + { + @Override + public WebdavResource resolve(@NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return data.createIndexDocument(null); + } + + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + ExpDataImpl data = ExpDataImpl.fromDocumentId(resourceIdentifier); + if (data == null) + return null; + + return ExperimentJSONConverter.serializeData(data, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Map idDataMap = ExpDataImpl.fromDocumentIds(resourceIdentifiers); + if (idDataMap == null) + return null; + + Map> searchJsonMap = new HashMap<>(); + for (String resourceIdentifier : idDataMap.keySet()) + searchJsonMap.put(resourceIdentifier, ExperimentJSONConverter.serializeData(idDataMap.get(resourceIdentifier), user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap()); + return searchJsonMap; + } + }); + } + + private void addDataClassResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpDataClass dataClass = ExperimentService.get().getDataClass(rowId); + if (dataClass == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(dataClass, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "dataClass" + (dataClass.getCategory() != null ? ":" + dataClass.getCategory() : "")); + + return properties; + } + }); + } + + private void addSampleTypeResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpSampleType sampleType = SampleTypeService.get().getSampleType(rowId); + if (sampleType == null) + return null; + + Map properties = ExperimentJSONConverter.serializeExpObject(sampleType, null, ExperimentJSONConverter.DEFAULT_SETTINGS, user).toMap(); + + //Need to map to proper Icon + properties.put("type", "sampleSet"); + + return properties; + } + }); + } + + private void addSampleResourceResolver(String categoryName) + { + SearchService.get().addResourceResolver(categoryName, new SearchService.ResourceResolver(){ + @Override + public Map getCustomSearchJson(User user, @NotNull String resourceIdentifier) + { + int rowId = NumberUtils.toInt(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId == 0) + return null; + + ExpMaterial material = ExperimentService.get().getExpMaterial(rowId); + if (material == null) + return null; + + return ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap(); + } + + @Override + public Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers) + { + Set rowIds = new HashSet<>(); + Map rowIdIdentifierMap = new LongHashMap<>(); + for (String resourceIdentifier : resourceIdentifiers) + { + long rowId = NumberUtils.toLong(resourceIdentifier.replace(categoryName + ":", "")); + if (rowId != 0) + { + rowIds.add(rowId); + rowIdIdentifierMap.put(rowId, resourceIdentifier); + } + } + + Map> searchJsonMap = new HashMap<>(); + for (ExpMaterial material : ExperimentService.get().getExpMaterials(rowIds)) + { + searchJsonMap.put( + rowIdIdentifierMap.get(material.getRowId()), + ExperimentJSONConverter.serializeMaterial(material, user, ExperimentJSONConverter.DEFAULT_SETTINGS).toMap() + ); + } + + return searchJsonMap; + } + }); + } + + @Override + protected void startupAfterSpringConfig(ModuleContext moduleContext) + { + SearchService ss = SearchService.get(); +// ss.addSearchCategory(OntologyManager.conceptCategory); + ss.addSearchCategory(ExpSampleTypeImpl.searchCategory); + ss.addSearchCategory(ExpSampleTypeImpl.mediaSearchCategory); + ss.addSearchCategory(ExpMaterialImpl.searchCategory); + ss.addSearchCategory(ExpMaterialImpl.mediaSearchCategory); + ss.addSearchCategory(ExpDataClassImpl.SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY); + ss.addSearchCategory(ExpDataImpl.expDataCategory); + ss.addSearchCategory(ExpDataImpl.expMediaDataCategory); + ss.addSearchResultTemplate(new ExpDataImpl.DataSearchResultTemplate()); + addDataResourceResolver(ExpDataImpl.expDataCategory.getName()); + addDataResourceResolver(ExpDataImpl.expMediaDataCategory.getName()); + addDataClassResourceResolver(ExpDataClassImpl.SEARCH_CATEGORY.getName()); + addDataClassResourceResolver(ExpDataClassImpl.MEDIA_SEARCH_CATEGORY.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.searchCategory.getName()); + addSampleTypeResourceResolver(ExpSampleTypeImpl.mediaSearchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.searchCategory.getName()); + addSampleResourceResolver(ExpMaterialImpl.mediaSearchCategory.getName()); + ss.addDocumentProvider(ExperimentServiceImpl.get()); + + PipelineService.get().registerPipelineProvider(new ExperimentPipelineProvider(this)); + ExperimentService.get().registerExperimentRunTypeSource(container -> Collections.singleton(ExperimentRunType.ALL_RUNS_TYPE)); + ExperimentService.get().registerDataType(new LogDataType()); + + AuditLogService.get().registerAuditType(new DomainAuditProvider()); + AuditLogService.get().registerAuditType(new DomainPropertyAuditProvider()); + AuditLogService.get().registerAuditType(new ExperimentAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTypeAuditProvider()); + AuditLogService.get().registerAuditType(new SampleTimelineAuditProvider()); + + FileContentService fileContentService = FileContentService.get(); + if (null != fileContentService) + { + fileContentService.addFileListener(new ExpDataFileListener()); + fileContentService.addFileListener(new TableUpdaterFileListener(ExperimentService.get().getTinfoExperimentRun(), "FilePathRoot", TableUpdaterFileListener.Type.fileRootPath, "RowId")); + fileContentService.addFileListener(new FileLinkFileListener()); + } + ContainerManager.addContainerListener(new ContainerManager.ContainerListener() + { + @Override + public void containerDeleted(Container c, User user) + { + try + { + ExperimentService.get().deleteAllExpObjInContainer(c, user); + } + catch (ExperimentException ee) + { + throw new RuntimeException(ee); + } + } + }, + // This is in the Last group because when a container is deleted, + // the Experiment listener needs to be called after the Study listener, + // because Study needs the metadata held by Experiment to delete properly. + // but it should be before the CoreContainerListener + ContainerManager.ContainerListener.Order.Last); + + if (ModuleLoader.getInstance().shouldInsertData()) + SystemProperty.registerProperties(); + + FolderSerializationRegistry folderRegistry = FolderSerializationRegistry.get(); + if (null != folderRegistry) + { + folderRegistry.addFactories(new FolderXarWriterFactory(), new FolderXarImporterFactory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDesignWriter.Factory()); + folderRegistry.addWriterFactory(new SampleTypeFolderWriter.SampleTypeDataWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDesignWriter.Factory()); + folderRegistry.addWriterFactory(new DataClassFolderWriter.DataClassDataWriter.Factory()); + folderRegistry.addImportFactory(new SampleTypeFolderImporter.Factory()); + folderRegistry.addImportFactory(new DataClassFolderImporter.Factory()); + folderRegistry.addImportFactory(new SampleStatusFolderImporter.Factory()); + } + + AttachmentService.get().registerAttachmentParentType(ExpDataClassType.get()); + + WebdavService.get().addProvider(new ScriptsResourceProvider()); + + SystemMaintenance.addTask(new FileLinkMetricsMaintenanceTask()); + + UsageMetricsService svc = UsageMetricsService.get(); + if (null != svc) + { + svc.registerUsageMetrics(getName(), () -> { + Map results = new HashMap<>(); + + DbSchema schema = ExperimentService.get().getSchema(); + if (AssayService.get() != null) + { + Map assayMetrics = new HashMap<>(); + SQLFragment baseRunSQL = new SQLFragment("SELECT COUNT(*) FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "r").append(" WHERE lsid LIKE ?"); + SQLFragment baseProtocolSQL = new SQLFragment("SELECT * FROM ").append(ExperimentService.get().getTinfoProtocol(), "p").append(" WHERE lsid LIKE ? AND ApplicationType = ?"); + for (AssayProvider assayProvider : AssayService.get().getAssayProviders()) + { + Map protocolMetrics = new HashMap<>(); + + // Run count across all assay designs of this type + SQLFragment runSQL = new SQLFragment(baseRunSQL); + runSQL.add(Lsid.namespaceLikeString(assayProvider.getRunLSIDPrefix())); + protocolMetrics.put("runCount", new SqlSelector(schema, runSQL).getObject(Long.class)); + + // Number of assay designs of this type + SQLFragment protocolSQL = new SQLFragment(baseProtocolSQL); + protocolSQL.add(assayProvider.getProtocolPattern()); + protocolSQL.add(ExpProtocol.ApplicationType.ExperimentRun.toString()); + List protocols = new SqlSelector(schema, protocolSQL).getArrayList(Protocol.class); + protocolMetrics.put("protocolCount", protocols.size()); + + List wrappedProtocols = protocols.stream().map(ExpProtocolImpl::new).collect(Collectors.toList()); + + protocolMetrics.put("resultRowCount", assayProvider.getResultRowCount(wrappedProtocols)); + + // Primary implementation class + protocolMetrics.put("implementingClass", assayProvider.getClass()); + + assayMetrics.put(assayProvider.getName(), protocolMetrics); + } + assayMetrics.put("autoLinkedAssayCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.propertyuri = 'terms.labkey.org#AutoCopyTargetContainer'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnEditCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + assayMetrics.put("protocolsWithTransformScriptRunOnImportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'TransformScript' AND status = 'Active' AND OP.stringvalue LIKE '%\"INSERT\"%'").getObject(Long.class)); + + assayMetrics.put("standardAssayWithPlateSupportCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.protocol EP JOIN exp.objectPropertiesView OP ON EP.lsid = OP.objecturi WHERE OP.name = 'PlateMetadata' AND floatValue = 1").getObject(Long.class)); + SQLFragment runsWithPlateSQL = new SQLFragment(""" + SELECT COUNT(*) FROM exp.experimentrun r + INNER JOIN exp.object o ON o.objectUri = r.lsid + INNER JOIN exp.objectproperty op ON op.objectId = o.objectId + WHERE op.propertyid IN ( + SELECT propertyid FROM exp.propertydescriptor WHERE name = ? AND lookupquery = ? + )"""); + assayMetrics.put("standardAssayRunsWithPlateTemplate", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateTemplate").add("PlateTemplate")).getObject(Long.class)); + assayMetrics.put("standardAssayRunsWithPlateSet", new SqlSelector(schema, new SQLFragment(runsWithPlateSQL).add("PlateSet").add("PlateSet")).getObject(Long.class)); + + assayMetrics.put("assayRunsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + assayMetrics.put("assayResultsFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.domainUri LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + + // metric to count the number of Luminex and Standard assay runs that were imported with > 1 data file + assayMetrics.put("assayRunsWithMultipleInputFiles", new SqlSelector(schema, """ + SELECT COUNT(*) FROM ( + SELECT sourceapplicationid, COUNT(*) AS count FROM exp.data + WHERE lsid NOT LIKE '%:RelatedFile.%' AND sourceapplicationid IN ( + SELECT rowid FROM exp.protocolapplication + WHERE lsid LIKE '%:SimpleProtocol.CoreStep' AND (protocollsid LIKE '%:LuminexAssayProtocol.%' OR protocollsid LIKE '%:GeneralAssayProtocol.%') + ) + GROUP BY sourceapplicationid + ) x WHERE count > 1""").getObject(Long.class)); + + Map sampleLookupCountMetrics = new HashMap<>(); + SQLFragment baseAssaySampleLookupSQL = new SQLFragment("SELECT COUNT(*) FROM exp.propertydescriptor WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) AND propertyuri LIKE ?"); + + SQLFragment batchAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + batchAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Batch.getPrefix() + ".%"); + sampleLookupCountMetrics.put("batchDomain", new SqlSelector(schema, batchAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment runAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + runAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%"); + sampleLookupCountMetrics.put("runDomain", new SqlSelector(schema, runAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssaySampleLookupSQL = new SQLFragment(baseAssaySampleLookupSQL); + resultAssaySampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomain", new SqlSelector(schema, resultAssaySampleLookupSQL).getObject(Long.class)); + + SQLFragment resultAssayMultipleSampleLookupSQL = new SQLFragment( + """ + SELECT COUNT(*) FROM ( + SELECT PD.domainid, COUNT(*) AS PropCount + FROM exp.propertydescriptor D + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + WHERE (lookupschema = 'samples' OR (lookupschema = 'exp' AND lookupquery = 'Materials')) + AND propertyuri LIKE ? + GROUP BY PD.domainid + ) X WHERE X.PropCount > 1""" + ); + resultAssayMultipleSampleLookupSQL.add("urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%"); + sampleLookupCountMetrics.put("resultDomainWithMultiple", new SqlSelector(schema, resultAssayMultipleSampleLookupSQL).getObject(Long.class)); + + assayMetrics.put("sampleLookupCount", sampleLookupCountMetrics); + + + // Putting these metrics at the same level as the other BooleanColumnCount metrics (e.g., sampleTypeWithBooleanColumnCount) + results.put("assayResultWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Result.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assayRunWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.propertyURI LIKE ? AND D.rangeURI = ?""", "urn:lsid:%:" + ExpProtocol.AssayDomainTypes.Run.getPrefix() + ".%", PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + + results.put("assay", assayMetrics); + } + + results.put("autoLinkedSampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource WHERE autoLinkTargetContainer IS NOT NULL").getObject(Long.class)); + results.put("sampleSetCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.materialsource").getObject(Long.class)); + + if (schema.getSqlDialect().isPostgreSQL()) // SQLServer does not support regular expression queries + { + Collection> numSampleCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.material GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.material m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("sampleSetWithNumberNamesCount", numSampleCounts.size()); + results.put("sampleSetWithOnlyNumberNamesCount", numSampleCounts.stream().filter( + map -> (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount") + ).count()); + } + UserSchema userSchema = AuditLogService.getAuditLogSchema(User.getSearchUser(), ContainerManager.getRoot()); + FilteredTable table = (FilteredTable) userSchema.getTable(SampleTimelineAuditEvent.EVENT_TYPE); + + SQLFragment sql = new SQLFragment("SELECT COUNT(*)\n" + + " FROM (\n" + + " -- updates that are marked as lineage updates\n" + + " (SELECT DISTINCT transactionId\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() +"\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanTRUE() + "\n" + + " AND comment = 'Sample was updated.'\n" + + " ) a1\n" + + " JOIN\n" + + " -- but have associated entries that are not lineage updates\n" + + " (SELECT DISTINCT transactionid\n" + + " FROM " + table.getRealTable().getFromSQL("").getSQL() + "\n" + + " WHERE islineageupdate = " + schema.getSqlDialect().getBooleanFALSE() + ") a2\n" + + " ON a1.transactionid = a2.transactionid\n" + + " )"); + + results.put("sampleLineageAuditDiscrepancyCount", new SqlSelector(schema, sql.getSQL()).getObject(Long.class)); + + results.put("sampleCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material").getObject(Long.class)); + results.put("aliquotCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material where aliquotedfromlsid IS NOT NULL").getObject(Long.class)); + results.put("sampleNullAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount IS NULL").getObject(Long.class)); + results.put("sampleNegativeAmountCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.material WHERE storedamount < 0").getObject(Long.class)); + results.put("sampleUnitsDifferCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.material m JOIN exp.materialSource s ON m.materialsourceid = s.rowid WHERE m.units != s.metricunit").getObject(Long.class)); + results.put("sampleTypesWithoutUnitsCount", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IS NULL").getObject(Long.class)); + results.put("sampleTypesWithMassTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('kg', 'g', 'mg', 'ug', 'ng')").getObject(Long.class)); + results.put("sampleTypesWithVolumeTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit IN ('L', 'mL', 'uL')").getObject(Long.class)); + results.put("sampleTypesWithCountTypeUnit", new SqlSelector(schema, "SELECT COUNT(*) from exp.materialSource WHERE category IS NULL AND metricunit = ?", "unit").getObject(Long.class)); + + results.put("duplicateSampleMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype <> 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + results.put("duplicateSpecimenMaterialNameCount", new SqlSelector(schema, "SELECT COUNT(*) as duplicateCount FROM " + + "(SELECT name, cpastype FROM exp.material WHERE cpastype = 'Material' GROUP BY name, cpastype HAVING COUNT(*) > 1) d").getObject(Long.class)); + String duplicateCaseInsensitiveSampleNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.material + WHERE materialsourceid IS NOT NULL + GROUP BY LOWER(name), materialsourceid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + String duplicateCaseInsensitiveDataNameCountSql = """ + SELECT COUNT(*) FROM + ( + SELECT 1 AS found + FROM exp.data + WHERE classid IS NOT NULL + GROUP BY LOWER(name), classid + HAVING COUNT(*) > 1 + ) AS duplicates + """; + results.put("duplicateCaseInsensitiveSampleNameCount", new SqlSelector(schema, duplicateCaseInsensitiveSampleNameCountSql).getObject(Long.class)); + results.put("duplicateCaseInsensitiveDataNameCount", new SqlSelector(schema, duplicateCaseInsensitiveDataNameCountSql).getObject(Long.class)); + + results.put("dataClassCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.dataclass").getObject(Long.class)); + results.put("dataClassRowCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.data WHERE classid IN (SELECT rowid FROM exp.dataclass)").getObject(Long.class)); + results.put("dataWithDataParentsCount", new SqlSelector(schema, "SELECT COUNT(DISTINCT d.sourceApplicationId) FROM exp.data d\n" + + "JOIN exp.datainput di ON di.targetapplicationid = d.sourceapplicationid").getObject(Long.class)); + if (schema.getSqlDialect().isPostgreSQL()) + { + Collection> numDataClassObjectsCounts = new SqlSelector(schema, """ + SELECT totalCount, numberNameCount FROM + (SELECT cpastype, COUNT(*) AS totalCount from exp.data GROUP BY cpastype) t + JOIN + (SELECT cpastype, COUNT(*) AS numberNameCount FROM exp.data m WHERE m.name SIMILAR TO '[0-9.]*' GROUP BY cpastype) ns + ON t.cpastype = ns.cpastype""").getMapCollection(); + results.put("dataClassWithNumberNamesCount", numDataClassObjectsCounts.size()); + results.put("dataClassWithOnlyNumberNamesCount", numDataClassObjectsCounts.stream().filter(map -> + (Long) map.get("totalCount") > 0 && map.get("totalCount") == map.get("numberNameCount")).count()); + } + + results.put("ontologyPrincipalConceptCodeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE principalconceptcode IS NOT NULL").getObject(Long.class)); + results.put("ontologyLookupColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", OntologyService.conceptCodeConceptURI).getObject(Long.class)); + results.put("ontologyConceptSubtreeCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptsubtree IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptImportColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptimportcolumn IS NOT NULL").getObject(Long.class)); + results.put("ontologyConceptLabelColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE conceptlabelcolumn IS NOT NULL").getObject(Long.class)); + + results.put("scannableColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE scannable = ?", true).getObject(Long.class)); + results.put("uniqueIdColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + results.put("sampleTypeWithUniqueIdCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.conceptURI = ?""", STORAGE_UNIQUE_ID_CONCEPT_URI).getObject(Long.class)); + + results.put("fileColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithFileColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.FILE_LINK.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("sampleTypeWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("sampleTypeAliquotSpecificField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ChildOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentOnlyField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND (D.derivationDataScope = ? OR D.derivationDataScope IS NULL)""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.ParentOnly.name()).getObject(Long.class)); + results.put("sampleTypeParentAndAliquotField", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT D.PropertyURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.derivationDataScope = ?""", SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME, ExpSchema.DerivationDataScopeType.All.name()).getObject(Long.class)); + + results.put("attachmentColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeURI = ?", PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithAttachmentColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.ATTACHMENT.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithBooleanColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.BOOLEAN.getTypeUri()).getObject(Long.class)); + results.put("dataClassWithMultiValueColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE DD.storageSchemaName = ? AND D.rangeURI = ?""", DataClassDomainKind.PROVISIONED_SCHEMA_NAME, PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("textChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE concepturi = ?", TEXT_CHOICE_CONCEPT_URI).getObject(Long.class)); + results.put("multiValueTextChoiceColumnCount", new SqlSelector(schema, "SELECT COUNT(*) FROM exp.propertydescriptor WHERE rangeuri = ?", PropertyType.MULTI_CHOICE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE_TIME.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithDateColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.DATE.getTypeUri()).getObject(Long.class)); + + results.put("domainsWithTimeColumnCount", new SqlSelector(schema, """ + SELECT COUNT(DISTINCT DD.DomainURI) FROM + exp.PropertyDescriptor D\s + JOIN exp.PropertyDomain PD ON D.propertyId = PD.propertyid + JOIN exp.DomainDescriptor DD on PD.domainID = DD.domainId + WHERE D.rangeURI = ?""", PropertyType.TIME.getTypeUri()).getObject(Long.class)); + + results.put("maxObjectObjectId", new SqlSelector(schema, "SELECT MAX(ObjectId) FROM exp.Object").getObject(Long.class)); + results.put("maxMaterialRowId", new SqlSelector(schema, "SELECT MAX(RowId) FROM exp.Material").getObject(Long.class)); + + results.putAll(ExperimentService.get().getDomainMetrics()); + + return results; + }); + } + } + + @Override + public void registerMigrationHandlers(@NotNull DatabaseMigrationService service) + { + ExperimentMigrationSchemaHandler handler = new ExperimentMigrationSchemaHandler(); + service.registerSchemaHandler(handler); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("Exclusions"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("premium", DbSchemaType.Bare).getTable("ExclusionMaps"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("ExclusionId", "RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerTableHandler(new MigrationTableHandler() + { + @Override + public TableInfo getTableInfo() + { + return DbSchema.get("assayrequest", DbSchemaType.Bare).getTable("RequestRunsJunction"); + } + + @Override + public void adjustFilter(TableInfo sourceTable, SimpleFilter filter, Set containers) + { + // Include experiment runs that were copied + FilterClause includedClause = handler.getIncludedRowIdClause(sourceTable, FieldKey.fromParts("RunId")); + if (includedClause != null) + filter.addClause(includedClause); + } + }); + service.registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); + DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); + service.registerSchemaHandler(dcHandler); + ExperimentDeleteService.setInstance(dcHandler); + } + + @Override + @NotNull + public Collection getSummary(Container c) + { + Collection list = new LinkedList<>(); + int runGroupCount = ExperimentService.get().getExperiments(c, null, false, true).size(); + if (runGroupCount > 0) + list.add(StringUtilsLabKey.pluralize(runGroupCount, "Run Group")); + + User user = HttpView.currentContext().getUser(); + + Set runTypes = ExperimentService.get().getExperimentRunTypes(c); + for (ExperimentRunType runType : runTypes) + { + if (runType == ExperimentRunType.ALL_RUNS_TYPE) + continue; + + long runCount = runType.getRunCount(user, c); + if (runCount > 0) + list.add(runCount + " runs of type " + runType.getDescription()); + } + + int dataClassCount = ExperimentService.get().getDataClasses(c, false).size(); + if (dataClassCount > 0) + list.add(dataClassCount + " Data Class" + (dataClassCount > 1 ? "es" : "")); + + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + list.add(sampleTypeCount + " Sample Type" + (sampleTypeCount > 1 ? "s" : "")); + + return list; + } + + @Override + public @NotNull ArrayList getDetailedSummary(Container c, User user) + { + ArrayList summaries = new ArrayList<>(); + + // Assay types + long assayTypeCount = AssayService.get().getAssayProtocols(c).stream().filter(p -> p.getContainer().equals(c)).count(); + if (assayTypeCount > 0) + summaries.add(new Summary(assayTypeCount, "Assay Type")); + + // Run count + int runGroupCount = ExperimentService.get().getExperiments(c, user, false, true).size(); + if (runGroupCount > 0) + summaries.add(new Summary(runGroupCount, "Assay run")); + + // Number of Data Classes + List dataClasses = ExperimentService.get().getDataClasses(c, false); + int dataClassCount = dataClasses.size(); + if (dataClassCount > 0) + summaries.add(new Summary(dataClassCount, "Data Class")); + + ExpSchema expSchema = new ExpSchema(user, c); + + // Individual Data Class row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.DataClasses.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 47919: The "DataCount" column is filtered to only count data in the target container + if (table instanceof ExpDataClassTableImpl tableImpl) + tableImpl.setDataCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpDataClassTable.Column.Name.name()); + columns.add(ExpDataClassTable.Column.DataCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + summaries.add(new Summary(count, entry.getKey())); + } + } + + // Sample Types + int sampleTypeCount = SampleTypeService.get().getSampleTypes(c, false).size(); + if (sampleTypeCount > 0) + summaries.add(new Summary(sampleTypeCount, "Sample Type")); + + // Individual Sample Type row counts + { + // The table-level container filter is set to ensure data class types are included + // that may not be defined in the target container but may have rows of data in the target container + TableInfo table = ExpSchema.TableType.SampleSets.createTable(expSchema, null, ContainerFilter.Type.CurrentPlusProjectAndShared.create(c, user)); + + // Issue 51557: The "SampleCount" column is filtered to only count data in the target container + if (table instanceof ExpSampleTypeTableImpl tableImpl) + tableImpl.setSampleCountContainerFilter(ContainerFilter.Type.Current.create(c, user)); + + Set columns = new LinkedHashSet<>(); + columns.add(ExpSampleTypeTable.Column.Name.name()); + columns.add(ExpSampleTypeTable.Column.SampleCount.name()); + + Map results = new TableSelector(table, columns).getValueMap(String.class); + for (var entry : results.entrySet()) + { + long count = entry.getValue().longValue(); + if (count > 0) + { + String name = entry.getKey(); + Summary s = name.equals("MixtureBatches") + ? new Summary(count, "Batch") + : new Summary(count, name); + summaries.add(s); + } + } + } + + return summaries; + } + + @Override + public @NotNull Set> getIntegrationTests() + { + return Set.of( + DomainImpl.TestCase.class, + DomainPropertyImpl.TestCase.class, + ExpDataTableImpl.TestCase.class, + ExperimentServiceImpl.AuditDomainUriTest.class, + ExperimentServiceImpl.LineageQueryTestCase.class, + ExperimentServiceImpl.ParseInputOutputAliasTestCase.class, + ExperimentServiceImpl.TestCase.class, + ExperimentStressTest.class, + LineagePerfTest.class, + LineageTest.class, + OntologyManager.TestCase.class, + PropertyServiceImpl.TestCase.class, + SampleTypeServiceImpl.TestCase.class, + StorageNameGenerator.TestCase.class, + StorageProvisionerImpl.TestCase.class, + UniqueValueCounterTestCase.class, + XarTestPipelineJob.TestCase.class + ); + } + + @Override + public @NotNull Collection>> getIntegrationTestFactories() + { + List>> list = new ArrayList<>(super.getIntegrationTestFactories()); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpDataClassDataTestCase.jsp")); + list.add(new JspTestCase("/org/labkey/experiment/api/ExpSampleTypeTestCase.jsp")); + return list; + } + + @Override + public @NotNull Set> getUnitTests() + { + return Set.of( + GraphAlgorithms.TestCase.class, + LSIDRelativizer.TestCase.class, + Lsid.TestCase.class, + LsidUtils.TestCase.class, + PropertyController.TestCase.class, + Quantity.TestCase.class, + Unit.TestCase.class + ); + } + + @Override + @NotNull + public Collection getSchemaNames() + { + return List.of( + ExpSchema.SCHEMA_NAME, + DataClassDomainKind.PROVISIONED_SCHEMA_NAME, + SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME + ); + } + + @NotNull + @Override + public Collection getProvisionedSchemaNames() + { + return PageFlowUtil.set(DataClassDomainKind.PROVISIONED_SCHEMA_NAME, SampleTypeDomainKind.PROVISIONED_SCHEMA_NAME); + } + + @Override + public JSONObject getPageContextJson(ContainerUser context) + { + JSONObject json = super.getPageContextJson(context); + json.put(SAMPLE_FILES_TABLE, OptionalFeatureService.get().isFeatureEnabled(SAMPLE_FILES_TABLE)); + return json; + } +} diff --git a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java index 7214de841e8..004be996f2e 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataImpl.java @@ -1,985 +1,985 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.lang3.StringUtils; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.json.JSONObject; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.LongHashMap; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.SimpleFilter; -import org.labkey.api.data.SqlSelector; -import org.labkey.api.data.Table; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.TableSelector; -import org.labkey.api.exp.ExperimentDataHandler; -import org.labkey.api.exp.ExperimentException; -import org.labkey.api.exp.Handler; -import org.labkey.api.exp.ObjectProperty; -import org.labkey.api.exp.XarFormatException; -import org.labkey.api.exp.XarSource; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; -import org.labkey.api.exp.api.ExpDataClass; -import org.labkey.api.exp.api.ExpRun; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.query.ExpDataClassDataTable; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.files.FileContentService; -import org.labkey.api.pipeline.PipeRoot; -import org.labkey.api.pipeline.PipelineJob; -import org.labkey.api.pipeline.PipelineService; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryRowReference; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.ValidationException; -import org.labkey.api.search.SearchResultTemplate; -import org.labkey.api.search.SearchScope; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.DataClassReadPermission; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.FileUtil; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HtmlString; -import org.labkey.api.util.LinkBuilder; -import org.labkey.api.util.MimeMap; -import org.labkey.api.util.NetworkDrive; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.URLHelper; -import org.labkey.api.util.InputBuilder; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.HttpView; -import org.labkey.api.view.NavTree; -import org.labkey.api.view.ViewContext; -import org.labkey.api.webdav.SimpleDocumentResource; -import org.labkey.api.webdav.WebdavResource; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.vfs.FileLike; -import org.labkey.vfs.FileSystemLike; - -import java.io.File; -import java.net.URI; -import java.net.URISyntaxException; -import java.nio.file.Files; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; - -public class ExpDataImpl extends AbstractRunItemImpl implements ExpData -{ - public enum DataOperations - { - Edit("editing", UpdatePermission.class), - EditLineage("editing lineage", UpdatePermission.class), - Delete("deleting", DeletePermission.class), - Move("moving", MoveEntitiesPermission.class); - - private final String _description; // used as a suffix in messaging users about what is not allowed - private final Class _permissionClass; - - DataOperations(String description, Class permissionClass) - { - _description = description; - _permissionClass = permissionClass; - } - - public String getDescription() - { - return _description; - } - - public Class getPermissionClass() - { - return _permissionClass; - } - } - - public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, DataClassReadPermission.class); - } - }; - public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { - @Override - public Set getPermittedContainerIds(User user, Map containers) - { - return getPermittedContainerIds(user, containers, MediaReadPermission.class); - } - }; - - /** Cache this because it can be expensive to recompute */ - private Boolean _finalRunOutput; - - /** - * Temporary mapping until experiment.xml contains the mime type - */ - private static final MimeMap MIME_MAP = new MimeMap(); - - static public List fromDatas(List datas) - { - List ret = new ArrayList<>(datas.size()); - for (Data data : datas) - { - ret.add(new ExpDataImpl(data)); - } - return ret; - } - - // For serialization - protected ExpDataImpl() {} - - public ExpDataImpl(Data data) - { - super(data); - } - - @Override - public void setComment(User user, String comment) throws ValidationException - { - setComment(user, comment, true); - } - - @Override - public void setComment(User user, String comment, boolean index) throws ValidationException - { - super.setComment(user, comment); - - if (index) - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - @Nullable - public ActionURL detailsURL() - { - DataType dataType = getDataType(); - if (dataType != null) - { - ActionURL url = dataType.getDetailsURL(this); - if (url != null) - return url; - } - - return _object.detailsURL(); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference() - { - return getQueryRowReference(null); - } - - @Override - public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) - { - ExpDataClassImpl dc = getDataClass(user); - if (dc != null) - return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - - // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace - DataType type = getDataType(); - if (type != null) - { - QueryRowReference queryRowReference = type.getQueryRowReference(this); - if (queryRowReference != null) - return queryRowReference; - } - - return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); - } - - @Override - public List getTargetApplications() - { - return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); - } - - @Override - public List getTargetRuns() - { - return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); - } - - @Override - public DataType getDataType() - { - return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); - } - - @Override - public void setDataFileURI(URI uri) - { - ensureUnlocked(); - _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); - } - - @Override - public void save(User user) - { - // Replace the default "Data" cpastype if the Data belongs to a DataClass - ExpDataClassImpl dataClass = getDataClass(); - if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) - setCpasType(dataClass.getLSID()); - - boolean isNew = getRowId() == 0; - save(user, ExperimentServiceImpl.get().getTinfoData(), true); - - if (isNew) - { - if (dataClass != null) - { - Map map = new HashMap<>(); - map.put("lsid", getLSID()); - Table.insert(user, dataClass.getTinfo(), map); - } - } - index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); - } - - @Override - protected void save(User user, TableInfo table, boolean ensureObject) - { - assert ensureObject; - super.save(user, table, true); - } - - @Override - public URI getDataFileURI() - { - String url = _object.getDataFileUrl(); - if (url == null) - return null; - try - { - return new URI(_object.getDataFileUrl()); - } - catch (URISyntaxException use) - { - return null; - } - } - - @Override - public ExperimentDataHandler findDataHandler() - { - return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); - } - - @Override - public String getDataFileUrl() - { - return _object.getDataFileUrl(); - } - - @Override - public boolean hasFileScheme() - { - return !FileUtil.hasCloudScheme(getDataFileUrl()); - } - - @Override - @Nullable - public File getFile() - { - return _object.getFile(); - } - - @Override - public @Nullable FileLike getFileLike() - { - return _object.getFileLike(); - } - - @Override - @Nullable - public java.nio.file.Path getFilePath() - { - return _object.getFilePath(); - } - - @Override - public boolean isInlineImage() - { - return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); - } - - @Override - public void delete(User user) - { - delete(user, true); - } - - @Override - public void delete(User user, boolean deleteRunsUsingData) - { - ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); - } - - public String getMimeType() - { - if (null != getDataFileUrl()) - return MIME_MAP.getContentTypeFor(getDataFileUrl()); - else - return null; - } - - @Override - public boolean isFileOnDisk() - { - java.nio.file.Path f = getFilePath(); - if (f != null) - if (!FileUtil.hasCloudScheme(f)) - return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); - else - return Files.exists(f); - else - return false; - } - - public boolean isPathAccessible() - { - java.nio.file.Path path = getFilePath(); - return (null != path && Files.exists(path)); - } - - @Override - public String getCpasType() - { - String result = _object.getCpasType(); - if (result != null) - return result; - - ExpDataClass dataClass = getDataClass(); - if (dataClass != null) - return dataClass.getLSID(); - - return ExpData.DEFAULT_CPAS_TYPE; - } - - public void setGenerated(boolean generated) - { - ensureUnlocked(); - _object.setGenerated(generated); - } - - @Override - public boolean isGenerated() - { - return _object.isGenerated(); - } - - @Override - public boolean isFinalRunOutput() - { - if (_finalRunOutput == null) - { - ExpRun run = getRun(); - _finalRunOutput = run != null && run.isFinalOutput(this); - } - return _finalRunOutput.booleanValue(); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass() - { - return getDataClass(null); - } - - @Override - @Nullable - public ExpDataClassImpl getDataClass(@Nullable User user) - { - if (_object.getClassId() != null && getContainer() != null) - { - if (user == null) - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); - else - return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); - } - - return null; - } - - @Override - public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException - { - String dataFileURL = getDataFileUrl(); - if (dataFileURL == null) - return; - - if (xarSource.shouldIgnoreDataFiles()) - { - job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); - return; - } - - job.debug("Trying to load data file " + dataFileURL + " into the system"); - - java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); - - if (!Files.exists(path)) - { - job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); - return; - } - - // Check that the file is under the pipeline root to prevent users from referencing a file that they - // don't have permission to import - PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); - if (!xarSource.allowImport(pr, job.getContainer(), path)) - { - if (pr == null) - { - job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); - return; - } - job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); - return; - } - - ExperimentDataHandler handler = findDataHandler(); - try - { - handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); - } - catch (ExperimentException e) - { - throw new XarFormatException(e); - } - - job.debug("Finished trying to load data file " + dataFileURL + " into the system"); - } - - // Get all text and int strings from the data class for indexing - private void getIndexValues( - Map props, - @NotNull ExpDataClassDataTableImpl table, - Set identifiersHi, - Set identifiersMed, - Set identifiersLo, - Set keywordHi, - Set keywordMed, - Set keywordsLo, - JSONObject jsonData - ) - { - CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); - for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) - skipColumns.add(column.name()); - skipColumns.add("Ancestors"); - skipColumns.add("Container"); - - processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); - } - - @Override - @NotNull - public Collection getAliases() - { - TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); - TableInfo ti = ExperimentService.get().getTinfoAlias(); - SQLFragment sql = new SQLFragment() - .append("SELECT a.name FROM ").append(mapTi, "m") - .append(" JOIN ").append(ti, "a") - .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); - sql.add(getLSID()); - ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); - return Collections.unmodifiableList(aliases); - } - - @Override - public String getDocumentId() - { - String dataClassName = "-"; - ExpDataClass dc = getDataClass(); - if (dc != null) - dataClassName = dc.getName(); - // why not just data:rowId? - return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); - } - - @Override - protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) - { - return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); - } - - @Override - public Map getObjectProperties() - { - return getObjectProperties(getDataClass()); - } - - @Override - public Map getObjectProperties(@Nullable User user) - { - return getObjectProperties(getDataClass(user)); - } - - private Map getObjectProperties(ExpDataClassImpl dataClass) - { - HashMap ret = new HashMap<>(super.getObjectProperties()); - var ti = null == dataClass ? null : dataClass.getTinfo(); - if (null != ti) - { - ret.putAll(getObjectProperties(ti)); - } - return ret; - } - - private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) - { - if (resourceIdentifier.startsWith("data:")) - resourceIdentifier = resourceIdentifier.substring("data:".length()); - - Path path = Path.parse(resourceIdentifier); - if (path.size() != 3) - return null; - String containerId = path.get(0); - String dataClassName = path.get(1); - String rowIdString = path.get(2); - - long rowId; - try - { - rowId = Long.parseLong(rowIdString); - if (rowId == 0) - return null; - } - catch (NumberFormatException ex) - { - return null; - } - - Container c = ContainerManager.getForId(containerId); - if (c == null) - return null; - - ExpDataClass dc = null; - if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) - { - String dcKey = containerId + '-' + dataClassName; - dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); - } - - return new Pair<>(rowId, dc); - } - - @Nullable - public static ExpDataImpl fromDocumentId(String resourceIdentifier) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); - if (rowIdDataClass == null) - return null; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - if (dc != null) - return ExperimentServiceImpl.get().getExpData(dc, rowId); - else - return ExperimentServiceImpl.get().getExpData(rowId); - } - - @Nullable - public static Map fromDocumentIds(Collection resourceIdentifiers) - { - Map rowIdIdentifierMap = new LongHashMap<>(); - Map dcCache = new HashMap<>(); - Map dcMap = new LongHashMap<>(); - Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass - List rowIds = new ArrayList<>(); // data rowIds without dataClass - for (String resourceIdentifier : resourceIdentifiers) - { - Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); - if (rowIdDataClass == null) - continue; - - Long rowId = rowIdDataClass.first; - ExpDataClass dc = rowIdDataClass.second; - - rowIdIdentifierMap.put(rowId, resourceIdentifier); - - if (dc != null) - { - dcMap.put(dc.getRowId(), dc); - dcRowIdMap - .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) - .add(rowId); - } - else - rowIds.add(rowId); - } - - List expDatas = new ArrayList<>(); - if (!rowIds.isEmpty()) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); - - if (!dcRowIdMap.isEmpty()) - { - for (Long dataClassId : dcRowIdMap.keySet()) - { - ExpDataClass dc = dcMap.get(dataClassId); - if (dc != null) - expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); - } - } - - Map identifierDatas = new HashMap<>(); - for (ExpData data : expDatas) - { - identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); - } - - return identifierDatas; - } - - @Override - public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) - { - java.nio.file.Path path = getFilePath(); - if (path == null) - { - return null; - } - - Container c = getContainer(); - if (c == null) - { - return null; - } - - return FileContentService.get().getWebDavUrl(path, c, type); - } - - @Override - public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) - { - Container container = getContainer(); - if (container == null) - return null; - - Map props = new HashMap<>(); - JSONObject jsonData = new JSONObject(); - Set keywordsHi = new HashSet<>(); - Set keywordsMed = new HashSet<>(); - Set keywordsLo = new HashSet<>(); - - Set identifiersHi = new HashSet<>(); - Set identifiersMed = new HashSet<>(); - Set identifiersLo = new HashSet<>(); - - StringBuilder body = new StringBuilder(); - - // Name is an identifier with the highest weight - identifiersHi.add(getName()); - keywordsMed.add(getName()); // also add to keywords since those are stemmed - - // Description is added as a keywordsLo -- in Biologics it is common for the description to - // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as - // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. - // CONSIDER: tokenize the description and extract identifiers - if (null != getDescription()) - keywordsLo.add(getDescription()); - - String comment = getComment(); - if (comment != null) - keywordsMed.add(comment); - - // Add aliases in parentheses in the title - StringBuilder title = new StringBuilder(getName()); - Collection aliases = getAliases(); - if (!aliases.isEmpty()) - { - title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); - identifiersHi.addAll(aliases); - } - - ExpDataClassImpl dc = getDataClass(User.getSearchUser()); - if (dc != null) - { - ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); - NavTree t = new NavTree(dc.getName(), show); - String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); - props.put(SearchService.PROPERTY.navtrail.toString(), nav); - - props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); - body.append(dc.getName()); - - if (tableInfo == null) - tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); - - if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) - throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); - - if (!expDataClassDataTable.getDataClass().equals(dc)) - throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); - - // Collect other text columns and lookup display columns - getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); - } - - // === Stored, not indexed - if (dc != null && dc.isMedia()) - props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); - else - props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); - props.put(SearchService.PROPERTY.title.toString(), title.toString()); - props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); - - ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); - view.setExtraPath(container.getId()); - String docId = getDocumentId(); - - // Generate a summary explicitly instead of relying on a summary to be extracted - // from the document body. Placing lookup values and the description in the body - // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. - StringBuilder summary = new StringBuilder(); - if (StringUtils.isNotEmpty(getDescription())) - summary.append(getDescription()).append("\n"); - - appendTokens(summary, keywordsMed); - appendTokens(summary, identifiersMed); - appendTokens(summary, identifiersLo); - - props.put(SearchService.PROPERTY.summary.toString(), summary); - - return new ExpDataResource( - getRowId(), - new Path(docId), - docId, - container.getEntityId(), - "text/plain", - body.toString(), - view, - props, - getCreatedBy(), - getCreated(), - getModifiedBy(), - getModified() - ); - } - - private static void appendTokens(StringBuilder sb, Collection toks) - { - if (toks.isEmpty()) - return; - - sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); - } - - private static class ExpDataResource extends SimpleDocumentResource - { - final long _rowId; - - public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) - { - super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); - _rowId = rowId; - } - - @Override - public void setLastIndexed(long ms, long modified) - { - ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); - } - } - - public static class DataSearchResultTemplate implements SearchResultTemplate - { - public static final String NAME = "data"; - public static final String PROPERTY = "dataclass"; - - @Nullable - @Override - public String getName() - { - return NAME; - } - - private ExpDataClass getDataClass() - { - if (HttpView.hasCurrentView()) - { - ViewContext ctx = HttpView.currentContext(); - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); - } - return null; - } - - @Nullable - @Override - public String getCategories() - { - ExpDataClass dataClass = getDataClass(); - - if (dataClass != null && dataClass.isMedia()) - return expMediaDataCategory.getName(); - - return expDataCategory.getName(); - } - - @Nullable - @Override - public SearchScope getSearchScope() - { - return SearchScope.FolderAndSubfolders; - } - - @NotNull - @Override - public String getResultNameSingular() - { - ExpDataClass dc = getDataClass(); - if (dc != null) - return dc.getName(); - return "data"; - } - - @NotNull - @Override - public String getResultNamePlural() - { - return getResultNameSingular(); - } - - @Override - public boolean includeNavigationLinks() - { - return true; - } - - @Override - public boolean includeAdvanceUI() - { - return false; - } - - @Nullable - @Override - public HtmlString getExtraHtml(ViewContext ctx) - { - String q = ctx.getActionURL().getParameter("q"); - - if (StringUtils.isNotBlank(q)) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); - url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); - - StringBuilder html = new StringBuilder(); - html.append("
"); - - appendParam(html, null, dataclass, "All", false, url); - for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) - { - appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); - } - - html.append("
"); - return HtmlString.unsafe(html.toString()); - } - else - { - return null; - } - } - - private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) - { - sb.append(""); - - if (!Objects.equals(dataclass, current)) - { - if (addParam) - url = url.clone().addParameter(PROPERTY, dataclass); - - sb.append(LinkBuilder.simpleLink(label, url)); - } - else - { - sb.append(label); - } - - sb.append(" "); - } - - @Override - public HtmlString getHiddenInputsHtml(ViewContext ctx) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); - } - - return null; - } - - - @Override - public String reviseQuery(ViewContext ctx, String q) - { - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - - if (null != dataclass) - return "+(" + q + ") +" + PROPERTY + ":" + dataclass; - else - return q; - } - - @Override - public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) - { - SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); - - String dataclass = ctx.getActionURL().getParameter(PROPERTY); - if (dataclass != null) - { - String text = root.getText(); - root.setText(text + " - " + dataclass); - } - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.json.JSONObject; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.LongHashMap; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.ExperimentDataHandler; +import org.labkey.api.exp.ExperimentException; +import org.labkey.api.exp.Handler; +import org.labkey.api.exp.ObjectProperty; +import org.labkey.api.exp.XarFormatException; +import org.labkey.api.exp.XarSource; +import org.labkey.api.exp.api.DataType; +import org.labkey.api.exp.api.ExpData; +import org.labkey.api.exp.api.ExpDataClass; +import org.labkey.api.exp.api.ExpRun; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataClassDataTable; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.files.FileContentService; +import org.labkey.api.pipeline.PipeRoot; +import org.labkey.api.pipeline.PipelineJob; +import org.labkey.api.pipeline.PipelineService; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryRowReference; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.ValidationException; +import org.labkey.api.search.SearchResultTemplate; +import org.labkey.api.search.SearchScope; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.DataClassReadPermission; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.FileUtil; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HtmlString; +import org.labkey.api.util.LinkBuilder; +import org.labkey.api.util.MimeMap; +import org.labkey.api.util.NetworkDrive; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringUtilsLabKey; +import org.labkey.api.util.URLHelper; +import org.labkey.api.util.InputBuilder; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.HttpView; +import org.labkey.api.view.NavTree; +import org.labkey.api.view.ViewContext; +import org.labkey.api.webdav.SimpleDocumentResource; +import org.labkey.api.webdav.WebdavResource; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.vfs.FileLike; +import org.labkey.vfs.FileSystemLike; + +import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.labkey.api.exp.query.ExpSchema.SCHEMA_EXP_DATA; + +public class ExpDataImpl extends AbstractRunItemImpl implements ExpData +{ + public enum DataOperations + { + Edit("editing", UpdatePermission.class), + EditLineage("editing lineage", UpdatePermission.class), + Delete("deleting", DeletePermission.class), + Move("moving", MoveEntitiesPermission.class); + + private final String _description; // used as a suffix in messaging users about what is not allowed + private final Class _permissionClass; + + DataOperations(String description, Class permissionClass) + { + _description = description; + _permissionClass = permissionClass; + } + + public String getDescription() + { + return _description; + } + + public Class getPermissionClass() + { + return _permissionClass; + } + } + + public static final SearchService.SearchCategory expDataCategory = new SearchService.SearchCategory("data", "ExpData", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, DataClassReadPermission.class); + } + }; + public static final SearchService.SearchCategory expMediaDataCategory = new SearchService.SearchCategory("mediaData", "ExpData for media objects", false) { + @Override + public Set getPermittedContainerIds(User user, Map containers) + { + return getPermittedContainerIds(user, containers, MediaReadPermission.class); + } + }; + + /** Cache this because it can be expensive to recompute */ + private Boolean _finalRunOutput; + + /** + * Temporary mapping until experiment.xml contains the mime type + */ + private static final MimeMap MIME_MAP = new MimeMap(); + + static public List fromDatas(List datas) + { + List ret = new ArrayList<>(datas.size()); + for (Data data : datas) + { + ret.add(new ExpDataImpl(data)); + } + return ret; + } + + // For serialization + protected ExpDataImpl() {} + + public ExpDataImpl(Data data) + { + super(data); + } + + @Override + public void setComment(User user, String comment) throws ValidationException + { + setComment(user, comment, true); + } + + @Override + public void setComment(User user, String comment, boolean index) throws ValidationException + { + super.setComment(user, comment); + + if (index) + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + @Nullable + public ActionURL detailsURL() + { + DataType dataType = getDataType(); + if (dataType != null) + { + ActionURL url = dataType.getDetailsURL(this); + if (url != null) + return url; + } + + return _object.detailsURL(); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference() + { + return getQueryRowReference(null); + } + + @Override + public @Nullable QueryRowReference getQueryRowReference(@Nullable User user) + { + ExpDataClassImpl dc = getDataClass(user); + if (dc != null) + return new QueryRowReference(getContainer(), SCHEMA_EXP_DATA, dc.getName(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + + // Issue 40123: see MedImmuneDataHandler MEDIMMUNE_DATA_TYPE, this claims the "Data" namespace + DataType type = getDataType(); + if (type != null) + { + QueryRowReference queryRowReference = type.getQueryRowReference(this); + if (queryRowReference != null) + return queryRowReference; + } + + return new QueryRowReference(getContainer(), ExpSchema.SCHEMA_EXP, ExpSchema.TableType.Data.name(), FieldKey.fromParts(ExpDataTable.Column.RowId), getRowId()); + } + + @Override + public List getTargetApplications() + { + return getTargetApplications(new SimpleFilter(FieldKey.fromParts("DataId"), getRowId()), ExperimentServiceImpl.get().getTinfoDataInput()); + } + + @Override + public List getTargetRuns() + { + return getTargetRuns(ExperimentServiceImpl.get().getTinfoDataInput(), "DataId"); + } + + @Override + public DataType getDataType() + { + return ExperimentService.get().getDataType(getLSIDNamespacePrefix()); + } + + @Override + public void setDataFileURI(URI uri) + { + ensureUnlocked(); + _object.setDataFileUrl(ExpData.normalizeDataFileURI(uri)); + } + + @Override + public void save(User user) + { + // Replace the default "Data" cpastype if the Data belongs to a DataClass + ExpDataClassImpl dataClass = getDataClass(); + if (dataClass != null && ExpData.DEFAULT_CPAS_TYPE.equals(getCpasType())) + setCpasType(dataClass.getLSID()); + + boolean isNew = getRowId() == 0; + save(user, ExperimentServiceImpl.get().getTinfoData(), true); + + if (isNew) + { + if (dataClass != null) + { + Map map = new HashMap<>(); + map.put("lsid", getLSID()); + Table.insert(user, dataClass.getTinfo(), map); + } + } + index(SearchService.get().defaultTask().getQueue(getContainer(), SearchService.PRIORITY.modified), null); + } + + @Override + protected void save(User user, TableInfo table, boolean ensureObject) + { + assert ensureObject; + super.save(user, table, true); + } + + @Override + public URI getDataFileURI() + { + String url = _object.getDataFileUrl(); + if (url == null) + return null; + try + { + return new URI(_object.getDataFileUrl()); + } + catch (URISyntaxException use) + { + return null; + } + } + + @Override + public ExperimentDataHandler findDataHandler() + { + return Handler.Priority.findBestHandler(ExperimentServiceImpl.get().getExperimentDataHandlers(), this); + } + + @Override + public String getDataFileUrl() + { + return _object.getDataFileUrl(); + } + + @Override + public boolean hasFileScheme() + { + return !FileUtil.hasCloudScheme(getDataFileUrl()); + } + + @Override + @Nullable + public File getFile() + { + return _object.getFile(); + } + + @Override + public @Nullable FileLike getFileLike() + { + return _object.getFileLike(); + } + + @Override + @Nullable + public java.nio.file.Path getFilePath() + { + return _object.getFilePath(); + } + + @Override + public boolean isInlineImage() + { + return null != getFile() && MIME_MAP.isInlineImageFor(getFile()); + } + + @Override + public void delete(User user) + { + delete(user, true); + } + + @Override + public void delete(User user, boolean deleteRunsUsingData) + { + ExperimentServiceImpl.get().deleteDataByRowIds(user, getContainer(), Collections.singleton(getRowId()), deleteRunsUsingData); + } + + public String getMimeType() + { + if (null != getDataFileUrl()) + return MIME_MAP.getContentTypeFor(getDataFileUrl()); + else + return null; + } + + @Override + public boolean isFileOnDisk() + { + java.nio.file.Path f = getFilePath(); + if (f != null) + if (!FileUtil.hasCloudScheme(f)) + return NetworkDrive.exists(f.toFile()) && !Files.isDirectory(f); + else + return Files.exists(f); + else + return false; + } + + public boolean isPathAccessible() + { + java.nio.file.Path path = getFilePath(); + return (null != path && Files.exists(path)); + } + + @Override + public String getCpasType() + { + String result = _object.getCpasType(); + if (result != null) + return result; + + ExpDataClass dataClass = getDataClass(); + if (dataClass != null) + return dataClass.getLSID(); + + return ExpData.DEFAULT_CPAS_TYPE; + } + + public void setGenerated(boolean generated) + { + ensureUnlocked(); + _object.setGenerated(generated); + } + + @Override + public boolean isGenerated() + { + return _object.isGenerated(); + } + + @Override + public boolean isFinalRunOutput() + { + if (_finalRunOutput == null) + { + ExpRun run = getRun(); + _finalRunOutput = run != null && run.isFinalOutput(this); + } + return _finalRunOutput.booleanValue(); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass() + { + return getDataClass(null); + } + + @Override + @Nullable + public ExpDataClassImpl getDataClass(@Nullable User user) + { + if (_object.getClassId() != null && getContainer() != null) + { + if (user == null) + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId()); + else + return ExperimentServiceImpl.get().getDataClass(getContainer(), _object.getClassId(), true); + } + + return null; + } + + @Override + public void importDataFile(PipelineJob job, XarSource xarSource) throws ExperimentException + { + String dataFileURL = getDataFileUrl(); + if (dataFileURL == null) + return; + + if (xarSource.shouldIgnoreDataFiles()) + { + job.debug("Skipping load of data file " + dataFileURL + " based on the XAR source"); + return; + } + + job.debug("Trying to load data file " + dataFileURL + " into the system"); + + java.nio.file.Path path = FileUtil.stringToPath(getContainer(), dataFileURL); + + if (!Files.exists(path)) + { + job.debug("Unable to find the data file " + FileUtil.getAbsolutePath(getContainer(), path) + " on disk."); + return; + } + + // Check that the file is under the pipeline root to prevent users from referencing a file that they + // don't have permission to import + PipeRoot pr = PipelineService.get().findPipelineRoot(job.getContainer()); + if (!xarSource.allowImport(pr, job.getContainer(), path)) + { + if (pr == null) + { + job.warn("No pipeline root was set, skipping load of file " + FileUtil.getAbsolutePath(getContainer(), path)); + return; + } + job.debug("The data file " + FileUtil.getAbsolutePath(getContainer(), path) + " is not under the folder's pipeline root: " + pr + ". It will not be loaded directly, but may be loaded if referenced from other files that are under the pipeline root."); + return; + } + + ExperimentDataHandler handler = findDataHandler(); + try + { + handler.importFile(this, FileSystemLike.wrapFile(path), job.getInfo(), job.getLogger(), xarSource.getXarContext()); + } + catch (ExperimentException e) + { + throw new XarFormatException(e); + } + + job.debug("Finished trying to load data file " + dataFileURL + " into the system"); + } + + // Get all text and int strings from the data class for indexing + private void getIndexValues( + Map props, + @NotNull ExpDataClassDataTableImpl table, + Set identifiersHi, + Set identifiersMed, + Set identifiersLo, + Set keywordHi, + Set keywordMed, + Set keywordsLo, + JSONObject jsonData + ) + { + CaseInsensitiveHashSet skipColumns = new CaseInsensitiveHashSet(); + for (ExpDataClassDataTable.Column column : ExpDataClassDataTable.Column.values()) + skipColumns.add(column.name()); + skipColumns.add("Ancestors"); + skipColumns.add("Container"); + + processIndexValues(props, table, skipColumns, identifiersHi, identifiersMed, identifiersLo, keywordHi, keywordMed, keywordsLo, jsonData); + } + + @Override + @NotNull + public Collection getAliases() + { + TableInfo mapTi = ExperimentService.get().getTinfoDataAliasMap(); + TableInfo ti = ExperimentService.get().getTinfoAlias(); + SQLFragment sql = new SQLFragment() + .append("SELECT a.name FROM ").append(mapTi, "m") + .append(" JOIN ").append(ti, "a") + .append(" ON m.alias = a.RowId WHERE m.lsid = ? "); + sql.add(getLSID()); + ArrayList aliases = new SqlSelector(mapTi.getSchema(), sql).getArrayList(String.class); + return Collections.unmodifiableList(aliases); + } + + @Override + public String getDocumentId() + { + String dataClassName = "-"; + ExpDataClass dc = getDataClass(); + if (dc != null) + dataClassName = dc.getName(); + // why not just data:rowId? + return "data:" + new Path(getContainer().getId(), dataClassName, Long.toString(getRowId())).encode(); + } + + @Override + protected TableSelector getObjectPropertiesSelector(@NotNull TableInfo table) + { + return new TableSelector(table, new SimpleFilter(ExpDataTable.Column.RowId.fieldKey(), getRowId()), null); + } + + @Override + public Map getObjectProperties() + { + return getObjectProperties(getDataClass()); + } + + @Override + public Map getObjectProperties(@Nullable User user) + { + return getObjectProperties(getDataClass(user)); + } + + private Map getObjectProperties(ExpDataClassImpl dataClass) + { + HashMap ret = new HashMap<>(super.getObjectProperties()); + var ti = null == dataClass ? null : dataClass.getTinfo(); + if (null != ti) + { + ret.putAll(getObjectProperties(ti)); + } + return ret; + } + + private static Pair getRowIdClassNameContainerFromDocumentId(String resourceIdentifier, Map dcCache) + { + if (resourceIdentifier.startsWith("data:")) + resourceIdentifier = resourceIdentifier.substring("data:".length()); + + Path path = Path.parse(resourceIdentifier); + if (path.size() != 3) + return null; + String containerId = path.get(0); + String dataClassName = path.get(1); + String rowIdString = path.get(2); + + long rowId; + try + { + rowId = Long.parseLong(rowIdString); + if (rowId == 0) + return null; + } + catch (NumberFormatException ex) + { + return null; + } + + Container c = ContainerManager.getForId(containerId); + if (c == null) + return null; + + ExpDataClass dc = null; + if (!StringUtils.isEmpty(dataClassName) && !dataClassName.equals("-")) + { + String dcKey = containerId + '-' + dataClassName; + dc = dcCache.computeIfAbsent(dcKey, (x) -> ExperimentServiceImpl.get().getDataClass(c, dataClassName)); + } + + return new Pair<>(rowId, dc); + } + + @Nullable + public static ExpDataImpl fromDocumentId(String resourceIdentifier) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, new HashMap<>()); + if (rowIdDataClass == null) + return null; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + if (dc != null) + return ExperimentServiceImpl.get().getExpData(dc, rowId); + else + return ExperimentServiceImpl.get().getExpData(rowId); + } + + @Nullable + public static Map fromDocumentIds(Collection resourceIdentifiers) + { + Map rowIdIdentifierMap = new LongHashMap<>(); + Map dcCache = new HashMap<>(); + Map dcMap = new LongHashMap<>(); + Map> dcRowIdMap = new LongHashMap<>(); // data rowIds with dataClass + List rowIds = new ArrayList<>(); // data rowIds without dataClass + for (String resourceIdentifier : resourceIdentifiers) + { + Pair rowIdDataClass = getRowIdClassNameContainerFromDocumentId(resourceIdentifier, dcCache); + if (rowIdDataClass == null) + continue; + + Long rowId = rowIdDataClass.first; + ExpDataClass dc = rowIdDataClass.second; + + rowIdIdentifierMap.put(rowId, resourceIdentifier); + + if (dc != null) + { + dcMap.put(dc.getRowId(), dc); + dcRowIdMap + .computeIfAbsent(dc.getRowId(), (k) -> new ArrayList<>()) + .add(rowId); + } + else + rowIds.add(rowId); + } + + List expDatas = new ArrayList<>(); + if (!rowIds.isEmpty()) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(rowIds)); + + if (!dcRowIdMap.isEmpty()) + { + for (Long dataClassId : dcRowIdMap.keySet()) + { + ExpDataClass dc = dcMap.get(dataClassId); + if (dc != null) + expDatas.addAll(ExperimentServiceImpl.get().getExpDatas(dc, dcRowIdMap.get(dataClassId))); + } + } + + Map identifierDatas = new HashMap<>(); + for (ExpData data : expDatas) + { + identifierDatas.put(rowIdIdentifierMap.get(data.getRowId()), data); + } + + return identifierDatas; + } + + @Override + public @Nullable URI getWebDavURL(@NotNull FileContentService.PathType type) + { + java.nio.file.Path path = getFilePath(); + if (path == null) + { + return null; + } + + Container c = getContainer(); + if (c == null) + { + return null; + } + + return FileContentService.get().getWebDavUrl(path, c, type); + } + + @Override + public @Nullable WebdavResource createIndexDocument(@Nullable TableInfo tableInfo) + { + Container container = getContainer(); + if (container == null) + return null; + + Map props = new HashMap<>(); + JSONObject jsonData = new JSONObject(); + Set keywordsHi = new HashSet<>(); + Set keywordsMed = new HashSet<>(); + Set keywordsLo = new HashSet<>(); + + Set identifiersHi = new HashSet<>(); + Set identifiersMed = new HashSet<>(); + Set identifiersLo = new HashSet<>(); + + StringBuilder body = new StringBuilder(); + + // Name is an identifier with the highest weight + identifiersHi.add(getName()); + keywordsMed.add(getName()); // also add to keywords since those are stemmed + + // Description is added as a keywordsLo -- in Biologics it is common for the description to + // contain names of other DataClasses, e.g., "Mature desK of PS-10", which would be tokenized as + // [mature, desk, ps, 10] if added it as a keyword so we lower its priority to avoid useless results. + // CONSIDER: tokenize the description and extract identifiers + if (null != getDescription()) + keywordsLo.add(getDescription()); + + String comment = getComment(); + if (comment != null) + keywordsMed.add(comment); + + // Add aliases in parentheses in the title + StringBuilder title = new StringBuilder(getName()); + Collection aliases = getAliases(); + if (!aliases.isEmpty()) + { + title.append(" (").append(StringUtils.join(aliases, ", ")).append(")"); + identifiersHi.addAll(aliases); + } + + ExpDataClassImpl dc = getDataClass(User.getSearchUser()); + if (dc != null) + { + ActionURL show = new ActionURL(ExperimentController.ShowDataClassAction.class, container).addParameter("rowId", dc.getRowId()); + NavTree t = new NavTree(dc.getName(), show); + String nav = NavTree.toJS(Collections.singleton(t), null, false, true).toString(); + props.put(SearchService.PROPERTY.navtrail.toString(), nav); + + props.put(DataSearchResultTemplate.PROPERTY, dc.getName()); + body.append(dc.getName()); + + if (tableInfo == null) + tableInfo = QueryService.get().getUserSchema(User.getSearchUser(), container, SCHEMA_EXP_DATA).getTable(dc.getName()); + + if (!(tableInfo instanceof ExpDataClassDataTableImpl expDataClassDataTable)) + throw new IllegalArgumentException(String.format("Unable to index data class item in %s. Table must be an instance of %s", dc.getName(), ExpDataClassDataTableImpl.class.getName())); + + if (!expDataClassDataTable.getDataClass().equals(dc)) + throw new IllegalArgumentException(String.format("Data class table mismatch for %s", dc.getName())); + + // Collect other text columns and lookup display columns + getIndexValues(props, expDataClassDataTable, identifiersHi, identifiersMed, identifiersLo, keywordsHi, keywordsMed, keywordsLo, jsonData); + } + + // === Stored, not indexed + if (dc != null && dc.isMedia()) + props.put(SearchService.PROPERTY.categories.toString(), expMediaDataCategory.toString()); + else + props.put(SearchService.PROPERTY.categories.toString(), expDataCategory.toString()); + props.put(SearchService.PROPERTY.title.toString(), title.toString()); + props.put(SearchService.PROPERTY.jsonData.toString(), jsonData); + + ActionURL view = ExperimentController.ExperimentUrlsImpl.get().getDataDetailsURL(this); + view.setExtraPath(container.getId()); + String docId = getDocumentId(); + + // Generate a summary explicitly instead of relying on a summary to be extracted + // from the document body. Placing lookup values and the description in the body + // would tokenize using the English analyzer and index "PS-12" as ["ps", "12"] which leads to poor results. + StringBuilder summary = new StringBuilder(); + if (StringUtils.isNotEmpty(getDescription())) + summary.append(getDescription()).append("\n"); + + appendTokens(summary, keywordsMed); + appendTokens(summary, identifiersMed); + appendTokens(summary, identifiersLo); + + props.put(SearchService.PROPERTY.summary.toString(), summary); + + return new ExpDataResource( + getRowId(), + new Path(docId), + docId, + container.getEntityId(), + "text/plain", + body.toString(), + view, + props, + getCreatedBy(), + getCreated(), + getModifiedBy(), + getModified() + ); + } + + private static void appendTokens(StringBuilder sb, Collection toks) + { + if (toks.isEmpty()) + return; + + sb.append(toks.stream().map(s -> s.length() > 30 ? StringUtilsLabKey.leftSurrogatePairFriendly(s, 30) + "\u2026" : s).collect(Collectors.joining(", "))).append("\n"); + } + + private static class ExpDataResource extends SimpleDocumentResource + { + final long _rowId; + + public ExpDataResource(long rowId, Path path, String documentId, GUID containerId, String contentType, String body, URLHelper executeUrl, Map properties, User createdBy, Date created, User modifiedBy, Date modified) + { + super(path, documentId, containerId, contentType, body, executeUrl, createdBy, created, modifiedBy, modified, properties); + _rowId = rowId; + } + + @Override + public void setLastIndexed(long ms, long modified) + { + ExperimentServiceImpl.get().setDataLastIndexed(_rowId, ms); + } + } + + public static class DataSearchResultTemplate implements SearchResultTemplate + { + public static final String NAME = "data"; + public static final String PROPERTY = "dataclass"; + + @Nullable + @Override + public String getName() + { + return NAME; + } + + private ExpDataClass getDataClass() + { + if (HttpView.hasCurrentView()) + { + ViewContext ctx = HttpView.currentContext(); + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + return ExperimentService.get().getDataClass(ctx.getContainer(), dataclass, true); + } + return null; + } + + @Nullable + @Override + public String getCategories() + { + ExpDataClass dataClass = getDataClass(); + + if (dataClass != null && dataClass.isMedia()) + return expMediaDataCategory.getName(); + + return expDataCategory.getName(); + } + + @Nullable + @Override + public SearchScope getSearchScope() + { + return SearchScope.FolderAndSubfolders; + } + + @NotNull + @Override + public String getResultNameSingular() + { + ExpDataClass dc = getDataClass(); + if (dc != null) + return dc.getName(); + return "data"; + } + + @NotNull + @Override + public String getResultNamePlural() + { + return getResultNameSingular(); + } + + @Override + public boolean includeNavigationLinks() + { + return true; + } + + @Override + public boolean includeAdvanceUI() + { + return false; + } + + @Nullable + @Override + public HtmlString getExtraHtml(ViewContext ctx) + { + String q = ctx.getActionURL().getParameter("q"); + + if (StringUtils.isNotBlank(q)) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + ActionURL url = ctx.cloneActionURL().deleteParameter(PROPERTY); + url.replaceParameter(ActionURL.Param._dc, (int)Math.round(1000 * Math.random())); + + StringBuilder html = new StringBuilder(); + html.append("
"); + + appendParam(html, null, dataclass, "All", false, url); + for (ExpDataClass dc : ExperimentService.get().getDataClasses(ctx.getContainer(), true)) + { + appendParam(html, dc.getName(), dataclass, dc.getName(), true, url); + } + + html.append("
"); + return HtmlString.unsafe(html.toString()); + } + else + { + return null; + } + } + + private void appendParam(StringBuilder sb, @Nullable String dataclass, @Nullable String current, @NotNull String label, boolean addParam, ActionURL url) + { + sb.append(""); + + if (!Objects.equals(dataclass, current)) + { + if (addParam) + url = url.clone().addParameter(PROPERTY, dataclass); + + sb.append(LinkBuilder.simpleLink(label, url)); + } + else + { + sb.append(label); + } + + sb.append(" "); + } + + @Override + public HtmlString getHiddenInputsHtml(ViewContext ctx) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + return InputBuilder.hidden().id("search-type").name(PROPERTY).value(dataclass).getHtmlString(); + } + + return null; + } + + + @Override + public String reviseQuery(ViewContext ctx, String q) + { + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + + if (null != dataclass) + return "+(" + q + ") +" + PROPERTY + ":" + dataclass; + else + return q; + } + + @Override + public void addNavTrail(NavTree root, ViewContext ctx, @NotNull SearchScope scope, @Nullable String category) + { + SearchResultTemplate.super.addNavTrail(root, ctx, scope, category); + + String dataclass = ctx.getActionURL().getParameter(PROPERTY); + if (dataclass != null) + { + String text = root.getText(); + root.setText(text + " - " + dataclass); + } + } + } +} From 4d9ff01456a44b2988aa6fc805f9cdca397588b0 Mon Sep 17 00:00:00 2001 From: XingY Date: Tue, 10 Mar 2026 20:10:27 -0700 Subject: [PATCH 04/17] fix build --- ...DataClassUpdateAddColumnsDataIterator.java | 167 ++++++++++++++++++ .../dataiterator/StatementDataIterator.java | 7 +- .../api/query/DefaultQueryUpdateService.java | 18 ++ .../api/ExpDataClassDataTableImpl.java | 20 ++- .../experiment/api/ExpDataClassType.java | 3 + .../api/SampleTypeUpdateServiceDI.java | 18 +- 6 files changed, 213 insertions(+), 20 deletions(-) create mode 100644 api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java new file mode 100644 index 00000000000..277438cae6f --- /dev/null +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -0,0 +1,167 @@ +package org.labkey.api.dataiterator; + +import org.apache.commons.lang3.StringUtils; +import org.labkey.api.collections.IntHashMap; +import org.labkey.api.collections.Sets; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.CompareType; +import org.labkey.api.data.Container; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.FieldKey; + +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Supplier; + +import static org.labkey.api.exp.query.ExpDataTable.Column.LSID; +import static org.labkey.api.exp.query.ExpDataTable.Column.ClassId; +import static org.labkey.api.util.IntegerUtils.asInteger; + +/** + * DataIterator that adds the LSID column for DataClass update operations. + * Queries the LSID from exp.data based on the provided key (rowId or name) and dataClassId. + * The LSID is needed downstream for attachment handling. + */ +public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator +{ + private final Container _targetContainer; + private final TableInfo _tableInfo; + final CachingDataIterator _unwrapped; + + private final long _dataClassId; + final int _lsidColIndex; + final ColumnInfo pkColumn; + final Supplier pkSupplier; + + int lastPrefetchRowNumber = -1; + final IntHashMap lsids = new IntHashMap<>(); + + public DataClassUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, Container container, long dataClassId, String keyColumnName) + { + super(in); + this._unwrapped = (CachingDataIterator)in; + _tableInfo = target; + _targetContainer = container; + _dataClassId = dataClassId; + + var map = DataIteratorUtil.createColumnNameMap(in); + + this._lsidColIndex = map.get(ExpDataTable.Column.LSID.name()); + + Integer index = map.get(keyColumnName); + ColumnInfo col = target.getColumn(keyColumnName); + if (null == index || null == col) + throw new IllegalArgumentException("Key column not found: " + keyColumnName); + pkSupplier = in.getSupplier(index); + pkColumn = col; + } + + @Override + public Supplier getSupplier(int i) + { + if (i != _lsidColIndex) + return _delegate.getSupplier(i); + return () -> get(i); + } + + @Override + public Object get(int i) + { + Integer rowNumber = asInteger(_delegate.get(0)); + + if (i == _lsidColIndex) + return lsids.get(rowNumber); + + return _delegate.get(i); + } + + @Override + public boolean isConstant(int i) + { + if (i != _lsidColIndex) + return _delegate.isConstant(i); + return false; + } + + @Override + public Object getConstantValue(int i) + { + if (i != _lsidColIndex) + return _delegate.getConstantValue(i); + return null; + } + + protected void prefetchExisting() throws BatchValidationException + { + Integer rowNumber = asInteger(_delegate.get(0)); + if (rowNumber <= lastPrefetchRowNumber) + return; + + lsids.clear(); + + int rowsToFetch = 50; + String keyFieldName = pkColumn.getName(); + boolean numericKey = pkColumn.isNumericType(); + JdbcType jdbcType = pkColumn.getJdbcType(); + Map rowKeyMap = new LinkedHashMap<>(); + Map keyRowMap = new LinkedHashMap<>(); + do + { + lastPrefetchRowNumber = asInteger(_delegate.get(0)); + Object keyObj = pkSupplier.get(); + Object key = jdbcType.convert(keyObj); + + if (numericKey) + { + if (null == key) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + } + else if (StringUtils.isEmpty((String) key)) + throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); + + rowKeyMap.put(lastPrefetchRowNumber, key); + keyRowMap.put(key, lastPrefetchRowNumber); + lsids.put(lastPrefetchRowNumber, null); + } + while (--rowsToFetch > 0 && _delegate.next()); + + SimpleFilter filter = new SimpleFilter(ClassId.fieldKey(), _dataClassId); + filter.addCondition(pkColumn.getFieldKey(), rowKeyMap.values(), CompareType.IN); + filter.addCondition(FieldKey.fromParts("Container"), _targetContainer); + + Set columns = Sets.newCaseInsensitiveHashSet(keyFieldName, LSID.name()); + Map[] results = new TableSelector(ExperimentService.get().getTinfoData(), columns, filter, null).getMapArray(); + + for (Map result : results) + { + Object key = result.get(keyFieldName); + Object lsidObj = result.get(LSID.name()); + + Integer rowInd = keyRowMap.get(key); + if (lsidObj != null) + lsids.put(rowInd, (String) lsidObj); + } + + // backup to where we started so caller can iterate through them one at a time + _unwrapped.reset(); // unwrapped _delegate + _delegate.next(); + } + + @Override + public boolean next() throws BatchValidationException + { + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached + _unwrapped.mark(); // unwrapped _delegate + boolean ret = super.next(); + if (ret) + prefetchExisting(); + return ret; + } +} \ No newline at end of file diff --git a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java index bf55b98b82c..4e4d9f11767 100644 --- a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java @@ -537,7 +537,12 @@ private void checkBackgroundException() throws BatchValidationException public Object get(int i) { if (null != _keyColumnInfo.get(i)) - return _keyValues.get(i); + { + Object value = _keyValues.get(i); + if (value != null) + return value; + } + return _data.get(i); } diff --git a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java index d04ea279c65..79f64079d88 100644 --- a/api/src/org/labkey/api/query/DefaultQueryUpdateService.java +++ b/api/src/org/labkey/api/query/DefaultQueryUpdateService.java @@ -28,6 +28,7 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ConvertHelper; import org.labkey.api.data.ExpDataFileConverter; +import org.labkey.api.data.ImportAliasable; import org.labkey.api.data.JdbcType; import org.labkey.api.data.MvUtil; import org.labkey.api.data.Parameter; @@ -933,4 +934,21 @@ protected void configureCrossFolderImport(DataIteratorBuilder rows, DataIterator context.setCrossFolderImport(false); } } + + public static @Nullable String getKeyColumnAliasForUpdate(TableInfo tableInfo, @NotNull Map columnNameMap) + { + // Currently, SampleUpdateAddColumnsDataIterator and DataClassUpdateAddColumnsDataIterator is being called before a translator is invoked to + // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the + // map of columns for the key column. + var rowIdAliases = ImportAliasable.Helper.createImportSet(tableInfo.getColumn(FieldKey.fromParts("RowId"))); + rowIdAliases.retainAll(columnNameMap.keySet()); + + if (rowIdAliases.size() == 1) + return rowIdAliases.iterator().next(); + if (rowIdAliases.isEmpty()) + return "Name"; + + return null; + } + } diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 2bffcbdb2b9..8926daf89b0 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -56,7 +56,9 @@ import org.labkey.api.data.UpdateableTableInfo; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.dataiterator.AttachmentDataIterator; +import org.labkey.api.dataiterator.CachingDataIterator; import org.labkey.api.dataiterator.CoerceDataIterator; +import org.labkey.api.dataiterator.DataClassUpdateAddColumnsDataIterator; import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.dataiterator.DataIteratorBuilder; import org.labkey.api.dataiterator.DataIteratorContext; @@ -152,11 +154,13 @@ import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.labkey.api.dataiterator.DataIteratorUtil.DUPLICATE_COLUMN_IN_DATA_ERROR; import static org.labkey.api.exp.api.ExpRunItem.PARENT_IMPORT_ALIAS_MAP_PROP; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.Name; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.QueryableInputs; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.RowId; import static org.labkey.api.exp.query.ExpMaterialTable.Column.LSID; +import static org.labkey.api.query.DefaultQueryUpdateService.getKeyColumnAliasForUpdate; import static org.labkey.experiment.ExpDataIterators.incrementCounts; public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl implements ExpDataClassDataTable @@ -1064,16 +1068,28 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) ColumnInfo cpasTypeCol = expData.getColumn("cpasType"); step0.addColumn(cpasTypeCol, new SimpleTranslator.ConstantColumn(_dataClass.getLSID())); + Map columnNameMap = DataIteratorUtil.createColumnNameMap(input); + if (context.getInsertOption() == QueryUpdateService.InsertOption.UPDATE) { + String keyColumnAlias = getKeyColumnAliasForUpdate(expData, columnNameMap); + if (keyColumnAlias == null) + { + context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, ExpDataTable.Column.RowId.name()))); + return null; + } + step0.addNullColumn(Column.LSID.name(), JdbcType.VARCHAR); step0.selectAll(); - return LoggingDataIterator.wrap(step0.getDataIterator(context)); + + // add lsid column (for Attachment) but need to re-query it + var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), expData, c ,_dataClass.getRowId(), keyColumnAlias); + return LoggingDataIterator.wrap(added); } step0.selectAll(Sets.newCaseInsensitiveHashSet("lsid", "dataClass", "genId")); //TODO can this be moved up? // Ensure we have a name column -- makes the NameExpressionDataIterator easier - if (!DataIteratorUtil.createColumnNameMap(step0).containsKey("name")) + if (!columnNameMap.containsKey("name")) { ColumnInfo nameCol = expData.getColumn("name"); step0.addColumn(nameCol, (Supplier)() -> null); diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java index 749137c049e..877016f47b5 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java @@ -88,6 +88,9 @@ public static AttachmentParentType get() )) .append(" AS Description FROM expdataclass.") .append(domain.getStorageTableName()) + .append(" ds JOIN ") + .append(ExperimentService.get().getTinfoData()) + .append(" d ON d.rowId = ds.rowid") .append(" WHERE ").append(where) ); }); diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 9deaccfce62..7279bda6fa5 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -1329,7 +1329,7 @@ public DataIterator getDataIterator(DataIteratorContext context) addAliquotedFrom.addNullColumn(PARENT_RECOMPUTE_NAME_COL, JdbcType.VARCHAR); addAliquotedFrom.selectAll(); - String keyColumnAlias = getKeyColumnAlias(materialTable, columnNameMap); + String keyColumnAlias = getKeyColumnAliasForUpdate(materialTable, columnNameMap); if (keyColumnAlias == null) { context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, RowId.name()))); @@ -1379,22 +1379,6 @@ public DataIterator getDataIterator(DataIteratorContext context) return LoggingDataIterator.wrap(names); } - private static @Nullable String getKeyColumnAlias(TableInfo materialTable, @NotNull Map columnNameMap) - { - // Currently, SampleUpdateAddColumnsDataIterator is being called before a translator is invoked to - // remap column labels to columns (e.g., "Row Id" -> "RowId"). Due to this, we need to search the - // map of columns for the key column. - var rowIdAliases = ImportAliasable.Helper.createImportSet(materialTable.getColumn(RowId.fieldKey())); - rowIdAliases.retainAll(columnNameMap.keySet()); - - if (rowIdAliases.size() == 1) - return rowIdAliases.iterator().next(); - if (rowIdAliases.isEmpty()) - return Name.name(); - - return null; - } - private static boolean isReservedHeader(String name) { if (isNameHeader(name) || isDescriptionHeader(name) || isCommentHeader(name) || "CpasType".equalsIgnoreCase(name) || isAliasHeader(name)) From a68e8a818ff0fd8fc990155c459bc430bcceb707 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 13:15:20 -0700 Subject: [PATCH 05/17] fix merge --- .../labkey/api/dataiterator/ExistingRecordDataIterator.java | 2 ++ .../org/labkey/experiment/api/ExpDataClassDataTableImpl.java | 5 +++++ 2 files changed, 7 insertions(+) diff --git a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java index c39acf459a1..8816c3a6f55 100644 --- a/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/ExistingRecordDataIterator.java @@ -153,6 +153,8 @@ public Supplier getSupplier(int i) @Override public Object get(int i) { + assert(i <= existingColIndex); + if (i)() -> null); } + if (context.getSelectIds() && !columnNameMap.containsKey(RowId.name())) + { + step0.addNullColumn(RowId.name(), JdbcType.INTEGER); + } + ColumnInfo lsidCol = expData.getColumn("lsid"); // TODO: validate dataFileUrl column, it will be saved later From 9d5aa75562e58c846936e6ec5db76692e3b4d3e5 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 13:19:24 -0700 Subject: [PATCH 06/17] fix merge --- .../org/labkey/experiment/api/ExpDataClassDataTableImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 7d55127fc28..3d61774d5ed 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -1095,7 +1095,7 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) step0.addColumn(nameCol, (Supplier)() -> null); } - if (context.getSelectIds() && !columnNameMap.containsKey(RowId.name())) + if (Boolean.TRUE.equals(context.getSelectIds()) && !columnNameMap.containsKey(RowId.name())) { step0.addNullColumn(RowId.name(), JdbcType.INTEGER); } From 48936604e982b69f636d5e6efe200a95cf1149a7 Mon Sep 17 00:00:00 2001 From: XingY Date: Wed, 11 Mar 2026 17:12:44 -0700 Subject: [PATCH 07/17] fix update --- ...DataClassUpdateAddColumnsDataIterator.java | 31 ++++++++++++++++--- .../test/integration/DataClassCrud.ispec.ts | 6 ++-- .../api/ExpDataClassDataTableImpl.java | 4 ++- 3 files changed, 32 insertions(+), 9 deletions(-) diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java index 277438cae6f..549825cc65a 100644 --- a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -1,6 +1,7 @@ package org.labkey.api.dataiterator; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; import org.labkey.api.collections.IntHashMap; import org.labkey.api.collections.Sets; import org.labkey.api.data.ColumnInfo; @@ -14,7 +15,9 @@ import org.labkey.api.exp.query.ExpDataTable; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.FieldKey; +import org.labkey.api.query.ValidationException; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; @@ -42,11 +45,13 @@ public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator int lastPrefetchRowNumber = -1; final IntHashMap lsids = new IntHashMap<>(); + final DataIteratorContext _context; - public DataClassUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, Container container, long dataClassId, String keyColumnName) + public DataClassUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataIteratorContext context, TableInfo target, Container container, long dataClassId, String keyColumnName) { super(in); this._unwrapped = (CachingDataIterator)in; + _context = context; _tableInfo = target; _targetContainer = container; _dataClassId = dataClassId; @@ -111,7 +116,9 @@ protected void prefetchExisting() throws BatchValidationException boolean numericKey = pkColumn.isNumericType(); JdbcType jdbcType = pkColumn.getJdbcType(); Map rowKeyMap = new LinkedHashMap<>(); - Map keyRowMap = new LinkedHashMap<>(); + Map> keyRowMap = new LinkedHashMap<>(); + Set notFoundKeys = new HashSet<>(); + do { lastPrefetchRowNumber = asInteger(_delegate.get(0)); @@ -127,7 +134,11 @@ else if (StringUtils.isEmpty((String) key)) throw new IllegalArgumentException(keyFieldName + " value not provided on row " + lastPrefetchRowNumber); rowKeyMap.put(lastPrefetchRowNumber, key); - keyRowMap.put(key, lastPrefetchRowNumber); + notFoundKeys.add(key); + // if keyRowMap doesn't contain key, add new set, then add row number to set for this key + if (!keyRowMap.containsKey(key)) + keyRowMap.put(key, new HashSet<>()); + keyRowMap.get(key).add(lastPrefetchRowNumber); lsids.put(lastPrefetchRowNumber, null); } while (--rowsToFetch > 0 && _delegate.next()); @@ -144,11 +155,18 @@ else if (StringUtils.isEmpty((String) key)) Object key = result.get(keyFieldName); Object lsidObj = result.get(LSID.name()); - Integer rowInd = keyRowMap.get(key); + Set rowInds = keyRowMap.get(key); if (lsidObj != null) - lsids.put(rowInd, (String) lsidObj); + { + for (Integer rowInd : rowInds) + lsids.put(rowInd, (String) lsidObj); + notFoundKeys.remove(key); + } } + if (!notFoundKeys.isEmpty()) + _context.getErrors().addRowError(new ValidationException("Data not found for " + notFoundKeys)); + // backup to where we started so caller can iterate through them one at a time _unwrapped.reset(); // unwrapped _delegate _delegate.next(); @@ -157,6 +175,9 @@ else if (StringUtils.isEmpty((String) key)) @Override public boolean next() throws BatchValidationException { + if (_context.getErrors().hasErrors()) + return false; + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached _unwrapped.mark(); // unwrapped _delegate boolean ret = super.next(); diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index f84394137b7..e1c5d7dce95 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -297,11 +297,11 @@ describe('Import with update / merge', () => { // Issue 52922: Blank / bogus id in the file are getting ignored in update from file let blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "UPDATE", subfolder1Options, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\n\tisBlank", dataType, "UPDATE", topFolderOptions, editorUserOptions); - expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); + expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_WITH_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", topFolderOptions, editorUserOptions); expect(blankKeyProvidedError.text.indexOf(BLANK_KEY_UPDATE_ERROR_NO_EXPRESSION) > -1).toBeTruthy(); blankKeyProvidedError = await ExperimentCRUDUtils.importData(server, "Name\tDescription\nData1\tNotblank\n\tisBlank", dataType, "MERGE", subfolder1Options, editorUserOptions); diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 3d61774d5ed..4b7ae4a05a0 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -1082,7 +1082,7 @@ else if (Column.ClassId.name().equalsIgnoreCase(name)) step0.selectAll(); // add lsid column (for Attachment) but need to re-query it - var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), expData, c ,_dataClass.getRowId(), keyColumnAlias); + var added = new DataClassUpdateAddColumnsDataIterator(new CachingDataIterator(step0), context, expData, c ,_dataClass.getRowId(), keyColumnAlias); return LoggingDataIterator.wrap(added); } @@ -1498,6 +1498,8 @@ public void configureDataIteratorContext(DataIteratorContext context) context.putConfigParameter(QueryUpdateService.ConfigParameters.CheckForCrossProjectData, true); if (context.getInsertOption() == InsertOption.IMPORT || context.getInsertOption() == InsertOption.MERGE) context.setSelectIds(true); // select rowId because provisioned expdataclass.rowId and QueryUpdateAuditEvent.rowPk needs actual rowId + else if (context.getSelectIds() == null && context.getInsertOption() == InsertOption.UPDATE) + context.setSelectIds(false); // for update, don't add RowId if it wasn't in the input (without setSelectIds(false), rowId col will be added if table.hasTriggers by TableInsertUpdateDataIterator } @Override From a35f2331b6b516a5d1a73dd0a5f721de30d6c88f Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 6 Apr 2026 21:55:48 -0700 Subject: [PATCH 08/17] fix attachment --- .../labkey/experiment/api/ExpDataClassType.java | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java index f09be848e6e..7a81d954d2b 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java @@ -26,6 +26,7 @@ import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.exp.Lsid; import org.labkey.api.exp.PropertyType; +import org.labkey.api.exp.api.ExpDataClass; import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.exp.property.Domain; import org.labkey.api.exp.property.PropertyService; @@ -81,6 +82,7 @@ public static AttachmentParentType get() if (null != domain) { + ExpDataClass dataClass = ExperimentService.get().getDataClass(lsid); // Enumerate columns on the data class TableInfo since it includes the vocabulary domain columns. // For example, Compound has a built-in Structure2D attachment column supplied by a vocabulary domain. TableInfo dataClassTable = new DataClassUserSchema(c, User.getSearchUser()).getTable(domain.getName()); @@ -89,6 +91,10 @@ public static AttachmentParentType get() { LOG.warn("DataClass table not found for {}", domain.getName()); } + else if (dataClass == null) + { + LOG.warn("DataClass not found for {}", domain.getName()); + } else if (dataClassTable.getColumns().stream().anyMatch(col -> col.getPropertyType() == PropertyType.ATTACHMENT)) { // Add a select for the ObjectIds in this ExpDataClass if the table includes an attachment column. @@ -103,12 +109,12 @@ else if (dataClassTable.getColumns().stream().anyMatch(col -> col.getPropertyTyp new SQLFragment("':'"), new SQLFragment("Name") )) - .append(" AS Description FROM expdataclass.") - .append(domain.getStorageTableName()) - .append(" ds JOIN ") + .append(" AS Description FROM ") .append(ExperimentService.get().getTinfoData()) - .append(" d ON d.rowId = ds.rowid") - .append(" WHERE ").append(where) + .append(" WHERE classid = ") + .appendValue(dataClass.getRowId()) + .append(" AND ") + .append(where) ); } } From 04f83ffd866ac428402e61ce30c3af342f0ebff1 Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 20 Apr 2026 17:05:38 -0700 Subject: [PATCH 09/17] Fix alias in audit --- api/src/org/labkey/api/audit/AuditHandler.java | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/api/src/org/labkey/api/audit/AuditHandler.java b/api/src/org/labkey/api/audit/AuditHandler.java index 0212dce7002..52619a50656 100644 --- a/api/src/org/labkey/api/audit/AuditHandler.java +++ b/api/src/org/labkey/api/audit/AuditHandler.java @@ -6,6 +6,7 @@ import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; import org.labkey.api.data.MultiValuedForeignKey; +import org.labkey.api.data.MultiValuedRenderContext; import org.labkey.api.data.TableInfo; import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.dataiterator.ExistingRecordDataIterator; @@ -13,6 +14,7 @@ import org.labkey.api.exp.api.ExpData; import org.labkey.api.exp.api.ExpMaterial; import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.query.ExpMaterialTable; import org.labkey.api.gwt.client.AuditBehaviorType; import org.labkey.api.query.QueryService; import org.labkey.api.security.User; @@ -122,11 +124,22 @@ static Pair, Map> getOldAndNewRecordForMerge } } + boolean isAliasInput = row.containsKey(ExperimentService.ALIASCOLUMNALIAS) && ExpMaterialTable.Column.Alias.name().equalsIgnoreCase(lcName); + boolean isExtraAuditField = extraFieldsToInclude != null && extraFieldsToInclude.contains(nameFromAlias); - if (!excludedFromDetailDiff.contains(nameFromAlias) && (row.containsKey(nameFromAlias) || isExpInput)) + if (!excludedFromDetailDiff.contains(nameFromAlias) && (row.containsKey(nameFromAlias) || isExpInput || isAliasInput)) { Object oldValue = entry.getValue(); Object newValue = row.get(nameFromAlias); + + // See ExpDataIterator: step1.addColumn(ExperimentService.ALIASCOLUMNALIAS, colNameMap.get(Alias.name())) + if (isAliasInput && newValue == null) + { + newValue = row.get(ExperimentService.ALIASCOLUMNALIAS); + if (oldValue instanceof String aliasStr) + oldValue = Arrays.asList(aliasStr.split(MultiValuedRenderContext.VALUE_DELIMITER_REGEX)); + } + // compare dates using string values to allow for both Date and Timestamp types if (newValue instanceof Date && oldValue != null) { @@ -167,7 +180,7 @@ else if (!Objects.equals(oldValue, newValue) || isExtraAuditField) // If multivalued columns change, the value in this table will remain the key to the junction table // but at this point newValue will look like the newly chosen values not that key. So we skip // this in the diff unless the value changes from non-null to null or vice versa. - if (isMultiValued) + if (isMultiValued && !isAliasInput) { if ((oldValue == null && newValue != null) || (newValue == null && oldValue != null)) { From 3446ba460c08af2cebc00b02d4b739031830f3cc Mon Sep 17 00:00:00 2001 From: XingY Date: Mon, 20 Apr 2026 19:33:12 -0700 Subject: [PATCH 10/17] don't check for fail fast for DataClassUpdateAddColumnsDataIterator --- .../DataClassUpdateAddColumnsDataIterator.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java index 549825cc65a..c7e776b87cf 100644 --- a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -181,8 +181,13 @@ public boolean next() throws BatchValidationException // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached _unwrapped.mark(); // unwrapped _delegate boolean ret = super.next(); - if (ret) + if (!_context.getErrors().hasErrors() && ret) + { prefetchExisting(); + if (_context.getErrors().hasErrors()) + return false; + } + return ret; } } \ No newline at end of file From 312a13cb436670cbc8d88acfd7a72cbfffe2cf55 Mon Sep 17 00:00:00 2001 From: XingY Date: Tue, 21 Apr 2026 18:19:18 -0700 Subject: [PATCH 11/17] Make sample consistent with data for failfast --- .../SampleUpdateAddColumnsDataIterator.java | 15 +++++++++++++-- .../experiment/api/SampleTypeUpdateServiceDI.java | 2 +- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java index 68a44b09d91..fb9ef9d7e6c 100644 --- a/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/SampleUpdateAddColumnsDataIterator.java @@ -1,6 +1,7 @@ package org.labkey.api.dataiterator; import org.apache.commons.lang3.StringUtils; +import org.jetbrains.annotations.NotNull; import org.labkey.api.collections.IntHashMap; import org.labkey.api.collections.Sets; import org.labkey.api.data.ColumnInfo; @@ -33,6 +34,7 @@ public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator final int _aliquotedFromColIndex; final int _rootMaterialRowIdColIndex; final int _currentSampleStateColIndex; + final DataIteratorContext _context; // prefetch of existing records int lastPrefetchRowNumber = -1; @@ -40,10 +42,11 @@ public class SampleUpdateAddColumnsDataIterator extends WrapperDataIterator final IntHashMap aliquotRoots = new IntHashMap<>(); final IntHashMap sampleState = new IntHashMap<>(); - public SampleUpdateAddColumnsDataIterator(DataIterator in, TableInfo target, long sampleTypeId, String keyColumnName) + public SampleUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataIteratorContext context, TableInfo target, long sampleTypeId, String keyColumnName) { super(in); this._unwrapped = (CachingDataIterator)in; + this._context = context; this.target = target; this._sampleTypeId = sampleTypeId; @@ -167,11 +170,19 @@ else if (StringUtils.isEmpty((String) key)) @Override public boolean next() throws BatchValidationException { + if (_context.getErrors().hasErrors()) + return false; + // NOTE: we have to call mark() before we call next() if we want the 'next' row to be cached _unwrapped.mark(); // unwrapped _delegate boolean ret = super.next(); - if (ret) + if (!_context.getErrors().hasErrors() && ret) + { prefetchExisting(); + if (_context.getErrors().hasErrors()) + return false; + } + return ret; } } diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 06b3961420a..79d4359e377 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -1345,7 +1345,7 @@ public DataIterator getDataIterator(DataIteratorContext context) context.getErrors().addRowError(new ValidationException(String.format(DUPLICATE_COLUMN_IN_DATA_ERROR, RowId.name()))); return null; } - di = new SampleUpdateAddColumnsDataIterator(new CachingDataIterator(addAliquotedFrom), materialTable, sampleType.getRowId(), keyColumnAlias); + di = new SampleUpdateAddColumnsDataIterator(new CachingDataIterator(addAliquotedFrom), context, materialTable, sampleType.getRowId(), keyColumnAlias); di = new _SamplesCoerceDataIterator(di, context, sampleType, materialTable); context.setWithLookupRemapping(false); From 60898fa84cb62291cad8605eaab0e32ce0431358 Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 23 Apr 2026 11:42:14 -0700 Subject: [PATCH 12/17] Fix reclassify --- experiment/src/org/labkey/experiment/ExpDataIterators.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index cd72052a255..34fe07902a3 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -2482,11 +2482,14 @@ public DataIterator getDataIterator(DataIteratorContext context) .setCommitRowsBeforeContinuing(true) .setFailOnEmptyUpdate(false)); + // Biologics reclassify uses afterUpdate to update other data that depend on the completion of update of the triggering data + boolean shouldCommitRowsBeforeContinuing = colNameMap.containsKey(".reclassify"); // pass in remap columns to help reconcile columns that may be aliased in the virtual table dib = LoggingDataIterator.wrap(new TableInsertDataIteratorBuilder(dib, _propertiesTable, _container) .setKeyColumns(propertyKeyColumns) .setDontUpdate(dontUpdate) .setRemapSchemaColumns(((UpdateableTableInfo) _expTable).remapSchemaColumns()) + .setCommitRowsBeforeContinuing(shouldCommitRowsBeforeContinuing) .setFailOnEmptyUpdate(false)); if (colNameMap.containsKey(Flag.name()) || colNameMap.containsKey("comment")) From 988d5fd4234b017eb3acf8f97105aa4b7f477c38 Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 23 Apr 2026 14:46:14 -0700 Subject: [PATCH 13/17] fix folder import --- .../labkey/api/exp/api/ExperimentService.java | 1 + .../org/labkey/api/exp/xar/LSIDRelativizer.java | 2 +- .../api/SampleTypeUpdateServiceDI.java | 17 ++++++++++++----- .../samples/AbstractExpFolderImporter.java | 1 + 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/api/src/org/labkey/api/exp/api/ExperimentService.java b/api/src/org/labkey/api/exp/api/ExperimentService.java index 2575d10582e..aba7d058997 100644 --- a/api/src/org/labkey/api/exp/api/ExperimentService.java +++ b/api/src/org/labkey/api/exp/api/ExperimentService.java @@ -151,6 +151,7 @@ static void setInstance(ExperimentService impl) enum QueryOptions { + UseProvidedLsidForXarImport, GetSampleRecomputeCol, SkipBulkRemapCache, DeferRequiredLineageValidation, diff --git a/api/src/org/labkey/api/exp/xar/LSIDRelativizer.java b/api/src/org/labkey/api/exp/xar/LSIDRelativizer.java index 3436343b347..eba75b6c46a 100644 --- a/api/src/org/labkey/api/exp/xar/LSIDRelativizer.java +++ b/api/src/org/labkey/api/exp/xar/LSIDRelativizer.java @@ -105,7 +105,7 @@ else if ("sfx".equals(lsid.getObjectId())) // else if (MATERIAL_PREFIX_PLACEHOLD } else if ("Sample".equals(prefix) || ExpMaterial.DEFAULT_CPAS_TYPE.equals(prefix)) { - String xarJobId = "." + XAR_JOB_ID_NAME_SUB; // XarJobId is more concise than XarFileId + String xarJobId = "." + XAR_JOB_ID_NAME_SUB + "."; // XarJobId is more concise than XarFileId return lsids.uniquifyRelativizedLSID("urn:lsid:" + XarContext.LSID_AUTHORITY_SUBSTITUTION + ":" + prefix + ".Folder-" + XarContext.CONTAINER_ID_SUBSTITUTION + xarJobId + lsids.getNextSampleId(), lsid.getObjectId(), lsid.getVersion()); } else if (ExpData.DEFAULT_CPAS_TYPE.equals(prefix)) diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index 79d4359e377..bbf1e85c593 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -23,7 +23,6 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.audit.AuditLogService; -import org.labkey.api.audit.TransactionAuditProvider; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveHashSet; import org.labkey.api.collections.LongHashSet; @@ -1319,7 +1318,8 @@ public DataIterator getDataIterator(DataIteratorContext context) context.getErrors().addRowError(new ValidationException(message, LSID.name())); return null; } - + if (context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseProvidedLsidForXarImport)) + drop.remove(LSID.name()); if (!drop.isEmpty()) di = new DropColumnsDataIterator(di, drop); @@ -1488,12 +1488,19 @@ static class _GenerateNamesDataIterator extends SimpleTranslator _nameState = sampleType.getNameGenState(skipDuplicateCheck, true, _container, user); lsidBuilder = sampleType.generateSampleLSID(); - selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); + boolean useProvidedLsidForImport = context.getConfigParameterBoolean(ExperimentService.QueryOptions.UseProvidedLsidForXarImport); + if (useProvidedLsidForImport) + selectAll(CaseInsensitiveHashSet.of(Name.name(), RootMaterialRowId.name())); + else + selectAll(CaseInsensitiveHashSet.of(Name.name(), LSID.name(), RootMaterialRowId.name())); addColumn(new BaseColumnInfo(Name.fieldKey(), JdbcType.VARCHAR), (Supplier)() -> generatedName); - DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); - addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + if (!useProvidedLsidForImport) + { + DbSequence lsidDbSeq = sampleType.getSampleLsidDbSeq(batchSize, sampleType.getContainer()); + addColumn(new BaseColumnInfo(LSID.name(), JdbcType.VARCHAR), (Supplier) () -> lsidBuilder.setObjectId(String.valueOf(lsidDbSeq.next())).toString()); + } addColumn(new BaseColumnInfo(CpasType.fieldKey(), JdbcType.VARCHAR), new SimpleTranslator.ConstantColumn(sampleType.getLSID())); addColumn(new BaseColumnInfo(MaterialSourceId.fieldKey(), JdbcType.INTEGER), new SimpleTranslator.ConstantColumn(sampleType.getRowId())); diff --git a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java index 97be2ad7061..eb5163bec66 100644 --- a/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java +++ b/experiment/src/org/labkey/experiment/samples/AbstractExpFolderImporter.java @@ -309,6 +309,7 @@ protected void importTsvData(FolderImportContext ctx, XarContext xarContext, Str if (isUpdate) options.put(QueryUpdateService.ConfigParameters.SkipRequiredFieldValidation, true); options.put(ExperimentService.QueryOptions.DeferRequiredLineageValidation, true); + options.put(ExperimentService.QueryOptions.UseProvidedLsidForXarImport, !isUpdate); context.setConfigParameters(options); DataIterator data = new ResolveLsidAndFileLinkDataIterator(loader.getDataIterator(context), xarContext, expObject instanceof ExpDataClass ? "DataClass" : ExpMaterial.DEFAULT_CPAS_TYPE, tinfo); From e07508c94d6f95e08f12f3c588dbc19cdd12178d Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 23 Apr 2026 17:19:02 -0700 Subject: [PATCH 14/17] Remove `altUpdateKeys` from `QueryInfo` --- .../org/labkey/api/audit/AuditHandler.java | 2 +- api/src/org/labkey/api/data/TableInfo.java | 1725 ++++---- ...DataClassUpdateAddColumnsDataIterator.java | 7 +- .../dataiterator/StatementDataIterator.java | 2 +- .../labkey/experiment/ExpDataIterators.java | 4 +- .../experiment/ExperimentUpgradeCode.java | 2 +- .../api/ExpDataClassDataTableImpl.java | 11 +- .../experiment/api/ExpMaterialTableImpl.java | 3753 ++++++++--------- .../controllers/GetQueryDetailsAction.java | 3 - 9 files changed, 2744 insertions(+), 2765 deletions(-) diff --git a/api/src/org/labkey/api/audit/AuditHandler.java b/api/src/org/labkey/api/audit/AuditHandler.java index 52619a50656..0e1cb9b5ff3 100644 --- a/api/src/org/labkey/api/audit/AuditHandler.java +++ b/api/src/org/labkey/api/audit/AuditHandler.java @@ -124,7 +124,7 @@ static Pair, Map> getOldAndNewRecordForMerge } } - boolean isAliasInput = row.containsKey(ExperimentService.ALIASCOLUMNALIAS) && ExpMaterialTable.Column.Alias.name().equalsIgnoreCase(lcName); + boolean isAliasInput = row.containsKey(ExperimentService.ALIASCOLUMNALIAS) && "Alias".equalsIgnoreCase(lcName); boolean isExtraAuditField = extraFieldsToInclude != null && extraFieldsToInclude.contains(nameFromAlias); if (!excludedFromDetailDiff.contains(nameFromAlias) && (row.containsKey(nameFromAlias) || isExpInput || isAliasInput)) diff --git a/api/src/org/labkey/api/data/TableInfo.java b/api/src/org/labkey/api/data/TableInfo.java index 15cde936ec4..cfc9b293d3a 100644 --- a/api/src/org/labkey/api/data/TableInfo.java +++ b/api/src/org/labkey/api/data/TableInfo.java @@ -1,865 +1,860 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.data; - -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.NamedObjectList; -import org.labkey.api.compliance.ComplianceService; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainKind; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.query.AggregateRowConfig; -import org.labkey.api.query.BatchValidationException; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryAction; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryParseException; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.SchemaTreeNode; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.HasPermission; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.util.ContainerContext; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringExpression; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.data.xml.queryCustomView.FilterType; - -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Representation of a queryable unit in the database. Might be backed by a "physical" table, a database VIEW, - * or an arbitrary generated chunk of SELECT SQL. - */ -public interface TableInfo extends TableDescription, HasPermission, SchemaTreeNode -{ - /** - * This is used to report problems encountered during construction of the TableInfo. These would generally be - * reported in the schema browser or editor. Failing to construct a TableInfo generally breaks the UI badly, so we - * want to be able to return a usable TableInfo whenever possible and then report problems here if they the issue - * is correctable. - *
- * See also ValidateQueryAction. - *
- * @return warnings collected during TableInfo construction. - */ - default Collection getWarnings() - { - return List.of(); - } - - /** Get title, falling back to the name if title is null **/ - String getTitle(); - - /** Get title field, whether null or not **/ - @Nullable - String getTitleField(); - - /** - * Use getSQLName() instead for generating SQL. - * getSelectName() can still be used for error messages. - */ - @Nullable - default String getSelectName() - { - var sqlf = getSQLName(); - return null == sqlf ? null : sqlf.getSQL(); - } - - /** - * Return a schema.name that can be used directly in SQL statement - * Use only for tables known to be real database tables, usually - * for INSERT/UPDATE/DELETE. For SELECT use getFromSQL(alias). - */ - @Nullable - default SQLFragment getSQLName() - { - return null; - } - - @Nullable - DatabaseIdentifier getMetaDataIdentifier(); - - /** - * SQL representing this table, e.g. - * "Issues.Issues " - * "(SELECT * FROM Issues.Issues WHERE Container='...') " - **/ - @NotNull - SQLFragment getFromSQL(String alias); - - /* For most tables this is the same as getFromSQL(). - * However, for some tables that want to help optimize generated SQL - * we can ask for the columns we need to be accessible. - * - * Only columns returned by getColumn() or resolveColumn() should be handed in (no lookups) - * - * This is really intended for QueryTable - */ - SQLFragment getFromSQL(String alias, Set cols); - - DbSchema getSchema(); - - @Nullable - UserSchema getUserSchema(); - - /** getSchema().getSqlDialect() */ - SqlDialect getSqlDialect(); - - @NotNull List getPkColumns(); - - /** Gets all the constraints that guarantee uniqueness in the underlying table. This includes both PRIMARY KEY and UNIQUE constraints */ - @NotNull - List getUniqueIndices(); - - /** Gets all the indices from the underlying table. This includes PRIMARY KEY and UNIQUE constraints, as well as non-unique INDEX */ - @NotNull - List getAllIndices(); - - class _DoNothingAuditHandler implements AuditHandler - { - @Override - public void addSummaryAuditEvent(User user, Container c, TableInfo table, QueryService.AuditAction action, Integer dataRowCount, @Nullable AuditBehaviorType auditBehaviorType, @Nullable String userComment) - { - } - - @Override - public void addAuditEvent(User user, Container c, TableInfo table, @Nullable AuditBehaviorType auditType, @Nullable String userComment, QueryService.AuditAction action, List> rows, @Nullable List> updatedRows, @Nullable List> providedValues, boolean useTransactionAuditCache) - { - } - } - - default AuditHandler getAuditHandler(@Nullable AuditBehaviorType auditBehaviorOverride) - { - if (!supportsAuditTracking() || getEffectiveAuditBehavior(auditBehaviorOverride) == AuditBehaviorType.NONE) - return new _DoNothingAuditHandler(); - return QueryService.get().getDefaultAuditHandler(); - } - - enum IndexType - { - Primary(org.labkey.data.xml.IndexType.Type.PRIMARY, true), - Unique(org.labkey.data.xml.IndexType.Type.UNIQUE, true), - NonUnique(org.labkey.data.xml.IndexType.Type.NON_UNIQUE, false); - - private final org.labkey.data.xml.IndexType.Type.Enum _xmlIndexType; - private final boolean _unique; - - IndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType, boolean unique) - { - _xmlIndexType = xmlIndexType; - _unique = unique; - } - - public org.labkey.data.xml.IndexType.Type.Enum getXmlIndexType() - { - return _xmlIndexType; - } - - public boolean isUnique() - { - return _unique; - } - - public static IndexType getForXmlIndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType) - { - for (IndexType indexType : IndexType.values()) - { - if(indexType.getXmlIndexType().equals(xmlIndexType)){ - return indexType; - } - } - throw new EnumConstantNotPresentException(IndexType.class, xmlIndexType.toString()); - } - } - - record IndexDefinition(String name, IndexType indexType, List columns, @Nullable String filterCondition) - { - void addColumn(ColumnInfo column) - { - columns.add(column); - } - } - - /** Get a list of columns that specifies a unique key, may return the same result as getPKColumns() - * However, whereas getPkColumns() will usually return a 'short' pk, such as "rowid" or "lsid", this - * should return a more verbose AK that is most semantically useful. - * - * For instance for a flow result it might return (container, run, sample, stim, populationName) rather - * than (container, analysisId) - * - * NOTE: unlike PK, this does not guarantee that columns are NON-NULL - * NOTE: Postgres does not consider rows with NULL values to be "equal" so NULLs may be repeated! - */ - @NotNull List getAlternateKeyColumns(); - - @NotNull default Set getAltKeysForUpdate() - { - return Collections.emptySet(); - } - - @Nullable default Set getDisabledSystemFields() - { - return Collections.emptySet(); - } - - /** - * @return a List> a list of additionally required fields during import, each field can have a Set of alternative field keys - * Example usage is specifying the set of required lineage columns during sample/dataclass data creation. - */ - @NotNull default List> getAdditionalRequiredInsertColumns() - { - return Collections.emptyList(); - } - - ColumnInfo getVersionColumn(); - - String getVersionColumnName(); - - boolean hasDefaultTitleColumn(); - - DatabaseTableType getTableType(); - - /** - * Get select list for named (hopefully unique!) column to title column, including filter on table. - * If maxRows is exceeded, the NamedObjectList will be marked as incomplete. - * When maxRows is null a default maxRows will be used. To select all rows, set maxRows to {@link Table#ALL_ROWS}. - * @see NamedObjectList#isComplete() - */ - @NotNull NamedObjectList getSelectList(String columnName, List filters, Integer maxRows, String titleColumn); - - ColumnInfo getColumn(@NotNull String colName); - - // same as getColumn(colName.getName()), does not need to support lookup columns - ColumnInfo getColumn(@NotNull FieldKey colName); - - List getColumns(); - - List getUserEditableColumns(); - - /** @param colNames comma separated column names */ - List getColumns(String colNames); - - List getColumns(String... colNameArray); - - Set getColumnNameSet(); - - default String getDbSequenceName(String columnName) - { - return (this.getSchema().getName() + ":" + this.getName() + ":" + columnName).toLowerCase(); - } - - /** - * Return a list of ColumnInfos that make up the extended set of - * columns that could be considered a part of this table by default. - * For the majority of tables, this is simply the columns returned from - * {@link TableInfo#getColumns()} plus any additional columns from - * {@link TableInfo#getDefaultVisibleColumns()). - * - * For other tables, the extended column set could include some columns - * from lookup tables. - * - * @param includeHidden Include hidden columns. - * @return All columns. - */ - Map getExtendedColumns(boolean includeHidden); - - - /** - * @return the {@link org.labkey.api.query.FieldKey}s that should be part of the default view of the table, - * assuming that no other default view has been configured. - */ - List getDefaultVisibleColumns(); - - /** - * @param keys the new set of columns to show when there is no explicit default view defined. Use null to indicate - * that the list should be inferred based on the column metadata - */ - void setDefaultVisibleColumns(@Nullable Iterable keys); - - ButtonBarConfig getButtonBarConfig(); - - AggregateRowConfig getAggregateRowConfig(); - - /** - * Return the default query grid view URL for the table or null. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#executeQuery} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getGridURL(Container container); - - /** - * Return the insert URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default insert view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#insertQueryRow} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getInsertURL(Container container); - - /** - * Return the import URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default import view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#importData} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getImportDataURL(Container container); - - /** - * Return the delete URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default action will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#deleteQueryRows} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getDeleteURL(Container container); - - /** - * Return the update URL expression for a particular record or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default update view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#updateQueryRow} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - * @param columns if null, implementations should simply return their first (and potentially only) details URL, - * or null if they don't have one. - * If non-null, the set of columns that are available from the ResultSet so that an appropriate - * URL can be chosen - */ - StringExpression getUpdateURL(@Nullable Set columns, Container container); - - /** - * Return the details URL expression for a particular record or null. - * @param columns if null, implementations should simply return their first (and potentially only) details URL, - * or null if they don't have one. - * If non-null, the set of columns that are available from the ResultSet so that an appropriate - * URL can be chosen - */ - StringExpression getDetailsURL(@Nullable Set columns, Container container); - boolean hasDetailsURL(); - - /** - * Return the method of a given name. Methods are accessible via the QueryModule's query - * language. Most tables do not have methods. - */ - default MethodInfo getMethod(String name) - { - return null; - } - - /** - * return the fieldkeys of the columns that methods in on this table depend on. - * We could do this per method, but in practice it's a short list. - */ - default Set getMethodRequiredFieldKeys() - { - return Set.of(); - } - - /** - * Returns a string that will appear on the default import page and as the top line - * of the default generated Excel template - */ - String getImportMessage(); - - /** - * Returns a list of templates (label / URL) that should be used as the options for excel upload - * Each URL should either point to a static template file or an action to generate the template. - * If no custom templates have been provided, it will return the default URL - */ - List> getImportTemplates(ViewContext ctx); - - default List> getValidatedImportTemplates(ViewContext ctx) - { - return getImportTemplates(ctx); - } - - /** - * Returns a list of the raw import templates (without substituting the container). This is intended to be - * used by FilteredTable or other instances that need to copy the raw values from a parent table. In general, - * getImportTemplates() should be used instead - */ - List> getRawImportTemplates(); - - // Most datasets do not have a container column - boolean hasContainerColumn(); - - boolean needsContainerClauseAdded(); - - @Nullable - ContainerFilter getContainerFilter(); - - /** - * Finds and applies the first metadata xml from active modules in the schema's container and then the first user-created metadata in the container hierarchy. - * - * @see QueryService#findMetadataOverride(UserSchema, String, boolean, boolean, Collection, Path) - */ - void overlayMetadata(String tableName, UserSchema schema, Collection errors); - - void overlayMetadata(Collection metadata, UserSchema schema, Collection errors); - - /** @return whether this table accepts XML metadata configuration to be overlaid on the default level of metadata */ - boolean isMetadataOverrideable(); - - ColumnInfo getLookupColumn(ColumnInfo parent, String name); - - int getCacheSize(); - - String getDescription(); - - /** - * Get Domain associated with this TableInfo if any. - */ - @Nullable - Domain getDomain(); - - /** - * Get Domain associated with this TableInfo if any. - */ - @Nullable - Domain getDomain(boolean forUpdate); - - /** - * Get DomainKind associated with this TableInfo if any. - * Domain may or may not exist even if DomainKind is available. - */ - @Nullable - DomainKind getDomainKind(); - - /** - * Returns a QueryUpdateService implementation for this TableInfo, - * or null if the table/query is not updatable. - * @return A QueryUpdateService implementation for this table/query or null. - */ - @Nullable - QueryUpdateService getUpdateService(); - - enum TriggerType - { - INSERT, UPDATE, DELETE, SELECT, TRUNCATE, MOVE; - - public String getMethodName() - { - String name = name(); - return name.substring(0, 1) + name.toLowerCase().substring(1, name.length()); - } - } - - /** - * Enumeration of trigger methods that may be implemented in trigger scripts. - */ - enum TriggerMethod - { - init, complete, - beforeInsert, afterInsert, - beforeUpdate, afterUpdate, - beforeDelete, afterDelete - } - - - /** - * Queries may have named parameters, SELECT queries (the only kind we have right now) may - * return TableInfos. This is how you find out if a TableInfo representing a query has named - * parameters - */ - - @NotNull - Collection getNamedParameters(); - - /** - * Executes any trigger scripts for this table. - *

- * The trigger should be called once before and once after an entire set of rows for each of the - * INSERT, UPDATE, DELETE trigger types. A trigger script may set up data structures to be used - * during validation. In particular, the trigger script might want to do a query to populate a set of - * legal values. - *

- * The errors parameter holds validation error messages for the entire row set. - * If errors are created during the row level trigger script, they should be added as nested ValidationExceptions. - * The ValidationException will be thrown after executing the trigger scripts if it contains any errors. - *

- * Example usage: - *

-     *   ValidationException errors = new ValidationException();
-     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, true, errors);
-     *
-     *   List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(rows.size());
-     *   for (int i = 0; i < rows.size(); i++)
-     *   {
-     *       try
-     *       {
-     *           Map row = rows.get(i);
-     *           Map oldRow = getRow( ... );
-     *           if (oldRow == null)
-     *               throw new NotFoundException("The existing row was not found.");
-     *
-     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, true, i, row, oldRow);
-     *           Map updatedRow = updateRow(user, container, row, oldRow);
-     *           if (updatedRow == null)
-     *               continue;
-     *
-     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow);
-     *           result.add(updatedRow);
-     *       }
-     *       catch (ValidationException vex)
-     *       {
-     *           errors.addNested(vex);
-     *       }
-     *   }
-     *
-     *   // Firing the after batch trigger will throw a ValidationException if
-     *   // any errors were generated during the row triggers or the during after batch trigger.
-     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, false, errors);
-     * 
- * - * @param c The current Container. - * @param user the current user - * @param type The TriggerType for the event. - * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. - * @param before true if the trigger is before the event, false if after the event. - * @param errors Any errors created by the validation script will be added to the errors collection. - * @param extraContext Optional additional bindings to set in the script's context when evaluating. - * @throws BatchValidationException if the trigger function returns false or the errors map isn't empty. - */ - void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext) - throws BatchValidationException; - - default void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, - @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) - throws ValidationException - { - fireRowTrigger(c, user, type, null, before, rowNumber, newRow, oldRow, extraContext, null); - } - - /** - * Fire trigger for a single row. - *

- * The trigger is called once before and once after each row for each of the INSERT, UPDATE, DELETE - * trigger types. - *

- * The following table describes the parameters for each of the trigger types: - *

- *
INSERT: - *
    - *
  • before: newRow contains the row values to be inserted, oldRow is null. - *
  • after: newRow contains the inserted row values, oldRow is null. - *
  • - * - *
    UPDATE: - *
      - *
    • before: newRow contains the row values to be updated, oldRow contains the previous version of the row. - *
    • after: newRow contains the updated row values, oldRow contains the previous version of the row. - *
    - * - *
    DELETE: - *
      - *
    • before: oldRow contains the previous version of the row. - *
    • after: newRow is null, oldRow contains the previous version of the row. - *
    • - *
- * - * @param c The current Container. - * @param user the current user - * @param type The TriggerType for the event. - * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. - * @param before true if the trigger is before the event, false if after the event. - * @param newRow The new row for INSERT and UPDATE. - * @param oldRow The previous row for UPDATE and DELETE - * @param extraContext Optional additional bindings to set in the script's context when evaluating. - * @param existingRecord Optional existing record for the row, used for merge operation to differentiate new vs existing row - * @throws ValidationException if the trigger function returns false or the errors map isn't empty. - */ - void fireRowTrigger( - Container c, - User user, - TriggerType type, - @Nullable QueryUpdateService.InsertOption insertOption, - boolean before, - int rowNumber, - @Nullable Map newRow, - @Nullable Map oldRow, - Map extraContext, - @Nullable Map existingRecord - ) throws ValidationException; - - /** - * Return true if there are trigger scripts associated with this table. - */ - boolean hasTriggers(@Nullable Container c); - - /** - * Return true if all trigger scripts support streaming. - */ - default boolean canStreamTriggers(Container c) - { - return false; - } - - /** - * Returns the full set of columns managed by triggers for this TableInfo. - */ - default @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption) - { - return null; - } - - /** - * Reset the trigger script context by reloading them. Note there could still be caches that need to be reset - * and script init() to rerun. - * - * @param c The current container - */ - void resetTriggers(Container c); - - /** - * Returns true if the underlying database table has triggers. - */ - default boolean hasDbTriggers() - { - return false; - } - - /* for asserting that the TableInfo is not changed unexpectedly */ - void setLocked(boolean b); - boolean isLocked(); - - boolean supportsContainerFilter(); - boolean hasUnionTable(); - - /** - * Returns a ContainerContext for this table or null if ContainerContext is not supported. - * - * @return The ContainerContext for this table or null. - */ - @Nullable - ContainerContext getContainerContext(); - - /** - * Return the FieldKey of the Container column for this table. - * If the value is non-null then getContainerContext() will - * return a FieldKeyContext using the container FieldKey. - * - * @return FieldKey of the Container column. - */ - @Nullable FieldKey getContainerFieldKey(); - - /** - * Returns whether this table supports audit tracking of insert, updates and deletes by implementing the - * AuditConfigurable interface. - */ - boolean supportsAuditTracking(); - - /** - * @return set of all columns involved in the query - */ - Set getAllInvolvedColumns(Collection selectColumns); - - /** see interface AuditConfigurable */ - @NotNull - default AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.NONE; - } - - @NotNull - default AuditBehaviorType getEffectiveAuditBehavior() - { - return getXmlAuditBehaviorType() == null ? getDefaultAuditBehavior() : getXmlAuditBehaviorType(); - } - - /** - * Retrieves the audit behavior for this table taking into account, in order of precedence: - * - the override value provided - * - the setting from the XML file (always returned if there is a value set) - * - the value supplied by this table's implementation - * - * @param apiOverride value used to override the behavior type provided by the table implementation - * @return audit behavior for this table - */ - @NotNull - default AuditBehaviorType getEffectiveAuditBehavior(@Nullable AuditBehaviorType apiOverride) - { - return AuditBehaviorType.getEffectiveAuditLevel(apiOverride, getEffectiveAuditBehavior()); - } - - default AuditBehaviorType getEffectiveAuditBehavior(@Nullable String apiOverrideValue) - { - if (apiOverrideValue != null) - { - try - { - return getEffectiveAuditBehavior(AuditBehaviorType.valueOf(apiOverrideValue)); - } - catch (IllegalArgumentException ignored) - { - } - } - return getEffectiveAuditBehavior(); - } - - /* Can be used to distinguish AuditBehaviorType.NONE vs absent xml audit config */ - default AuditBehaviorType getXmlAuditBehaviorType() - { - return null; - } - - /* fields to include in detailed UPDATE audit log, even if no change is made to field value */ - @NotNull - default Set getExtraDetailedUpdateAuditFields() - { - return Collections.emptySet(); - } - - - // we exclude these from the detailed record because they are already on the audit record itself and - // depending on the data iterator behavior (e.g., for ExpDataIterators.getDataIterator), these values - // time of creating the audit log may actually already have been updated so the difference shown will be incorrect. - Set defaultExcludedDetailedUpdateAuditFields = CaseInsensitiveHashSet.of("Modified", "ModifiedBy", "Created", "CreatedBy"); - - @NotNull - default Set getExcludedDetailedUpdateAuditFields() - { - return defaultExcludedDetailedUpdateAuditFields; - } - - - /** - * Returns the row primary key column to use for audit history details. Note, this must - * be a single key as we don't support multiple column primary keys for audit details. - */ - @Nullable - default FieldKey getAuditRowPk() - { - return null; - } - - default boolean hasInsertURLOverride() - { - return false; - } - - default boolean hasUpdateURLOverride() - { - return false; - } - - default boolean hasDeleteURLOverride() - { - return false; - } - - /** - * Allow QueryView to render insert, update, and other buttons if the metadata xml provides URL - * overrides and the underlying table doesn't support insert/update/delete operations, - * such as a query table. - */ - default boolean allowQueryTableURLOverrides() - { - return false; - } - - /** - * Max PHI across all columns in the table. - */ - default PHI getMaxPhiLevel() - { - return getColumns().stream() - .map(ColumnRenderProperties::getPHI) - .max(Comparator.comparingInt(PHI::getRank)) - .orElse(PHI.NotPHI); - } - - /** - * Get the max allowed PHI for the current user in the current container. - */ - default PHI getUserMaxAllowedPhiLevel() - { - UserSchema schema = getUserSchema(); - if (schema == null) - return PHI.NotPHI; - - return ComplianceService.get().getMaxAllowedPhi(schema.getContainer(), schema.getUser()); - } - - /** - * Return true if the current user is allowed the maximum phi level set across all columns. - */ - default boolean canUserAccessPhi() - { - return getMaxPhiLevel().isLevelAllowed(getUserMaxAllowedPhiLevel()); - } - - /** - * Useful helper to combine permission check/throw. This is especially useful for tables where permissions can - * differ from the containing schema/container. - * - * TableInfo users should still always check hasPermission() as early as possible for best error handling, and - * to fail-fast (or degrade gracefully). - */ - default void checkReadBeforeExecute() - { - assert null != getUserSchema(); // this is only for tables in UserSchema - if (null != getUserSchema() && !hasPermission(getUserSchema().getUser(), ReadPermission.class)) - throw new UnauthorizedException(getUserSchema().getSchemaName() + "." + getName()); - } - - default List getSortFields() - { - return List.of(); - } - - default boolean supportsInsertOption(QueryUpdateService.InsertOption option) - { - return true; - } - - default void setAllowCalculatedColumns(boolean allowCalculatedColumns) {} - - /* Many tables should be indexed via controller provided views e.g. issues, announcements, wiki. - * We do not want crawlers to index data in these tables via query-executeQuery for instance. - */ - default boolean allowRobotsIndex() - { - return true; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.data; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.NamedObjectList; +import org.labkey.api.compliance.ComplianceService; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainKind; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.query.AggregateRowConfig; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryAction; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryParseException; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.SchemaTreeNode; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.HasPermission; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.util.ContainerContext; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringExpression; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.data.xml.queryCustomView.FilterType; + +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Representation of a queryable unit in the database. Might be backed by a "physical" table, a database VIEW, + * or an arbitrary generated chunk of SELECT SQL. + */ +public interface TableInfo extends TableDescription, HasPermission, SchemaTreeNode +{ + /** + * This is used to report problems encountered during construction of the TableInfo. These would generally be + * reported in the schema browser or editor. Failing to construct a TableInfo generally breaks the UI badly, so we + * want to be able to return a usable TableInfo whenever possible and then report problems here if they the issue + * is correctable. + *
+ * See also ValidateQueryAction. + *
+ * @return warnings collected during TableInfo construction. + */ + default Collection getWarnings() + { + return List.of(); + } + + /** Get title, falling back to the name if title is null **/ + String getTitle(); + + /** Get title field, whether null or not **/ + @Nullable + String getTitleField(); + + /** + * Use getSQLName() instead for generating SQL. + * getSelectName() can still be used for error messages. + */ + @Nullable + default String getSelectName() + { + var sqlf = getSQLName(); + return null == sqlf ? null : sqlf.getSQL(); + } + + /** + * Return a schema.name that can be used directly in SQL statement + * Use only for tables known to be real database tables, usually + * for INSERT/UPDATE/DELETE. For SELECT use getFromSQL(alias). + */ + @Nullable + default SQLFragment getSQLName() + { + return null; + } + + @Nullable + DatabaseIdentifier getMetaDataIdentifier(); + + /** + * SQL representing this table, e.g. + * "Issues.Issues " + * "(SELECT * FROM Issues.Issues WHERE Container='...') " + **/ + @NotNull + SQLFragment getFromSQL(String alias); + + /* For most tables this is the same as getFromSQL(). + * However, for some tables that want to help optimize generated SQL + * we can ask for the columns we need to be accessible. + * + * Only columns returned by getColumn() or resolveColumn() should be handed in (no lookups) + * + * This is really intended for QueryTable + */ + SQLFragment getFromSQL(String alias, Set cols); + + DbSchema getSchema(); + + @Nullable + UserSchema getUserSchema(); + + /** getSchema().getSqlDialect() */ + SqlDialect getSqlDialect(); + + @NotNull List getPkColumns(); + + /** Gets all the constraints that guarantee uniqueness in the underlying table. This includes both PRIMARY KEY and UNIQUE constraints */ + @NotNull + List getUniqueIndices(); + + /** Gets all the indices from the underlying table. This includes PRIMARY KEY and UNIQUE constraints, as well as non-unique INDEX */ + @NotNull + List getAllIndices(); + + class _DoNothingAuditHandler implements AuditHandler + { + @Override + public void addSummaryAuditEvent(User user, Container c, TableInfo table, QueryService.AuditAction action, Integer dataRowCount, @Nullable AuditBehaviorType auditBehaviorType, @Nullable String userComment) + { + } + + @Override + public void addAuditEvent(User user, Container c, TableInfo table, @Nullable AuditBehaviorType auditType, @Nullable String userComment, QueryService.AuditAction action, List> rows, @Nullable List> updatedRows, @Nullable List> providedValues, boolean useTransactionAuditCache) + { + } + } + + default AuditHandler getAuditHandler(@Nullable AuditBehaviorType auditBehaviorOverride) + { + if (!supportsAuditTracking() || getEffectiveAuditBehavior(auditBehaviorOverride) == AuditBehaviorType.NONE) + return new _DoNothingAuditHandler(); + return QueryService.get().getDefaultAuditHandler(); + } + + enum IndexType + { + Primary(org.labkey.data.xml.IndexType.Type.PRIMARY, true), + Unique(org.labkey.data.xml.IndexType.Type.UNIQUE, true), + NonUnique(org.labkey.data.xml.IndexType.Type.NON_UNIQUE, false); + + private final org.labkey.data.xml.IndexType.Type.Enum _xmlIndexType; + private final boolean _unique; + + IndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType, boolean unique) + { + _xmlIndexType = xmlIndexType; + _unique = unique; + } + + public org.labkey.data.xml.IndexType.Type.Enum getXmlIndexType() + { + return _xmlIndexType; + } + + public boolean isUnique() + { + return _unique; + } + + public static IndexType getForXmlIndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType) + { + for (IndexType indexType : IndexType.values()) + { + if(indexType.getXmlIndexType().equals(xmlIndexType)){ + return indexType; + } + } + throw new EnumConstantNotPresentException(IndexType.class, xmlIndexType.toString()); + } + } + + record IndexDefinition(String name, IndexType indexType, List columns, @Nullable String filterCondition) + { + void addColumn(ColumnInfo column) + { + columns.add(column); + } + } + + /** Get a list of columns that specifies a unique key, may return the same result as getPKColumns() + * However, whereas getPkColumns() will usually return a 'short' pk, such as "rowid" or "lsid", this + * should return a more verbose AK that is most semantically useful. + * + * For instance for a flow result it might return (container, run, sample, stim, populationName) rather + * than (container, analysisId) + * + * NOTE: unlike PK, this does not guarantee that columns are NON-NULL + * NOTE: Postgres does not consider rows with NULL values to be "equal" so NULLs may be repeated! + */ + @NotNull List getAlternateKeyColumns(); + + @Nullable default Set getDisabledSystemFields() + { + return Collections.emptySet(); + } + + /** + * @return a List> a list of additionally required fields during import, each field can have a Set of alternative field keys + * Example usage is specifying the set of required lineage columns during sample/dataclass data creation. + */ + @NotNull default List> getAdditionalRequiredInsertColumns() + { + return Collections.emptyList(); + } + + ColumnInfo getVersionColumn(); + + String getVersionColumnName(); + + boolean hasDefaultTitleColumn(); + + DatabaseTableType getTableType(); + + /** + * Get select list for named (hopefully unique!) column to title column, including filter on table. + * If maxRows is exceeded, the NamedObjectList will be marked as incomplete. + * When maxRows is null a default maxRows will be used. To select all rows, set maxRows to {@link Table#ALL_ROWS}. + * @see NamedObjectList#isComplete() + */ + @NotNull NamedObjectList getSelectList(String columnName, List filters, Integer maxRows, String titleColumn); + + ColumnInfo getColumn(@NotNull String colName); + + // same as getColumn(colName.getName()), does not need to support lookup columns + ColumnInfo getColumn(@NotNull FieldKey colName); + + List getColumns(); + + List getUserEditableColumns(); + + /** @param colNames comma separated column names */ + List getColumns(String colNames); + + List getColumns(String... colNameArray); + + Set getColumnNameSet(); + + default String getDbSequenceName(String columnName) + { + return (this.getSchema().getName() + ":" + this.getName() + ":" + columnName).toLowerCase(); + } + + /** + * Return a list of ColumnInfos that make up the extended set of + * columns that could be considered a part of this table by default. + * For the majority of tables, this is simply the columns returned from + * {@link TableInfo#getColumns()} plus any additional columns from + * {@link TableInfo#getDefaultVisibleColumns()). + * + * For other tables, the extended column set could include some columns + * from lookup tables. + * + * @param includeHidden Include hidden columns. + * @return All columns. + */ + Map getExtendedColumns(boolean includeHidden); + + + /** + * @return the {@link org.labkey.api.query.FieldKey}s that should be part of the default view of the table, + * assuming that no other default view has been configured. + */ + List getDefaultVisibleColumns(); + + /** + * @param keys the new set of columns to show when there is no explicit default view defined. Use null to indicate + * that the list should be inferred based on the column metadata + */ + void setDefaultVisibleColumns(@Nullable Iterable keys); + + ButtonBarConfig getButtonBarConfig(); + + AggregateRowConfig getAggregateRowConfig(); + + /** + * Return the default query grid view URL for the table or null. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#executeQuery} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getGridURL(Container container); + + /** + * Return the insert URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default insert view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#insertQueryRow} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getInsertURL(Container container); + + /** + * Return the import URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default import view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#importData} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getImportDataURL(Container container); + + /** + * Return the delete URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default action will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#deleteQueryRows} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getDeleteURL(Container container); + + /** + * Return the update URL expression for a particular record or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default update view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#updateQueryRow} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + * @param columns if null, implementations should simply return their first (and potentially only) details URL, + * or null if they don't have one. + * If non-null, the set of columns that are available from the ResultSet so that an appropriate + * URL can be chosen + */ + StringExpression getUpdateURL(@Nullable Set columns, Container container); + + /** + * Return the details URL expression for a particular record or null. + * @param columns if null, implementations should simply return their first (and potentially only) details URL, + * or null if they don't have one. + * If non-null, the set of columns that are available from the ResultSet so that an appropriate + * URL can be chosen + */ + StringExpression getDetailsURL(@Nullable Set columns, Container container); + boolean hasDetailsURL(); + + /** + * Return the method of a given name. Methods are accessible via the QueryModule's query + * language. Most tables do not have methods. + */ + default MethodInfo getMethod(String name) + { + return null; + } + + /** + * return the fieldkeys of the columns that methods in on this table depend on. + * We could do this per method, but in practice it's a short list. + */ + default Set getMethodRequiredFieldKeys() + { + return Set.of(); + } + + /** + * Returns a string that will appear on the default import page and as the top line + * of the default generated Excel template + */ + String getImportMessage(); + + /** + * Returns a list of templates (label / URL) that should be used as the options for excel upload + * Each URL should either point to a static template file or an action to generate the template. + * If no custom templates have been provided, it will return the default URL + */ + List> getImportTemplates(ViewContext ctx); + + default List> getValidatedImportTemplates(ViewContext ctx) + { + return getImportTemplates(ctx); + } + + /** + * Returns a list of the raw import templates (without substituting the container). This is intended to be + * used by FilteredTable or other instances that need to copy the raw values from a parent table. In general, + * getImportTemplates() should be used instead + */ + List> getRawImportTemplates(); + + // Most datasets do not have a container column + boolean hasContainerColumn(); + + boolean needsContainerClauseAdded(); + + @Nullable + ContainerFilter getContainerFilter(); + + /** + * Finds and applies the first metadata xml from active modules in the schema's container and then the first user-created metadata in the container hierarchy. + * + * @see QueryService#findMetadataOverride(UserSchema, String, boolean, boolean, Collection, Path) + */ + void overlayMetadata(String tableName, UserSchema schema, Collection errors); + + void overlayMetadata(Collection metadata, UserSchema schema, Collection errors); + + /** @return whether this table accepts XML metadata configuration to be overlaid on the default level of metadata */ + boolean isMetadataOverrideable(); + + ColumnInfo getLookupColumn(ColumnInfo parent, String name); + + int getCacheSize(); + + String getDescription(); + + /** + * Get Domain associated with this TableInfo if any. + */ + @Nullable + Domain getDomain(); + + /** + * Get Domain associated with this TableInfo if any. + */ + @Nullable + Domain getDomain(boolean forUpdate); + + /** + * Get DomainKind associated with this TableInfo if any. + * Domain may or may not exist even if DomainKind is available. + */ + @Nullable + DomainKind getDomainKind(); + + /** + * Returns a QueryUpdateService implementation for this TableInfo, + * or null if the table/query is not updatable. + * @return A QueryUpdateService implementation for this table/query or null. + */ + @Nullable + QueryUpdateService getUpdateService(); + + enum TriggerType + { + INSERT, UPDATE, DELETE, SELECT, TRUNCATE, MOVE; + + public String getMethodName() + { + String name = name(); + return name.substring(0, 1) + name.toLowerCase().substring(1, name.length()); + } + } + + /** + * Enumeration of trigger methods that may be implemented in trigger scripts. + */ + enum TriggerMethod + { + init, complete, + beforeInsert, afterInsert, + beforeUpdate, afterUpdate, + beforeDelete, afterDelete + } + + + /** + * Queries may have named parameters, SELECT queries (the only kind we have right now) may + * return TableInfos. This is how you find out if a TableInfo representing a query has named + * parameters + */ + + @NotNull + Collection getNamedParameters(); + + /** + * Executes any trigger scripts for this table. + *

+ * The trigger should be called once before and once after an entire set of rows for each of the + * INSERT, UPDATE, DELETE trigger types. A trigger script may set up data structures to be used + * during validation. In particular, the trigger script might want to do a query to populate a set of + * legal values. + *

+ * The errors parameter holds validation error messages for the entire row set. + * If errors are created during the row level trigger script, they should be added as nested ValidationExceptions. + * The ValidationException will be thrown after executing the trigger scripts if it contains any errors. + *

+ * Example usage: + *

+     *   ValidationException errors = new ValidationException();
+     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, true, errors);
+     *
+     *   List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(rows.size());
+     *   for (int i = 0; i < rows.size(); i++)
+     *   {
+     *       try
+     *       {
+     *           Map row = rows.get(i);
+     *           Map oldRow = getRow( ... );
+     *           if (oldRow == null)
+     *               throw new NotFoundException("The existing row was not found.");
+     *
+     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, true, i, row, oldRow);
+     *           Map updatedRow = updateRow(user, container, row, oldRow);
+     *           if (updatedRow == null)
+     *               continue;
+     *
+     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow);
+     *           result.add(updatedRow);
+     *       }
+     *       catch (ValidationException vex)
+     *       {
+     *           errors.addNested(vex);
+     *       }
+     *   }
+     *
+     *   // Firing the after batch trigger will throw a ValidationException if
+     *   // any errors were generated during the row triggers or the during after batch trigger.
+     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, false, errors);
+     * 
+ * + * @param c The current Container. + * @param user the current user + * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. + * @param before true if the trigger is before the event, false if after the event. + * @param errors Any errors created by the validation script will be added to the errors collection. + * @param extraContext Optional additional bindings to set in the script's context when evaluating. + * @throws BatchValidationException if the trigger function returns false or the errors map isn't empty. + */ + void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext) + throws BatchValidationException; + + default void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, + @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) + throws ValidationException + { + fireRowTrigger(c, user, type, null, before, rowNumber, newRow, oldRow, extraContext, null); + } + + /** + * Fire trigger for a single row. + *

+ * The trigger is called once before and once after each row for each of the INSERT, UPDATE, DELETE + * trigger types. + *

+ * The following table describes the parameters for each of the trigger types: + *

+ *
INSERT: + *
    + *
  • before: newRow contains the row values to be inserted, oldRow is null. + *
  • after: newRow contains the inserted row values, oldRow is null. + *
  • + * + *
    UPDATE: + *
      + *
    • before: newRow contains the row values to be updated, oldRow contains the previous version of the row. + *
    • after: newRow contains the updated row values, oldRow contains the previous version of the row. + *
    + * + *
    DELETE: + *
      + *
    • before: oldRow contains the previous version of the row. + *
    • after: newRow is null, oldRow contains the previous version of the row. + *
    • + *
+ * + * @param c The current Container. + * @param user the current user + * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. + * @param before true if the trigger is before the event, false if after the event. + * @param newRow The new row for INSERT and UPDATE. + * @param oldRow The previous row for UPDATE and DELETE + * @param extraContext Optional additional bindings to set in the script's context when evaluating. + * @param existingRecord Optional existing record for the row, used for merge operation to differentiate new vs existing row + * @throws ValidationException if the trigger function returns false or the errors map isn't empty. + */ + void fireRowTrigger( + Container c, + User user, + TriggerType type, + @Nullable QueryUpdateService.InsertOption insertOption, + boolean before, + int rowNumber, + @Nullable Map newRow, + @Nullable Map oldRow, + Map extraContext, + @Nullable Map existingRecord + ) throws ValidationException; + + /** + * Return true if there are trigger scripts associated with this table. + */ + boolean hasTriggers(@Nullable Container c); + + /** + * Return true if all trigger scripts support streaming. + */ + default boolean canStreamTriggers(Container c) + { + return false; + } + + /** + * Returns the full set of columns managed by triggers for this TableInfo. + */ + default @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption) + { + return null; + } + + /** + * Reset the trigger script context by reloading them. Note there could still be caches that need to be reset + * and script init() to rerun. + * + * @param c The current container + */ + void resetTriggers(Container c); + + /** + * Returns true if the underlying database table has triggers. + */ + default boolean hasDbTriggers() + { + return false; + } + + /* for asserting that the TableInfo is not changed unexpectedly */ + void setLocked(boolean b); + boolean isLocked(); + + boolean supportsContainerFilter(); + boolean hasUnionTable(); + + /** + * Returns a ContainerContext for this table or null if ContainerContext is not supported. + * + * @return The ContainerContext for this table or null. + */ + @Nullable + ContainerContext getContainerContext(); + + /** + * Return the FieldKey of the Container column for this table. + * If the value is non-null then getContainerContext() will + * return a FieldKeyContext using the container FieldKey. + * + * @return FieldKey of the Container column. + */ + @Nullable FieldKey getContainerFieldKey(); + + /** + * Returns whether this table supports audit tracking of insert, updates and deletes by implementing the + * AuditConfigurable interface. + */ + boolean supportsAuditTracking(); + + /** + * @return set of all columns involved in the query + */ + Set getAllInvolvedColumns(Collection selectColumns); + + /** see interface AuditConfigurable */ + @NotNull + default AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.NONE; + } + + @NotNull + default AuditBehaviorType getEffectiveAuditBehavior() + { + return getXmlAuditBehaviorType() == null ? getDefaultAuditBehavior() : getXmlAuditBehaviorType(); + } + + /** + * Retrieves the audit behavior for this table taking into account, in order of precedence: + * - the override value provided + * - the setting from the XML file (always returned if there is a value set) + * - the value supplied by this table's implementation + * + * @param apiOverride value used to override the behavior type provided by the table implementation + * @return audit behavior for this table + */ + @NotNull + default AuditBehaviorType getEffectiveAuditBehavior(@Nullable AuditBehaviorType apiOverride) + { + return AuditBehaviorType.getEffectiveAuditLevel(apiOverride, getEffectiveAuditBehavior()); + } + + default AuditBehaviorType getEffectiveAuditBehavior(@Nullable String apiOverrideValue) + { + if (apiOverrideValue != null) + { + try + { + return getEffectiveAuditBehavior(AuditBehaviorType.valueOf(apiOverrideValue)); + } + catch (IllegalArgumentException ignored) + { + } + } + return getEffectiveAuditBehavior(); + } + + /* Can be used to distinguish AuditBehaviorType.NONE vs absent xml audit config */ + default AuditBehaviorType getXmlAuditBehaviorType() + { + return null; + } + + /* fields to include in detailed UPDATE audit log, even if no change is made to field value */ + @NotNull + default Set getExtraDetailedUpdateAuditFields() + { + return Collections.emptySet(); + } + + + // we exclude these from the detailed record because they are already on the audit record itself and + // depending on the data iterator behavior (e.g., for ExpDataIterators.getDataIterator), these values + // time of creating the audit log may actually already have been updated so the difference shown will be incorrect. + Set defaultExcludedDetailedUpdateAuditFields = CaseInsensitiveHashSet.of("Modified", "ModifiedBy", "Created", "CreatedBy"); + + @NotNull + default Set getExcludedDetailedUpdateAuditFields() + { + return defaultExcludedDetailedUpdateAuditFields; + } + + + /** + * Returns the row primary key column to use for audit history details. Note, this must + * be a single key as we don't support multiple column primary keys for audit details. + */ + @Nullable + default FieldKey getAuditRowPk() + { + return null; + } + + default boolean hasInsertURLOverride() + { + return false; + } + + default boolean hasUpdateURLOverride() + { + return false; + } + + default boolean hasDeleteURLOverride() + { + return false; + } + + /** + * Allow QueryView to render insert, update, and other buttons if the metadata xml provides URL + * overrides and the underlying table doesn't support insert/update/delete operations, + * such as a query table. + */ + default boolean allowQueryTableURLOverrides() + { + return false; + } + + /** + * Max PHI across all columns in the table. + */ + default PHI getMaxPhiLevel() + { + return getColumns().stream() + .map(ColumnRenderProperties::getPHI) + .max(Comparator.comparingInt(PHI::getRank)) + .orElse(PHI.NotPHI); + } + + /** + * Get the max allowed PHI for the current user in the current container. + */ + default PHI getUserMaxAllowedPhiLevel() + { + UserSchema schema = getUserSchema(); + if (schema == null) + return PHI.NotPHI; + + return ComplianceService.get().getMaxAllowedPhi(schema.getContainer(), schema.getUser()); + } + + /** + * Return true if the current user is allowed the maximum phi level set across all columns. + */ + default boolean canUserAccessPhi() + { + return getMaxPhiLevel().isLevelAllowed(getUserMaxAllowedPhiLevel()); + } + + /** + * Useful helper to combine permission check/throw. This is especially useful for tables where permissions can + * differ from the containing schema/container. + * + * TableInfo users should still always check hasPermission() as early as possible for best error handling, and + * to fail-fast (or degrade gracefully). + */ + default void checkReadBeforeExecute() + { + assert null != getUserSchema(); // this is only for tables in UserSchema + if (null != getUserSchema() && !hasPermission(getUserSchema().getUser(), ReadPermission.class)) + throw new UnauthorizedException(getUserSchema().getSchemaName() + "." + getName()); + } + + default List getSortFields() + { + return List.of(); + } + + default boolean supportsInsertOption(QueryUpdateService.InsertOption option) + { + return true; + } + + default void setAllowCalculatedColumns(boolean allowCalculatedColumns) {} + + /* Many tables should be indexed via controller provided views e.g. issues, announcements, wiki. + * We do not want crawlers to index data in these tables via query-executeQuery for instance. + */ + default boolean allowRobotsIndex() + { + return true; + } +} diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java index c7e776b87cf..1c099654cbe 100644 --- a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -58,7 +58,10 @@ public DataClassUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataItera var map = DataIteratorUtil.createColumnNameMap(in); - this._lsidColIndex = map.get(ExpDataTable.Column.LSID.name()); + Integer lsidIdx = map.get(ExpDataTable.Column.LSID.name()); + if (lsidIdx == null) + throw new IllegalStateException("LSID column not found in input."); + this._lsidColIndex = lsidIdx; Integer index = map.get(keyColumnName); ColumnInfo col = target.getColumn(keyColumnName); @@ -190,4 +193,4 @@ public boolean next() throws BatchValidationException return ret; } -} \ No newline at end of file +} diff --git a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java index 4e4d9f11767..e49b47357f5 100644 --- a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java @@ -540,7 +540,7 @@ public Object get(int i) { Object value = _keyValues.get(i); if (value != null) - return value; + return value; // TODO, why is this needed? } return _data.get(i); diff --git a/experiment/src/org/labkey/experiment/ExpDataIterators.java b/experiment/src/org/labkey/experiment/ExpDataIterators.java index 34fe07902a3..4bfe068ccb1 100644 --- a/experiment/src/org/labkey/experiment/ExpDataIterators.java +++ b/experiment/src/org/labkey/experiment/ExpDataIterators.java @@ -914,7 +914,7 @@ public DataIterator getDataIterator(DataIteratorContext context) else if (_isSample) di = new SampleUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); else - di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents);// + di = new DataUpdateDerivationDataIterator(di, context, _container, _user, _currentDataType, _checkRequiredParents); return LoggingDataIterator.wrap(di); } @@ -1185,7 +1185,7 @@ public boolean next() throws BatchValidationException // For each iteration, collect the parent col values if (hasNext) { - String lsid = (String) get(_lsidCol); // why lsid?, insert or merge + String lsid = (String) get(_lsidCol); String name = null; if (_nameCol != null) name = (String) get(_nameCol); diff --git a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java index 08d26a83b48..4d6458313b4 100644 --- a/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java +++ b/experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java @@ -819,7 +819,7 @@ public static void shortenAllStorageNames(ModuleContext context) } /** - * Called from exp-26.004-26.005.sql + * Called from exp-26.005-26.006.sql * Drop the lsid column from existing provisioned DataClass tables. */ @SuppressWarnings("unused") diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index 4b7ae4a05a0..e7f637bbef8 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -170,11 +170,9 @@ public class ExpDataClassDataTableImpl extends ExpRunItemTableImpl DATA_CLASS_ALT_MERGE_KEYS; - public static final Set DATA_CLASS_ALT_UPDATE_KEYS; private static final Set ALLOWED_IMPORT_HEADERS; static { DATA_CLASS_ALT_MERGE_KEYS = new HashSet<>(Arrays.asList(Column.ClassId.name(), Name.name())); - DATA_CLASS_ALT_UPDATE_KEYS = new HashSet<>(Arrays.asList(Column.RowId.name())); ALLOWED_IMPORT_HEADERS = new HashSet<>(Arrays.asList("description", "flag", "comment", "alias", "datafileurl")); } @@ -862,13 +860,6 @@ public CaseInsensitiveHashMap remapSchemaColumns() return DATA_CLASS_ALT_MERGE_KEYS; } - @Override - @NotNull - public Set getAltKeysForUpdate() - { - return DATA_CLASS_ALT_UPDATE_KEYS; - } - @Override public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) { @@ -1318,7 +1309,7 @@ public Map> getExistingRows(User user, Container co Set lsids = new HashSet<>(); for (Map dataRow : dataRows.values()) - lsids.add((String) dataRow.get("lsid")); // ? + lsids.add((String) dataRow.get("lsid")); List seeds = ExperimentServiceImpl.get().getExpDatasByLSID(lsids); ExperimentServiceImpl.get().addRowsParentsFields(new HashSet<>(seeds), dataRows, user, container); diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index 33b412786e1..a8e50b11384 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -1,1880 +1,1873 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.collections4.ListUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.math3.util.Precision; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.assay.plate.AssayPlateMetadataService; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.cache.BlockingCache; -import org.labkey.api.cache.CacheManager; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.compliance.TableRules; -import org.labkey.api.compliance.TableRulesManager; -import org.labkey.api.data.ColumnHeaderType; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DataColumn; -import org.labkey.api.data.DataRegion; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MaterializedQueryHelper; -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.data.PHI; -import org.labkey.api.data.RenderContext; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.UnionContainerFilter; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.LoggingDataIterator; -import org.labkey.api.dataiterator.SimpleTranslator; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ExperimentUrls; -import org.labkey.api.exp.api.NameExpressionOptionService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.DomainUtil; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpMaterialTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.inventory.InventoryService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.qc.SampleStatusService; -import org.labkey.api.query.AliasedColumn; -import org.labkey.api.query.DetailsURL; -import org.labkey.api.query.ExprColumn; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.LookupForeignKey; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryForeignKey; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.QueryUrls; -import org.labkey.api.query.RowIdForeignKey; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.column.BuiltInColumnTypes; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HeartBeat; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.StringExpression; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.experiment.ExpDataIterators; -import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.lineage.LineageMethod; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLE_TYPE_FILE_DIRECTORY_NAME; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; -import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; -import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; - -public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable -{ - ExpSampleTypeImpl _ss; - Set _uniqueIdFields; - boolean _supportTableRules = true; - - private static final Set MATERIAL_ALT_KEYS; - private static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); - static { - MATERIAL_ALT_KEYS = CaseInsensitiveHashSet.of(MaterialSourceId.name(), Name.name()); - - Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); - IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); - amountValidator.setName("SampleAmountNonNegative"); - amountValidator.setExpressionValue("~gte=0"); - amountValidator.setErrorMessage("Amounts must be non-negative."); - amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); - AMOUNT_RANGE_VALIDATORS.add(amountValidator); - } - - public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) - { - super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); - setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); - setPublicSchemaName(ExpSchema.SCHEMA_NAME); - addAllowablePermission(InsertPermission.class); - addAllowablePermission(UpdatePermission.class); - addAllowablePermission(MoveEntitiesPermission.class); - setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); - setSampleType(sampleType); - } - - public Set getUniqueIdFields() - { - if (_uniqueIdFields == null) - { - _uniqueIdFields = new CaseInsensitiveHashSet(); - _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); - } - return _uniqueIdFields; - } - - @Override - protected ColumnInfo resolveColumn(String name) - { - ColumnInfo result = super.resolveColumn(name); - if (result == null) - { - if ("CpasType".equalsIgnoreCase(name)) - result = createColumn(SampleSet.name(), SampleSet); - else if (Property.name().equalsIgnoreCase(name)) - result = createPropertyColumn(Property.name()); - else if (QueryableInputs.name().equalsIgnoreCase(name)) - result = createColumn(QueryableInputs.name(), QueryableInputs); - } - return result; - } - - @Override - public ColumnInfo getExpObjectColumn() - { - var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); - ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); - return ret; - } - - @Override - public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) - { - if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) - { - // Special case sample auditing to help build a useful timeline view - return SampleTypeServiceImpl.get(); - } - - return super.getAuditHandler(auditBehaviorType); - } - - @Override - public MutableColumnInfo createColumn(String alias, Column column) - { - switch (column) - { - case Folder -> - { - return wrapColumn(alias, _rootTable.getColumn("Container")); - } - case LSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(LSID.name())); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - return columnInfo; - } - case MaterialSourceId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(MaterialSourceId.name())); - columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case RootMaterialRowId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(RootMaterialRowId.name())); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), RowId.name())); - columnInfo.setLabel("Root Material"); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - - // NK: Here we mark the column as not required AND nullable which is the opposite of the database where - // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. - // See ExpMaterialValidatorIterator. - columnInfo.setRequired(false); - columnInfo.setNullable(true); - - return columnInfo; - } - case AliquotedFromLSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(AliquotedFromLSID.name())); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), LSID.name())); - columnInfo.setLabel("Aliquoted From Parent"); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - return columnInfo; - } - case IsAliquot -> - { - String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RootMaterialRowId.name(); - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); - ExprColumn columnInfo = new ExprColumn(this, IsAliquot.fieldKey(), new SQLFragment( - "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) - .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 - .append(" ELSE NULL END)"), JdbcType.BOOLEAN); - columnInfo.setLabel("Is Aliquot"); - columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(false); - return columnInfo; - } - case Name -> - { - var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); - // shut off this field in insert and update views if user specified names are not allowed - if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) - { - nameCol.setShownInInsertView(false); - nameCol.setShownInUpdateView(false); - } - return nameCol; - } - case RawAmount -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - case StoredAmount -> - { - String label = StoredAmount.label(); - Set importAliases = Set.of(label, "Stored Amount"); - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setLabel(label); - columnInfo.setImportAliasesSet(importAliases); - columnInfo.setDescription("The amount of this sample currently on hand."); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - } - case RawUnits -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - return columnInfo; - } - case Units -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Units.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); - columnInfo.setFk(fk); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - return columnInfo; - } - } - case Description -> - { - return wrapColumn(alias, _rootTable.getColumn(Description.name())); - } - case SampleSet -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); - columnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) - { - @Override - protected ContainerFilter getLookupContainerFilter() - { - // Be sure that we can resolve the sample type if it's defined in a separate container. - // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. - // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has - // parents in current sample type and a sample type in the parent container - Set containers = new HashSet<>(); - if (null != getSampleType()) - containers.add(getSampleType().getContainer()); - containers.add(getContainer()); - if (getContainer().getProject() != null) - containers.add(getContainer().getProject()); - containers.add(ContainerManager.getSharedContainer()); - ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); - - if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) - cf = new UnionContainerFilter(_containerFilter, cf); - return cf; - } - }); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - return columnInfo; - } - case SourceProtocolLSID -> - { - // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module - ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( - "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + - " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); - columnInfo.setLabel("Source Protocol"); - columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case SourceProtocolApplication -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); - columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - columnInfo.setAutoIncrement(false); - return columnInfo; - } - case SourceApplicationInput -> - { - var col = createEdgeColumn(alias, SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); - col.setHidden(true); - return col; - } - case RunApplication -> - { - SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") - .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") - .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") - .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) - .append(")"); - - var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); - col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); - col.setUserEditable(false); - col.setReadOnly(true); - col.setHidden(true); - return col; - } - case RunApplicationOutput -> - { - var col = createEdgeColumn(alias, RunApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); - return col; - } - case Run -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); - ret.setFk(getExpSchema().getRunIdForeignKey(getContainerFilter())); - ret.setReadOnly(true); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case RowId -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); - // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction - ret.setSortDirection(Sort.SortDirection.DESC); - ret.setFk(new RowIdForeignKey(ret)); - ret.setUserEditable(false); - ret.setHidden(true); - ret.setShownInInsertView(false); - ret.setHasDbSequence(true); - ret.setIsRootDbSequence(true); - return ret; - } - case Property -> - { - return createPropertyColumn(alias); - } - case Flag -> - { - return createFlagColumn(alias); - } - case Created -> - { - return wrapColumn(alias, _rootTable.getColumn("Created")); - } - case CreatedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); - } - case Modified -> - { - return wrapColumn(alias, _rootTable.getColumn("Modified")); - } - case ModifiedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); - } - case Alias -> - { - return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); - } - case Inputs -> - { - return createLineageColumn(this, alias, true, false); - } - case QueryableInputs -> - { - return createLineageColumn(this, alias, true, true); - } - case Outputs -> - { - return createLineageColumn(this, alias, false, false); - } - case Properties -> - { - return createPropertiesColumn(alias); - } - case SampleState -> - { - boolean statusEnabled = isStatusEnabled(getContainer()); - var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); - ret.setLabel("Status"); - ret.setHidden(!statusEnabled); - ret.setShownInDetailsView(statusEnabled); - ret.setShownInInsertView(statusEnabled); - ret.setShownInUpdateView(statusEnabled); - ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - return ret; - } - case AliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); - ret.setLabel(ALIQUOT_COUNT_LABEL); - return ret; - } - case RawAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(RawAliquotVolume.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AliquotVolume -> - { - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AliquotVolume.name(), AliquotUnit.name(), ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The total amount of this sample's aliquots, in the display unit for the sample type, currently on hand."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(ALIQUOT_VOLUME_LABEL); - return ret; - } - } - case RawAvailableAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(RawAvailableAliquotVolume.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AvailableAliquotVolume -> - { - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AvailableAliquotVolume.name(), AliquotUnit.name(), AVAILABLE_ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The total amount of this sample's available aliquots currently on hand, in the display unit for the sample type."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); - return ret; - } - } - case AvailableAliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); - ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); - return ret; - } - case RawAliquotUnit -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); - ret.setLabel(RawAliquotUnit.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AliquotUnit -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, AliquotUnit.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the AliquotAmount for this sample."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); - ret.setShownInDetailsView(false); - return ret; - } - } - case MaterialExpDate -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(MaterialExpDate.name())); - ret.setLabel("Expiration Date"); - ret.setShownInDetailsView(true); - ret.setShownInInsertView(true); - ret.setShownInUpdateView(true); - return ret; - } - case LastIndexed -> - { - var sql = new SQLFragment("(SELECT LastIndexed FROM ") - .append(ExperimentServiceImpl.get().getTinfoMaterialIndexed(), "mi") - .append(" WHERE mi.MaterialId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId)"); - var ret = new ExprColumn(this, LastIndexed.name(), sql, JdbcType.TIMESTAMP); - ret.setDescription("Date when the material was last full-text search indexed in the system"); - ret.setHidden(true); - ret.setReadOnly(true); - ret.setUserEditable(false); - return ret; - } - default -> throw new IllegalArgumentException("Unknown column " + column); - } - } - - @Override - public MutableColumnInfo createPropertyColumn(String alias) - { - var ret = wrapColumn(alias, _rootTable.getColumn(RowId.name())); - - if (_ss != null && _ss.getTinfo() != null) - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getLookupContainerFilter()).table(_ss.getTinfo()).key(RowId.name()).build()); - - ret.setIsUnselectable(true); - ret.setDescription("A holder for any custom fields associated with this sample"); - ret.setHidden(true); - return ret; - } - - private static boolean isStatusEnabled(Container c) - { - return SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(c).isEmpty(); - } - - private Unit getSampleTypeUnit() - { - Unit typeUnit = null; - if (_ss != null && _ss.getMetricUnit() != null) - typeUnit = Unit.fromName(_ss.getMetricUnit()); - return typeUnit; - } - - private void setSampleType(@Nullable ExpSampleType st) - { - checkLocked(); - if (_ss != null) - { - throw new IllegalStateException("Cannot unset sample type"); - } - if (st != null && !(st instanceof ExpSampleTypeImpl)) - { - throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); - } - _ss = (ExpSampleTypeImpl) st; - if (_ss != null) - { - setPublicSchemaName(SamplesSchema.SCHEMA_NAME); - setName(st.getName()); - - String description = _ss.getDescription(); - if (StringUtils.isEmpty(description)) - description = "Contains one row per sample in the " + _ss.getName() + " sample type"; - setDescription(description); - - if (canUserAccessPhi()) - { - ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); - setImportURL(new DetailsURL(url)); - } - } - } - - public ExpSampleType getSampleType() - { - return _ss; - } - - @Override - protected void populateColumns() - { - var st = getSampleType(); - var rowIdCol = addColumn(RowId); - addColumn(MaterialSourceId); - addColumn(SourceProtocolApplication); - addColumn(SourceApplicationInput); - addColumn(RunApplication); - addColumn(RunApplicationOutput); - addColumn(SourceProtocolLSID); - - List defaultCols = new ArrayList<>(); - var nameCol = addColumn(Name); - defaultCols.add(Name.fieldKey()); - if (st != null && st.hasNameAsIdCol()) - { - // Show the Name field but don't mark is as required when using name expressions - if (st.hasNameExpression()) - { - var nameExpression = st.getNameExpression(); - nameCol.setNameExpression(nameExpression); - nameCol.setNullable(true); - String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); - String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); - nameCol.setDescription(desc); - } - else - { - nameCol.setNullable(false); - } - } - else - { - nameCol.setReadOnly(true); - nameCol.setShownInInsertView(false); - } - - addColumn(Alias); - addColumn(Description); - addColumn(SampleSet); - addColumn(MaterialExpDate); - defaultCols.add(MaterialExpDate.fieldKey()); - addContainerColumn(Folder, null); - if (getContainer().hasProductFolders()) - defaultCols.add(Folder.fieldKey()); - addColumn(Run); - defaultCols.add(Run.fieldKey()); - addColumn(LSID); - addColumn(RootMaterialRowId); - addColumn(AliquotedFromLSID); - addColumn(IsAliquot); - addColumn(Created); - addColumn(CreatedBy); - addColumn(Modified); - addColumn(ModifiedBy); - if (st == null) - defaultCols.add(SampleSet.fieldKey()); - addColumn(Flag); - addColumn(SampleState); - if (isStatusEnabled(getContainer())) - defaultCols.add(SampleState.fieldKey()); - - // TODO is this a real Domain??? - if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) - { - defaultCols.add(Flag.fieldKey()); - addSampleTypeColumns(st, defaultCols); - - setName(_ss.getName()); - - ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); - gridUrl.addParameter("rowId", st.getRowId()); - setGridURL(new DetailsURL(gridUrl)); - } - - List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); - defaultCols.addAll(calculatedFieldKeys); - - addColumn(AliquotCount); - addColumn(AliquotVolume); - addColumn(AliquotUnit); - addColumn(AvailableAliquotCount); - addColumn(AvailableAliquotVolume); - - addColumn(StoredAmount); - defaultCols.add(StoredAmount.fieldKey()); - - addColumn(Units); - defaultCols.add(Units.fieldKey()); - - addColumn(RawAmount).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(StoredAmount.fieldKey()))); - addColumn(RawUnits).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(Units.fieldKey()))); - addColumn(RawAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotVolume.fieldKey()))); - addColumn(RawAvailableAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AvailableAliquotVolume.fieldKey()))); - addColumn(RawAliquotUnit).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotUnit.fieldKey()))); - - if (InventoryService.get() != null && (st == null || !st.isMedia())) - defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); - - SQLFragment sql; - UserSchema plateUserSchema; - // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset - // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying - // against tables in that schema. - if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) - plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); - else - plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); - - if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) - { - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); - SQLFragment existsSubquery = new SQLFragment() - .append("SELECT 1 FROM ") - .append(plateUserSchema.getTable("Well"), "well") - .append(" WHERE well.sampleid = ").append(rowIdField); - - sql = new SQLFragment() - .append("CASE WHEN EXISTS (") - .append(existsSubquery) - .append(") THEN 'Plated' ") - .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 - .append("ELSE NULL END"); - } - else - { - sql = new SQLFragment("(SELECT NULL)"); - } - var col = new ExprColumn(this, IsPlated.name(), sql, JdbcType.VARCHAR); - col.setDescription("Whether the sample that has been plated, if plating is supported."); - col.setUserEditable(false); - col.setReadOnly(true); - col.setShownInDetailsView(false); - col.setShownInInsertView(false); - col.setShownInUpdateView(false); - if (plateUserSchema != null) - col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + RowId.name() + "}")); - addColumn(col); - - addVocabularyDomains(); - addColumn(LastIndexed); - addColumn(Properties); - - var colInputs = addColumn(Inputs); - addMethod(Inputs.name(), new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); - - var colOutputs = addColumn(Outputs); - addMethod(Outputs.name(), new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); - - addExpObjectMethod(); - - ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); - DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); - nameCol.setURL(url); - rowIdCol.setURL(url); - setDetailsURL(url); - - if (canUserAccessPhi()) - { - ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); - setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); - - ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); - setInsertURL(new DetailsURL(insertActionURL)); - } - else - { - setImportURL(LINK_DISABLER); - setInsertURL(LINK_DISABLER); - setUpdateURL(LINK_DISABLER); - } - - setTitleColumn(Name.toString()); - - setDefaultVisibleColumns(defaultCols); - - MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn(RowId.name()), _ss, true); - addColumn(lineageLookup); - } - - private ContainerFilter getSampleStatusLookupContainerFilter() - { - // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared - // for the sample status lookup since in the app project context we want to share status definitions across - // a given project instead of creating duplicate statuses in each subfolder project. - ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); - type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; - return type.create(getUserSchema()); - } - - @Override - public Domain getDomain() - { - return getDomain(false); - } - - @Override - public Domain getDomain(boolean forUpdate) - { - return _ss == null ? null : _ss.getDomain(forUpdate); - } - - public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) - { - if (nameExpression == null) - return currentDescription; - - StringBuilder sb = new StringBuilder(); - if (currentDescription != null && !currentDescription.isEmpty()) - { - sb.append(currentDescription); - if (!currentDescription.endsWith(".")) - sb.append("."); - sb.append("\n"); - } - - sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); - sb.append(nameExpression); - sb.append("."); - if (!StringUtils.isEmpty(nameExpressionPreview)) - { - sb.append("\nExample of name that will be generated from the current pattern: \n"); - sb.append(nameExpressionPreview); - } - - return sb.toString(); - } - - private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) - { - TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); - if (null == dbTable) - return; - - UserSchema schema = getUserSchema(); - Domain domain = st.getDomain(); - ColumnInfo rowIdColumn = getColumn(RowId); - ColumnInfo nameColumn = getColumn(Name); - - visibleColumns.remove(Run.fieldKey()); - - // When not using name expressions, mark the ID columns as required. - // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. - final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); - - Set mvColumns = domain.getProperties().stream() - .filter(ImportAliasable::isMvEnabled) - .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) - .collect(Collectors.toSet()); - - for (ColumnInfo dbColumn : dbTable.getColumns()) - { - // Don't include PHI columns in full text search index - // CONSIDER: Can we move this to a base class? Maybe in .addColumn() - if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) - continue; - - if ( - rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) - ) - { - continue; - } - - var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); - - // TODO missing values? comments? flags? - DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); - var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); - if (propColumn.getName().equalsIgnoreCase("genid")) - { - propColumn.setHidden(true); - propColumn.setUserEditable(false); - propColumn.setShownInDetailsView(false); - propColumn.setShownInInsertView(false); - propColumn.setShownInUpdateView(false); - } - if (null != dp) - { - PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), - SamplesSchema.SCHEMA_SAMPLES, st.getName(), RowId.fieldKey(), null, getLookupContainerFilter()); - - if (idCols.contains(dp)) - { - propColumn.setNullable(false); - propColumn.setDisplayColumnFactory(c -> new DataColumn(c) - { - @Override - protected boolean isDisabledInput(RenderContext ctx) - { - return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; - } - }); - } - - // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected - if (!propColumn.isHidden()) - { - visibleColumns.add(propColumn.getFieldKey()); - } - - if (propColumn.isMvEnabled()) - { - // The column in the physical table has a "_MVIndicator" suffix, but we want to expose - // it with a "MVIndicator" suffix (no underscore) - var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, - StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); - mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); - mvColumn.setSqlTypeName("VARCHAR"); - mvColumn.setPropertyURI(dp.getPropertyURI()); - mvColumn.setNullable(true); - mvColumn.setUserEditable(false); - mvColumn.setHidden(true); - mvColumn.setMvIndicatorColumn(true); - - addColumn(mvColumn); - propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); - } - } - - if (!mvColumns.contains(propColumn.getFieldKey())) - addColumn(propColumn); - - } - - setDefaultVisibleColumns(visibleColumns); - } - - // These are mostly fields that are wrapped by fields with different names (see createColumn()) - // we could handle each case separately, but this is easier - static final Set wrappedFieldKeys = Set.of( - new FieldKey(null, "objectid"), - new FieldKey(null, "RowId"), - new FieldKey(null, "LSID"), // Flag - new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication - new FieldKey(null, "runId"), // Run, RunApplication - new FieldKey(null, "CpasType")); // SampleSet - static final Set ALL_COLUMNS = Set.of(); - - private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) - { - if (null == selectedColumns) - return ALL_COLUMNS; - selectedColumns = new TreeSet<>(selectedColumns); - if (selectedColumns.contains(new FieldKey(null, StoredAmount))) - selectedColumns.add(new FieldKey(null, Units)); - if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) - selectedColumns.add(new FieldKey(null, AliquotedFromLSID.name())); - if (selectedColumns.contains(new FieldKey(null, IsAliquot.name()))) - selectedColumns.add(new FieldKey(null, RootMaterialRowId.name())); - if (selectedColumns.contains(new FieldKey(null, AliquotVolume.name())) || selectedColumns.contains(new FieldKey(null, AvailableAliquotVolume.name()))) - selectedColumns.add(new FieldKey(null, AliquotUnit.name())); - selectedColumns.addAll(wrappedFieldKeys); - if (null != getFilter()) - selectedColumns.addAll(getFilter().getAllFieldKeys()); - return selectedColumns; - } - - @NotNull - @Override - public SQLFragment getFromSQL(String alias) - { - return getFromSQL(alias, null); - } - - @Override - public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) - { - SQLFragment sql = new SQLFragment("("); - boolean usedMaterialized; - - - // SELECT FROM - /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) - * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. - * Also, we may use RemapCache for material lookup outside a transaction - */ - boolean onlyMaterialColums = false; - if (null != selectedColumns && !selectedColumns.isEmpty()) - onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); - if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) - { - sql.append(getMaterializedSQL()); - usedMaterialized = true; - } - else - { - sql.append(getJoinSQL(selectedColumns)); - usedMaterialized = false; - } - - // WHERE - SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); - sql.append("\n").append(filterFrag); - if (_ss != null && !usedMaterialized) - { - if (!filterFrag.isEmpty()) - sql.append(" AND "); - else - sql.append(" WHERE "); - sql.append("CpasType = ").appendValue(_ss.getLSID()); - } - sql.append(") ").appendIdentifier(alias); - - return getTransformedFromSQL(sql); - } - - @Override - public void setSupportTableRules(boolean b) - { - this._supportTableRules = b; - } - - @Override - public boolean supportTableRules() // intentional override - { - return _supportTableRules; - } - - @Override - protected @NotNull TableRules findTableRules() - { - Container definitionContainer = getUserSchema().getContainer(); - if (null != _ss) - definitionContainer = _ss.getContainer(); - return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); - } - - - static class InvalidationCounters - { - public final AtomicLong update, insert, delete, rollup; - InvalidationCounters() - { - long l = System.currentTimeMillis(); - update = new AtomicLong(l); - insert = new AtomicLong(l); - delete = new AtomicLong(l); - rollup = new AtomicLong(l); - } - } - - static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); - static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); - static final AtomicBoolean initializedListeners = new AtomicBoolean(false); - - // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() - public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - var scope = ExperimentServiceImpl.getExpSchema().getScope(); - var runnable = new RefreshMaterializedViewRunnable(lsid, reason); - scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); - } - - private static class RefreshMaterializedViewRunnable implements Runnable - { - private final String _lsid; - private final SampleTypeServiceImpl.SampleChangeType _reason; - - public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - _lsid = lsid; - _reason = reason; - } - - @Override - public void run() - { - if (_reason == schema) - { - /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. - * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the - * MQH from the cache to force the SQL to be regenerated. - */ - _materializedQueries.remove(_lsid); - return; - } - var counters = getInvalidateCounters(_lsid); - switch (_reason) - { - case insert -> counters.insert.incrementAndGet(); - case rollup -> counters.rollup.incrementAndGet(); - case update -> counters.update.incrementAndGet(); - case delete -> counters.delete.incrementAndGet(); - default -> throw new IllegalStateException("Unexpected value: " + _reason); - } - } - - @Override - public boolean equals(Object obj) - { - return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); - } - } - - private static InvalidationCounters getInvalidateCounters(String lsid) - { - if (!initializedListeners.getAndSet(true)) - { - CacheManager.addListener(_invalidationCounters::clear); - } - return _invalidationCounters.computeIfAbsent(lsid, (unused) -> - new InvalidationCounters() - ); - } - - /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ - private SQLFragment getMaterializedSQL() - { - if (null == _ss) - return getJoinSQL(null); - - var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> - { - /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. - * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, - * even if just to help with race-conditions. - * - * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. - */ - SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); - return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") - .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") - .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") - .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) - .build(); - }); - return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); - } - - - /** - * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. - * It does not help with incremental updates (except for providing the upsert() method). - * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. - */ - static class _MaterializedQueryHelper extends MaterializedQueryHelper - { - final String _lsid; - - static class Builder extends MaterializedQueryHelper.Builder - { - String _lsid; - - public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) - { - super(prefix, scope, select); - this._lsid = lsid; - } - - @Override - public _MaterializedQueryHelper build() - { - return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); - } - } - - _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, - boolean isSelectIntoSql) - { - super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); - this._lsid = lsid; - } - - @Override - protected Materialized createMaterialized(String txCacheKey) - { - DbSchema temp = DbSchema.getTemp(); - String name = _prefix + "_" + GUID.makeHash(); - _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); - initMaterialized(materialized); - return materialized; - } - - @Override - protected void incrementalUpdateBeforeSelect(Materialized m) - { - _Materialized materialized = (_Materialized) m; - - boolean lockAcquired = false; - try - { - lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); - if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) - throw materialized._loadException; - - if (!materialized.incrementalDeleteCheck.stillValid(0)) - executeIncrementalDelete(); - if (!materialized.incrementalRollupCheck.stillValid(0)) - executeIncrementalRollup(); - if (!materialized.incrementalInsertCheck.stillValid(0)) - executeIncrementalInsert(); - } - catch (RuntimeException|InterruptedException ex) - { - RuntimeException rex = UnexpectedException.wrap(ex); - materialized.setError(rex); - // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. - - // Ensure that next refresh starts clean - _materializedQueries.remove(_lsid); - getInvalidateCounters(_lsid).update.incrementAndGet(); - throw rex; - } - finally - { - if (lockAcquired) - materialized.getLock().unlock(); - } - } - - void upsertWithRetry(SQLFragment sql) - { - // not actually read-only, but we don't want to start an explicit transaction - _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); - } - - void executeIncrementalInsert() - { - SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") - .append("SELECT * FROM (") - .append(getViewSourceSql()).append(") viewsource_\n") - .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); - upsertWithRetry(incremental); - } - - void executeIncrementalDelete() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - // POSTGRES bug??? the obvious query is _very_ slow O(n^2) - // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) - SQLFragment incremental = new SQLFragment() - .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") - .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); - upsertWithRetry(incremental); - } - - void executeIncrementalRollup() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - SQLFragment incremental = new SQLFragment(); - if (d.isPostgreSQL()) - { - incremental - .append("UPDATE temp.${NAME} AS st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM exp.Material AS expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") - .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") - .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") - .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") - .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") - .append(")"); - } - else - { - // SQL Server 2022 supports IS DISTINCT FROM - incremental - .append("UPDATE st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM temp.${NAME} st, exp.Material expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") - .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") - .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") - .append(")"); - } - upsertWithRetry(incremental); - } - } - - static class _Materialized extends MaterializedQueryHelper.Materialized - { - final MaterializedQueryHelper.Invalidator incrementalInsertCheck; - final MaterializedQueryHelper.Invalidator incrementalRollupCheck; - final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; - - _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) - { - super(mqh, tableName, cacheKey, created, sql); - final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); - incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); - incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); - incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); - } - - @Override - public void reset() - { - super.reset(); - long now = HeartBeat.currentTimeMillis(); - incrementalInsertCheck.stillValid(now); - incrementalRollupCheck.stillValid(now); - incrementalDeleteCheck.stillValid(now); - } - - Lock getLock() - { - return _loadingLock; - } - } - - - /* SELECT and JOIN, does not include WHERE */ - private SQLFragment getJoinSQL(Set selectedColumns) - { - TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); - Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); - provisionedCols.remove(RowId.name()); - provisionedCols.remove(Name.name()); - boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); - - boolean hasSampleColumns = false; - boolean hasAliquotColumns = false; - - Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); - selectedColumns = computeInnerSelectedColumns(selectedColumns); - - SQLFragment sql = new SQLFragment(); - sql.appendComment("", getSqlDialect()); - sql.append("SELECT "); - String comma = ""; - for (String materialCol : materialCols) - { - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) - { - sql.append(comma).append("m.").appendIdentifier(materialCol); - comma = ", "; - } - } - if (null != provisioned && hasProvisionedColumns) - { - for (ColumnInfo propertyColumn : provisioned.getColumns()) - { - // don't select twice - if ( - RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) - ) - { - continue; - } - - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) - { - sql.append(comma); - boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) - || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); - if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - else if (rootField) - { - sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasSampleColumns = true; - } - else - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - comma = ", "; - } - } - } - - sql.append("\nFROM "); - sql.append(_rootTable, "m"); - if (hasSampleColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); - if (hasAliquotColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); - - sql.appendComment("", getSqlDialect()); - return sql; - } - - private static class SampleTypeAmountDisplayColumn extends ExprColumn - { - public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) - { - super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" / ? AS ") - .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") - .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.getValue()), - JdbcType.DOUBLE); - - setLabel(label); - setImportAliasesSet(importAliases); - } - } - - private static class SampleTypeUnitDisplayColumn extends ExprColumn - { - public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) - { - super(parent, FieldKey.fromParts(unitFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.toString()), - JdbcType.VARCHAR); - } - } - - @Override - public QueryUpdateService getUpdateService() - { - return new SampleTypeUpdateServiceDI(this, _ss); - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) - { - if (_ss == null) - { - // Allow read and delete for exp.Materials. - // Don't allow insert/update on exp.Materials without a sample type. - if (perm == DeletePermission.class || perm == ReadPermission.class) - return getContainer().hasPermission(user, perm); - return false; - } - - if (_ss.isMedia() && perm == ReadPermission.class) - return getContainer().hasPermission(user, MediaReadPermission.class); - - return super.hasPermission(user, perm); - } - - @NotNull - @Override - public List getUniqueIndices() - { - // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" - // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. - // Some FKs to ExpMaterialTable don't include the "Name" column (e.g., NabBaseTable.Specimen) - String indexName = "idx_material_ak"; - List ret = new ArrayList<>(super.getUniqueIndices()); - if (getColumn("Name") != null) - ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); - else - ret.removeIf(def -> def.name().equals(indexName)); - return Collections.unmodifiableList(ret); - } - - // - // UpdatableTableInfo - // - - @Override - public @Nullable Long getOwnerObjectId() - { - return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); - } - - @Nullable - @Override - public CaseInsensitiveHashMap remapSchemaColumns() - { - CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); - - if (null != getRealTable().getColumn("container") && null != getColumn("folder")) - { - m.put("container", "folder"); - } - - for (ColumnInfo col : getColumns()) - { - if (col.getMvColumnName() != null) - m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); - } - - return m; - } - - @Override - public Set getAltMergeKeys(DataIteratorContext context) - { - return MATERIAL_ALT_KEYS; - } - - @Override - public @NotNull Set getAltKeysForUpdate() - { - return MATERIAL_ALT_KEYS; - } - - @Override - public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) - { - Set keyColumnNames = new CaseInsensitiveHashSet(); - - if (context.getInsertOption().allowUpdate) - { - if (context.getInsertOption().updateOnly) - { - if (colNameMap.containsKey(RowId.name())) - keyColumnNames.add(RowId.name()); - else - { - for (String altKey : getAltKeysForUpdate()) - { - if (colNameMap.containsKey(altKey)) - keyColumnNames.add(altKey); - } - } - } - else - { - Set altMergeKeys = getAltMergeKeys(context); - if (altMergeKeys != null) - keyColumnNames.addAll(altMergeKeys); - } - } - - return keyColumnNames.isEmpty() ? null : keyColumnNames; - } - - @Override - public @Nullable Set getExistingRecordSharedKeyColumnNames() - { - return CaseInsensitiveHashSet.of(MaterialSourceId.name()); - } - - @Override - @NotNull - public List> getAdditionalRequiredInsertColumns() - { - if (getSampleType() == null) - return Collections.emptyList(); - - try - { - return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); - } - catch (IOException e) - { - return Collections.emptyList(); - } - } - - @Override - public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) - { - TableInfo propertiesTable = _ss.getTinfo(); - - // The specimens sample type doesn't have a properties table - if (propertiesTable == null) - return data; - - try - { - final Container container = getContainer(); - final User user = getUserSchema().getUser(); - ExperimentServiceImpl expService = ExperimentServiceImpl.get(); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); - - DataIteratorBuilder dib = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, container, user, _ss.getImportAliasesIncludingAliquot()) - .setFileLinkDirectory(SAMPLE_TYPE_FILE_DIRECTORY_NAME) - .setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> { - List lsids = searchIndexDataKeys.lsids(); - List orderedRowIds = searchIndexDataKeys.orderedRowIds(); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : expService.getExpMaterials(sublist)) - expMaterial.index(q, this); - }) - ); - - ListUtils.partition(lsids, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : expService.getExpMaterialsByLsid(sublist)) - expMaterial.index(q, this); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT) - ); - - dib = LoggingDataIterator.wrap(dib); - return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(dib, container, user, expService.getTinfoMaterialAliasMap(), _ss, true)); - } - catch (IOException e) - { - throw new UncheckedIOException(e); - } - } - - @Override - @NotNull - public AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.DETAILED; - } - - static final Set excludeFromDetailedAuditField; - static - { - var set = new CaseInsensitiveHashSet(); - set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); - set.addAll(ExpDataIterators.MATERIAL_NOT_FOR_UPDATE); - // We don't want the inventory columns to show up in the sample timeline audit record; - // they are captured in their own audit record. - set.addAll(InventoryService.InventoryStatusColumn.names()); - excludeFromDetailedAuditField = Collections.unmodifiableSet(set); - } - - @Override - public @NotNull Set getExcludedDetailedUpdateAuditFields() - { - // uniqueId fields don't change in reality, so exclude them from the audit updates - Set excluded = new CaseInsensitiveHashSet(); - excluded.addAll(this.getUniqueIdFields()); - excluded.addAll(excludeFromDetailedAuditField); - return excluded; - } - - @Override - public List> getImportTemplates(ViewContext ctx) - { - // respect any metadata overrides - if (getRawImportTemplates() != null) - return super.getImportTemplates(ctx); - - List> templates = new ArrayList<>(); - ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); - url.addParameter("headerType", ColumnHeaderType.ImportField.name()); - try - { - if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) - { - for (String aliasKey : getSampleType().getImportAliases().keySet()) - url.addParameter("includeColumn", aliasKey); - } - } - catch (IOException e) - {} - templates.add(Pair.of("Download Template", url.toString())); - return templates; - } - - @Override - public void overlayMetadata(String tableName, UserSchema schema, Collection errors) - { - if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) - { - Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); - if (null != metadata) - { - overlayMetadata(metadata, schema, errors); - } - } - super.overlayMetadata(tableName, schema, errors); - } - - static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn - { - private final Unit typeUnit; - private final boolean applySampleTypePrecision; - - public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) - { - super(col, false); - this.typeUnit = typeUnit; - this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user - } - - @Override - public Object getDisplayValue(RenderContext ctx) - { - Object value = super.getDisplayValue(ctx); - if (this.applySampleTypePrecision && value != null) - { - int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); - value = Precision.round(Double.valueOf(value.toString()), scale); - } - return value; - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Precision; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.assay.plate.AssayPlateMetadataService; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.cache.BlockingCache; +import org.labkey.api.cache.CacheManager; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.compliance.TableRules; +import org.labkey.api.compliance.TableRulesManager; +import org.labkey.api.data.ColumnHeaderType; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DataColumn; +import org.labkey.api.data.DataRegion; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MaterializedQueryHelper; +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.data.PHI; +import org.labkey.api.data.RenderContext; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.UnionContainerFilter; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.LoggingDataIterator; +import org.labkey.api.dataiterator.SimpleTranslator; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ExperimentUrls; +import org.labkey.api.exp.api.NameExpressionOptionService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.DomainUtil; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpMaterialTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.inventory.InventoryService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.qc.SampleStatusService; +import org.labkey.api.query.AliasedColumn; +import org.labkey.api.query.DetailsURL; +import org.labkey.api.query.ExprColumn; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.LookupForeignKey; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryForeignKey; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.QueryUrls; +import org.labkey.api.query.RowIdForeignKey; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.column.BuiltInColumnTypes; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HeartBeat; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.StringExpression; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.experiment.ExpDataIterators; +import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.lineage.LineageMethod; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLE_TYPE_FILE_DIRECTORY_NAME; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; +import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; +import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; + +public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable +{ + ExpSampleTypeImpl _ss; + Set _uniqueIdFields; + boolean _supportTableRules = true; + + private static final Set MATERIAL_ALT_KEYS; + private static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); + static { + MATERIAL_ALT_KEYS = CaseInsensitiveHashSet.of(MaterialSourceId.name(), Name.name()); + + Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); + IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); + amountValidator.setName("SampleAmountNonNegative"); + amountValidator.setExpressionValue("~gte=0"); + amountValidator.setErrorMessage("Amounts must be non-negative."); + amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); + AMOUNT_RANGE_VALIDATORS.add(amountValidator); + } + + public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) + { + super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); + setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); + setPublicSchemaName(ExpSchema.SCHEMA_NAME); + addAllowablePermission(InsertPermission.class); + addAllowablePermission(UpdatePermission.class); + addAllowablePermission(MoveEntitiesPermission.class); + setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); + setSampleType(sampleType); + } + + public Set getUniqueIdFields() + { + if (_uniqueIdFields == null) + { + _uniqueIdFields = new CaseInsensitiveHashSet(); + _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); + } + return _uniqueIdFields; + } + + @Override + protected ColumnInfo resolveColumn(String name) + { + ColumnInfo result = super.resolveColumn(name); + if (result == null) + { + if ("CpasType".equalsIgnoreCase(name)) + result = createColumn(SampleSet.name(), SampleSet); + else if (Property.name().equalsIgnoreCase(name)) + result = createPropertyColumn(Property.name()); + else if (QueryableInputs.name().equalsIgnoreCase(name)) + result = createColumn(QueryableInputs.name(), QueryableInputs); + } + return result; + } + + @Override + public ColumnInfo getExpObjectColumn() + { + var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); + ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); + return ret; + } + + @Override + public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) + { + if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) + { + // Special case sample auditing to help build a useful timeline view + return SampleTypeServiceImpl.get(); + } + + return super.getAuditHandler(auditBehaviorType); + } + + @Override + public MutableColumnInfo createColumn(String alias, Column column) + { + switch (column) + { + case Folder -> + { + return wrapColumn(alias, _rootTable.getColumn("Container")); + } + case LSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(LSID.name())); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + return columnInfo; + } + case MaterialSourceId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(MaterialSourceId.name())); + columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case RootMaterialRowId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(RootMaterialRowId.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), RowId.name())); + columnInfo.setLabel("Root Material"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + + // NK: Here we mark the column as not required AND nullable which is the opposite of the database where + // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. + // See ExpMaterialValidatorIterator. + columnInfo.setRequired(false); + columnInfo.setNullable(true); + + return columnInfo; + } + case AliquotedFromLSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(AliquotedFromLSID.name())); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), LSID.name())); + columnInfo.setLabel("Aliquoted From Parent"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + return columnInfo; + } + case IsAliquot -> + { + String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RootMaterialRowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); + ExprColumn columnInfo = new ExprColumn(this, IsAliquot.fieldKey(), new SQLFragment( + "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) + .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 + .append(" ELSE NULL END)"), JdbcType.BOOLEAN); + columnInfo.setLabel("Is Aliquot"); + columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(false); + return columnInfo; + } + case Name -> + { + var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); + // shut off this field in insert and update views if user specified names are not allowed + if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) + { + nameCol.setShownInInsertView(false); + nameCol.setShownInUpdateView(false); + } + return nameCol; + } + case RawAmount -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + case StoredAmount -> + { + String label = StoredAmount.label(); + Set importAliases = Set.of(label, "Stored Amount"); + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setLabel(label); + columnInfo.setImportAliasesSet(importAliases); + columnInfo.setDescription("The amount of this sample currently on hand."); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + } + case RawUnits -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + return columnInfo; + } + case Units -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Units.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); + columnInfo.setFk(fk); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + return columnInfo; + } + } + case Description -> + { + return wrapColumn(alias, _rootTable.getColumn(Description.name())); + } + case SampleSet -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); + columnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) + { + @Override + protected ContainerFilter getLookupContainerFilter() + { + // Be sure that we can resolve the sample type if it's defined in a separate container. + // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. + // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has + // parents in current sample type and a sample type in the parent container + Set containers = new HashSet<>(); + if (null != getSampleType()) + containers.add(getSampleType().getContainer()); + containers.add(getContainer()); + if (getContainer().getProject() != null) + containers.add(getContainer().getProject()); + containers.add(ContainerManager.getSharedContainer()); + ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); + + if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) + cf = new UnionContainerFilter(_containerFilter, cf); + return cf; + } + }); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + return columnInfo; + } + case SourceProtocolLSID -> + { + // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module + ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( + "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + + " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); + columnInfo.setLabel("Source Protocol"); + columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case SourceProtocolApplication -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); + columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + columnInfo.setAutoIncrement(false); + return columnInfo; + } + case SourceApplicationInput -> + { + var col = createEdgeColumn(alias, SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); + col.setHidden(true); + return col; + } + case RunApplication -> + { + SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") + .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") + .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") + .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) + .append(")"); + + var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); + col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); + col.setUserEditable(false); + col.setReadOnly(true); + col.setHidden(true); + return col; + } + case RunApplicationOutput -> + { + var col = createEdgeColumn(alias, RunApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); + return col; + } + case Run -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); + ret.setFk(getExpSchema().getRunIdForeignKey(getContainerFilter())); + ret.setReadOnly(true); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case RowId -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); + // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction + ret.setSortDirection(Sort.SortDirection.DESC); + ret.setFk(new RowIdForeignKey(ret)); + ret.setUserEditable(false); + ret.setHidden(true); + ret.setShownInInsertView(false); + ret.setHasDbSequence(true); + ret.setIsRootDbSequence(true); + return ret; + } + case Property -> + { + return createPropertyColumn(alias); + } + case Flag -> + { + return createFlagColumn(alias); + } + case Created -> + { + return wrapColumn(alias, _rootTable.getColumn("Created")); + } + case CreatedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); + } + case Modified -> + { + return wrapColumn(alias, _rootTable.getColumn("Modified")); + } + case ModifiedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); + } + case Alias -> + { + return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); + } + case Inputs -> + { + return createLineageColumn(this, alias, true, false); + } + case QueryableInputs -> + { + return createLineageColumn(this, alias, true, true); + } + case Outputs -> + { + return createLineageColumn(this, alias, false, false); + } + case Properties -> + { + return createPropertiesColumn(alias); + } + case SampleState -> + { + boolean statusEnabled = isStatusEnabled(getContainer()); + var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); + ret.setLabel("Status"); + ret.setHidden(!statusEnabled); + ret.setShownInDetailsView(statusEnabled); + ret.setShownInInsertView(statusEnabled); + ret.setShownInUpdateView(statusEnabled); + ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + return ret; + } + case AliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); + ret.setLabel(ALIQUOT_COUNT_LABEL); + return ret; + } + case RawAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(RawAliquotVolume.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AliquotVolume -> + { + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AliquotVolume.name(), AliquotUnit.name(), ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The total amount of this sample's aliquots, in the display unit for the sample type, currently on hand."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(ALIQUOT_VOLUME_LABEL); + return ret; + } + } + case RawAvailableAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(RawAvailableAliquotVolume.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AvailableAliquotVolume -> + { + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AvailableAliquotVolume.name(), AliquotUnit.name(), AVAILABLE_ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The total amount of this sample's available aliquots currently on hand, in the display unit for the sample type."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); + return ret; + } + } + case AvailableAliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); + ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); + return ret; + } + case RawAliquotUnit -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); + ret.setLabel(RawAliquotUnit.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AliquotUnit -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, AliquotUnit.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the AliquotAmount for this sample."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); + ret.setShownInDetailsView(false); + return ret; + } + } + case MaterialExpDate -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(MaterialExpDate.name())); + ret.setLabel("Expiration Date"); + ret.setShownInDetailsView(true); + ret.setShownInInsertView(true); + ret.setShownInUpdateView(true); + return ret; + } + case LastIndexed -> + { + var sql = new SQLFragment("(SELECT LastIndexed FROM ") + .append(ExperimentServiceImpl.get().getTinfoMaterialIndexed(), "mi") + .append(" WHERE mi.MaterialId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId)"); + var ret = new ExprColumn(this, LastIndexed.name(), sql, JdbcType.TIMESTAMP); + ret.setDescription("Date when the material was last full-text search indexed in the system"); + ret.setHidden(true); + ret.setReadOnly(true); + ret.setUserEditable(false); + return ret; + } + default -> throw new IllegalArgumentException("Unknown column " + column); + } + } + + @Override + public MutableColumnInfo createPropertyColumn(String alias) + { + var ret = wrapColumn(alias, _rootTable.getColumn(RowId.name())); + + if (_ss != null && _ss.getTinfo() != null) + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getLookupContainerFilter()).table(_ss.getTinfo()).key(RowId.name()).build()); + + ret.setIsUnselectable(true); + ret.setDescription("A holder for any custom fields associated with this sample"); + ret.setHidden(true); + return ret; + } + + private static boolean isStatusEnabled(Container c) + { + return SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(c).isEmpty(); + } + + private Unit getSampleTypeUnit() + { + Unit typeUnit = null; + if (_ss != null && _ss.getMetricUnit() != null) + typeUnit = Unit.fromName(_ss.getMetricUnit()); + return typeUnit; + } + + private void setSampleType(@Nullable ExpSampleType st) + { + checkLocked(); + if (_ss != null) + { + throw new IllegalStateException("Cannot unset sample type"); + } + if (st != null && !(st instanceof ExpSampleTypeImpl)) + { + throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); + } + _ss = (ExpSampleTypeImpl) st; + if (_ss != null) + { + setPublicSchemaName(SamplesSchema.SCHEMA_NAME); + setName(st.getName()); + + String description = _ss.getDescription(); + if (StringUtils.isEmpty(description)) + description = "Contains one row per sample in the " + _ss.getName() + " sample type"; + setDescription(description); + + if (canUserAccessPhi()) + { + ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); + setImportURL(new DetailsURL(url)); + } + } + } + + public ExpSampleType getSampleType() + { + return _ss; + } + + @Override + protected void populateColumns() + { + var st = getSampleType(); + var rowIdCol = addColumn(RowId); + addColumn(MaterialSourceId); + addColumn(SourceProtocolApplication); + addColumn(SourceApplicationInput); + addColumn(RunApplication); + addColumn(RunApplicationOutput); + addColumn(SourceProtocolLSID); + + List defaultCols = new ArrayList<>(); + var nameCol = addColumn(Name); + defaultCols.add(Name.fieldKey()); + if (st != null && st.hasNameAsIdCol()) + { + // Show the Name field but don't mark is as required when using name expressions + if (st.hasNameExpression()) + { + var nameExpression = st.getNameExpression(); + nameCol.setNameExpression(nameExpression); + nameCol.setNullable(true); + String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); + String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); + nameCol.setDescription(desc); + } + else + { + nameCol.setNullable(false); + } + } + else + { + nameCol.setReadOnly(true); + nameCol.setShownInInsertView(false); + } + + addColumn(Alias); + addColumn(Description); + addColumn(SampleSet); + addColumn(MaterialExpDate); + defaultCols.add(MaterialExpDate.fieldKey()); + addContainerColumn(Folder, null); + if (getContainer().hasProductFolders()) + defaultCols.add(Folder.fieldKey()); + addColumn(Run); + defaultCols.add(Run.fieldKey()); + addColumn(LSID); + addColumn(RootMaterialRowId); + addColumn(AliquotedFromLSID); + addColumn(IsAliquot); + addColumn(Created); + addColumn(CreatedBy); + addColumn(Modified); + addColumn(ModifiedBy); + if (st == null) + defaultCols.add(SampleSet.fieldKey()); + addColumn(Flag); + addColumn(SampleState); + if (isStatusEnabled(getContainer())) + defaultCols.add(SampleState.fieldKey()); + + // TODO is this a real Domain??? + if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) + { + defaultCols.add(Flag.fieldKey()); + addSampleTypeColumns(st, defaultCols); + + setName(_ss.getName()); + + ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); + gridUrl.addParameter("rowId", st.getRowId()); + setGridURL(new DetailsURL(gridUrl)); + } + + List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); + defaultCols.addAll(calculatedFieldKeys); + + addColumn(AliquotCount); + addColumn(AliquotVolume); + addColumn(AliquotUnit); + addColumn(AvailableAliquotCount); + addColumn(AvailableAliquotVolume); + + addColumn(StoredAmount); + defaultCols.add(StoredAmount.fieldKey()); + + addColumn(Units); + defaultCols.add(Units.fieldKey()); + + addColumn(RawAmount).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(StoredAmount.fieldKey()))); + addColumn(RawUnits).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(Units.fieldKey()))); + addColumn(RawAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotVolume.fieldKey()))); + addColumn(RawAvailableAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AvailableAliquotVolume.fieldKey()))); + addColumn(RawAliquotUnit).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotUnit.fieldKey()))); + + if (InventoryService.get() != null && (st == null || !st.isMedia())) + defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); + + SQLFragment sql; + UserSchema plateUserSchema; + // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset + // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying + // against tables in that schema. + if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) + plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); + else + plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); + + if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) + { + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); + SQLFragment existsSubquery = new SQLFragment() + .append("SELECT 1 FROM ") + .append(plateUserSchema.getTable("Well"), "well") + .append(" WHERE well.sampleid = ").append(rowIdField); + + sql = new SQLFragment() + .append("CASE WHEN EXISTS (") + .append(existsSubquery) + .append(") THEN 'Plated' ") + .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 + .append("ELSE NULL END"); + } + else + { + sql = new SQLFragment("(SELECT NULL)"); + } + var col = new ExprColumn(this, IsPlated.name(), sql, JdbcType.VARCHAR); + col.setDescription("Whether the sample that has been plated, if plating is supported."); + col.setUserEditable(false); + col.setReadOnly(true); + col.setShownInDetailsView(false); + col.setShownInInsertView(false); + col.setShownInUpdateView(false); + if (plateUserSchema != null) + col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + RowId.name() + "}")); + addColumn(col); + + addVocabularyDomains(); + addColumn(LastIndexed); + addColumn(Properties); + + var colInputs = addColumn(Inputs); + addMethod(Inputs.name(), new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); + + var colOutputs = addColumn(Outputs); + addMethod(Outputs.name(), new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); + + addExpObjectMethod(); + + ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); + DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); + nameCol.setURL(url); + rowIdCol.setURL(url); + setDetailsURL(url); + + if (canUserAccessPhi()) + { + ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); + setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); + + ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); + setInsertURL(new DetailsURL(insertActionURL)); + } + else + { + setImportURL(LINK_DISABLER); + setInsertURL(LINK_DISABLER); + setUpdateURL(LINK_DISABLER); + } + + setTitleColumn(Name.toString()); + + setDefaultVisibleColumns(defaultCols); + + MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn(RowId.name()), _ss, true); + addColumn(lineageLookup); + } + + private ContainerFilter getSampleStatusLookupContainerFilter() + { + // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared + // for the sample status lookup since in the app project context we want to share status definitions across + // a given project instead of creating duplicate statuses in each subfolder project. + ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); + type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; + return type.create(getUserSchema()); + } + + @Override + public Domain getDomain() + { + return getDomain(false); + } + + @Override + public Domain getDomain(boolean forUpdate) + { + return _ss == null ? null : _ss.getDomain(forUpdate); + } + + public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) + { + if (nameExpression == null) + return currentDescription; + + StringBuilder sb = new StringBuilder(); + if (currentDescription != null && !currentDescription.isEmpty()) + { + sb.append(currentDescription); + if (!currentDescription.endsWith(".")) + sb.append("."); + sb.append("\n"); + } + + sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); + sb.append(nameExpression); + sb.append("."); + if (!StringUtils.isEmpty(nameExpressionPreview)) + { + sb.append("\nExample of name that will be generated from the current pattern: \n"); + sb.append(nameExpressionPreview); + } + + return sb.toString(); + } + + private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) + { + TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); + if (null == dbTable) + return; + + UserSchema schema = getUserSchema(); + Domain domain = st.getDomain(); + ColumnInfo rowIdColumn = getColumn(RowId); + ColumnInfo nameColumn = getColumn(Name); + + visibleColumns.remove(Run.fieldKey()); + + // When not using name expressions, mark the ID columns as required. + // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. + final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); + + Set mvColumns = domain.getProperties().stream() + .filter(ImportAliasable::isMvEnabled) + .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) + .collect(Collectors.toSet()); + + for (ColumnInfo dbColumn : dbTable.getColumns()) + { + // Don't include PHI columns in full text search index + // CONSIDER: Can we move this to a base class? Maybe in .addColumn() + if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) + continue; + + if ( + rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) + ) + { + continue; + } + + var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); + + // TODO missing values? comments? flags? + DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); + var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); + if (propColumn.getName().equalsIgnoreCase("genid")) + { + propColumn.setHidden(true); + propColumn.setUserEditable(false); + propColumn.setShownInDetailsView(false); + propColumn.setShownInInsertView(false); + propColumn.setShownInUpdateView(false); + } + if (null != dp) + { + PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), + SamplesSchema.SCHEMA_SAMPLES, st.getName(), RowId.fieldKey(), null, getLookupContainerFilter()); + + if (idCols.contains(dp)) + { + propColumn.setNullable(false); + propColumn.setDisplayColumnFactory(c -> new DataColumn(c) + { + @Override + protected boolean isDisabledInput(RenderContext ctx) + { + return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; + } + }); + } + + // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected + if (!propColumn.isHidden()) + { + visibleColumns.add(propColumn.getFieldKey()); + } + + if (propColumn.isMvEnabled()) + { + // The column in the physical table has a "_MVIndicator" suffix, but we want to expose + // it with a "MVIndicator" suffix (no underscore) + var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, + StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); + mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); + mvColumn.setSqlTypeName("VARCHAR"); + mvColumn.setPropertyURI(dp.getPropertyURI()); + mvColumn.setNullable(true); + mvColumn.setUserEditable(false); + mvColumn.setHidden(true); + mvColumn.setMvIndicatorColumn(true); + + addColumn(mvColumn); + propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); + } + } + + if (!mvColumns.contains(propColumn.getFieldKey())) + addColumn(propColumn); + + } + + setDefaultVisibleColumns(visibleColumns); + } + + // These are mostly fields that are wrapped by fields with different names (see createColumn()) + // we could handle each case separately, but this is easier + static final Set wrappedFieldKeys = Set.of( + new FieldKey(null, "objectid"), + new FieldKey(null, "RowId"), + new FieldKey(null, "LSID"), // Flag + new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication + new FieldKey(null, "runId"), // Run, RunApplication + new FieldKey(null, "CpasType")); // SampleSet + static final Set ALL_COLUMNS = Set.of(); + + private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) + { + if (null == selectedColumns) + return ALL_COLUMNS; + selectedColumns = new TreeSet<>(selectedColumns); + if (selectedColumns.contains(new FieldKey(null, StoredAmount))) + selectedColumns.add(new FieldKey(null, Units)); + if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) + selectedColumns.add(new FieldKey(null, AliquotedFromLSID.name())); + if (selectedColumns.contains(new FieldKey(null, IsAliquot.name()))) + selectedColumns.add(new FieldKey(null, RootMaterialRowId.name())); + if (selectedColumns.contains(new FieldKey(null, AliquotVolume.name())) || selectedColumns.contains(new FieldKey(null, AvailableAliquotVolume.name()))) + selectedColumns.add(new FieldKey(null, AliquotUnit.name())); + selectedColumns.addAll(wrappedFieldKeys); + if (null != getFilter()) + selectedColumns.addAll(getFilter().getAllFieldKeys()); + return selectedColumns; + } + + @NotNull + @Override + public SQLFragment getFromSQL(String alias) + { + return getFromSQL(alias, null); + } + + @Override + public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) + { + SQLFragment sql = new SQLFragment("("); + boolean usedMaterialized; + + + // SELECT FROM + /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) + * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. + * Also, we may use RemapCache for material lookup outside a transaction + */ + boolean onlyMaterialColums = false; + if (null != selectedColumns && !selectedColumns.isEmpty()) + onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); + if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) + { + sql.append(getMaterializedSQL()); + usedMaterialized = true; + } + else + { + sql.append(getJoinSQL(selectedColumns)); + usedMaterialized = false; + } + + // WHERE + SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); + sql.append("\n").append(filterFrag); + if (_ss != null && !usedMaterialized) + { + if (!filterFrag.isEmpty()) + sql.append(" AND "); + else + sql.append(" WHERE "); + sql.append("CpasType = ").appendValue(_ss.getLSID()); + } + sql.append(") ").appendIdentifier(alias); + + return getTransformedFromSQL(sql); + } + + @Override + public void setSupportTableRules(boolean b) + { + this._supportTableRules = b; + } + + @Override + public boolean supportTableRules() // intentional override + { + return _supportTableRules; + } + + @Override + protected @NotNull TableRules findTableRules() + { + Container definitionContainer = getUserSchema().getContainer(); + if (null != _ss) + definitionContainer = _ss.getContainer(); + return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); + } + + + static class InvalidationCounters + { + public final AtomicLong update, insert, delete, rollup; + InvalidationCounters() + { + long l = System.currentTimeMillis(); + update = new AtomicLong(l); + insert = new AtomicLong(l); + delete = new AtomicLong(l); + rollup = new AtomicLong(l); + } + } + + static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); + static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); + static final AtomicBoolean initializedListeners = new AtomicBoolean(false); + + // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() + public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + var scope = ExperimentServiceImpl.getExpSchema().getScope(); + var runnable = new RefreshMaterializedViewRunnable(lsid, reason); + scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); + } + + private static class RefreshMaterializedViewRunnable implements Runnable + { + private final String _lsid; + private final SampleTypeServiceImpl.SampleChangeType _reason; + + public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + _lsid = lsid; + _reason = reason; + } + + @Override + public void run() + { + if (_reason == schema) + { + /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. + * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the + * MQH from the cache to force the SQL to be regenerated. + */ + _materializedQueries.remove(_lsid); + return; + } + var counters = getInvalidateCounters(_lsid); + switch (_reason) + { + case insert -> counters.insert.incrementAndGet(); + case rollup -> counters.rollup.incrementAndGet(); + case update -> counters.update.incrementAndGet(); + case delete -> counters.delete.incrementAndGet(); + default -> throw new IllegalStateException("Unexpected value: " + _reason); + } + } + + @Override + public boolean equals(Object obj) + { + return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); + } + } + + private static InvalidationCounters getInvalidateCounters(String lsid) + { + if (!initializedListeners.getAndSet(true)) + { + CacheManager.addListener(_invalidationCounters::clear); + } + return _invalidationCounters.computeIfAbsent(lsid, (unused) -> + new InvalidationCounters() + ); + } + + /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ + private SQLFragment getMaterializedSQL() + { + if (null == _ss) + return getJoinSQL(null); + + var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> + { + /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. + * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, + * even if just to help with race-conditions. + * + * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. + */ + SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); + return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") + .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") + .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") + .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) + .build(); + }); + return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); + } + + + /** + * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. + * It does not help with incremental updates (except for providing the upsert() method). + * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. + */ + static class _MaterializedQueryHelper extends MaterializedQueryHelper + { + final String _lsid; + + static class Builder extends MaterializedQueryHelper.Builder + { + String _lsid; + + public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) + { + super(prefix, scope, select); + this._lsid = lsid; + } + + @Override + public _MaterializedQueryHelper build() + { + return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); + } + } + + _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, + boolean isSelectIntoSql) + { + super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); + this._lsid = lsid; + } + + @Override + protected Materialized createMaterialized(String txCacheKey) + { + DbSchema temp = DbSchema.getTemp(); + String name = _prefix + "_" + GUID.makeHash(); + _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); + initMaterialized(materialized); + return materialized; + } + + @Override + protected void incrementalUpdateBeforeSelect(Materialized m) + { + _Materialized materialized = (_Materialized) m; + + boolean lockAcquired = false; + try + { + lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); + if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) + throw materialized._loadException; + + if (!materialized.incrementalDeleteCheck.stillValid(0)) + executeIncrementalDelete(); + if (!materialized.incrementalRollupCheck.stillValid(0)) + executeIncrementalRollup(); + if (!materialized.incrementalInsertCheck.stillValid(0)) + executeIncrementalInsert(); + } + catch (RuntimeException|InterruptedException ex) + { + RuntimeException rex = UnexpectedException.wrap(ex); + materialized.setError(rex); + // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. + + // Ensure that next refresh starts clean + _materializedQueries.remove(_lsid); + getInvalidateCounters(_lsid).update.incrementAndGet(); + throw rex; + } + finally + { + if (lockAcquired) + materialized.getLock().unlock(); + } + } + + void upsertWithRetry(SQLFragment sql) + { + // not actually read-only, but we don't want to start an explicit transaction + _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); + } + + void executeIncrementalInsert() + { + SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") + .append("SELECT * FROM (") + .append(getViewSourceSql()).append(") viewsource_\n") + .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); + upsertWithRetry(incremental); + } + + void executeIncrementalDelete() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + // POSTGRES bug??? the obvious query is _very_ slow O(n^2) + // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) + SQLFragment incremental = new SQLFragment() + .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") + .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); + upsertWithRetry(incremental); + } + + void executeIncrementalRollup() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + SQLFragment incremental = new SQLFragment(); + if (d.isPostgreSQL()) + { + incremental + .append("UPDATE temp.${NAME} AS st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM exp.Material AS expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") + .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") + .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") + .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") + .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") + .append(")"); + } + else + { + // SQL Server 2022 supports IS DISTINCT FROM + incremental + .append("UPDATE st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM temp.${NAME} st, exp.Material expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") + .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") + .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") + .append(")"); + } + upsertWithRetry(incremental); + } + } + + static class _Materialized extends MaterializedQueryHelper.Materialized + { + final MaterializedQueryHelper.Invalidator incrementalInsertCheck; + final MaterializedQueryHelper.Invalidator incrementalRollupCheck; + final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; + + _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) + { + super(mqh, tableName, cacheKey, created, sql); + final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); + incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); + incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); + incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); + } + + @Override + public void reset() + { + super.reset(); + long now = HeartBeat.currentTimeMillis(); + incrementalInsertCheck.stillValid(now); + incrementalRollupCheck.stillValid(now); + incrementalDeleteCheck.stillValid(now); + } + + Lock getLock() + { + return _loadingLock; + } + } + + + /* SELECT and JOIN, does not include WHERE */ + private SQLFragment getJoinSQL(Set selectedColumns) + { + TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); + Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); + provisionedCols.remove(RowId.name()); + provisionedCols.remove(Name.name()); + boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); + + boolean hasSampleColumns = false; + boolean hasAliquotColumns = false; + + Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); + selectedColumns = computeInnerSelectedColumns(selectedColumns); + + SQLFragment sql = new SQLFragment(); + sql.appendComment("", getSqlDialect()); + sql.append("SELECT "); + String comma = ""; + for (String materialCol : materialCols) + { + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) + { + sql.append(comma).append("m.").appendIdentifier(materialCol); + comma = ", "; + } + } + if (null != provisioned && hasProvisionedColumns) + { + for (ColumnInfo propertyColumn : provisioned.getColumns()) + { + // don't select twice + if ( + RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) + ) + { + continue; + } + + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) + { + sql.append(comma); + boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) + || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); + if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + else if (rootField) + { + sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasSampleColumns = true; + } + else + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + comma = ", "; + } + } + } + + sql.append("\nFROM "); + sql.append(_rootTable, "m"); + if (hasSampleColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); + if (hasAliquotColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); + + sql.appendComment("", getSqlDialect()); + return sql; + } + + private static class SampleTypeAmountDisplayColumn extends ExprColumn + { + public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) + { + super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" / ? AS ") + .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") + .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.getValue()), + JdbcType.DOUBLE); + + setLabel(label); + setImportAliasesSet(importAliases); + } + } + + private static class SampleTypeUnitDisplayColumn extends ExprColumn + { + public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) + { + super(parent, FieldKey.fromParts(unitFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.toString()), + JdbcType.VARCHAR); + } + } + + @Override + public QueryUpdateService getUpdateService() + { + return new SampleTypeUpdateServiceDI(this, _ss); + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) + { + if (_ss == null) + { + // Allow read and delete for exp.Materials. + // Don't allow insert/update on exp.Materials without a sample type. + if (perm == DeletePermission.class || perm == ReadPermission.class) + return getContainer().hasPermission(user, perm); + return false; + } + + if (_ss.isMedia() && perm == ReadPermission.class) + return getContainer().hasPermission(user, MediaReadPermission.class); + + return super.hasPermission(user, perm); + } + + @NotNull + @Override + public List getUniqueIndices() + { + // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" + // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. + // Some FKs to ExpMaterialTable don't include the "Name" column (e.g., NabBaseTable.Specimen) + String indexName = "idx_material_ak"; + List ret = new ArrayList<>(super.getUniqueIndices()); + if (getColumn("Name") != null) + ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); + else + ret.removeIf(def -> def.name().equals(indexName)); + return Collections.unmodifiableList(ret); + } + + // + // UpdatableTableInfo + // + + @Override + public @Nullable Long getOwnerObjectId() + { + return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); + } + + @Nullable + @Override + public CaseInsensitiveHashMap remapSchemaColumns() + { + CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); + + if (null != getRealTable().getColumn("container") && null != getColumn("folder")) + { + m.put("container", "folder"); + } + + for (ColumnInfo col : getColumns()) + { + if (col.getMvColumnName() != null) + m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); + } + + return m; + } + + @Override + public Set getAltMergeKeys(DataIteratorContext context) + { + return MATERIAL_ALT_KEYS; + } + + @Override + public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) + { + Set keyColumnNames = new CaseInsensitiveHashSet(); + + if (context.getInsertOption().allowUpdate) + { + if (context.getInsertOption().updateOnly) + { + if (colNameMap.containsKey(RowId.name())) + keyColumnNames.add(RowId.name()); + else if (colNameMap.containsKey(Name.name())) + { + keyColumnNames.add(MaterialSourceId.name()); + keyColumnNames.add(Name.name()); + } + else + throw new IllegalArgumentException("Either RowId or Name is required for update."); + } + else + { + Set altMergeKeys = getAltMergeKeys(context); + if (altMergeKeys != null) + keyColumnNames.addAll(altMergeKeys); + } + } + + return keyColumnNames.isEmpty() ? null : keyColumnNames; + } + + @Override + public @Nullable Set getExistingRecordSharedKeyColumnNames() + { + return CaseInsensitiveHashSet.of(MaterialSourceId.name()); + } + + @Override + @NotNull + public List> getAdditionalRequiredInsertColumns() + { + if (getSampleType() == null) + return Collections.emptyList(); + + try + { + return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); + } + catch (IOException e) + { + return Collections.emptyList(); + } + } + + @Override + public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) + { + TableInfo propertiesTable = _ss.getTinfo(); + + // The specimens sample type doesn't have a properties table + if (propertiesTable == null) + return data; + + try + { + final Container container = getContainer(); + final User user = getUserSchema().getUser(); + ExperimentServiceImpl expService = ExperimentServiceImpl.get(); + SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); + + DataIteratorBuilder dib = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, container, user, _ss.getImportAliasesIncludingAliquot()) + .setFileLinkDirectory(SAMPLE_TYPE_FILE_DIRECTORY_NAME) + .setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> { + List lsids = searchIndexDataKeys.lsids(); + List orderedRowIds = searchIndexDataKeys.orderedRowIds(); + + // Issue 51263: order by RowId to reduce deadlock + ListUtils.partition(orderedRowIds, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterials(sublist)) + expMaterial.index(q, this); + }) + ); + + ListUtils.partition(lsids, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterialsByLsid(sublist)) + expMaterial.index(q, this); + }) + ); + }, DbScope.CommitTaskOption.POSTCOMMIT) + ); + + dib = LoggingDataIterator.wrap(dib); + return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(dib, container, user, expService.getTinfoMaterialAliasMap(), _ss, true)); + } + catch (IOException e) + { + throw new UncheckedIOException(e); + } + } + + @Override + @NotNull + public AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.DETAILED; + } + + static final Set excludeFromDetailedAuditField; + static + { + var set = new CaseInsensitiveHashSet(); + set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); + set.addAll(ExpDataIterators.MATERIAL_NOT_FOR_UPDATE); + // We don't want the inventory columns to show up in the sample timeline audit record; + // they are captured in their own audit record. + set.addAll(InventoryService.InventoryStatusColumn.names()); + excludeFromDetailedAuditField = Collections.unmodifiableSet(set); + } + + @Override + public @NotNull Set getExcludedDetailedUpdateAuditFields() + { + // uniqueId fields don't change in reality, so exclude them from the audit updates + Set excluded = new CaseInsensitiveHashSet(); + excluded.addAll(this.getUniqueIdFields()); + excluded.addAll(excludeFromDetailedAuditField); + return excluded; + } + + @Override + public List> getImportTemplates(ViewContext ctx) + { + // respect any metadata overrides + if (getRawImportTemplates() != null) + return super.getImportTemplates(ctx); + + List> templates = new ArrayList<>(); + ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); + url.addParameter("headerType", ColumnHeaderType.ImportField.name()); + try + { + if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) + { + for (String aliasKey : getSampleType().getImportAliases().keySet()) + url.addParameter("includeColumn", aliasKey); + } + } + catch (IOException e) + {} + templates.add(Pair.of("Download Template", url.toString())); + return templates; + } + + @Override + public void overlayMetadata(String tableName, UserSchema schema, Collection errors) + { + if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) + { + Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); + if (null != metadata) + { + overlayMetadata(metadata, schema, errors); + } + } + super.overlayMetadata(tableName, schema, errors); + } + + static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn + { + private final Unit typeUnit; + private final boolean applySampleTypePrecision; + + public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) + { + super(col, false); + this.typeUnit = typeUnit; + this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user + } + + @Override + public Object getDisplayValue(RenderContext ctx) + { + Object value = super.getDisplayValue(ctx); + if (this.applySampleTypePrecision && value != null) + { + int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); + value = Precision.round(Double.valueOf(value.toString()), scale); + } + return value; + } + } +} diff --git a/query/src/org/labkey/query/controllers/GetQueryDetailsAction.java b/query/src/org/labkey/query/controllers/GetQueryDetailsAction.java index a90c5a14d0d..88da57ff66a 100644 --- a/query/src/org/labkey/query/controllers/GetQueryDetailsAction.java +++ b/query/src/org/labkey/query/controllers/GetQueryDetailsAction.java @@ -302,9 +302,6 @@ public ApiResponse execute(Form form, BindException errors) } resp.put("indices", jsonIndices); - if (!tinfo.getAltKeysForUpdate().isEmpty()) - resp.put("altUpdateKeys", tinfo.getAltKeysForUpdate()); - Set disabledSystemFields = tinfo.getDisabledSystemFields(); if (disabledSystemFields != null && !disabledSystemFields.isEmpty()) resp.put("disabledSystemFields", disabledSystemFields); From 896c5cbcc15c0acf41f424fe571ab6c0d4dc1a3c Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 23 Apr 2026 21:54:27 -0700 Subject: [PATCH 15/17] CC review --- api/src/org/labkey/api/audit/AuditHandler.java | 1 - .../DataClassUpdateAddColumnsDataIterator.java | 2 -- .../client/test/integration/DataClassCrud.ispec.ts | 4 ++-- .../experiment/api/ExpDataClassDataTableImpl.java | 12 ++---------- .../experiment/api/SampleTypeUpdateServiceDI.java | 2 +- 5 files changed, 5 insertions(+), 16 deletions(-) diff --git a/api/src/org/labkey/api/audit/AuditHandler.java b/api/src/org/labkey/api/audit/AuditHandler.java index 0e1cb9b5ff3..e395dfab1e4 100644 --- a/api/src/org/labkey/api/audit/AuditHandler.java +++ b/api/src/org/labkey/api/audit/AuditHandler.java @@ -14,7 +14,6 @@ import org.labkey.api.exp.api.ExpData; import org.labkey.api.exp.api.ExpMaterial; import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.query.ExpMaterialTable; import org.labkey.api.gwt.client.AuditBehaviorType; import org.labkey.api.query.QueryService; import org.labkey.api.security.User; diff --git a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java index 1c099654cbe..10008b3bc1c 100644 --- a/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/DataClassUpdateAddColumnsDataIterator.java @@ -35,7 +35,6 @@ public class DataClassUpdateAddColumnsDataIterator extends WrapperDataIterator { private final Container _targetContainer; - private final TableInfo _tableInfo; final CachingDataIterator _unwrapped; private final long _dataClassId; @@ -52,7 +51,6 @@ public DataClassUpdateAddColumnsDataIterator(DataIterator in, @NotNull DataItera super(in); this._unwrapped = (CachingDataIterator)in; _context = context; - _tableInfo = target; _targetContainer = container; _dataClassId = dataClassId; diff --git a/experiment/src/client/test/integration/DataClassCrud.ispec.ts b/experiment/src/client/test/integration/DataClassCrud.ispec.ts index 26d78102d0c..0aeaff03e2e 100644 --- a/experiment/src/client/test/integration/DataClassCrud.ispec.ts +++ b/experiment/src/client/test/integration/DataClassCrud.ispec.ts @@ -445,7 +445,7 @@ describe('Duplicate IDs', () => { expect(errorResp['exception']).toBe('Duplicate key provided: ' + data2RowId); }); - // update date twice specifying the name across multiple partitions + // update data twice specifying the name across multiple partitions await server.post('query', 'updateRows', { schemaName: 'exp.data', queryName: dataType, @@ -464,7 +464,7 @@ describe('Duplicate IDs', () => { expect(errorResp['exception']).toBe('Duplicate key provided: ' + dataName1); }); - // update date twice specifying the rowId across multiple partitions + // update data twice specifying the rowId across multiple partitions await server.post('query', 'updateRows', { schemaName: 'exp.data', queryName: dataType, diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java index e7f637bbef8..fd509d1dc8b 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassDataTableImpl.java @@ -21,14 +21,9 @@ import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Strings; -import org.apache.logging.log4j.Level; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; -import org.labkey.api.attachments.AttachmentFile; -import org.labkey.api.attachments.AttachmentParent; import org.labkey.api.attachments.AttachmentParentFactory; -import org.labkey.api.attachments.AttachmentService; -import org.labkey.api.audit.TransactionAuditProvider; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveHashSet; import org.labkey.api.collections.Sets; @@ -125,7 +120,6 @@ import org.labkey.api.util.PageFlowUtil; import org.labkey.api.util.Pair; import org.labkey.api.util.StringExpressionFactory; -import org.labkey.api.util.UnexpectedException; import org.labkey.api.view.ActionURL; import org.labkey.api.view.UnauthorizedException; import org.labkey.api.view.ViewContext; @@ -138,7 +132,6 @@ import java.io.IOException; import java.io.UncheckedIOException; -import java.nio.file.Path; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; @@ -149,7 +142,6 @@ import java.util.LinkedHashSet; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -159,7 +151,7 @@ import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.Name; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.QueryableInputs; import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.RowId; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.LSID; +import static org.labkey.api.exp.query.ExpDataClassDataTable.Column.LSID; import static org.labkey.api.query.DefaultQueryUpdateService.getKeyColumnAliasForUpdate; import static org.labkey.experiment.ExpDataIterators.incrementCounts; @@ -1439,7 +1431,7 @@ public List> updateRows(User user, Container container, List List> results; Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - recordDataIteratorUsed(configParameters); + recordDataIteratorUsed(finalConfigParameters); try { diff --git a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java index bbf1e85c593..25128b80488 100644 --- a/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java +++ b/experiment/src/org/labkey/experiment/api/SampleTypeUpdateServiceDI.java @@ -535,7 +535,7 @@ public List> updateRows( List> results; Map finalConfigParameters = configParameters == null ? new HashMap<>() : configParameters; - recordDataIteratorUsed(configParameters); + recordDataIteratorUsed(finalConfigParameters); try { From 53b876f58fbe681418e79a279a27347d5e6c9249 Mon Sep 17 00:00:00 2001 From: XingY Date: Thu, 23 Apr 2026 22:06:06 -0700 Subject: [PATCH 16/17] revert null values --- .../org/labkey/api/dataiterator/StatementDataIterator.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java index e49b47357f5..0de013d9048 100644 --- a/api/src/org/labkey/api/dataiterator/StatementDataIterator.java +++ b/api/src/org/labkey/api/dataiterator/StatementDataIterator.java @@ -537,11 +537,7 @@ private void checkBackgroundException() throws BatchValidationException public Object get(int i) { if (null != _keyColumnInfo.get(i)) - { - Object value = _keyValues.get(i); - if (value != null) - return value; // TODO, why is this needed? - } + return _keyValues.get(i); return _data.get(i); } From edca8443a1b79d0ef60d05cdef5738cdb1640686 Mon Sep 17 00:00:00 2001 From: XingY Date: Fri, 24 Apr 2026 10:30:16 -0700 Subject: [PATCH 17/17] crlf --- api/src/org/labkey/api/data/TableInfo.java | 1720 ++++---- .../experiment/api/ExpMaterialTableImpl.java | 3746 ++++++++--------- 2 files changed, 2733 insertions(+), 2733 deletions(-) diff --git a/api/src/org/labkey/api/data/TableInfo.java b/api/src/org/labkey/api/data/TableInfo.java index cfc9b293d3a..e015cb15c18 100644 --- a/api/src/org/labkey/api/data/TableInfo.java +++ b/api/src/org/labkey/api/data/TableInfo.java @@ -1,860 +1,860 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.api.data; - -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.collections.NamedObjectList; -import org.labkey.api.compliance.ComplianceService; -import org.labkey.api.data.dialect.SqlDialect; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainKind; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.query.AggregateRowConfig; -import org.labkey.api.query.BatchValidationException; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.QueryAction; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryParseException; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.SchemaTreeNode; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.ValidationException; -import org.labkey.api.security.HasPermission; -import org.labkey.api.security.User; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.util.ContainerContext; -import org.labkey.api.util.Pair; -import org.labkey.api.util.Path; -import org.labkey.api.util.StringExpression; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.UnauthorizedException; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.data.xml.queryCustomView.FilterType; - -import java.util.Collection; -import java.util.Collections; -import java.util.Comparator; -import java.util.List; -import java.util.Map; -import java.util.Set; - -/** - * Representation of a queryable unit in the database. Might be backed by a "physical" table, a database VIEW, - * or an arbitrary generated chunk of SELECT SQL. - */ -public interface TableInfo extends TableDescription, HasPermission, SchemaTreeNode -{ - /** - * This is used to report problems encountered during construction of the TableInfo. These would generally be - * reported in the schema browser or editor. Failing to construct a TableInfo generally breaks the UI badly, so we - * want to be able to return a usable TableInfo whenever possible and then report problems here if they the issue - * is correctable. - *
- * See also ValidateQueryAction. - *
- * @return warnings collected during TableInfo construction. - */ - default Collection getWarnings() - { - return List.of(); - } - - /** Get title, falling back to the name if title is null **/ - String getTitle(); - - /** Get title field, whether null or not **/ - @Nullable - String getTitleField(); - - /** - * Use getSQLName() instead for generating SQL. - * getSelectName() can still be used for error messages. - */ - @Nullable - default String getSelectName() - { - var sqlf = getSQLName(); - return null == sqlf ? null : sqlf.getSQL(); - } - - /** - * Return a schema.name that can be used directly in SQL statement - * Use only for tables known to be real database tables, usually - * for INSERT/UPDATE/DELETE. For SELECT use getFromSQL(alias). - */ - @Nullable - default SQLFragment getSQLName() - { - return null; - } - - @Nullable - DatabaseIdentifier getMetaDataIdentifier(); - - /** - * SQL representing this table, e.g. - * "Issues.Issues " - * "(SELECT * FROM Issues.Issues WHERE Container='...') " - **/ - @NotNull - SQLFragment getFromSQL(String alias); - - /* For most tables this is the same as getFromSQL(). - * However, for some tables that want to help optimize generated SQL - * we can ask for the columns we need to be accessible. - * - * Only columns returned by getColumn() or resolveColumn() should be handed in (no lookups) - * - * This is really intended for QueryTable - */ - SQLFragment getFromSQL(String alias, Set cols); - - DbSchema getSchema(); - - @Nullable - UserSchema getUserSchema(); - - /** getSchema().getSqlDialect() */ - SqlDialect getSqlDialect(); - - @NotNull List getPkColumns(); - - /** Gets all the constraints that guarantee uniqueness in the underlying table. This includes both PRIMARY KEY and UNIQUE constraints */ - @NotNull - List getUniqueIndices(); - - /** Gets all the indices from the underlying table. This includes PRIMARY KEY and UNIQUE constraints, as well as non-unique INDEX */ - @NotNull - List getAllIndices(); - - class _DoNothingAuditHandler implements AuditHandler - { - @Override - public void addSummaryAuditEvent(User user, Container c, TableInfo table, QueryService.AuditAction action, Integer dataRowCount, @Nullable AuditBehaviorType auditBehaviorType, @Nullable String userComment) - { - } - - @Override - public void addAuditEvent(User user, Container c, TableInfo table, @Nullable AuditBehaviorType auditType, @Nullable String userComment, QueryService.AuditAction action, List> rows, @Nullable List> updatedRows, @Nullable List> providedValues, boolean useTransactionAuditCache) - { - } - } - - default AuditHandler getAuditHandler(@Nullable AuditBehaviorType auditBehaviorOverride) - { - if (!supportsAuditTracking() || getEffectiveAuditBehavior(auditBehaviorOverride) == AuditBehaviorType.NONE) - return new _DoNothingAuditHandler(); - return QueryService.get().getDefaultAuditHandler(); - } - - enum IndexType - { - Primary(org.labkey.data.xml.IndexType.Type.PRIMARY, true), - Unique(org.labkey.data.xml.IndexType.Type.UNIQUE, true), - NonUnique(org.labkey.data.xml.IndexType.Type.NON_UNIQUE, false); - - private final org.labkey.data.xml.IndexType.Type.Enum _xmlIndexType; - private final boolean _unique; - - IndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType, boolean unique) - { - _xmlIndexType = xmlIndexType; - _unique = unique; - } - - public org.labkey.data.xml.IndexType.Type.Enum getXmlIndexType() - { - return _xmlIndexType; - } - - public boolean isUnique() - { - return _unique; - } - - public static IndexType getForXmlIndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType) - { - for (IndexType indexType : IndexType.values()) - { - if(indexType.getXmlIndexType().equals(xmlIndexType)){ - return indexType; - } - } - throw new EnumConstantNotPresentException(IndexType.class, xmlIndexType.toString()); - } - } - - record IndexDefinition(String name, IndexType indexType, List columns, @Nullable String filterCondition) - { - void addColumn(ColumnInfo column) - { - columns.add(column); - } - } - - /** Get a list of columns that specifies a unique key, may return the same result as getPKColumns() - * However, whereas getPkColumns() will usually return a 'short' pk, such as "rowid" or "lsid", this - * should return a more verbose AK that is most semantically useful. - * - * For instance for a flow result it might return (container, run, sample, stim, populationName) rather - * than (container, analysisId) - * - * NOTE: unlike PK, this does not guarantee that columns are NON-NULL - * NOTE: Postgres does not consider rows with NULL values to be "equal" so NULLs may be repeated! - */ - @NotNull List getAlternateKeyColumns(); - - @Nullable default Set getDisabledSystemFields() - { - return Collections.emptySet(); - } - - /** - * @return a List> a list of additionally required fields during import, each field can have a Set of alternative field keys - * Example usage is specifying the set of required lineage columns during sample/dataclass data creation. - */ - @NotNull default List> getAdditionalRequiredInsertColumns() - { - return Collections.emptyList(); - } - - ColumnInfo getVersionColumn(); - - String getVersionColumnName(); - - boolean hasDefaultTitleColumn(); - - DatabaseTableType getTableType(); - - /** - * Get select list for named (hopefully unique!) column to title column, including filter on table. - * If maxRows is exceeded, the NamedObjectList will be marked as incomplete. - * When maxRows is null a default maxRows will be used. To select all rows, set maxRows to {@link Table#ALL_ROWS}. - * @see NamedObjectList#isComplete() - */ - @NotNull NamedObjectList getSelectList(String columnName, List filters, Integer maxRows, String titleColumn); - - ColumnInfo getColumn(@NotNull String colName); - - // same as getColumn(colName.getName()), does not need to support lookup columns - ColumnInfo getColumn(@NotNull FieldKey colName); - - List getColumns(); - - List getUserEditableColumns(); - - /** @param colNames comma separated column names */ - List getColumns(String colNames); - - List getColumns(String... colNameArray); - - Set getColumnNameSet(); - - default String getDbSequenceName(String columnName) - { - return (this.getSchema().getName() + ":" + this.getName() + ":" + columnName).toLowerCase(); - } - - /** - * Return a list of ColumnInfos that make up the extended set of - * columns that could be considered a part of this table by default. - * For the majority of tables, this is simply the columns returned from - * {@link TableInfo#getColumns()} plus any additional columns from - * {@link TableInfo#getDefaultVisibleColumns()). - * - * For other tables, the extended column set could include some columns - * from lookup tables. - * - * @param includeHidden Include hidden columns. - * @return All columns. - */ - Map getExtendedColumns(boolean includeHidden); - - - /** - * @return the {@link org.labkey.api.query.FieldKey}s that should be part of the default view of the table, - * assuming that no other default view has been configured. - */ - List getDefaultVisibleColumns(); - - /** - * @param keys the new set of columns to show when there is no explicit default view defined. Use null to indicate - * that the list should be inferred based on the column metadata - */ - void setDefaultVisibleColumns(@Nullable Iterable keys); - - ButtonBarConfig getButtonBarConfig(); - - AggregateRowConfig getAggregateRowConfig(); - - /** - * Return the default query grid view URL for the table or null. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#executeQuery} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getGridURL(Container container); - - /** - * Return the insert URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default insert view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#insertQueryRow} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getInsertURL(Container container); - - /** - * Return the import URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default import view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#importData} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getImportDataURL(Container container); - - /** - * Return the delete URL expression for the table or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default action will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#deleteQueryRows} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - */ - ActionURL getDeleteURL(Container container); - - /** - * Return the update URL expression for a particular record or null. - * If the table provides an update service via {@link #getUpdateService()}, - * a default update view will be provided. - * Instead of calling this method directly, callers should pass - * {@link QueryAction#updateQueryRow} to - * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or - * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. - * @param columns if null, implementations should simply return their first (and potentially only) details URL, - * or null if they don't have one. - * If non-null, the set of columns that are available from the ResultSet so that an appropriate - * URL can be chosen - */ - StringExpression getUpdateURL(@Nullable Set columns, Container container); - - /** - * Return the details URL expression for a particular record or null. - * @param columns if null, implementations should simply return their first (and potentially only) details URL, - * or null if they don't have one. - * If non-null, the set of columns that are available from the ResultSet so that an appropriate - * URL can be chosen - */ - StringExpression getDetailsURL(@Nullable Set columns, Container container); - boolean hasDetailsURL(); - - /** - * Return the method of a given name. Methods are accessible via the QueryModule's query - * language. Most tables do not have methods. - */ - default MethodInfo getMethod(String name) - { - return null; - } - - /** - * return the fieldkeys of the columns that methods in on this table depend on. - * We could do this per method, but in practice it's a short list. - */ - default Set getMethodRequiredFieldKeys() - { - return Set.of(); - } - - /** - * Returns a string that will appear on the default import page and as the top line - * of the default generated Excel template - */ - String getImportMessage(); - - /** - * Returns a list of templates (label / URL) that should be used as the options for excel upload - * Each URL should either point to a static template file or an action to generate the template. - * If no custom templates have been provided, it will return the default URL - */ - List> getImportTemplates(ViewContext ctx); - - default List> getValidatedImportTemplates(ViewContext ctx) - { - return getImportTemplates(ctx); - } - - /** - * Returns a list of the raw import templates (without substituting the container). This is intended to be - * used by FilteredTable or other instances that need to copy the raw values from a parent table. In general, - * getImportTemplates() should be used instead - */ - List> getRawImportTemplates(); - - // Most datasets do not have a container column - boolean hasContainerColumn(); - - boolean needsContainerClauseAdded(); - - @Nullable - ContainerFilter getContainerFilter(); - - /** - * Finds and applies the first metadata xml from active modules in the schema's container and then the first user-created metadata in the container hierarchy. - * - * @see QueryService#findMetadataOverride(UserSchema, String, boolean, boolean, Collection, Path) - */ - void overlayMetadata(String tableName, UserSchema schema, Collection errors); - - void overlayMetadata(Collection metadata, UserSchema schema, Collection errors); - - /** @return whether this table accepts XML metadata configuration to be overlaid on the default level of metadata */ - boolean isMetadataOverrideable(); - - ColumnInfo getLookupColumn(ColumnInfo parent, String name); - - int getCacheSize(); - - String getDescription(); - - /** - * Get Domain associated with this TableInfo if any. - */ - @Nullable - Domain getDomain(); - - /** - * Get Domain associated with this TableInfo if any. - */ - @Nullable - Domain getDomain(boolean forUpdate); - - /** - * Get DomainKind associated with this TableInfo if any. - * Domain may or may not exist even if DomainKind is available. - */ - @Nullable - DomainKind getDomainKind(); - - /** - * Returns a QueryUpdateService implementation for this TableInfo, - * or null if the table/query is not updatable. - * @return A QueryUpdateService implementation for this table/query or null. - */ - @Nullable - QueryUpdateService getUpdateService(); - - enum TriggerType - { - INSERT, UPDATE, DELETE, SELECT, TRUNCATE, MOVE; - - public String getMethodName() - { - String name = name(); - return name.substring(0, 1) + name.toLowerCase().substring(1, name.length()); - } - } - - /** - * Enumeration of trigger methods that may be implemented in trigger scripts. - */ - enum TriggerMethod - { - init, complete, - beforeInsert, afterInsert, - beforeUpdate, afterUpdate, - beforeDelete, afterDelete - } - - - /** - * Queries may have named parameters, SELECT queries (the only kind we have right now) may - * return TableInfos. This is how you find out if a TableInfo representing a query has named - * parameters - */ - - @NotNull - Collection getNamedParameters(); - - /** - * Executes any trigger scripts for this table. - *

- * The trigger should be called once before and once after an entire set of rows for each of the - * INSERT, UPDATE, DELETE trigger types. A trigger script may set up data structures to be used - * during validation. In particular, the trigger script might want to do a query to populate a set of - * legal values. - *

- * The errors parameter holds validation error messages for the entire row set. - * If errors are created during the row level trigger script, they should be added as nested ValidationExceptions. - * The ValidationException will be thrown after executing the trigger scripts if it contains any errors. - *

- * Example usage: - *

-     *   ValidationException errors = new ValidationException();
-     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, true, errors);
-     *
-     *   List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(rows.size());
-     *   for (int i = 0; i < rows.size(); i++)
-     *   {
-     *       try
-     *       {
-     *           Map row = rows.get(i);
-     *           Map oldRow = getRow( ... );
-     *           if (oldRow == null)
-     *               throw new NotFoundException("The existing row was not found.");
-     *
-     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, true, i, row, oldRow);
-     *           Map updatedRow = updateRow(user, container, row, oldRow);
-     *           if (updatedRow == null)
-     *               continue;
-     *
-     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow);
-     *           result.add(updatedRow);
-     *       }
-     *       catch (ValidationException vex)
-     *       {
-     *           errors.addNested(vex);
-     *       }
-     *   }
-     *
-     *   // Firing the after batch trigger will throw a ValidationException if
-     *   // any errors were generated during the row triggers or the during after batch trigger.
-     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, false, errors);
-     * 
- * - * @param c The current Container. - * @param user the current user - * @param type The TriggerType for the event. - * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. - * @param before true if the trigger is before the event, false if after the event. - * @param errors Any errors created by the validation script will be added to the errors collection. - * @param extraContext Optional additional bindings to set in the script's context when evaluating. - * @throws BatchValidationException if the trigger function returns false or the errors map isn't empty. - */ - void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext) - throws BatchValidationException; - - default void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, - @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) - throws ValidationException - { - fireRowTrigger(c, user, type, null, before, rowNumber, newRow, oldRow, extraContext, null); - } - - /** - * Fire trigger for a single row. - *

- * The trigger is called once before and once after each row for each of the INSERT, UPDATE, DELETE - * trigger types. - *

- * The following table describes the parameters for each of the trigger types: - *

- *
INSERT: - *
    - *
  • before: newRow contains the row values to be inserted, oldRow is null. - *
  • after: newRow contains the inserted row values, oldRow is null. - *
  • - * - *
    UPDATE: - *
      - *
    • before: newRow contains the row values to be updated, oldRow contains the previous version of the row. - *
    • after: newRow contains the updated row values, oldRow contains the previous version of the row. - *
    - * - *
    DELETE: - *
      - *
    • before: oldRow contains the previous version of the row. - *
    • after: newRow is null, oldRow contains the previous version of the row. - *
    • - *
- * - * @param c The current Container. - * @param user the current user - * @param type The TriggerType for the event. - * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. - * @param before true if the trigger is before the event, false if after the event. - * @param newRow The new row for INSERT and UPDATE. - * @param oldRow The previous row for UPDATE and DELETE - * @param extraContext Optional additional bindings to set in the script's context when evaluating. - * @param existingRecord Optional existing record for the row, used for merge operation to differentiate new vs existing row - * @throws ValidationException if the trigger function returns false or the errors map isn't empty. - */ - void fireRowTrigger( - Container c, - User user, - TriggerType type, - @Nullable QueryUpdateService.InsertOption insertOption, - boolean before, - int rowNumber, - @Nullable Map newRow, - @Nullable Map oldRow, - Map extraContext, - @Nullable Map existingRecord - ) throws ValidationException; - - /** - * Return true if there are trigger scripts associated with this table. - */ - boolean hasTriggers(@Nullable Container c); - - /** - * Return true if all trigger scripts support streaming. - */ - default boolean canStreamTriggers(Container c) - { - return false; - } - - /** - * Returns the full set of columns managed by triggers for this TableInfo. - */ - default @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption) - { - return null; - } - - /** - * Reset the trigger script context by reloading them. Note there could still be caches that need to be reset - * and script init() to rerun. - * - * @param c The current container - */ - void resetTriggers(Container c); - - /** - * Returns true if the underlying database table has triggers. - */ - default boolean hasDbTriggers() - { - return false; - } - - /* for asserting that the TableInfo is not changed unexpectedly */ - void setLocked(boolean b); - boolean isLocked(); - - boolean supportsContainerFilter(); - boolean hasUnionTable(); - - /** - * Returns a ContainerContext for this table or null if ContainerContext is not supported. - * - * @return The ContainerContext for this table or null. - */ - @Nullable - ContainerContext getContainerContext(); - - /** - * Return the FieldKey of the Container column for this table. - * If the value is non-null then getContainerContext() will - * return a FieldKeyContext using the container FieldKey. - * - * @return FieldKey of the Container column. - */ - @Nullable FieldKey getContainerFieldKey(); - - /** - * Returns whether this table supports audit tracking of insert, updates and deletes by implementing the - * AuditConfigurable interface. - */ - boolean supportsAuditTracking(); - - /** - * @return set of all columns involved in the query - */ - Set getAllInvolvedColumns(Collection selectColumns); - - /** see interface AuditConfigurable */ - @NotNull - default AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.NONE; - } - - @NotNull - default AuditBehaviorType getEffectiveAuditBehavior() - { - return getXmlAuditBehaviorType() == null ? getDefaultAuditBehavior() : getXmlAuditBehaviorType(); - } - - /** - * Retrieves the audit behavior for this table taking into account, in order of precedence: - * - the override value provided - * - the setting from the XML file (always returned if there is a value set) - * - the value supplied by this table's implementation - * - * @param apiOverride value used to override the behavior type provided by the table implementation - * @return audit behavior for this table - */ - @NotNull - default AuditBehaviorType getEffectiveAuditBehavior(@Nullable AuditBehaviorType apiOverride) - { - return AuditBehaviorType.getEffectiveAuditLevel(apiOverride, getEffectiveAuditBehavior()); - } - - default AuditBehaviorType getEffectiveAuditBehavior(@Nullable String apiOverrideValue) - { - if (apiOverrideValue != null) - { - try - { - return getEffectiveAuditBehavior(AuditBehaviorType.valueOf(apiOverrideValue)); - } - catch (IllegalArgumentException ignored) - { - } - } - return getEffectiveAuditBehavior(); - } - - /* Can be used to distinguish AuditBehaviorType.NONE vs absent xml audit config */ - default AuditBehaviorType getXmlAuditBehaviorType() - { - return null; - } - - /* fields to include in detailed UPDATE audit log, even if no change is made to field value */ - @NotNull - default Set getExtraDetailedUpdateAuditFields() - { - return Collections.emptySet(); - } - - - // we exclude these from the detailed record because they are already on the audit record itself and - // depending on the data iterator behavior (e.g., for ExpDataIterators.getDataIterator), these values - // time of creating the audit log may actually already have been updated so the difference shown will be incorrect. - Set defaultExcludedDetailedUpdateAuditFields = CaseInsensitiveHashSet.of("Modified", "ModifiedBy", "Created", "CreatedBy"); - - @NotNull - default Set getExcludedDetailedUpdateAuditFields() - { - return defaultExcludedDetailedUpdateAuditFields; - } - - - /** - * Returns the row primary key column to use for audit history details. Note, this must - * be a single key as we don't support multiple column primary keys for audit details. - */ - @Nullable - default FieldKey getAuditRowPk() - { - return null; - } - - default boolean hasInsertURLOverride() - { - return false; - } - - default boolean hasUpdateURLOverride() - { - return false; - } - - default boolean hasDeleteURLOverride() - { - return false; - } - - /** - * Allow QueryView to render insert, update, and other buttons if the metadata xml provides URL - * overrides and the underlying table doesn't support insert/update/delete operations, - * such as a query table. - */ - default boolean allowQueryTableURLOverrides() - { - return false; - } - - /** - * Max PHI across all columns in the table. - */ - default PHI getMaxPhiLevel() - { - return getColumns().stream() - .map(ColumnRenderProperties::getPHI) - .max(Comparator.comparingInt(PHI::getRank)) - .orElse(PHI.NotPHI); - } - - /** - * Get the max allowed PHI for the current user in the current container. - */ - default PHI getUserMaxAllowedPhiLevel() - { - UserSchema schema = getUserSchema(); - if (schema == null) - return PHI.NotPHI; - - return ComplianceService.get().getMaxAllowedPhi(schema.getContainer(), schema.getUser()); - } - - /** - * Return true if the current user is allowed the maximum phi level set across all columns. - */ - default boolean canUserAccessPhi() - { - return getMaxPhiLevel().isLevelAllowed(getUserMaxAllowedPhiLevel()); - } - - /** - * Useful helper to combine permission check/throw. This is especially useful for tables where permissions can - * differ from the containing schema/container. - * - * TableInfo users should still always check hasPermission() as early as possible for best error handling, and - * to fail-fast (or degrade gracefully). - */ - default void checkReadBeforeExecute() - { - assert null != getUserSchema(); // this is only for tables in UserSchema - if (null != getUserSchema() && !hasPermission(getUserSchema().getUser(), ReadPermission.class)) - throw new UnauthorizedException(getUserSchema().getSchemaName() + "." + getName()); - } - - default List getSortFields() - { - return List.of(); - } - - default boolean supportsInsertOption(QueryUpdateService.InsertOption option) - { - return true; - } - - default void setAllowCalculatedColumns(boolean allowCalculatedColumns) {} - - /* Many tables should be indexed via controller provided views e.g. issues, announcements, wiki. - * We do not want crawlers to index data in these tables via query-executeQuery for instance. - */ - default boolean allowRobotsIndex() - { - return true; - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.api.data; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.collections.NamedObjectList; +import org.labkey.api.compliance.ComplianceService; +import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainKind; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.query.AggregateRowConfig; +import org.labkey.api.query.BatchValidationException; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.QueryAction; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryParseException; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.SchemaTreeNode; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.ValidationException; +import org.labkey.api.security.HasPermission; +import org.labkey.api.security.User; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.util.ContainerContext; +import org.labkey.api.util.Pair; +import org.labkey.api.util.Path; +import org.labkey.api.util.StringExpression; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.UnauthorizedException; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.data.xml.queryCustomView.FilterType; + +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +/** + * Representation of a queryable unit in the database. Might be backed by a "physical" table, a database VIEW, + * or an arbitrary generated chunk of SELECT SQL. + */ +public interface TableInfo extends TableDescription, HasPermission, SchemaTreeNode +{ + /** + * This is used to report problems encountered during construction of the TableInfo. These would generally be + * reported in the schema browser or editor. Failing to construct a TableInfo generally breaks the UI badly, so we + * want to be able to return a usable TableInfo whenever possible and then report problems here if they the issue + * is correctable. + *
+ * See also ValidateQueryAction. + *
+ * @return warnings collected during TableInfo construction. + */ + default Collection getWarnings() + { + return List.of(); + } + + /** Get title, falling back to the name if title is null **/ + String getTitle(); + + /** Get title field, whether null or not **/ + @Nullable + String getTitleField(); + + /** + * Use getSQLName() instead for generating SQL. + * getSelectName() can still be used for error messages. + */ + @Nullable + default String getSelectName() + { + var sqlf = getSQLName(); + return null == sqlf ? null : sqlf.getSQL(); + } + + /** + * Return a schema.name that can be used directly in SQL statement + * Use only for tables known to be real database tables, usually + * for INSERT/UPDATE/DELETE. For SELECT use getFromSQL(alias). + */ + @Nullable + default SQLFragment getSQLName() + { + return null; + } + + @Nullable + DatabaseIdentifier getMetaDataIdentifier(); + + /** + * SQL representing this table, e.g. + * "Issues.Issues " + * "(SELECT * FROM Issues.Issues WHERE Container='...') " + **/ + @NotNull + SQLFragment getFromSQL(String alias); + + /* For most tables this is the same as getFromSQL(). + * However, for some tables that want to help optimize generated SQL + * we can ask for the columns we need to be accessible. + * + * Only columns returned by getColumn() or resolveColumn() should be handed in (no lookups) + * + * This is really intended for QueryTable + */ + SQLFragment getFromSQL(String alias, Set cols); + + DbSchema getSchema(); + + @Nullable + UserSchema getUserSchema(); + + /** getSchema().getSqlDialect() */ + SqlDialect getSqlDialect(); + + @NotNull List getPkColumns(); + + /** Gets all the constraints that guarantee uniqueness in the underlying table. This includes both PRIMARY KEY and UNIQUE constraints */ + @NotNull + List getUniqueIndices(); + + /** Gets all the indices from the underlying table. This includes PRIMARY KEY and UNIQUE constraints, as well as non-unique INDEX */ + @NotNull + List getAllIndices(); + + class _DoNothingAuditHandler implements AuditHandler + { + @Override + public void addSummaryAuditEvent(User user, Container c, TableInfo table, QueryService.AuditAction action, Integer dataRowCount, @Nullable AuditBehaviorType auditBehaviorType, @Nullable String userComment) + { + } + + @Override + public void addAuditEvent(User user, Container c, TableInfo table, @Nullable AuditBehaviorType auditType, @Nullable String userComment, QueryService.AuditAction action, List> rows, @Nullable List> updatedRows, @Nullable List> providedValues, boolean useTransactionAuditCache) + { + } + } + + default AuditHandler getAuditHandler(@Nullable AuditBehaviorType auditBehaviorOverride) + { + if (!supportsAuditTracking() || getEffectiveAuditBehavior(auditBehaviorOverride) == AuditBehaviorType.NONE) + return new _DoNothingAuditHandler(); + return QueryService.get().getDefaultAuditHandler(); + } + + enum IndexType + { + Primary(org.labkey.data.xml.IndexType.Type.PRIMARY, true), + Unique(org.labkey.data.xml.IndexType.Type.UNIQUE, true), + NonUnique(org.labkey.data.xml.IndexType.Type.NON_UNIQUE, false); + + private final org.labkey.data.xml.IndexType.Type.Enum _xmlIndexType; + private final boolean _unique; + + IndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType, boolean unique) + { + _xmlIndexType = xmlIndexType; + _unique = unique; + } + + public org.labkey.data.xml.IndexType.Type.Enum getXmlIndexType() + { + return _xmlIndexType; + } + + public boolean isUnique() + { + return _unique; + } + + public static IndexType getForXmlIndexType(org.labkey.data.xml.IndexType.Type.Enum xmlIndexType) + { + for (IndexType indexType : IndexType.values()) + { + if(indexType.getXmlIndexType().equals(xmlIndexType)){ + return indexType; + } + } + throw new EnumConstantNotPresentException(IndexType.class, xmlIndexType.toString()); + } + } + + record IndexDefinition(String name, IndexType indexType, List columns, @Nullable String filterCondition) + { + void addColumn(ColumnInfo column) + { + columns.add(column); + } + } + + /** Get a list of columns that specifies a unique key, may return the same result as getPKColumns() + * However, whereas getPkColumns() will usually return a 'short' pk, such as "rowid" or "lsid", this + * should return a more verbose AK that is most semantically useful. + * + * For instance for a flow result it might return (container, run, sample, stim, populationName) rather + * than (container, analysisId) + * + * NOTE: unlike PK, this does not guarantee that columns are NON-NULL + * NOTE: Postgres does not consider rows with NULL values to be "equal" so NULLs may be repeated! + */ + @NotNull List getAlternateKeyColumns(); + + @Nullable default Set getDisabledSystemFields() + { + return Collections.emptySet(); + } + + /** + * @return a List> a list of additionally required fields during import, each field can have a Set of alternative field keys + * Example usage is specifying the set of required lineage columns during sample/dataclass data creation. + */ + @NotNull default List> getAdditionalRequiredInsertColumns() + { + return Collections.emptyList(); + } + + ColumnInfo getVersionColumn(); + + String getVersionColumnName(); + + boolean hasDefaultTitleColumn(); + + DatabaseTableType getTableType(); + + /** + * Get select list for named (hopefully unique!) column to title column, including filter on table. + * If maxRows is exceeded, the NamedObjectList will be marked as incomplete. + * When maxRows is null a default maxRows will be used. To select all rows, set maxRows to {@link Table#ALL_ROWS}. + * @see NamedObjectList#isComplete() + */ + @NotNull NamedObjectList getSelectList(String columnName, List filters, Integer maxRows, String titleColumn); + + ColumnInfo getColumn(@NotNull String colName); + + // same as getColumn(colName.getName()), does not need to support lookup columns + ColumnInfo getColumn(@NotNull FieldKey colName); + + List getColumns(); + + List getUserEditableColumns(); + + /** @param colNames comma separated column names */ + List getColumns(String colNames); + + List getColumns(String... colNameArray); + + Set getColumnNameSet(); + + default String getDbSequenceName(String columnName) + { + return (this.getSchema().getName() + ":" + this.getName() + ":" + columnName).toLowerCase(); + } + + /** + * Return a list of ColumnInfos that make up the extended set of + * columns that could be considered a part of this table by default. + * For the majority of tables, this is simply the columns returned from + * {@link TableInfo#getColumns()} plus any additional columns from + * {@link TableInfo#getDefaultVisibleColumns()). + * + * For other tables, the extended column set could include some columns + * from lookup tables. + * + * @param includeHidden Include hidden columns. + * @return All columns. + */ + Map getExtendedColumns(boolean includeHidden); + + + /** + * @return the {@link org.labkey.api.query.FieldKey}s that should be part of the default view of the table, + * assuming that no other default view has been configured. + */ + List getDefaultVisibleColumns(); + + /** + * @param keys the new set of columns to show when there is no explicit default view defined. Use null to indicate + * that the list should be inferred based on the column metadata + */ + void setDefaultVisibleColumns(@Nullable Iterable keys); + + ButtonBarConfig getButtonBarConfig(); + + AggregateRowConfig getAggregateRowConfig(); + + /** + * Return the default query grid view URL for the table or null. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#executeQuery} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getGridURL(Container container); + + /** + * Return the insert URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default insert view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#insertQueryRow} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getInsertURL(Container container); + + /** + * Return the import URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default import view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#importData} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getImportDataURL(Container container); + + /** + * Return the delete URL expression for the table or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default action will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#deleteQueryRows} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + */ + ActionURL getDeleteURL(Container container); + + /** + * Return the update URL expression for a particular record or null. + * If the table provides an update service via {@link #getUpdateService()}, + * a default update view will be provided. + * Instead of calling this method directly, callers should pass + * {@link QueryAction#updateQueryRow} to + * {@link org.labkey.api.query.QueryView#urlFor(QueryAction)} or + * {@link org.labkey.api.query.UserSchema#urlFor(org.labkey.api.query.QueryAction)}. + * @param columns if null, implementations should simply return their first (and potentially only) details URL, + * or null if they don't have one. + * If non-null, the set of columns that are available from the ResultSet so that an appropriate + * URL can be chosen + */ + StringExpression getUpdateURL(@Nullable Set columns, Container container); + + /** + * Return the details URL expression for a particular record or null. + * @param columns if null, implementations should simply return their first (and potentially only) details URL, + * or null if they don't have one. + * If non-null, the set of columns that are available from the ResultSet so that an appropriate + * URL can be chosen + */ + StringExpression getDetailsURL(@Nullable Set columns, Container container); + boolean hasDetailsURL(); + + /** + * Return the method of a given name. Methods are accessible via the QueryModule's query + * language. Most tables do not have methods. + */ + default MethodInfo getMethod(String name) + { + return null; + } + + /** + * return the fieldkeys of the columns that methods in on this table depend on. + * We could do this per method, but in practice it's a short list. + */ + default Set getMethodRequiredFieldKeys() + { + return Set.of(); + } + + /** + * Returns a string that will appear on the default import page and as the top line + * of the default generated Excel template + */ + String getImportMessage(); + + /** + * Returns a list of templates (label / URL) that should be used as the options for excel upload + * Each URL should either point to a static template file or an action to generate the template. + * If no custom templates have been provided, it will return the default URL + */ + List> getImportTemplates(ViewContext ctx); + + default List> getValidatedImportTemplates(ViewContext ctx) + { + return getImportTemplates(ctx); + } + + /** + * Returns a list of the raw import templates (without substituting the container). This is intended to be + * used by FilteredTable or other instances that need to copy the raw values from a parent table. In general, + * getImportTemplates() should be used instead + */ + List> getRawImportTemplates(); + + // Most datasets do not have a container column + boolean hasContainerColumn(); + + boolean needsContainerClauseAdded(); + + @Nullable + ContainerFilter getContainerFilter(); + + /** + * Finds and applies the first metadata xml from active modules in the schema's container and then the first user-created metadata in the container hierarchy. + * + * @see QueryService#findMetadataOverride(UserSchema, String, boolean, boolean, Collection, Path) + */ + void overlayMetadata(String tableName, UserSchema schema, Collection errors); + + void overlayMetadata(Collection metadata, UserSchema schema, Collection errors); + + /** @return whether this table accepts XML metadata configuration to be overlaid on the default level of metadata */ + boolean isMetadataOverrideable(); + + ColumnInfo getLookupColumn(ColumnInfo parent, String name); + + int getCacheSize(); + + String getDescription(); + + /** + * Get Domain associated with this TableInfo if any. + */ + @Nullable + Domain getDomain(); + + /** + * Get Domain associated with this TableInfo if any. + */ + @Nullable + Domain getDomain(boolean forUpdate); + + /** + * Get DomainKind associated with this TableInfo if any. + * Domain may or may not exist even if DomainKind is available. + */ + @Nullable + DomainKind getDomainKind(); + + /** + * Returns a QueryUpdateService implementation for this TableInfo, + * or null if the table/query is not updatable. + * @return A QueryUpdateService implementation for this table/query or null. + */ + @Nullable + QueryUpdateService getUpdateService(); + + enum TriggerType + { + INSERT, UPDATE, DELETE, SELECT, TRUNCATE, MOVE; + + public String getMethodName() + { + String name = name(); + return name.substring(0, 1) + name.toLowerCase().substring(1, name.length()); + } + } + + /** + * Enumeration of trigger methods that may be implemented in trigger scripts. + */ + enum TriggerMethod + { + init, complete, + beforeInsert, afterInsert, + beforeUpdate, afterUpdate, + beforeDelete, afterDelete + } + + + /** + * Queries may have named parameters, SELECT queries (the only kind we have right now) may + * return TableInfos. This is how you find out if a TableInfo representing a query has named + * parameters + */ + + @NotNull + Collection getNamedParameters(); + + /** + * Executes any trigger scripts for this table. + *

+ * The trigger should be called once before and once after an entire set of rows for each of the + * INSERT, UPDATE, DELETE trigger types. A trigger script may set up data structures to be used + * during validation. In particular, the trigger script might want to do a query to populate a set of + * legal values. + *

+ * The errors parameter holds validation error messages for the entire row set. + * If errors are created during the row level trigger script, they should be added as nested ValidationExceptions. + * The ValidationException will be thrown after executing the trigger scripts if it contains any errors. + *

+ * Example usage: + *

+     *   ValidationException errors = new ValidationException();
+     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, true, errors);
+     *
+     *   List<Map<String, Object>> result = new ArrayList<Map<String, Object>>(rows.size());
+     *   for (int i = 0; i < rows.size(); i++)
+     *   {
+     *       try
+     *       {
+     *           Map row = rows.get(i);
+     *           Map oldRow = getRow( ... );
+     *           if (oldRow == null)
+     *               throw new NotFoundException("The existing row was not found.");
+     *
+     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, true, i, row, oldRow);
+     *           Map updatedRow = updateRow(user, container, row, oldRow);
+     *           if (updatedRow == null)
+     *               continue;
+     *
+     *           getQueryTable().fireRowTrigger(container, TableInfo.TriggerType.UPDATE, false, i, updatedRow, oldRow);
+     *           result.add(updatedRow);
+     *       }
+     *       catch (ValidationException vex)
+     *       {
+     *           errors.addNested(vex);
+     *       }
+     *   }
+     *
+     *   // Firing the after batch trigger will throw a ValidationException if
+     *   // any errors were generated during the row triggers or the during after batch trigger.
+     *   getQueryTable().fireBatchTrigger(container, TableInfo.TriggerType.UPDATE, false, errors);
+     * 
+ * + * @param c The current Container. + * @param user the current user + * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. + * @param before true if the trigger is before the event, false if after the event. + * @param errors Any errors created by the validation script will be added to the errors collection. + * @param extraContext Optional additional bindings to set in the script's context when evaluating. + * @throws BatchValidationException if the trigger function returns false or the errors map isn't empty. + */ + void fireBatchTrigger(Container c, User user, TriggerType type, @Nullable QueryUpdateService.InsertOption insertOption, boolean before, BatchValidationException errors, Map extraContext) + throws BatchValidationException; + + default void fireRowTrigger(Container c, User user, TriggerType type, boolean before, int rowNumber, + @Nullable Map newRow, @Nullable Map oldRow, Map extraContext) + throws ValidationException + { + fireRowTrigger(c, user, type, null, before, rowNumber, newRow, oldRow, extraContext, null); + } + + /** + * Fire trigger for a single row. + *

+ * The trigger is called once before and once after each row for each of the INSERT, UPDATE, DELETE + * trigger types. + *

+ * The following table describes the parameters for each of the trigger types: + *

+ *
INSERT: + *
    + *
  • before: newRow contains the row values to be inserted, oldRow is null. + *
  • after: newRow contains the inserted row values, oldRow is null. + *
  • + * + *
    UPDATE: + *
      + *
    • before: newRow contains the row values to be updated, oldRow contains the previous version of the row. + *
    • after: newRow contains the updated row values, oldRow contains the previous version of the row. + *
    + * + *
    DELETE: + *
      + *
    • before: oldRow contains the previous version of the row. + *
    • after: newRow is null, oldRow contains the previous version of the row. + *
    • + *
+ * + * @param c The current Container. + * @param user the current user + * @param type The TriggerType for the event. + * @param insertOption The insertOption that invoked this trigger. Will be null when invoked outside a data iterator. + * @param before true if the trigger is before the event, false if after the event. + * @param newRow The new row for INSERT and UPDATE. + * @param oldRow The previous row for UPDATE and DELETE + * @param extraContext Optional additional bindings to set in the script's context when evaluating. + * @param existingRecord Optional existing record for the row, used for merge operation to differentiate new vs existing row + * @throws ValidationException if the trigger function returns false or the errors map isn't empty. + */ + void fireRowTrigger( + Container c, + User user, + TriggerType type, + @Nullable QueryUpdateService.InsertOption insertOption, + boolean before, + int rowNumber, + @Nullable Map newRow, + @Nullable Map oldRow, + Map extraContext, + @Nullable Map existingRecord + ) throws ValidationException; + + /** + * Return true if there are trigger scripts associated with this table. + */ + boolean hasTriggers(@Nullable Container c); + + /** + * Return true if all trigger scripts support streaming. + */ + default boolean canStreamTriggers(Container c) + { + return false; + } + + /** + * Returns the full set of columns managed by triggers for this TableInfo. + */ + default @Nullable Set getTriggerManagedColumns(@Nullable Container c, QueryUpdateService.InsertOption insertOption) + { + return null; + } + + /** + * Reset the trigger script context by reloading them. Note there could still be caches that need to be reset + * and script init() to rerun. + * + * @param c The current container + */ + void resetTriggers(Container c); + + /** + * Returns true if the underlying database table has triggers. + */ + default boolean hasDbTriggers() + { + return false; + } + + /* for asserting that the TableInfo is not changed unexpectedly */ + void setLocked(boolean b); + boolean isLocked(); + + boolean supportsContainerFilter(); + boolean hasUnionTable(); + + /** + * Returns a ContainerContext for this table or null if ContainerContext is not supported. + * + * @return The ContainerContext for this table or null. + */ + @Nullable + ContainerContext getContainerContext(); + + /** + * Return the FieldKey of the Container column for this table. + * If the value is non-null then getContainerContext() will + * return a FieldKeyContext using the container FieldKey. + * + * @return FieldKey of the Container column. + */ + @Nullable FieldKey getContainerFieldKey(); + + /** + * Returns whether this table supports audit tracking of insert, updates and deletes by implementing the + * AuditConfigurable interface. + */ + boolean supportsAuditTracking(); + + /** + * @return set of all columns involved in the query + */ + Set getAllInvolvedColumns(Collection selectColumns); + + /** see interface AuditConfigurable */ + @NotNull + default AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.NONE; + } + + @NotNull + default AuditBehaviorType getEffectiveAuditBehavior() + { + return getXmlAuditBehaviorType() == null ? getDefaultAuditBehavior() : getXmlAuditBehaviorType(); + } + + /** + * Retrieves the audit behavior for this table taking into account, in order of precedence: + * - the override value provided + * - the setting from the XML file (always returned if there is a value set) + * - the value supplied by this table's implementation + * + * @param apiOverride value used to override the behavior type provided by the table implementation + * @return audit behavior for this table + */ + @NotNull + default AuditBehaviorType getEffectiveAuditBehavior(@Nullable AuditBehaviorType apiOverride) + { + return AuditBehaviorType.getEffectiveAuditLevel(apiOverride, getEffectiveAuditBehavior()); + } + + default AuditBehaviorType getEffectiveAuditBehavior(@Nullable String apiOverrideValue) + { + if (apiOverrideValue != null) + { + try + { + return getEffectiveAuditBehavior(AuditBehaviorType.valueOf(apiOverrideValue)); + } + catch (IllegalArgumentException ignored) + { + } + } + return getEffectiveAuditBehavior(); + } + + /* Can be used to distinguish AuditBehaviorType.NONE vs absent xml audit config */ + default AuditBehaviorType getXmlAuditBehaviorType() + { + return null; + } + + /* fields to include in detailed UPDATE audit log, even if no change is made to field value */ + @NotNull + default Set getExtraDetailedUpdateAuditFields() + { + return Collections.emptySet(); + } + + + // we exclude these from the detailed record because they are already on the audit record itself and + // depending on the data iterator behavior (e.g., for ExpDataIterators.getDataIterator), these values + // time of creating the audit log may actually already have been updated so the difference shown will be incorrect. + Set defaultExcludedDetailedUpdateAuditFields = CaseInsensitiveHashSet.of("Modified", "ModifiedBy", "Created", "CreatedBy"); + + @NotNull + default Set getExcludedDetailedUpdateAuditFields() + { + return defaultExcludedDetailedUpdateAuditFields; + } + + + /** + * Returns the row primary key column to use for audit history details. Note, this must + * be a single key as we don't support multiple column primary keys for audit details. + */ + @Nullable + default FieldKey getAuditRowPk() + { + return null; + } + + default boolean hasInsertURLOverride() + { + return false; + } + + default boolean hasUpdateURLOverride() + { + return false; + } + + default boolean hasDeleteURLOverride() + { + return false; + } + + /** + * Allow QueryView to render insert, update, and other buttons if the metadata xml provides URL + * overrides and the underlying table doesn't support insert/update/delete operations, + * such as a query table. + */ + default boolean allowQueryTableURLOverrides() + { + return false; + } + + /** + * Max PHI across all columns in the table. + */ + default PHI getMaxPhiLevel() + { + return getColumns().stream() + .map(ColumnRenderProperties::getPHI) + .max(Comparator.comparingInt(PHI::getRank)) + .orElse(PHI.NotPHI); + } + + /** + * Get the max allowed PHI for the current user in the current container. + */ + default PHI getUserMaxAllowedPhiLevel() + { + UserSchema schema = getUserSchema(); + if (schema == null) + return PHI.NotPHI; + + return ComplianceService.get().getMaxAllowedPhi(schema.getContainer(), schema.getUser()); + } + + /** + * Return true if the current user is allowed the maximum phi level set across all columns. + */ + default boolean canUserAccessPhi() + { + return getMaxPhiLevel().isLevelAllowed(getUserMaxAllowedPhiLevel()); + } + + /** + * Useful helper to combine permission check/throw. This is especially useful for tables where permissions can + * differ from the containing schema/container. + * + * TableInfo users should still always check hasPermission() as early as possible for best error handling, and + * to fail-fast (or degrade gracefully). + */ + default void checkReadBeforeExecute() + { + assert null != getUserSchema(); // this is only for tables in UserSchema + if (null != getUserSchema() && !hasPermission(getUserSchema().getUser(), ReadPermission.class)) + throw new UnauthorizedException(getUserSchema().getSchemaName() + "." + getName()); + } + + default List getSortFields() + { + return List.of(); + } + + default boolean supportsInsertOption(QueryUpdateService.InsertOption option) + { + return true; + } + + default void setAllowCalculatedColumns(boolean allowCalculatedColumns) {} + + /* Many tables should be indexed via controller provided views e.g. issues, announcements, wiki. + * We do not want crawlers to index data in these tables via query-executeQuery for instance. + */ + default boolean allowRobotsIndex() + { + return true; + } +} diff --git a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java index a8e50b11384..fc60c528479 100644 --- a/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java +++ b/experiment/src/org/labkey/experiment/api/ExpMaterialTableImpl.java @@ -1,1873 +1,1873 @@ -/* - * Copyright (c) 2008-2019 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.labkey.experiment.api; - -import org.apache.commons.collections4.ListUtils; -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.math3.util.Precision; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.assay.plate.AssayPlateMetadataService; -import org.labkey.api.audit.AuditHandler; -import org.labkey.api.cache.BlockingCache; -import org.labkey.api.cache.CacheManager; -import org.labkey.api.collections.CaseInsensitiveHashMap; -import org.labkey.api.collections.CaseInsensitiveHashSet; -import org.labkey.api.compliance.TableRules; -import org.labkey.api.compliance.TableRulesManager; -import org.labkey.api.data.ColumnHeaderType; -import org.labkey.api.data.ColumnInfo; -import org.labkey.api.data.Container; -import org.labkey.api.data.ContainerFilter; -import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DataColumn; -import org.labkey.api.data.DataRegion; -import org.labkey.api.data.DbSchema; -import org.labkey.api.data.DbScope; -import org.labkey.api.data.ForeignKey; -import org.labkey.api.data.ImportAliasable; -import org.labkey.api.data.JdbcType; -import org.labkey.api.data.MaterializedQueryHelper; -import org.labkey.api.data.MutableColumnInfo; -import org.labkey.api.data.PHI; -import org.labkey.api.data.RenderContext; -import org.labkey.api.data.SQLFragment; -import org.labkey.api.data.Sort; -import org.labkey.api.data.TableInfo; -import org.labkey.api.data.UnionContainerFilter; -import org.labkey.api.dataiterator.DataIteratorBuilder; -import org.labkey.api.dataiterator.DataIteratorContext; -import org.labkey.api.dataiterator.LoggingDataIterator; -import org.labkey.api.dataiterator.SimpleTranslator; -import org.labkey.api.exp.Lsid; -import org.labkey.api.exp.MvColumn; -import org.labkey.api.exp.OntologyManager; -import org.labkey.api.exp.PropertyColumn; -import org.labkey.api.exp.api.ExpMaterial; -import org.labkey.api.exp.api.ExpProtocol; -import org.labkey.api.exp.api.ExpSampleType; -import org.labkey.api.exp.api.ExperimentService; -import org.labkey.api.exp.api.ExperimentUrls; -import org.labkey.api.exp.api.NameExpressionOptionService; -import org.labkey.api.exp.api.StorageProvisioner; -import org.labkey.api.exp.property.DefaultPropertyValidator; -import org.labkey.api.exp.property.Domain; -import org.labkey.api.exp.property.DomainProperty; -import org.labkey.api.exp.property.DomainUtil; -import org.labkey.api.exp.property.IPropertyValidator; -import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.exp.query.ExpDataTable; -import org.labkey.api.exp.query.ExpMaterialTable; -import org.labkey.api.exp.query.ExpSampleTypeTable; -import org.labkey.api.exp.query.ExpSchema; -import org.labkey.api.exp.query.SamplesSchema; -import org.labkey.api.gwt.client.AuditBehaviorType; -import org.labkey.api.gwt.client.model.PropertyValidatorType; -import org.labkey.api.inventory.InventoryService; -import org.labkey.api.ontology.Quantity; -import org.labkey.api.ontology.Unit; -import org.labkey.api.qc.SampleStatusService; -import org.labkey.api.query.AliasedColumn; -import org.labkey.api.query.DetailsURL; -import org.labkey.api.query.ExprColumn; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.LookupForeignKey; -import org.labkey.api.query.QueryException; -import org.labkey.api.query.QueryForeignKey; -import org.labkey.api.query.QueryService; -import org.labkey.api.query.QueryUpdateService; -import org.labkey.api.query.QueryUrls; -import org.labkey.api.query.RowIdForeignKey; -import org.labkey.api.query.UserSchema; -import org.labkey.api.query.column.BuiltInColumnTypes; -import org.labkey.api.search.SearchService; -import org.labkey.api.security.User; -import org.labkey.api.security.UserPrincipal; -import org.labkey.api.security.permissions.DeletePermission; -import org.labkey.api.security.permissions.InsertPermission; -import org.labkey.api.security.permissions.MediaReadPermission; -import org.labkey.api.security.permissions.MoveEntitiesPermission; -import org.labkey.api.security.permissions.Permission; -import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.security.permissions.UpdatePermission; -import org.labkey.api.util.GUID; -import org.labkey.api.util.HeartBeat; -import org.labkey.api.util.PageFlowUtil; -import org.labkey.api.util.Pair; -import org.labkey.api.util.StringExpression; -import org.labkey.api.util.UnexpectedException; -import org.labkey.api.view.ActionURL; -import org.labkey.api.view.ViewContext; -import org.labkey.data.xml.TableType; -import org.labkey.experiment.ExpDataIterators; -import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; -import org.labkey.experiment.controllers.exp.ExperimentController; -import org.labkey.experiment.lineage.LineageMethod; - -import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.TreeSet; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.locks.Lock; -import java.util.function.Supplier; -import java.util.stream.Collectors; - -import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; -import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; -import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLE_TYPE_FILE_DIRECTORY_NAME; -import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; -import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; -import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; - -public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable -{ - ExpSampleTypeImpl _ss; - Set _uniqueIdFields; - boolean _supportTableRules = true; - - private static final Set MATERIAL_ALT_KEYS; - private static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); - static { - MATERIAL_ALT_KEYS = CaseInsensitiveHashSet.of(MaterialSourceId.name(), Name.name()); - - Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); - IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); - amountValidator.setName("SampleAmountNonNegative"); - amountValidator.setExpressionValue("~gte=0"); - amountValidator.setErrorMessage("Amounts must be non-negative."); - amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); - AMOUNT_RANGE_VALIDATORS.add(amountValidator); - } - - public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) - { - super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); - setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); - setPublicSchemaName(ExpSchema.SCHEMA_NAME); - addAllowablePermission(InsertPermission.class); - addAllowablePermission(UpdatePermission.class); - addAllowablePermission(MoveEntitiesPermission.class); - setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); - setSampleType(sampleType); - } - - public Set getUniqueIdFields() - { - if (_uniqueIdFields == null) - { - _uniqueIdFields = new CaseInsensitiveHashSet(); - _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); - } - return _uniqueIdFields; - } - - @Override - protected ColumnInfo resolveColumn(String name) - { - ColumnInfo result = super.resolveColumn(name); - if (result == null) - { - if ("CpasType".equalsIgnoreCase(name)) - result = createColumn(SampleSet.name(), SampleSet); - else if (Property.name().equalsIgnoreCase(name)) - result = createPropertyColumn(Property.name()); - else if (QueryableInputs.name().equalsIgnoreCase(name)) - result = createColumn(QueryableInputs.name(), QueryableInputs); - } - return result; - } - - @Override - public ColumnInfo getExpObjectColumn() - { - var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); - ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); - return ret; - } - - @Override - public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) - { - if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) - { - // Special case sample auditing to help build a useful timeline view - return SampleTypeServiceImpl.get(); - } - - return super.getAuditHandler(auditBehaviorType); - } - - @Override - public MutableColumnInfo createColumn(String alias, Column column) - { - switch (column) - { - case Folder -> - { - return wrapColumn(alias, _rootTable.getColumn("Container")); - } - case LSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(LSID.name())); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - return columnInfo; - } - case MaterialSourceId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(MaterialSourceId.name())); - columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") - { - @Override - public TableInfo getLookupTableInfo() - { - ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); - sampleTypeTable.populate(); - return sampleTypeTable; - } - - @Override - public StringExpression getURL(ColumnInfo parent) - { - return super.getURL(parent, true); - } - }); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case RootMaterialRowId -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(RootMaterialRowId.name())); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), RowId.name())); - columnInfo.setLabel("Root Material"); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - - // NK: Here we mark the column as not required AND nullable which is the opposite of the database where - // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. - // See ExpMaterialValidatorIterator. - columnInfo.setRequired(false); - columnInfo.setNullable(true); - - return columnInfo; - } - case AliquotedFromLSID -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(AliquotedFromLSID.name())); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), LSID.name())); - columnInfo.setLabel("Aliquoted From Parent"); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInUpdateView(false); - return columnInfo; - } - case IsAliquot -> - { - String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RootMaterialRowId.name(); - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); - ExprColumn columnInfo = new ExprColumn(this, IsAliquot.fieldKey(), new SQLFragment( - "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) - .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 - .append(" ELSE NULL END)"), JdbcType.BOOLEAN); - columnInfo.setLabel("Is Aliquot"); - columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(false); - return columnInfo; - } - case Name -> - { - var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); - // shut off this field in insert and update views if user specified names are not allowed - if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) - { - nameCol.setShownInInsertView(false); - nameCol.setShownInUpdateView(false); - } - return nameCol; - } - case RawAmount -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - case StoredAmount -> - { - String label = StoredAmount.label(); - Set importAliases = Set.of(label, "Stored Amount"); - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); - columnInfo.setLabel(label); - columnInfo.setImportAliasesSet(importAliases); - columnInfo.setDescription("The amount of this sample currently on hand."); - columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); - columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); - return columnInfo; - } - } - case RawUnits -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - columnInfo.setHidden(true); - columnInfo.setReadOnly(true); - columnInfo.setShownInDetailsView(false); - columnInfo.setShownInInsertView(false); - columnInfo.setShownInUpdateView(false); - columnInfo.setUserEditable(false); - return columnInfo; - } - case Units -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Units.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); - columnInfo.setShownInUpdateView(true); - columnInfo.setShownInInsertView(true); - columnInfo.setUserEditable(true); - columnInfo.setCalculated(false); - return columnInfo; - } - else - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); - columnInfo.setFk(fk); - columnInfo.setDescription("The units associated with the Stored Amount for this sample."); - return columnInfo; - } - } - case Description -> - { - return wrapColumn(alias, _rootTable.getColumn(Description.name())); - } - case SampleSet -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); - columnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) - { - @Override - protected ContainerFilter getLookupContainerFilter() - { - // Be sure that we can resolve the sample type if it's defined in a separate container. - // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. - // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has - // parents in current sample type and a sample type in the parent container - Set containers = new HashSet<>(); - if (null != getSampleType()) - containers.add(getSampleType().getContainer()); - containers.add(getContainer()); - if (getContainer().getProject() != null) - containers.add(getContainer().getProject()); - containers.add(ContainerManager.getSharedContainer()); - ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); - - if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) - cf = new UnionContainerFilter(_containerFilter, cf); - return cf; - } - }); - columnInfo.setReadOnly(true); - columnInfo.setUserEditable(false); - columnInfo.setShownInInsertView(false); - return columnInfo; - } - case SourceProtocolLSID -> - { - // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module - ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( - "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + - " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); - columnInfo.setSqlTypeName("lsidtype"); - columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); - columnInfo.setLabel("Source Protocol"); - columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - return columnInfo; - } - case SourceProtocolApplication -> - { - var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); - columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - columnInfo.setUserEditable(false); - columnInfo.setReadOnly(true); - columnInfo.setHidden(true); - columnInfo.setAutoIncrement(false); - return columnInfo; - } - case SourceApplicationInput -> - { - var col = createEdgeColumn(alias, SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); - col.setHidden(true); - return col; - } - case RunApplication -> - { - SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") - .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") - .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") - .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) - .append(")"); - - var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); - col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); - col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); - col.setUserEditable(false); - col.setReadOnly(true); - col.setHidden(true); - return col; - } - case RunApplicationOutput -> - { - var col = createEdgeColumn(alias, RunApplication, ExpSchema.TableType.MaterialInputs); - col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); - return col; - } - case Run -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); - ret.setFk(getExpSchema().getRunIdForeignKey(getContainerFilter())); - ret.setReadOnly(true); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case RowId -> - { - var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); - // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction - ret.setSortDirection(Sort.SortDirection.DESC); - ret.setFk(new RowIdForeignKey(ret)); - ret.setUserEditable(false); - ret.setHidden(true); - ret.setShownInInsertView(false); - ret.setHasDbSequence(true); - ret.setIsRootDbSequence(true); - return ret; - } - case Property -> - { - return createPropertyColumn(alias); - } - case Flag -> - { - return createFlagColumn(alias); - } - case Created -> - { - return wrapColumn(alias, _rootTable.getColumn("Created")); - } - case CreatedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); - } - case Modified -> - { - return wrapColumn(alias, _rootTable.getColumn("Modified")); - } - case ModifiedBy -> - { - return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); - } - case Alias -> - { - return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); - } - case Inputs -> - { - return createLineageColumn(this, alias, true, false); - } - case QueryableInputs -> - { - return createLineageColumn(this, alias, true, true); - } - case Outputs -> - { - return createLineageColumn(this, alias, false, false); - } - case Properties -> - { - return createPropertiesColumn(alias); - } - case SampleState -> - { - boolean statusEnabled = isStatusEnabled(getContainer()); - var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); - ret.setLabel("Status"); - ret.setHidden(!statusEnabled); - ret.setShownInDetailsView(statusEnabled); - ret.setShownInInsertView(statusEnabled); - ret.setShownInUpdateView(statusEnabled); - ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) - .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); - return ret; - } - case AliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); - ret.setLabel(ALIQUOT_COUNT_LABEL); - return ret; - } - case RawAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(RawAliquotVolume.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AliquotVolume -> - { - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AliquotVolume.name(), AliquotUnit.name(), ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The total amount of this sample's aliquots, in the display unit for the sample type, currently on hand."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); - ret.setLabel(ALIQUOT_VOLUME_LABEL); - return ret; - } - } - case RawAvailableAliquotVolume -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(RawAvailableAliquotVolume.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AvailableAliquotVolume -> - { - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AvailableAliquotVolume.name(), AliquotUnit.name(), AVAILABLE_ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); - columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); - columnInfo.setDescription("The total amount of this sample's available aliquots currently on hand, in the display unit for the sample type."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); - ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); - return ret; - } - } - case AvailableAliquotCount -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); - ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); - return ret; - } - case RawAliquotUnit -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); - ret.setLabel(RawAliquotUnit.label()); - ret.setHidden(true); - ret.setShownInDetailsView(false); - ret.setShownInInsertView(false); - ret.setShownInUpdateView(false); - return ret; - } - case AliquotUnit -> - { - ForeignKey fk = new LookupForeignKey("Value", "Value") - { - @Override - public @Nullable TableInfo getLookupTableInfo() - { - return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); - } - }; - - Unit typeUnit = getSampleTypeUnit(); - if (typeUnit != null) - { - SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, AliquotUnit.name(), typeUnit); - columnInfo.setFk(fk); - columnInfo.setDescription("The sample type display units associated with the AliquotAmount for this sample."); - return columnInfo; - } - else - { - var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); - ret.setShownInDetailsView(false); - return ret; - } - } - case MaterialExpDate -> - { - var ret = wrapColumn(alias, _rootTable.getColumn(MaterialExpDate.name())); - ret.setLabel("Expiration Date"); - ret.setShownInDetailsView(true); - ret.setShownInInsertView(true); - ret.setShownInUpdateView(true); - return ret; - } - case LastIndexed -> - { - var sql = new SQLFragment("(SELECT LastIndexed FROM ") - .append(ExperimentServiceImpl.get().getTinfoMaterialIndexed(), "mi") - .append(" WHERE mi.MaterialId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId)"); - var ret = new ExprColumn(this, LastIndexed.name(), sql, JdbcType.TIMESTAMP); - ret.setDescription("Date when the material was last full-text search indexed in the system"); - ret.setHidden(true); - ret.setReadOnly(true); - ret.setUserEditable(false); - return ret; - } - default -> throw new IllegalArgumentException("Unknown column " + column); - } - } - - @Override - public MutableColumnInfo createPropertyColumn(String alias) - { - var ret = wrapColumn(alias, _rootTable.getColumn(RowId.name())); - - if (_ss != null && _ss.getTinfo() != null) - ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getLookupContainerFilter()).table(_ss.getTinfo()).key(RowId.name()).build()); - - ret.setIsUnselectable(true); - ret.setDescription("A holder for any custom fields associated with this sample"); - ret.setHidden(true); - return ret; - } - - private static boolean isStatusEnabled(Container c) - { - return SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(c).isEmpty(); - } - - private Unit getSampleTypeUnit() - { - Unit typeUnit = null; - if (_ss != null && _ss.getMetricUnit() != null) - typeUnit = Unit.fromName(_ss.getMetricUnit()); - return typeUnit; - } - - private void setSampleType(@Nullable ExpSampleType st) - { - checkLocked(); - if (_ss != null) - { - throw new IllegalStateException("Cannot unset sample type"); - } - if (st != null && !(st instanceof ExpSampleTypeImpl)) - { - throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); - } - _ss = (ExpSampleTypeImpl) st; - if (_ss != null) - { - setPublicSchemaName(SamplesSchema.SCHEMA_NAME); - setName(st.getName()); - - String description = _ss.getDescription(); - if (StringUtils.isEmpty(description)) - description = "Contains one row per sample in the " + _ss.getName() + " sample type"; - setDescription(description); - - if (canUserAccessPhi()) - { - ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); - setImportURL(new DetailsURL(url)); - } - } - } - - public ExpSampleType getSampleType() - { - return _ss; - } - - @Override - protected void populateColumns() - { - var st = getSampleType(); - var rowIdCol = addColumn(RowId); - addColumn(MaterialSourceId); - addColumn(SourceProtocolApplication); - addColumn(SourceApplicationInput); - addColumn(RunApplication); - addColumn(RunApplicationOutput); - addColumn(SourceProtocolLSID); - - List defaultCols = new ArrayList<>(); - var nameCol = addColumn(Name); - defaultCols.add(Name.fieldKey()); - if (st != null && st.hasNameAsIdCol()) - { - // Show the Name field but don't mark is as required when using name expressions - if (st.hasNameExpression()) - { - var nameExpression = st.getNameExpression(); - nameCol.setNameExpression(nameExpression); - nameCol.setNullable(true); - String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); - String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); - nameCol.setDescription(desc); - } - else - { - nameCol.setNullable(false); - } - } - else - { - nameCol.setReadOnly(true); - nameCol.setShownInInsertView(false); - } - - addColumn(Alias); - addColumn(Description); - addColumn(SampleSet); - addColumn(MaterialExpDate); - defaultCols.add(MaterialExpDate.fieldKey()); - addContainerColumn(Folder, null); - if (getContainer().hasProductFolders()) - defaultCols.add(Folder.fieldKey()); - addColumn(Run); - defaultCols.add(Run.fieldKey()); - addColumn(LSID); - addColumn(RootMaterialRowId); - addColumn(AliquotedFromLSID); - addColumn(IsAliquot); - addColumn(Created); - addColumn(CreatedBy); - addColumn(Modified); - addColumn(ModifiedBy); - if (st == null) - defaultCols.add(SampleSet.fieldKey()); - addColumn(Flag); - addColumn(SampleState); - if (isStatusEnabled(getContainer())) - defaultCols.add(SampleState.fieldKey()); - - // TODO is this a real Domain??? - if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) - { - defaultCols.add(Flag.fieldKey()); - addSampleTypeColumns(st, defaultCols); - - setName(_ss.getName()); - - ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); - gridUrl.addParameter("rowId", st.getRowId()); - setGridURL(new DetailsURL(gridUrl)); - } - - List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); - defaultCols.addAll(calculatedFieldKeys); - - addColumn(AliquotCount); - addColumn(AliquotVolume); - addColumn(AliquotUnit); - addColumn(AvailableAliquotCount); - addColumn(AvailableAliquotVolume); - - addColumn(StoredAmount); - defaultCols.add(StoredAmount.fieldKey()); - - addColumn(Units); - defaultCols.add(Units.fieldKey()); - - addColumn(RawAmount).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(StoredAmount.fieldKey()))); - addColumn(RawUnits).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(Units.fieldKey()))); - addColumn(RawAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotVolume.fieldKey()))); - addColumn(RawAvailableAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AvailableAliquotVolume.fieldKey()))); - addColumn(RawAliquotUnit).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotUnit.fieldKey()))); - - if (InventoryService.get() != null && (st == null || !st.isMedia())) - defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); - - SQLFragment sql; - UserSchema plateUserSchema; - // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset - // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying - // against tables in that schema. - if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) - plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); - else - plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); - - if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) - { - String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); - SQLFragment existsSubquery = new SQLFragment() - .append("SELECT 1 FROM ") - .append(plateUserSchema.getTable("Well"), "well") - .append(" WHERE well.sampleid = ").append(rowIdField); - - sql = new SQLFragment() - .append("CASE WHEN EXISTS (") - .append(existsSubquery) - .append(") THEN 'Plated' ") - .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 - .append("ELSE NULL END"); - } - else - { - sql = new SQLFragment("(SELECT NULL)"); - } - var col = new ExprColumn(this, IsPlated.name(), sql, JdbcType.VARCHAR); - col.setDescription("Whether the sample that has been plated, if plating is supported."); - col.setUserEditable(false); - col.setReadOnly(true); - col.setShownInDetailsView(false); - col.setShownInInsertView(false); - col.setShownInUpdateView(false); - if (plateUserSchema != null) - col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + RowId.name() + "}")); - addColumn(col); - - addVocabularyDomains(); - addColumn(LastIndexed); - addColumn(Properties); - - var colInputs = addColumn(Inputs); - addMethod(Inputs.name(), new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); - - var colOutputs = addColumn(Outputs); - addMethod(Outputs.name(), new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); - - addExpObjectMethod(); - - ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); - DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); - nameCol.setURL(url); - rowIdCol.setURL(url); - setDetailsURL(url); - - if (canUserAccessPhi()) - { - ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); - setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); - - ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); - setInsertURL(new DetailsURL(insertActionURL)); - } - else - { - setImportURL(LINK_DISABLER); - setInsertURL(LINK_DISABLER); - setUpdateURL(LINK_DISABLER); - } - - setTitleColumn(Name.toString()); - - setDefaultVisibleColumns(defaultCols); - - MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn(RowId.name()), _ss, true); - addColumn(lineageLookup); - } - - private ContainerFilter getSampleStatusLookupContainerFilter() - { - // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared - // for the sample status lookup since in the app project context we want to share status definitions across - // a given project instead of creating duplicate statuses in each subfolder project. - ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); - type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; - return type.create(getUserSchema()); - } - - @Override - public Domain getDomain() - { - return getDomain(false); - } - - @Override - public Domain getDomain(boolean forUpdate) - { - return _ss == null ? null : _ss.getDomain(forUpdate); - } - - public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) - { - if (nameExpression == null) - return currentDescription; - - StringBuilder sb = new StringBuilder(); - if (currentDescription != null && !currentDescription.isEmpty()) - { - sb.append(currentDescription); - if (!currentDescription.endsWith(".")) - sb.append("."); - sb.append("\n"); - } - - sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); - sb.append(nameExpression); - sb.append("."); - if (!StringUtils.isEmpty(nameExpressionPreview)) - { - sb.append("\nExample of name that will be generated from the current pattern: \n"); - sb.append(nameExpressionPreview); - } - - return sb.toString(); - } - - private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) - { - TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); - if (null == dbTable) - return; - - UserSchema schema = getUserSchema(); - Domain domain = st.getDomain(); - ColumnInfo rowIdColumn = getColumn(RowId); - ColumnInfo nameColumn = getColumn(Name); - - visibleColumns.remove(Run.fieldKey()); - - // When not using name expressions, mark the ID columns as required. - // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. - final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); - - Set mvColumns = domain.getProperties().stream() - .filter(ImportAliasable::isMvEnabled) - .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) - .collect(Collectors.toSet()); - - for (ColumnInfo dbColumn : dbTable.getColumns()) - { - // Don't include PHI columns in full text search index - // CONSIDER: Can we move this to a base class? Maybe in .addColumn() - if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) - continue; - - if ( - rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || - nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) - ) - { - continue; - } - - var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); - - // TODO missing values? comments? flags? - DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); - var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); - if (propColumn.getName().equalsIgnoreCase("genid")) - { - propColumn.setHidden(true); - propColumn.setUserEditable(false); - propColumn.setShownInDetailsView(false); - propColumn.setShownInInsertView(false); - propColumn.setShownInUpdateView(false); - } - if (null != dp) - { - PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), - SamplesSchema.SCHEMA_SAMPLES, st.getName(), RowId.fieldKey(), null, getLookupContainerFilter()); - - if (idCols.contains(dp)) - { - propColumn.setNullable(false); - propColumn.setDisplayColumnFactory(c -> new DataColumn(c) - { - @Override - protected boolean isDisabledInput(RenderContext ctx) - { - return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; - } - }); - } - - // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected - if (!propColumn.isHidden()) - { - visibleColumns.add(propColumn.getFieldKey()); - } - - if (propColumn.isMvEnabled()) - { - // The column in the physical table has a "_MVIndicator" suffix, but we want to expose - // it with a "MVIndicator" suffix (no underscore) - var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, - StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); - mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); - mvColumn.setSqlTypeName("VARCHAR"); - mvColumn.setPropertyURI(dp.getPropertyURI()); - mvColumn.setNullable(true); - mvColumn.setUserEditable(false); - mvColumn.setHidden(true); - mvColumn.setMvIndicatorColumn(true); - - addColumn(mvColumn); - propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); - } - } - - if (!mvColumns.contains(propColumn.getFieldKey())) - addColumn(propColumn); - - } - - setDefaultVisibleColumns(visibleColumns); - } - - // These are mostly fields that are wrapped by fields with different names (see createColumn()) - // we could handle each case separately, but this is easier - static final Set wrappedFieldKeys = Set.of( - new FieldKey(null, "objectid"), - new FieldKey(null, "RowId"), - new FieldKey(null, "LSID"), // Flag - new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication - new FieldKey(null, "runId"), // Run, RunApplication - new FieldKey(null, "CpasType")); // SampleSet - static final Set ALL_COLUMNS = Set.of(); - - private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) - { - if (null == selectedColumns) - return ALL_COLUMNS; - selectedColumns = new TreeSet<>(selectedColumns); - if (selectedColumns.contains(new FieldKey(null, StoredAmount))) - selectedColumns.add(new FieldKey(null, Units)); - if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) - selectedColumns.add(new FieldKey(null, AliquotedFromLSID.name())); - if (selectedColumns.contains(new FieldKey(null, IsAliquot.name()))) - selectedColumns.add(new FieldKey(null, RootMaterialRowId.name())); - if (selectedColumns.contains(new FieldKey(null, AliquotVolume.name())) || selectedColumns.contains(new FieldKey(null, AvailableAliquotVolume.name()))) - selectedColumns.add(new FieldKey(null, AliquotUnit.name())); - selectedColumns.addAll(wrappedFieldKeys); - if (null != getFilter()) - selectedColumns.addAll(getFilter().getAllFieldKeys()); - return selectedColumns; - } - - @NotNull - @Override - public SQLFragment getFromSQL(String alias) - { - return getFromSQL(alias, null); - } - - @Override - public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) - { - SQLFragment sql = new SQLFragment("("); - boolean usedMaterialized; - - - // SELECT FROM - /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) - * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. - * Also, we may use RemapCache for material lookup outside a transaction - */ - boolean onlyMaterialColums = false; - if (null != selectedColumns && !selectedColumns.isEmpty()) - onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); - if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) - { - sql.append(getMaterializedSQL()); - usedMaterialized = true; - } - else - { - sql.append(getJoinSQL(selectedColumns)); - usedMaterialized = false; - } - - // WHERE - SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); - sql.append("\n").append(filterFrag); - if (_ss != null && !usedMaterialized) - { - if (!filterFrag.isEmpty()) - sql.append(" AND "); - else - sql.append(" WHERE "); - sql.append("CpasType = ").appendValue(_ss.getLSID()); - } - sql.append(") ").appendIdentifier(alias); - - return getTransformedFromSQL(sql); - } - - @Override - public void setSupportTableRules(boolean b) - { - this._supportTableRules = b; - } - - @Override - public boolean supportTableRules() // intentional override - { - return _supportTableRules; - } - - @Override - protected @NotNull TableRules findTableRules() - { - Container definitionContainer = getUserSchema().getContainer(); - if (null != _ss) - definitionContainer = _ss.getContainer(); - return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); - } - - - static class InvalidationCounters - { - public final AtomicLong update, insert, delete, rollup; - InvalidationCounters() - { - long l = System.currentTimeMillis(); - update = new AtomicLong(l); - insert = new AtomicLong(l); - delete = new AtomicLong(l); - rollup = new AtomicLong(l); - } - } - - static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); - static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); - static final AtomicBoolean initializedListeners = new AtomicBoolean(false); - - // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() - public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - var scope = ExperimentServiceImpl.getExpSchema().getScope(); - var runnable = new RefreshMaterializedViewRunnable(lsid, reason); - scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); - } - - private static class RefreshMaterializedViewRunnable implements Runnable - { - private final String _lsid; - private final SampleTypeServiceImpl.SampleChangeType _reason; - - public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) - { - _lsid = lsid; - _reason = reason; - } - - @Override - public void run() - { - if (_reason == schema) - { - /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. - * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the - * MQH from the cache to force the SQL to be regenerated. - */ - _materializedQueries.remove(_lsid); - return; - } - var counters = getInvalidateCounters(_lsid); - switch (_reason) - { - case insert -> counters.insert.incrementAndGet(); - case rollup -> counters.rollup.incrementAndGet(); - case update -> counters.update.incrementAndGet(); - case delete -> counters.delete.incrementAndGet(); - default -> throw new IllegalStateException("Unexpected value: " + _reason); - } - } - - @Override - public boolean equals(Object obj) - { - return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); - } - } - - private static InvalidationCounters getInvalidateCounters(String lsid) - { - if (!initializedListeners.getAndSet(true)) - { - CacheManager.addListener(_invalidationCounters::clear); - } - return _invalidationCounters.computeIfAbsent(lsid, (unused) -> - new InvalidationCounters() - ); - } - - /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ - private SQLFragment getMaterializedSQL() - { - if (null == _ss) - return getJoinSQL(null); - - var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> - { - /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. - * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, - * even if just to help with race-conditions. - * - * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. - */ - SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); - return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") - .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") - .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") - .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") - .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) - .build(); - }); - return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); - } - - - /** - * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. - * It does not help with incremental updates (except for providing the upsert() method). - * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. - */ - static class _MaterializedQueryHelper extends MaterializedQueryHelper - { - final String _lsid; - - static class Builder extends MaterializedQueryHelper.Builder - { - String _lsid; - - public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) - { - super(prefix, scope, select); - this._lsid = lsid; - } - - @Override - public _MaterializedQueryHelper build() - { - return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); - } - } - - _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, - boolean isSelectIntoSql) - { - super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); - this._lsid = lsid; - } - - @Override - protected Materialized createMaterialized(String txCacheKey) - { - DbSchema temp = DbSchema.getTemp(); - String name = _prefix + "_" + GUID.makeHash(); - _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); - initMaterialized(materialized); - return materialized; - } - - @Override - protected void incrementalUpdateBeforeSelect(Materialized m) - { - _Materialized materialized = (_Materialized) m; - - boolean lockAcquired = false; - try - { - lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); - if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) - throw materialized._loadException; - - if (!materialized.incrementalDeleteCheck.stillValid(0)) - executeIncrementalDelete(); - if (!materialized.incrementalRollupCheck.stillValid(0)) - executeIncrementalRollup(); - if (!materialized.incrementalInsertCheck.stillValid(0)) - executeIncrementalInsert(); - } - catch (RuntimeException|InterruptedException ex) - { - RuntimeException rex = UnexpectedException.wrap(ex); - materialized.setError(rex); - // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. - - // Ensure that next refresh starts clean - _materializedQueries.remove(_lsid); - getInvalidateCounters(_lsid).update.incrementAndGet(); - throw rex; - } - finally - { - if (lockAcquired) - materialized.getLock().unlock(); - } - } - - void upsertWithRetry(SQLFragment sql) - { - // not actually read-only, but we don't want to start an explicit transaction - _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); - } - - void executeIncrementalInsert() - { - SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") - .append("SELECT * FROM (") - .append(getViewSourceSql()).append(") viewsource_\n") - .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); - upsertWithRetry(incremental); - } - - void executeIncrementalDelete() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - // POSTGRES bug??? the obvious query is _very_ slow O(n^2) - // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) - SQLFragment incremental = new SQLFragment() - .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") - .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); - upsertWithRetry(incremental); - } - - void executeIncrementalRollup() - { - var d = CoreSchema.getInstance().getSchema().getSqlDialect(); - SQLFragment incremental = new SQLFragment(); - if (d.isPostgreSQL()) - { - incremental - .append("UPDATE temp.${NAME} AS st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM exp.Material AS expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") - .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") - .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") - .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") - .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") - .append(")"); - } - else - { - // SQL Server 2022 supports IS DISTINCT FROM - incremental - .append("UPDATE st\n") - .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") - .append("FROM temp.${NAME} st, exp.Material expm\n") - .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") - .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") - .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") - .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") - .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") - .append(")"); - } - upsertWithRetry(incremental); - } - } - - static class _Materialized extends MaterializedQueryHelper.Materialized - { - final MaterializedQueryHelper.Invalidator incrementalInsertCheck; - final MaterializedQueryHelper.Invalidator incrementalRollupCheck; - final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; - - _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) - { - super(mqh, tableName, cacheKey, created, sql); - final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); - incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); - incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); - incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); - } - - @Override - public void reset() - { - super.reset(); - long now = HeartBeat.currentTimeMillis(); - incrementalInsertCheck.stillValid(now); - incrementalRollupCheck.stillValid(now); - incrementalDeleteCheck.stillValid(now); - } - - Lock getLock() - { - return _loadingLock; - } - } - - - /* SELECT and JOIN, does not include WHERE */ - private SQLFragment getJoinSQL(Set selectedColumns) - { - TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); - Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); - provisionedCols.remove(RowId.name()); - provisionedCols.remove(Name.name()); - boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); - - boolean hasSampleColumns = false; - boolean hasAliquotColumns = false; - - Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); - selectedColumns = computeInnerSelectedColumns(selectedColumns); - - SQLFragment sql = new SQLFragment(); - sql.appendComment("", getSqlDialect()); - sql.append("SELECT "); - String comma = ""; - for (String materialCol : materialCols) - { - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) - { - sql.append(comma).append("m.").appendIdentifier(materialCol); - comma = ", "; - } - } - if (null != provisioned && hasProvisionedColumns) - { - for (ColumnInfo propertyColumn : provisioned.getColumns()) - { - // don't select twice - if ( - RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || - Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) - ) - { - continue; - } - - // don't need to generate SQL for columns that aren't selected - if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) - { - sql.append(comma); - boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) - || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); - if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - else if (rootField) - { - sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasSampleColumns = true; - } - else - { - sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); - hasAliquotColumns = true; - } - comma = ", "; - } - } - } - - sql.append("\nFROM "); - sql.append(_rootTable, "m"); - if (hasSampleColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); - if (hasAliquotColumns) - sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); - - sql.appendComment("", getSqlDialect()); - return sql; - } - - private static class SampleTypeAmountDisplayColumn extends ExprColumn - { - public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) - { - super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" / ? AS ") - .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") - .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.getValue()), - JdbcType.DOUBLE); - - setLabel(label); - setImportAliasesSet(importAliases); - } - } - - private static class SampleTypeUnitDisplayColumn extends ExprColumn - { - public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) - { - super(parent, FieldKey.fromParts(unitFieldName), new SQLFragment( - "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) - .append(" END)") - .add(typeUnit.getBase().toString()) - .add(typeUnit.toString()), - JdbcType.VARCHAR); - } - } - - @Override - public QueryUpdateService getUpdateService() - { - return new SampleTypeUpdateServiceDI(this, _ss); - } - - @Override - public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) - { - if (_ss == null) - { - // Allow read and delete for exp.Materials. - // Don't allow insert/update on exp.Materials without a sample type. - if (perm == DeletePermission.class || perm == ReadPermission.class) - return getContainer().hasPermission(user, perm); - return false; - } - - if (_ss.isMedia() && perm == ReadPermission.class) - return getContainer().hasPermission(user, MediaReadPermission.class); - - return super.hasPermission(user, perm); - } - - @NotNull - @Override - public List getUniqueIndices() - { - // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" - // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. - // Some FKs to ExpMaterialTable don't include the "Name" column (e.g., NabBaseTable.Specimen) - String indexName = "idx_material_ak"; - List ret = new ArrayList<>(super.getUniqueIndices()); - if (getColumn("Name") != null) - ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); - else - ret.removeIf(def -> def.name().equals(indexName)); - return Collections.unmodifiableList(ret); - } - - // - // UpdatableTableInfo - // - - @Override - public @Nullable Long getOwnerObjectId() - { - return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); - } - - @Nullable - @Override - public CaseInsensitiveHashMap remapSchemaColumns() - { - CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); - - if (null != getRealTable().getColumn("container") && null != getColumn("folder")) - { - m.put("container", "folder"); - } - - for (ColumnInfo col : getColumns()) - { - if (col.getMvColumnName() != null) - m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); - } - - return m; - } - - @Override - public Set getAltMergeKeys(DataIteratorContext context) - { - return MATERIAL_ALT_KEYS; - } - - @Override - public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) - { - Set keyColumnNames = new CaseInsensitiveHashSet(); - - if (context.getInsertOption().allowUpdate) - { - if (context.getInsertOption().updateOnly) - { - if (colNameMap.containsKey(RowId.name())) - keyColumnNames.add(RowId.name()); - else if (colNameMap.containsKey(Name.name())) - { - keyColumnNames.add(MaterialSourceId.name()); - keyColumnNames.add(Name.name()); - } - else - throw new IllegalArgumentException("Either RowId or Name is required for update."); - } - else - { - Set altMergeKeys = getAltMergeKeys(context); - if (altMergeKeys != null) - keyColumnNames.addAll(altMergeKeys); - } - } - - return keyColumnNames.isEmpty() ? null : keyColumnNames; - } - - @Override - public @Nullable Set getExistingRecordSharedKeyColumnNames() - { - return CaseInsensitiveHashSet.of(MaterialSourceId.name()); - } - - @Override - @NotNull - public List> getAdditionalRequiredInsertColumns() - { - if (getSampleType() == null) - return Collections.emptyList(); - - try - { - return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); - } - catch (IOException e) - { - return Collections.emptyList(); - } - } - - @Override - public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) - { - TableInfo propertiesTable = _ss.getTinfo(); - - // The specimens sample type doesn't have a properties table - if (propertiesTable == null) - return data; - - try - { - final Container container = getContainer(); - final User user = getUserSchema().getUser(); - ExperimentServiceImpl expService = ExperimentServiceImpl.get(); - SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); - - DataIteratorBuilder dib = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, container, user, _ss.getImportAliasesIncludingAliquot()) - .setFileLinkDirectory(SAMPLE_TYPE_FILE_DIRECTORY_NAME) - .setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> { - List lsids = searchIndexDataKeys.lsids(); - List orderedRowIds = searchIndexDataKeys.orderedRowIds(); - - // Issue 51263: order by RowId to reduce deadlock - ListUtils.partition(orderedRowIds, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : expService.getExpMaterials(sublist)) - expMaterial.index(q, this); - }) - ); - - ListUtils.partition(lsids, 100).forEach(sublist -> - queue.addRunnable((q) -> - { - for (ExpMaterialImpl expMaterial : expService.getExpMaterialsByLsid(sublist)) - expMaterial.index(q, this); - }) - ); - }, DbScope.CommitTaskOption.POSTCOMMIT) - ); - - dib = LoggingDataIterator.wrap(dib); - return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(dib, container, user, expService.getTinfoMaterialAliasMap(), _ss, true)); - } - catch (IOException e) - { - throw new UncheckedIOException(e); - } - } - - @Override - @NotNull - public AuditBehaviorType getDefaultAuditBehavior() - { - return AuditBehaviorType.DETAILED; - } - - static final Set excludeFromDetailedAuditField; - static - { - var set = new CaseInsensitiveHashSet(); - set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); - set.addAll(ExpDataIterators.MATERIAL_NOT_FOR_UPDATE); - // We don't want the inventory columns to show up in the sample timeline audit record; - // they are captured in their own audit record. - set.addAll(InventoryService.InventoryStatusColumn.names()); - excludeFromDetailedAuditField = Collections.unmodifiableSet(set); - } - - @Override - public @NotNull Set getExcludedDetailedUpdateAuditFields() - { - // uniqueId fields don't change in reality, so exclude them from the audit updates - Set excluded = new CaseInsensitiveHashSet(); - excluded.addAll(this.getUniqueIdFields()); - excluded.addAll(excludeFromDetailedAuditField); - return excluded; - } - - @Override - public List> getImportTemplates(ViewContext ctx) - { - // respect any metadata overrides - if (getRawImportTemplates() != null) - return super.getImportTemplates(ctx); - - List> templates = new ArrayList<>(); - ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); - url.addParameter("headerType", ColumnHeaderType.ImportField.name()); - try - { - if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) - { - for (String aliasKey : getSampleType().getImportAliases().keySet()) - url.addParameter("includeColumn", aliasKey); - } - } - catch (IOException e) - {} - templates.add(Pair.of("Download Template", url.toString())); - return templates; - } - - @Override - public void overlayMetadata(String tableName, UserSchema schema, Collection errors) - { - if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) - { - Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); - if (null != metadata) - { - overlayMetadata(metadata, schema, errors); - } - } - super.overlayMetadata(tableName, schema, errors); - } - - static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn - { - private final Unit typeUnit; - private final boolean applySampleTypePrecision; - - public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) - { - super(col, false); - this.typeUnit = typeUnit; - this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user - } - - @Override - public Object getDisplayValue(RenderContext ctx) - { - Object value = super.getDisplayValue(ctx); - if (this.applySampleTypePrecision && value != null) - { - int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); - value = Precision.round(Double.valueOf(value.toString()), scale); - } - return value; - } - } -} +/* + * Copyright (c) 2008-2019 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.labkey.experiment.api; + +import org.apache.commons.collections4.ListUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.math3.util.Precision; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.assay.plate.AssayPlateMetadataService; +import org.labkey.api.audit.AuditHandler; +import org.labkey.api.cache.BlockingCache; +import org.labkey.api.cache.CacheManager; +import org.labkey.api.collections.CaseInsensitiveHashMap; +import org.labkey.api.collections.CaseInsensitiveHashSet; +import org.labkey.api.compliance.TableRules; +import org.labkey.api.compliance.TableRulesManager; +import org.labkey.api.data.ColumnHeaderType; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerFilter; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DataColumn; +import org.labkey.api.data.DataRegion; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.ImportAliasable; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.MaterializedQueryHelper; +import org.labkey.api.data.MutableColumnInfo; +import org.labkey.api.data.PHI; +import org.labkey.api.data.RenderContext; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.Sort; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.UnionContainerFilter; +import org.labkey.api.dataiterator.DataIteratorBuilder; +import org.labkey.api.dataiterator.DataIteratorContext; +import org.labkey.api.dataiterator.LoggingDataIterator; +import org.labkey.api.dataiterator.SimpleTranslator; +import org.labkey.api.exp.Lsid; +import org.labkey.api.exp.MvColumn; +import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.PropertyColumn; +import org.labkey.api.exp.api.ExpMaterial; +import org.labkey.api.exp.api.ExpProtocol; +import org.labkey.api.exp.api.ExpSampleType; +import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.exp.api.ExperimentUrls; +import org.labkey.api.exp.api.NameExpressionOptionService; +import org.labkey.api.exp.api.StorageProvisioner; +import org.labkey.api.exp.property.DefaultPropertyValidator; +import org.labkey.api.exp.property.Domain; +import org.labkey.api.exp.property.DomainProperty; +import org.labkey.api.exp.property.DomainUtil; +import org.labkey.api.exp.property.IPropertyValidator; +import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.exp.query.ExpDataTable; +import org.labkey.api.exp.query.ExpMaterialTable; +import org.labkey.api.exp.query.ExpSampleTypeTable; +import org.labkey.api.exp.query.ExpSchema; +import org.labkey.api.exp.query.SamplesSchema; +import org.labkey.api.gwt.client.AuditBehaviorType; +import org.labkey.api.gwt.client.model.PropertyValidatorType; +import org.labkey.api.inventory.InventoryService; +import org.labkey.api.ontology.Quantity; +import org.labkey.api.ontology.Unit; +import org.labkey.api.qc.SampleStatusService; +import org.labkey.api.query.AliasedColumn; +import org.labkey.api.query.DetailsURL; +import org.labkey.api.query.ExprColumn; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.LookupForeignKey; +import org.labkey.api.query.QueryException; +import org.labkey.api.query.QueryForeignKey; +import org.labkey.api.query.QueryService; +import org.labkey.api.query.QueryUpdateService; +import org.labkey.api.query.QueryUrls; +import org.labkey.api.query.RowIdForeignKey; +import org.labkey.api.query.UserSchema; +import org.labkey.api.query.column.BuiltInColumnTypes; +import org.labkey.api.search.SearchService; +import org.labkey.api.security.User; +import org.labkey.api.security.UserPrincipal; +import org.labkey.api.security.permissions.DeletePermission; +import org.labkey.api.security.permissions.InsertPermission; +import org.labkey.api.security.permissions.MediaReadPermission; +import org.labkey.api.security.permissions.MoveEntitiesPermission; +import org.labkey.api.security.permissions.Permission; +import org.labkey.api.security.permissions.ReadPermission; +import org.labkey.api.security.permissions.UpdatePermission; +import org.labkey.api.util.GUID; +import org.labkey.api.util.HeartBeat; +import org.labkey.api.util.PageFlowUtil; +import org.labkey.api.util.Pair; +import org.labkey.api.util.StringExpression; +import org.labkey.api.util.UnexpectedException; +import org.labkey.api.view.ActionURL; +import org.labkey.api.view.ViewContext; +import org.labkey.data.xml.TableType; +import org.labkey.experiment.ExpDataIterators; +import org.labkey.experiment.ExpDataIterators.AliasDataIteratorBuilder; +import org.labkey.experiment.controllers.exp.ExperimentController; +import org.labkey.experiment.lineage.LineageMethod; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Lock; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import static org.labkey.api.audit.AuditHandler.PROVIDED_DATA_PREFIX; +import static org.labkey.api.data.ColumnRenderPropertiesImpl.NON_NEGATIVE_NUMBER_CONCEPT_URI; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_COUNT_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.AVAILABLE_ALIQUOT_VOLUME_LABEL; +import static org.labkey.api.exp.api.SampleTypeDomainKind.SAMPLE_TYPE_FILE_DIRECTORY_NAME; +import static org.labkey.api.exp.query.ExpMaterialTable.Column.*; +import static org.labkey.api.util.StringExpressionFactory.AbstractStringExpression.NullValueBehavior.NullResult; +import static org.labkey.experiment.api.SampleTypeServiceImpl.SampleChangeType.schema; + +public class ExpMaterialTableImpl extends ExpRunItemTableImpl implements ExpMaterialTable +{ + ExpSampleTypeImpl _ss; + Set _uniqueIdFields; + boolean _supportTableRules = true; + + private static final Set MATERIAL_ALT_KEYS; + private static final List AMOUNT_RANGE_VALIDATORS = new ArrayList<>(); + static { + MATERIAL_ALT_KEYS = CaseInsensitiveHashSet.of(MaterialSourceId.name(), Name.name()); + + Lsid rangeValidatorLsid = DefaultPropertyValidator.createValidatorURI(PropertyValidatorType.Range); + IPropertyValidator amountValidator = PropertyService.get().createValidator(rangeValidatorLsid.toString()); + amountValidator.setName("SampleAmountNonNegative"); + amountValidator.setExpressionValue("~gte=0"); + amountValidator.setErrorMessage("Amounts must be non-negative."); + amountValidator.setColumnNameProvidedData(PROVIDED_DATA_PREFIX + Column.StoredAmount.name()); + AMOUNT_RANGE_VALIDATORS.add(amountValidator); + } + + public ExpMaterialTableImpl(UserSchema schema, ContainerFilter cf, @Nullable ExpSampleType sampleType) + { + super(ExpSchema.TableType.Materials.name(), ExperimentServiceImpl.get().getTinfoMaterial(), schema, cf); + setDetailsURL(new DetailsURL(new ActionURL(ExperimentController.ShowMaterialAction.class, schema.getContainer()), Collections.singletonMap("rowId", "rowId"), NullResult)); + setPublicSchemaName(ExpSchema.SCHEMA_NAME); + addAllowablePermission(InsertPermission.class); + addAllowablePermission(UpdatePermission.class); + addAllowablePermission(MoveEntitiesPermission.class); + setAllowedInsertOption(QueryUpdateService.InsertOption.MERGE); + setSampleType(sampleType); + } + + public Set getUniqueIdFields() + { + if (_uniqueIdFields == null) + { + _uniqueIdFields = new CaseInsensitiveHashSet(); + _uniqueIdFields.addAll(getColumns().stream().filter(ColumnInfo::isUniqueIdField).map(ColumnInfo::getName).collect(Collectors.toSet())); + } + return _uniqueIdFields; + } + + @Override + protected ColumnInfo resolveColumn(String name) + { + ColumnInfo result = super.resolveColumn(name); + if (result == null) + { + if ("CpasType".equalsIgnoreCase(name)) + result = createColumn(SampleSet.name(), SampleSet); + else if (Property.name().equalsIgnoreCase(name)) + result = createPropertyColumn(Property.name()); + else if (QueryableInputs.name().equalsIgnoreCase(name)) + result = createColumn(QueryableInputs.name(), QueryableInputs); + } + return result; + } + + @Override + public ColumnInfo getExpObjectColumn() + { + var ret = wrapColumn("ExpMaterialTableImpl_object_", _rootTable.getColumn("objectid")); + ret.setConceptURI(BuiltInColumnTypes.EXPOBJECTID_CONCEPT_URI); + return ret; + } + + @Override + public AuditHandler getAuditHandler(AuditBehaviorType auditBehaviorType) + { + if (getUserSchema().getName().equalsIgnoreCase(SamplesSchema.SCHEMA_NAME)) + { + // Special case sample auditing to help build a useful timeline view + return SampleTypeServiceImpl.get(); + } + + return super.getAuditHandler(auditBehaviorType); + } + + @Override + public MutableColumnInfo createColumn(String alias, Column column) + { + switch (column) + { + case Folder -> + { + return wrapColumn(alias, _rootTable.getColumn("Container")); + } + case LSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(LSID.name())); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + return columnInfo; + } + case MaterialSourceId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(MaterialSourceId.name())); + columnInfo.setFk(new LookupForeignKey(getLookupContainerFilter(), null, null, null, null, "RowId", "Name") + { + @Override + public TableInfo getLookupTableInfo() + { + ExpSampleTypeTable sampleTypeTable = ExperimentService.get().createSampleTypeTable(ExpSchema.TableType.SampleSets.toString(), _userSchema, getLookupContainerFilter()); + sampleTypeTable.populate(); + return sampleTypeTable; + } + + @Override + public StringExpression getURL(ColumnInfo parent) + { + return super.getURL(parent, true); + } + }); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case RootMaterialRowId -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(RootMaterialRowId.name())); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), RowId.name())); + columnInfo.setLabel("Root Material"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + + // NK: Here we mark the column as not required AND nullable which is the opposite of the database where + // a NOT NULL constraint is in place. This is done to avoid the RequiredValidator check upon updating a row. + // See ExpMaterialValidatorIterator. + columnInfo.setRequired(false); + columnInfo.setNullable(true); + + return columnInfo; + } + case AliquotedFromLSID -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(AliquotedFromLSID.name())); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getMaterialForeignKey(getLookupContainerFilter(), LSID.name())); + columnInfo.setLabel("Aliquoted From Parent"); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInUpdateView(false); + return columnInfo; + } + case IsAliquot -> + { + String rootMaterialRowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RootMaterialRowId.name(); + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); + ExprColumn columnInfo = new ExprColumn(this, IsAliquot.fieldKey(), new SQLFragment( + "(CASE WHEN (" + rootMaterialRowIdField + " = " + rowIdField + ") THEN ").append(getSqlDialect().getBooleanFALSE()) + .append(" WHEN ").append(rowIdField).append(" IS NOT NULL THEN ").append(getSqlDialect().getBooleanTRUE()) // Issue 52745 + .append(" ELSE NULL END)"), JdbcType.BOOLEAN); + columnInfo.setLabel("Is Aliquot"); + columnInfo.setDescription("Identifies if the material is a sample or an aliquot"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(false); + return columnInfo; + } + case Name -> + { + var nameCol = wrapColumn(alias, _rootTable.getColumn(column.toString())); + // shut off this field in insert and update views if user specified names are not allowed + if (!NameExpressionOptionService.get().getAllowUserSpecificNamesValue(getContainer())) + { + nameCol.setShownInInsertView(false); + nameCol.setShownInUpdateView(false); + } + return nameCol; + } + case RawAmount -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setDescription("The amount of this sample, in the base unit for the sample type's display unit (if defined), currently on hand."); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + case StoredAmount -> + { + String label = StoredAmount.label(); + Set importAliases = Set.of(label, "Stored Amount"); + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.StoredAmount.name(), Column.Units.name(), label, importAliases, typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The amount of this sample, in the display unit for the sample type, currently on hand."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(StoredAmount.name())); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, null)); + columnInfo.setLabel(label); + columnInfo.setImportAliasesSet(importAliases); + columnInfo.setDescription("The amount of this sample currently on hand."); + columnInfo.setConceptURI(NON_NEGATIVE_NUMBER_CONCEPT_URI); + columnInfo.setValidators(AMOUNT_RANGE_VALIDATORS); + return columnInfo; + } + } + case RawUnits -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + columnInfo.setHidden(true); + columnInfo.setReadOnly(true); + columnInfo.setShownInDetailsView(false); + columnInfo.setShownInInsertView(false); + columnInfo.setShownInUpdateView(false); + columnInfo.setUserEditable(false); + return columnInfo; + } + case Units -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, Units.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the Amount for this sample."); + columnInfo.setShownInUpdateView(true); + columnInfo.setShownInInsertView(true); + columnInfo.setUserEditable(true); + columnInfo.setCalculated(false); + return columnInfo; + } + else + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn(Units.name())); + columnInfo.setFk(fk); + columnInfo.setDescription("The units associated with the Stored Amount for this sample."); + return columnInfo; + } + } + case Description -> + { + return wrapColumn(alias, _rootTable.getColumn(Description.name())); + } + case SampleSet -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("CpasType")); + columnInfo.setFk(new QueryForeignKey(_userSchema, getContainerFilter(), ExpSchema.SCHEMA_NAME, getContainer(), null, ExpSchema.TableType.SampleSets.name(), "lsid", null) + { + @Override + protected ContainerFilter getLookupContainerFilter() + { + // Be sure that we can resolve the sample type if it's defined in a separate container. + // Same as CurrentPlusProjectAndShared but includes SampleSet's container as well. + // Issue 37982: Sample Type: Link to precursor sample type does not resolve correctly if sample has + // parents in current sample type and a sample type in the parent container + Set containers = new HashSet<>(); + if (null != getSampleType()) + containers.add(getSampleType().getContainer()); + containers.add(getContainer()); + if (getContainer().getProject() != null) + containers.add(getContainer().getProject()); + containers.add(ContainerManager.getSharedContainer()); + ContainerFilter cf = new ContainerFilter.CurrentPlusExtras(_userSchema.getContainer(), _userSchema.getUser(), containers); + + if (null != _containerFilter && _containerFilter.getType() != ContainerFilter.Type.Current) + cf = new UnionContainerFilter(_containerFilter, cf); + return cf; + } + }); + columnInfo.setReadOnly(true); + columnInfo.setUserEditable(false); + columnInfo.setShownInInsertView(false); + return columnInfo; + } + case SourceProtocolLSID -> + { + // NOTE: This column is incorrectly named "Protocol", but we are keeping it for backwards compatibility to avoid breaking queries in hvtnFlow module + ExprColumn columnInfo = new ExprColumn(this, ExpDataTable.Column.Protocol.toString(), new SQLFragment( + "(SELECT ProtocolLSID FROM " + ExperimentServiceImpl.get().getTinfoProtocolApplication() + " pa " + + " WHERE pa.RowId = " + ExprColumn.STR_TABLE_ALIAS + ".SourceApplicationId)"), JdbcType.VARCHAR); + columnInfo.setSqlTypeName("lsidtype"); + columnInfo.setFk(getExpSchema().getProtocolForeignKey(getContainerFilter(), "LSID")); + columnInfo.setLabel("Source Protocol"); + columnInfo.setDescription("Contains a reference to the protocol for the protocol application that created this sample"); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + return columnInfo; + } + case SourceProtocolApplication -> + { + var columnInfo = wrapColumn(alias, _rootTable.getColumn("SourceApplicationId")); + columnInfo.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + columnInfo.setUserEditable(false); + columnInfo.setReadOnly(true); + columnInfo.setHidden(true); + columnInfo.setAutoIncrement(false); + return columnInfo; + } + case SourceApplicationInput -> + { + var col = createEdgeColumn(alias, SourceProtocolApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's SourceProtocolApplication"); + col.setHidden(true); + return col; + } + case RunApplication -> + { + SQLFragment sql = new SQLFragment("(SELECT pa.rowId FROM ") + .append(ExperimentService.get().getTinfoProtocolApplication(), "pa") + .append(" WHERE pa.runId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".runId") + .append(" AND pa.cpasType = ").appendValue(ExpProtocol.ApplicationType.ExperimentRunOutput) + .append(")"); + + var col = new ExprColumn(this, alias, sql, JdbcType.INTEGER); + col.setFk(getExpSchema().getProtocolApplicationForeignKey(getContainerFilter())); + col.setDescription("Contains a reference to the ExperimentRunOutput protocol application of the run that created this sample"); + col.setUserEditable(false); + col.setReadOnly(true); + col.setHidden(true); + return col; + } + case RunApplicationOutput -> + { + var col = createEdgeColumn(alias, RunApplication, ExpSchema.TableType.MaterialInputs); + col.setDescription("Contains a reference to the MaterialInput row between this ExpMaterial and it's RunOutputApplication"); + return col; + } + case Run -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RunId")); + ret.setFk(getExpSchema().getRunIdForeignKey(getContainerFilter())); + ret.setReadOnly(true); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case RowId -> + { + var ret = wrapColumn(alias, _rootTable.getColumn("RowId")); + // When no sorts are added by views, QueryServiceImpl.createDefaultSort() adds the primary key's default sort direction + ret.setSortDirection(Sort.SortDirection.DESC); + ret.setFk(new RowIdForeignKey(ret)); + ret.setUserEditable(false); + ret.setHidden(true); + ret.setShownInInsertView(false); + ret.setHasDbSequence(true); + ret.setIsRootDbSequence(true); + return ret; + } + case Property -> + { + return createPropertyColumn(alias); + } + case Flag -> + { + return createFlagColumn(alias); + } + case Created -> + { + return wrapColumn(alias, _rootTable.getColumn("Created")); + } + case CreatedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("CreatedBy")); + } + case Modified -> + { + return wrapColumn(alias, _rootTable.getColumn("Modified")); + } + case ModifiedBy -> + { + return createUserColumn(alias, _rootTable.getColumn("ModifiedBy")); + } + case Alias -> + { + return createAliasColumn(alias, ExperimentService.get()::getTinfoMaterialAliasMap); + } + case Inputs -> + { + return createLineageColumn(this, alias, true, false); + } + case QueryableInputs -> + { + return createLineageColumn(this, alias, true, true); + } + case Outputs -> + { + return createLineageColumn(this, alias, false, false); + } + case Properties -> + { + return createPropertiesColumn(alias); + } + case SampleState -> + { + boolean statusEnabled = isStatusEnabled(getContainer()); + var ret = wrapColumn(alias, _rootTable.getColumn(column.name())); + ret.setLabel("Status"); + ret.setHidden(!statusEnabled); + ret.setShownInDetailsView(statusEnabled); + ret.setShownInInsertView(statusEnabled); + ret.setShownInUpdateView(statusEnabled); + ret.setRemapMissingBehavior(SimpleTranslator.RemapMissingBehavior.Error); + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getSampleStatusLookupContainerFilter()) + .schema(getExpSchema()).table(ExpSchema.TableType.SampleStatus).display("Label")); + return ret; + } + case AliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotCount.name())); + ret.setLabel(ALIQUOT_COUNT_LABEL); + return ret; + } + case RawAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(RawAliquotVolume.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AliquotVolume -> + { + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AliquotVolume.name(), AliquotUnit.name(), ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The total amount of this sample's aliquots, in the display unit for the sample type, currently on hand."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotVolume.name())); + ret.setLabel(ALIQUOT_VOLUME_LABEL); + return ret; + } + } + case RawAvailableAliquotVolume -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(RawAvailableAliquotVolume.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AvailableAliquotVolume -> + { + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeAmountDisplayColumn columnInfo = new SampleTypeAmountDisplayColumn(this, Column.AvailableAliquotVolume.name(), AliquotUnit.name(), AVAILABLE_ALIQUOT_VOLUME_LABEL, Collections.emptySet(), typeUnit); + columnInfo.setDisplayColumnFactory(colInfo -> new SampleTypeAmountPrecisionDisplayColumn(colInfo, typeUnit)); + columnInfo.setDescription("The total amount of this sample's available aliquots currently on hand, in the display unit for the sample type."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotVolume.name())); + ret.setLabel(AVAILABLE_ALIQUOT_VOLUME_LABEL); + return ret; + } + } + case AvailableAliquotCount -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AvailableAliquotCount.name())); + ret.setLabel(AVAILABLE_ALIQUOT_COUNT_LABEL); + return ret; + } + case RawAliquotUnit -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); + ret.setLabel(RawAliquotUnit.label()); + ret.setHidden(true); + ret.setShownInDetailsView(false); + ret.setShownInInsertView(false); + ret.setShownInUpdateView(false); + return ret; + } + case AliquotUnit -> + { + ForeignKey fk = new LookupForeignKey("Value", "Value") + { + @Override + public @Nullable TableInfo getLookupTableInfo() + { + return getExpSchema().getTable(ExpSchema.MEASUREMENT_UNITS_TABLE); + } + }; + + Unit typeUnit = getSampleTypeUnit(); + if (typeUnit != null) + { + SampleTypeUnitDisplayColumn columnInfo = new SampleTypeUnitDisplayColumn(this, AliquotUnit.name(), typeUnit); + columnInfo.setFk(fk); + columnInfo.setDescription("The sample type display units associated with the AliquotAmount for this sample."); + return columnInfo; + } + else + { + var ret = wrapColumn(alias, _rootTable.getColumn(AliquotUnit.name())); + ret.setShownInDetailsView(false); + return ret; + } + } + case MaterialExpDate -> + { + var ret = wrapColumn(alias, _rootTable.getColumn(MaterialExpDate.name())); + ret.setLabel("Expiration Date"); + ret.setShownInDetailsView(true); + ret.setShownInInsertView(true); + ret.setShownInUpdateView(true); + return ret; + } + case LastIndexed -> + { + var sql = new SQLFragment("(SELECT LastIndexed FROM ") + .append(ExperimentServiceImpl.get().getTinfoMaterialIndexed(), "mi") + .append(" WHERE mi.MaterialId = ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId)"); + var ret = new ExprColumn(this, LastIndexed.name(), sql, JdbcType.TIMESTAMP); + ret.setDescription("Date when the material was last full-text search indexed in the system"); + ret.setHidden(true); + ret.setReadOnly(true); + ret.setUserEditable(false); + return ret; + } + default -> throw new IllegalArgumentException("Unknown column " + column); + } + } + + @Override + public MutableColumnInfo createPropertyColumn(String alias) + { + var ret = wrapColumn(alias, _rootTable.getColumn(RowId.name())); + + if (_ss != null && _ss.getTinfo() != null) + ret.setFk(new QueryForeignKey.Builder(getUserSchema(), getLookupContainerFilter()).table(_ss.getTinfo()).key(RowId.name()).build()); + + ret.setIsUnselectable(true); + ret.setDescription("A holder for any custom fields associated with this sample"); + ret.setHidden(true); + return ret; + } + + private static boolean isStatusEnabled(Container c) + { + return SampleStatusService.get().supportsSampleStatus() && !SampleStatusService.get().getAllProjectStates(c).isEmpty(); + } + + private Unit getSampleTypeUnit() + { + Unit typeUnit = null; + if (_ss != null && _ss.getMetricUnit() != null) + typeUnit = Unit.fromName(_ss.getMetricUnit()); + return typeUnit; + } + + private void setSampleType(@Nullable ExpSampleType st) + { + checkLocked(); + if (_ss != null) + { + throw new IllegalStateException("Cannot unset sample type"); + } + if (st != null && !(st instanceof ExpSampleTypeImpl)) + { + throw new IllegalArgumentException("Expected sample type to be an instance of " + ExpSampleTypeImpl.class.getName() + " but was a " + st.getClass().getName()); + } + _ss = (ExpSampleTypeImpl) st; + if (_ss != null) + { + setPublicSchemaName(SamplesSchema.SCHEMA_NAME); + setName(st.getName()); + + String description = _ss.getDescription(); + if (StringUtils.isEmpty(description)) + description = "Contains one row per sample in the " + _ss.getName() + " sample type"; + setDescription(description); + + if (canUserAccessPhi()) + { + ActionURL url = PageFlowUtil.urlProvider(ExperimentUrls.class).getImportSamplesURL(getContainer(), _ss.getName()); + setImportURL(new DetailsURL(url)); + } + } + } + + public ExpSampleType getSampleType() + { + return _ss; + } + + @Override + protected void populateColumns() + { + var st = getSampleType(); + var rowIdCol = addColumn(RowId); + addColumn(MaterialSourceId); + addColumn(SourceProtocolApplication); + addColumn(SourceApplicationInput); + addColumn(RunApplication); + addColumn(RunApplicationOutput); + addColumn(SourceProtocolLSID); + + List defaultCols = new ArrayList<>(); + var nameCol = addColumn(Name); + defaultCols.add(Name.fieldKey()); + if (st != null && st.hasNameAsIdCol()) + { + // Show the Name field but don't mark is as required when using name expressions + if (st.hasNameExpression()) + { + var nameExpression = st.getNameExpression(); + nameCol.setNameExpression(nameExpression); + nameCol.setNullable(true); + String nameExpressionPreview = getExpNameExpressionPreview(getUserSchema().getSchemaName(), st.getName(), getUserSchema().getUser()); + String desc = appendNameExpressionDescription(nameCol.getDescription(), nameExpression, nameExpressionPreview); + nameCol.setDescription(desc); + } + else + { + nameCol.setNullable(false); + } + } + else + { + nameCol.setReadOnly(true); + nameCol.setShownInInsertView(false); + } + + addColumn(Alias); + addColumn(Description); + addColumn(SampleSet); + addColumn(MaterialExpDate); + defaultCols.add(MaterialExpDate.fieldKey()); + addContainerColumn(Folder, null); + if (getContainer().hasProductFolders()) + defaultCols.add(Folder.fieldKey()); + addColumn(Run); + defaultCols.add(Run.fieldKey()); + addColumn(LSID); + addColumn(RootMaterialRowId); + addColumn(AliquotedFromLSID); + addColumn(IsAliquot); + addColumn(Created); + addColumn(CreatedBy); + addColumn(Modified); + addColumn(ModifiedBy); + if (st == null) + defaultCols.add(SampleSet.fieldKey()); + addColumn(Flag); + addColumn(SampleState); + if (isStatusEnabled(getContainer())) + defaultCols.add(SampleState.fieldKey()); + + // TODO is this a real Domain??? + if (st != null && !"urn:lsid:labkey.com:SampleSource:Default".equals(st.getDomain().getTypeURI())) + { + defaultCols.add(Flag.fieldKey()); + addSampleTypeColumns(st, defaultCols); + + setName(_ss.getName()); + + ActionURL gridUrl = new ActionURL(ExperimentController.ShowSampleTypeAction.class, getContainer()); + gridUrl.addParameter("rowId", st.getRowId()); + setGridURL(new DetailsURL(gridUrl)); + } + + List calculatedFieldKeys = DomainUtil.getCalculatedFieldsForDefaultView(this); + defaultCols.addAll(calculatedFieldKeys); + + addColumn(AliquotCount); + addColumn(AliquotVolume); + addColumn(AliquotUnit); + addColumn(AvailableAliquotCount); + addColumn(AvailableAliquotVolume); + + addColumn(StoredAmount); + defaultCols.add(StoredAmount.fieldKey()); + + addColumn(Units); + defaultCols.add(Units.fieldKey()); + + addColumn(RawAmount).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(StoredAmount.fieldKey()))); + addColumn(RawUnits).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(Units.fieldKey()))); + addColumn(RawAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotVolume.fieldKey()))); + addColumn(RawAvailableAliquotVolume).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AvailableAliquotVolume.fieldKey()))); + addColumn(RawAliquotUnit).getRenderer().addQueryFieldKeys(new HashSet<>(Set.of(AliquotUnit.fieldKey()))); + + if (InventoryService.get() != null && (st == null || !st.isMedia())) + defaultCols.addAll(InventoryService.get().addInventoryStatusColumns(st == null ? null : st.getMetricUnit(), this, getContainer(), _userSchema.getUser())); + + SQLFragment sql; + UserSchema plateUserSchema; + // Issue 53194 : this would be the case for linked to study samples. The contextual role is set up from the study dataset + // for the source sample, we want to allow the plate schema to inherit any contextual roles to allow querying + // against tables in that schema. + if (_userSchema instanceof UserSchema.HasContextualRoles samplesSchema && !samplesSchema.getContextualRoles().isEmpty()) + plateUserSchema = AssayPlateMetadataService.get().getPlateSchema(_userSchema, samplesSchema.getContextualRoles()); + else + plateUserSchema = QueryService.get().getUserSchema(_userSchema.getUser(), _userSchema.getContainer(), "plate"); + + if (plateUserSchema != null && plateUserSchema.getTable("Well") != null) + { + String rowIdField = ExprColumn.STR_TABLE_ALIAS + "." + RowId.name(); + SQLFragment existsSubquery = new SQLFragment() + .append("SELECT 1 FROM ") + .append(plateUserSchema.getTable("Well"), "well") + .append(" WHERE well.sampleid = ").append(rowIdField); + + sql = new SQLFragment() + .append("CASE WHEN EXISTS (") + .append(existsSubquery) + .append(") THEN 'Plated' ") + .append("WHEN ").append(ExprColumn.STR_TABLE_ALIAS).append(".RowId").append(" IS NOT NULL THEN 'Not Plated' ")// Issue 52745 + .append("ELSE NULL END"); + } + else + { + sql = new SQLFragment("(SELECT NULL)"); + } + var col = new ExprColumn(this, IsPlated.name(), sql, JdbcType.VARCHAR); + col.setDescription("Whether the sample that has been plated, if plating is supported."); + col.setUserEditable(false); + col.setReadOnly(true); + col.setShownInDetailsView(false); + col.setShownInInsertView(false); + col.setShownInUpdateView(false); + if (plateUserSchema != null) + col.setURL(DetailsURL.fromString("plate-isPlated.api?sampleId=${" + RowId.name() + "}")); + addColumn(col); + + addVocabularyDomains(); + addColumn(LastIndexed); + addColumn(Properties); + + var colInputs = addColumn(Inputs); + addMethod(Inputs.name(), new LineageMethod(colInputs, true), Set.of(colInputs.getFieldKey())); + + var colOutputs = addColumn(Outputs); + addMethod(Outputs.name(), new LineageMethod(colOutputs, false), Set.of(colOutputs.getFieldKey())); + + addExpObjectMethod(); + + ActionURL detailsUrl = new ActionURL(ExperimentController.ShowMaterialAction.class, getContainer()); + DetailsURL url = new DetailsURL(detailsUrl, Collections.singletonMap("rowId", "RowId"), NullResult); + nameCol.setURL(url); + rowIdCol.setURL(url); + setDetailsURL(url); + + if (canUserAccessPhi()) + { + ActionURL updateActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getUpdateMaterialQueryRowAction(getContainer(), this); + setUpdateURL(new DetailsURL(updateActionURL, Collections.singletonMap("RowId", "RowId"))); + + ActionURL insertActionURL = PageFlowUtil.urlProvider(ExperimentUrls.class).getInsertMaterialQueryRowAction(getContainer(), this); + setInsertURL(new DetailsURL(insertActionURL)); + } + else + { + setImportURL(LINK_DISABLER); + setInsertURL(LINK_DISABLER); + setUpdateURL(LINK_DISABLER); + } + + setTitleColumn(Name.toString()); + + setDefaultVisibleColumns(defaultCols); + + MutableColumnInfo lineageLookup = ClosureQueryHelper.createAncestorLookupColumnInfo("Ancestors", this, _rootTable.getColumn(RowId.name()), _ss, true); + addColumn(lineageLookup); + } + + private ContainerFilter getSampleStatusLookupContainerFilter() + { + // The default lookup container filter is Current, but we want to have the default be CurrentPlusProjectAndShared + // for the sample status lookup since in the app project context we want to share status definitions across + // a given project instead of creating duplicate statuses in each subfolder project. + ContainerFilter.Type type = QueryService.get().getContainerFilterTypeForLookups(getContainer()); + type = type == null ? ContainerFilter.Type.CurrentPlusProjectAndShared : type; + return type.create(getUserSchema()); + } + + @Override + public Domain getDomain() + { + return getDomain(false); + } + + @Override + public Domain getDomain(boolean forUpdate) + { + return _ss == null ? null : _ss.getDomain(forUpdate); + } + + public static String appendNameExpressionDescription(String currentDescription, String nameExpression, String nameExpressionPreview) + { + if (nameExpression == null) + return currentDescription; + + StringBuilder sb = new StringBuilder(); + if (currentDescription != null && !currentDescription.isEmpty()) + { + sb.append(currentDescription); + if (!currentDescription.endsWith(".")) + sb.append("."); + sb.append("\n"); + } + + sb.append("\nIf not provided, a unique name will be generated from the expression:\n"); + sb.append(nameExpression); + sb.append("."); + if (!StringUtils.isEmpty(nameExpressionPreview)) + { + sb.append("\nExample of name that will be generated from the current pattern: \n"); + sb.append(nameExpressionPreview); + } + + return sb.toString(); + } + + private void addSampleTypeColumns(ExpSampleType st, List visibleColumns) + { + TableInfo dbTable = ((ExpSampleTypeImpl)st).getTinfo(); + if (null == dbTable) + return; + + UserSchema schema = getUserSchema(); + Domain domain = st.getDomain(); + ColumnInfo rowIdColumn = getColumn(RowId); + ColumnInfo nameColumn = getColumn(Name); + + visibleColumns.remove(Run.fieldKey()); + + // When not using name expressions, mark the ID columns as required. + // NOTE: If not explicitly set, the first domain property will be chosen as the ID column. + final List idCols = st.hasNameExpression() ? Collections.emptyList() : st.getIdCols(); + + Set mvColumns = domain.getProperties().stream() + .filter(ImportAliasable::isMvEnabled) + .map(dp -> FieldKey.fromParts(dp.getPropertyDescriptor().getMvIndicatorStorageColumnName())) + .collect(Collectors.toSet()); + + for (ColumnInfo dbColumn : dbTable.getColumns()) + { + // Don't include PHI columns in full text search index + // CONSIDER: Can we move this to a base class? Maybe in .addColumn() + if (schema.getUser().isSearchUser() && !dbColumn.getPHI().isLevelAllowed(PHI.NotPHI)) + continue; + + if ( + rowIdColumn.getFieldKey().equals(dbColumn.getFieldKey()) || + nameColumn.getFieldKey().equals(dbColumn.getFieldKey()) + ) + { + continue; + } + + var wrapped = wrapColumnFromJoinedTable(dbColumn.getName(), dbColumn); + + // TODO missing values? comments? flags? + DomainProperty dp = domain.getPropertyByURI(dbColumn.getPropertyURI()); + var propColumn = copyColumnFromJoinedTable(null==dp ? dbColumn.getName() : dp.getName(), wrapped); + if (propColumn.getName().equalsIgnoreCase("genid")) + { + propColumn.setHidden(true); + propColumn.setUserEditable(false); + propColumn.setShownInDetailsView(false); + propColumn.setShownInInsertView(false); + propColumn.setShownInUpdateView(false); + } + if (null != dp) + { + PropertyColumn.copyAttributes(schema.getUser(), propColumn, dp.getPropertyDescriptor(), schema.getContainer(), + SamplesSchema.SCHEMA_SAMPLES, st.getName(), RowId.fieldKey(), null, getLookupContainerFilter()); + + if (idCols.contains(dp)) + { + propColumn.setNullable(false); + propColumn.setDisplayColumnFactory(c -> new DataColumn(c) + { + @Override + protected boolean isDisabledInput(RenderContext ctx) + { + return !super.isDisabledInput() && ctx.getMode() != DataRegion.MODE_INSERT; + } + }); + } + + // Issue 38341: domain designer advanced settings 'show in default view' setting is not respected + if (!propColumn.isHidden()) + { + visibleColumns.add(propColumn.getFieldKey()); + } + + if (propColumn.isMvEnabled()) + { + // The column in the physical table has a "_MVIndicator" suffix, but we want to expose + // it with a "MVIndicator" suffix (no underscore) + var mvColumn = new AliasedColumn(this, dp.getName() + MvColumn.MV_INDICATOR_SUFFIX, + StorageProvisioner.get().getMvIndicatorColumn(dbTable, dp.getPropertyDescriptor(), "No MV column found for '" + dp.getName() + "' in sample type '" + getName() + "'")); + mvColumn.setLabel(dp.getLabel() != null ? dp.getLabel() : dp.getName() + " MV Indicator"); + mvColumn.setSqlTypeName("VARCHAR"); + mvColumn.setPropertyURI(dp.getPropertyURI()); + mvColumn.setNullable(true); + mvColumn.setUserEditable(false); + mvColumn.setHidden(true); + mvColumn.setMvIndicatorColumn(true); + + addColumn(mvColumn); + propColumn.setMvColumnName(FieldKey.fromParts(dp.getName() + MvColumn.MV_INDICATOR_SUFFIX)); + } + } + + if (!mvColumns.contains(propColumn.getFieldKey())) + addColumn(propColumn); + + } + + setDefaultVisibleColumns(visibleColumns); + } + + // These are mostly fields that are wrapped by fields with different names (see createColumn()) + // we could handle each case separately, but this is easier + static final Set wrappedFieldKeys = Set.of( + new FieldKey(null, "objectid"), + new FieldKey(null, "RowId"), + new FieldKey(null, "LSID"), // Flag + new FieldKey(null, "SourceApplicationId"), // SourceProtocolApplication + new FieldKey(null, "runId"), // Run, RunApplication + new FieldKey(null, "CpasType")); // SampleSet + static final Set ALL_COLUMNS = Set.of(); + + private @NotNull Set computeInnerSelectedColumns(Set selectedColumns) + { + if (null == selectedColumns) + return ALL_COLUMNS; + selectedColumns = new TreeSet<>(selectedColumns); + if (selectedColumns.contains(new FieldKey(null, StoredAmount))) + selectedColumns.add(new FieldKey(null, Units)); + if (selectedColumns.contains(new FieldKey(null, ExpMaterial.ALIQUOTED_FROM_INPUT))) + selectedColumns.add(new FieldKey(null, AliquotedFromLSID.name())); + if (selectedColumns.contains(new FieldKey(null, IsAliquot.name()))) + selectedColumns.add(new FieldKey(null, RootMaterialRowId.name())); + if (selectedColumns.contains(new FieldKey(null, AliquotVolume.name())) || selectedColumns.contains(new FieldKey(null, AvailableAliquotVolume.name()))) + selectedColumns.add(new FieldKey(null, AliquotUnit.name())); + selectedColumns.addAll(wrappedFieldKeys); + if (null != getFilter()) + selectedColumns.addAll(getFilter().getAllFieldKeys()); + return selectedColumns; + } + + @NotNull + @Override + public SQLFragment getFromSQL(String alias) + { + return getFromSQL(alias, null); + } + + @Override + public SQLFragment getFromSQLExpanded(String alias, Set selectedColumns) + { + SQLFragment sql = new SQLFragment("("); + boolean usedMaterialized; + + + // SELECT FROM + /* NOTE We want to avoid caching in paths where the table is actively being updated (e.g. loadRows) + * Unfortunately, we don't _really_ know when this is, but if we in a transaction that's a good guess. + * Also, we may use RemapCache for material lookup outside a transaction + */ + boolean onlyMaterialColums = false; + if (null != selectedColumns && !selectedColumns.isEmpty()) + onlyMaterialColums = selectedColumns.stream().allMatch(fk -> fk.getName().equalsIgnoreCase("Folder") || null != _rootTable.getColumn(fk)); + if (!onlyMaterialColums && null != _ss && null != _ss.getTinfo() && !getExpSchema().getDbSchema().getScope().isTransactionActive()) + { + sql.append(getMaterializedSQL()); + usedMaterialized = true; + } + else + { + sql.append(getJoinSQL(selectedColumns)); + usedMaterialized = false; + } + + // WHERE + SQLFragment filterFrag = getFilter().getSQLFragment(_rootTable, null); + sql.append("\n").append(filterFrag); + if (_ss != null && !usedMaterialized) + { + if (!filterFrag.isEmpty()) + sql.append(" AND "); + else + sql.append(" WHERE "); + sql.append("CpasType = ").appendValue(_ss.getLSID()); + } + sql.append(") ").appendIdentifier(alias); + + return getTransformedFromSQL(sql); + } + + @Override + public void setSupportTableRules(boolean b) + { + this._supportTableRules = b; + } + + @Override + public boolean supportTableRules() // intentional override + { + return _supportTableRules; + } + + @Override + protected @NotNull TableRules findTableRules() + { + Container definitionContainer = getUserSchema().getContainer(); + if (null != _ss) + definitionContainer = _ss.getContainer(); + return TableRulesManager.get().getTableRules(definitionContainer, getUserSchema().getUser(), getUserSchema().getContainer()); + } + + + static class InvalidationCounters + { + public final AtomicLong update, insert, delete, rollup; + InvalidationCounters() + { + long l = System.currentTimeMillis(); + update = new AtomicLong(l); + insert = new AtomicLong(l); + delete = new AtomicLong(l); + rollup = new AtomicLong(l); + } + } + + static final BlockingCache _materializedQueries = CacheManager.getBlockingStringKeyCache(CacheManager.UNLIMITED, CacheManager.HOUR, "materialized sample types", null); + static final Map _invalidationCounters = Collections.synchronizedMap(new HashMap<>()); + static final AtomicBoolean initializedListeners = new AtomicBoolean(false); + + // used by SampleTypeServiceImpl.refreshSampleTypeMaterializedView() + public static void refreshMaterializedView(final String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + var scope = ExperimentServiceImpl.getExpSchema().getScope(); + var runnable = new RefreshMaterializedViewRunnable(lsid, reason); + scope.addCommitTask(runnable, DbScope.CommitTaskOption.POSTCOMMIT); + } + + private static class RefreshMaterializedViewRunnable implements Runnable + { + private final String _lsid; + private final SampleTypeServiceImpl.SampleChangeType _reason; + + public RefreshMaterializedViewRunnable(String lsid, SampleTypeServiceImpl.SampleChangeType reason) + { + _lsid = lsid; + _reason = reason; + } + + @Override + public void run() + { + if (_reason == schema) + { + /* NOTE: MaterializedQueryHelper can detect data changes and refresh the materialized view using the provided SQL. + * It does not handle schema changes where the SQL itself needs to be updated. In this case, we remove the + * MQH from the cache to force the SQL to be regenerated. + */ + _materializedQueries.remove(_lsid); + return; + } + var counters = getInvalidateCounters(_lsid); + switch (_reason) + { + case insert -> counters.insert.incrementAndGet(); + case rollup -> counters.rollup.incrementAndGet(); + case update -> counters.update.incrementAndGet(); + case delete -> counters.delete.incrementAndGet(); + default -> throw new IllegalStateException("Unexpected value: " + _reason); + } + } + + @Override + public boolean equals(Object obj) + { + return obj instanceof RefreshMaterializedViewRunnable other && _lsid.equals(other._lsid) && _reason.equals(other._reason); + } + } + + private static InvalidationCounters getInvalidateCounters(String lsid) + { + if (!initializedListeners.getAndSet(true)) + { + CacheManager.addListener(_invalidationCounters::clear); + } + return _invalidationCounters.computeIfAbsent(lsid, (unused) -> + new InvalidationCounters() + ); + } + + /* SELECT and JOIN, does not include WHERE, same as getJoinSQL() */ + private SQLFragment getMaterializedSQL() + { + if (null == _ss) + return getJoinSQL(null); + + var mqh = _materializedQueries.get(_ss.getLSID(), null, (unusedKey, unusedArg) -> + { + /* NOTE: MaterializedQueryHelper does have a pattern to help with detecting schema changes. + * Previously it has been used on non-provisioned tables. It might be helpful to have a pattern, + * even if just to help with race-conditions. + * + * Maybe have a callback to generate the SQL dynamically, and verify that the sql is unchanged. + */ + SQLFragment viewSql = getJoinSQL(null).append(" WHERE CpasType = ").appendValue(_ss.getLSID()); + return (_MaterializedQueryHelper) new _MaterializedQueryHelper.Builder(_ss.getLSID(), "", getExpSchema().getDbSchema().getScope(), viewSql) + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_rowid ON temp.${NAME} (rowid)") + .addIndex("CREATE UNIQUE INDEX uq_${NAME}_lsid ON temp.${NAME} (lsid)") + .addIndex("CREATE INDEX idx_${NAME}_container ON temp.${NAME} (container)") + .addIndex("CREATE INDEX idx_${NAME}_root ON temp.${NAME} (rootmaterialrowid)") + .addInvalidCheck(() -> String.valueOf(getInvalidateCounters(_ss.getLSID()).update.get())) + .build(); + }); + return new SQLFragment("SELECT * FROM ").append(mqh.getFromSql("_cached_view_")); + } + + + /** + * MaterializedQueryHelper has a built-in mechanism for tracking when a temp table needs to be recomputed. + * It does not help with incremental updates (except for providing the upsert() method). + * _MaterializedQueryHelper and _Materialized copy the pattern using class Invalidator. + */ + static class _MaterializedQueryHelper extends MaterializedQueryHelper + { + final String _lsid; + + static class Builder extends MaterializedQueryHelper.Builder + { + String _lsid; + + public Builder(String lsid, String prefix, DbScope scope, SQLFragment select) + { + super(prefix, scope, select); + this._lsid = lsid; + } + + @Override + public _MaterializedQueryHelper build() + { + return new _MaterializedQueryHelper(_lsid, _prefix, _scope, _select, _uptodate, _supplier, _indexes, _max, _isSelectInto); + } + } + + _MaterializedQueryHelper(String lsid, String prefix, DbScope scope, SQLFragment select, @Nullable SQLFragment uptodate, Supplier supplier, @Nullable Collection indexes, long maxTimeToCache, + boolean isSelectIntoSql) + { + super(prefix, scope, select, uptodate, supplier, indexes, maxTimeToCache, isSelectIntoSql); + this._lsid = lsid; + } + + @Override + protected Materialized createMaterialized(String txCacheKey) + { + DbSchema temp = DbSchema.getTemp(); + String name = _prefix + "_" + GUID.makeHash(); + _Materialized materialized = new _Materialized(this, name, txCacheKey, HeartBeat.currentTimeMillis(), "\"" + temp.getName() + "\".\"" + name + "\""); + initMaterialized(materialized); + return materialized; + } + + @Override + protected void incrementalUpdateBeforeSelect(Materialized m) + { + _Materialized materialized = (_Materialized) m; + + boolean lockAcquired = false; + try + { + lockAcquired = materialized.getLock().tryLock(1, TimeUnit.MINUTES); + if (Materialized.LoadingState.ERROR == materialized._loadingState.get()) + throw materialized._loadException; + + if (!materialized.incrementalDeleteCheck.stillValid(0)) + executeIncrementalDelete(); + if (!materialized.incrementalRollupCheck.stillValid(0)) + executeIncrementalRollup(); + if (!materialized.incrementalInsertCheck.stillValid(0)) + executeIncrementalInsert(); + } + catch (RuntimeException|InterruptedException ex) + { + RuntimeException rex = UnexpectedException.wrap(ex); + materialized.setError(rex); + // The only time I'd expect an error is due to a schema change race-condition, but that can happen in any code path. + + // Ensure that next refresh starts clean + _materializedQueries.remove(_lsid); + getInvalidateCounters(_lsid).update.incrementAndGet(); + throw rex; + } + finally + { + if (lockAcquired) + materialized.getLock().unlock(); + } + } + + void upsertWithRetry(SQLFragment sql) + { + // not actually read-only, but we don't want to start an explicit transaction + _scope.executeWithRetryReadOnly((tx) -> upsert(sql)); + } + + void executeIncrementalInsert() + { + SQLFragment incremental = new SQLFragment("INSERT INTO temp.${NAME}\n") + .append("SELECT * FROM (") + .append(getViewSourceSql()).append(") viewsource_\n") + .append("WHERE rowid > (SELECT COALESCE(MAX(rowid),0) FROM temp.${NAME})"); + upsertWithRetry(incremental); + } + + void executeIncrementalDelete() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + // POSTGRES bug??? the obvious query is _very_ slow O(n^2) + // DELETE FROM temp.${NAME} WHERE rowid NOT IN (SELECT rowid FROM exp.material WHERE cpastype = <<_lsid>>) + SQLFragment incremental = new SQLFragment() + .append("WITH deleted AS (SELECT rowid FROM temp.${NAME} EXCEPT SELECT rowid FROM exp.material WHERE cpastype = ").appendValue(_lsid,d).append(")\n") + .append("DELETE FROM temp.${NAME} WHERE rowid IN (SELECT rowid from deleted)\n"); + upsertWithRetry(incremental); + } + + void executeIncrementalRollup() + { + var d = CoreSchema.getInstance().getSchema().getSqlDialect(); + SQLFragment incremental = new SQLFragment(); + if (d.isPostgreSQL()) + { + incremental + .append("UPDATE temp.${NAME} AS st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM exp.Material AS expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" st.aliquotcount IS DISTINCT FROM expm.aliquotcount OR ") + .append(" st.availablealiquotcount IS DISTINCT FROM expm.availablealiquotcount OR ") + .append(" st.aliquotvolume IS DISTINCT FROM expm.aliquotvolume OR ") + .append(" st.availablealiquotvolume IS DISTINCT FROM expm.availablealiquotvolume OR ") + .append(" st.aliquotunit IS DISTINCT FROM expm.aliquotunit") + .append(")"); + } + else + { + // SQL Server 2022 supports IS DISTINCT FROM + incremental + .append("UPDATE st\n") + .append("SET aliquotcount = expm.aliquotcount, availablealiquotcount = expm.availablealiquotcount, aliquotvolume = expm.aliquotvolume, availablealiquotvolume = expm.availablealiquotvolume, aliquotunit = expm.aliquotunit\n") + .append("FROM temp.${NAME} st, exp.Material expm\n") + .append("WHERE expm.rowid = st.rowid AND expm.cpastype = ").appendValue(_lsid,d).append(" AND (\n") + .append(" COALESCE(st.aliquotcount,-2147483648) <> COALESCE(expm.aliquotcount,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotcount,-2147483648) <> COALESCE(expm.availablealiquotcount,-2147483648) OR ") + .append(" COALESCE(st.aliquotvolume,-2147483648) <> COALESCE(expm.aliquotvolume,-2147483648) OR ") + .append(" COALESCE(st.availablealiquotvolume,-2147483648) <> COALESCE(expm.availablealiquotvolume,-2147483648) OR ") + .append(" COALESCE(st.aliquotunit,'-') <> COALESCE(expm.aliquotunit,'-')") + .append(")"); + } + upsertWithRetry(incremental); + } + } + + static class _Materialized extends MaterializedQueryHelper.Materialized + { + final MaterializedQueryHelper.Invalidator incrementalInsertCheck; + final MaterializedQueryHelper.Invalidator incrementalRollupCheck; + final MaterializedQueryHelper.Invalidator incrementalDeleteCheck; + + _Materialized(_MaterializedQueryHelper mqh, String tableName, String cacheKey, long created, String sql) + { + super(mqh, tableName, cacheKey, created, sql); + final InvalidationCounters counters = getInvalidateCounters(mqh._lsid); + incrementalInsertCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.insert.get())); + incrementalRollupCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.rollup.get())); + incrementalDeleteCheck = new MaterializedQueryHelper.SupplierInvalidator(() -> String.valueOf(counters.delete.get())); + } + + @Override + public void reset() + { + super.reset(); + long now = HeartBeat.currentTimeMillis(); + incrementalInsertCheck.stillValid(now); + incrementalRollupCheck.stillValid(now); + incrementalDeleteCheck.stillValid(now); + } + + Lock getLock() + { + return _loadingLock; + } + } + + + /* SELECT and JOIN, does not include WHERE */ + private SQLFragment getJoinSQL(Set selectedColumns) + { + TableInfo provisioned = null == _ss ? null : _ss.getTinfo(); + Set provisionedCols = new CaseInsensitiveHashSet(provisioned != null ? provisioned.getColumnNameSet() : Collections.emptySet()); + provisionedCols.remove(RowId.name()); + provisionedCols.remove(Name.name()); + boolean hasProvisionedColumns = containsProvisionedColumns(selectedColumns, provisionedCols); + + boolean hasSampleColumns = false; + boolean hasAliquotColumns = false; + + Set materialCols = new CaseInsensitiveHashSet(_rootTable.getColumnNameSet()); + selectedColumns = computeInnerSelectedColumns(selectedColumns); + + SQLFragment sql = new SQLFragment(); + sql.appendComment("", getSqlDialect()); + sql.append("SELECT "); + String comma = ""; + for (String materialCol : materialCols) + { + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(new FieldKey(null, materialCol))) + { + sql.append(comma).append("m.").appendIdentifier(materialCol); + comma = ", "; + } + } + if (null != provisioned && hasProvisionedColumns) + { + for (ColumnInfo propertyColumn : provisioned.getColumns()) + { + // don't select twice + if ( + RowId.name().equalsIgnoreCase(propertyColumn.getColumnName()) || + Name.name().equalsIgnoreCase(propertyColumn.getColumnName()) + ) + { + continue; + } + + // don't need to generate SQL for columns that aren't selected + if (ALL_COLUMNS == selectedColumns || selectedColumns.contains(propertyColumn.getFieldKey()) || propertyColumn.isMvIndicatorColumn()) + { + sql.append(comma); + boolean rootField = StringUtils.isEmpty(propertyColumn.getDerivationDataScope()) + || ExpSchema.DerivationDataScopeType.ParentOnly.name().equalsIgnoreCase(propertyColumn.getDerivationDataScope()); + if ("genid".equalsIgnoreCase(propertyColumn.getColumnName()) || propertyColumn.isUniqueIdField()) + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + else if (rootField) + { + sql.append(propertyColumn.getValueSql("m_sample")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasSampleColumns = true; + } + else + { + sql.append(propertyColumn.getValueSql("m_aliquot")).append(" AS ").appendIdentifier(propertyColumn.getSelectIdentifier()); + hasAliquotColumns = true; + } + comma = ", "; + } + } + } + + sql.append("\nFROM "); + sql.append(_rootTable, "m"); + if (hasSampleColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_sample").append(" ON m.RootMaterialRowId = m_sample.RowId"); + if (hasAliquotColumns) + sql.append(" INNER JOIN ").append(provisioned, "m_aliquot").append(" ON m.RowId = m_aliquot.RowId"); + + sql.appendComment("", getSqlDialect()); + return sql; + } + + private static class SampleTypeAmountDisplayColumn extends ExprColumn + { + public SampleTypeAmountDisplayColumn(TableInfo parent, String amountFieldName, String unitFieldName, String label, Set importAliases, Unit typeUnit) + { + super(parent, FieldKey.fromParts(amountFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? AND ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" IS NOT NULL THEN CAST(").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" / ? AS ") + .append(parent.getSqlDialect().isPostgreSQL() ? "DECIMAL" : "DOUBLE PRECISION") + .append(") ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(amountFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.getValue()), + JdbcType.DOUBLE); + + setLabel(label); + setImportAliasesSet(importAliases); + } + } + + private static class SampleTypeUnitDisplayColumn extends ExprColumn + { + public SampleTypeUnitDisplayColumn(TableInfo parent, String unitFieldName, Unit typeUnit) + { + super(parent, FieldKey.fromParts(unitFieldName), new SQLFragment( + "(CASE WHEN ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" = ? THEN ? ELSE ").append(ExprColumn.STR_TABLE_ALIAS + ".").append(unitFieldName) + .append(" END)") + .add(typeUnit.getBase().toString()) + .add(typeUnit.toString()), + JdbcType.VARCHAR); + } + } + + @Override + public QueryUpdateService getUpdateService() + { + return new SampleTypeUpdateServiceDI(this, _ss); + } + + @Override + public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class perm) + { + if (_ss == null) + { + // Allow read and delete for exp.Materials. + // Don't allow insert/update on exp.Materials without a sample type. + if (perm == DeletePermission.class || perm == ReadPermission.class) + return getContainer().hasPermission(user, perm); + return false; + } + + if (_ss.isMedia() && perm == ReadPermission.class) + return getContainer().hasPermission(user, MediaReadPermission.class); + + return super.hasPermission(user, perm); + } + + @NotNull + @Override + public List getUniqueIndices() + { + // Rewrite the "idx_material_ak" unique index over "Folder", "SampleSet", "Name" to just "Name" + // Issue 25397: Don't include the "idx_material_ak" index if the "Name" column hasn't been added to the table. + // Some FKs to ExpMaterialTable don't include the "Name" column (e.g., NabBaseTable.Specimen) + String indexName = "idx_material_ak"; + List ret = new ArrayList<>(super.getUniqueIndices()); + if (getColumn("Name") != null) + ret.add(new IndexDefinition(indexName, IndexType.Unique, Arrays.asList(getColumn("Name")), null)); + else + ret.removeIf(def -> def.name().equals(indexName)); + return Collections.unmodifiableList(ret); + } + + // + // UpdatableTableInfo + // + + @Override + public @Nullable Long getOwnerObjectId() + { + return OntologyManager.ensureObject(_ss.getContainer(), _ss.getLSID(), (Long) null); + } + + @Nullable + @Override + public CaseInsensitiveHashMap remapSchemaColumns() + { + CaseInsensitiveHashMap m = new CaseInsensitiveHashMap<>(); + + if (null != getRealTable().getColumn("container") && null != getColumn("folder")) + { + m.put("container", "folder"); + } + + for (ColumnInfo col : getColumns()) + { + if (col.getMvColumnName() != null) + m.put(col.getName() + "_" + MvColumn.MV_INDICATOR_SUFFIX, col.getMvColumnName().getName()); + } + + return m; + } + + @Override + public Set getAltMergeKeys(DataIteratorContext context) + { + return MATERIAL_ALT_KEYS; + } + + @Override + public @Nullable Set getExistingRecordKeyColumnNames(DataIteratorContext context, Map colNameMap) + { + Set keyColumnNames = new CaseInsensitiveHashSet(); + + if (context.getInsertOption().allowUpdate) + { + if (context.getInsertOption().updateOnly) + { + if (colNameMap.containsKey(RowId.name())) + keyColumnNames.add(RowId.name()); + else if (colNameMap.containsKey(Name.name())) + { + keyColumnNames.add(MaterialSourceId.name()); + keyColumnNames.add(Name.name()); + } + else + throw new IllegalArgumentException("Either RowId or Name is required for update."); + } + else + { + Set altMergeKeys = getAltMergeKeys(context); + if (altMergeKeys != null) + keyColumnNames.addAll(altMergeKeys); + } + } + + return keyColumnNames.isEmpty() ? null : keyColumnNames; + } + + @Override + public @Nullable Set getExistingRecordSharedKeyColumnNames() + { + return CaseInsensitiveHashSet.of(MaterialSourceId.name()); + } + + @Override + @NotNull + public List> getAdditionalRequiredInsertColumns() + { + if (getSampleType() == null) + return Collections.emptyList(); + + try + { + return getRequiredParentImportFields(getSampleType().getRequiredImportAliases()); + } + catch (IOException e) + { + return Collections.emptyList(); + } + } + + @Override + public DataIteratorBuilder persistRows(DataIteratorBuilder data, DataIteratorContext context) + { + TableInfo propertiesTable = _ss.getTinfo(); + + // The specimens sample type doesn't have a properties table + if (propertiesTable == null) + return data; + + try + { + final Container container = getContainer(); + final User user = getUserSchema().getUser(); + ExperimentServiceImpl expService = ExperimentServiceImpl.get(); + SearchService.TaskIndexingQueue queue = SearchService.get().defaultTask().getQueue(container, SearchService.PRIORITY.modified); + + DataIteratorBuilder dib = new ExpDataIterators.PersistDataIteratorBuilder(data, this, propertiesTable, _ss, container, user, _ss.getImportAliasesIncludingAliquot()) + .setFileLinkDirectory(SAMPLE_TYPE_FILE_DIRECTORY_NAME) + .setIndexFunction(searchIndexDataKeys -> propertiesTable.getSchema().getScope().addCommitTask(() -> { + List lsids = searchIndexDataKeys.lsids(); + List orderedRowIds = searchIndexDataKeys.orderedRowIds(); + + // Issue 51263: order by RowId to reduce deadlock + ListUtils.partition(orderedRowIds, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterials(sublist)) + expMaterial.index(q, this); + }) + ); + + ListUtils.partition(lsids, 100).forEach(sublist -> + queue.addRunnable((q) -> + { + for (ExpMaterialImpl expMaterial : expService.getExpMaterialsByLsid(sublist)) + expMaterial.index(q, this); + }) + ); + }, DbScope.CommitTaskOption.POSTCOMMIT) + ); + + dib = LoggingDataIterator.wrap(dib); + return LoggingDataIterator.wrap(new AliasDataIteratorBuilder(dib, container, user, expService.getTinfoMaterialAliasMap(), _ss, true)); + } + catch (IOException e) + { + throw new UncheckedIOException(e); + } + } + + @Override + @NotNull + public AuditBehaviorType getDefaultAuditBehavior() + { + return AuditBehaviorType.DETAILED; + } + + static final Set excludeFromDetailedAuditField; + static + { + var set = new CaseInsensitiveHashSet(); + set.addAll(TableInfo.defaultExcludedDetailedUpdateAuditFields); + set.addAll(ExpDataIterators.MATERIAL_NOT_FOR_UPDATE); + // We don't want the inventory columns to show up in the sample timeline audit record; + // they are captured in their own audit record. + set.addAll(InventoryService.InventoryStatusColumn.names()); + excludeFromDetailedAuditField = Collections.unmodifiableSet(set); + } + + @Override + public @NotNull Set getExcludedDetailedUpdateAuditFields() + { + // uniqueId fields don't change in reality, so exclude them from the audit updates + Set excluded = new CaseInsensitiveHashSet(); + excluded.addAll(this.getUniqueIdFields()); + excluded.addAll(excludeFromDetailedAuditField); + return excluded; + } + + @Override + public List> getImportTemplates(ViewContext ctx) + { + // respect any metadata overrides + if (getRawImportTemplates() != null) + return super.getImportTemplates(ctx); + + List> templates = new ArrayList<>(); + ActionURL url = PageFlowUtil.urlProvider(QueryUrls.class).urlCreateExcelTemplate(ctx.getContainer(), getPublicSchemaName(), getName()); + url.addParameter("headerType", ColumnHeaderType.ImportField.name()); + try + { + if (getSampleType() != null && !getSampleType().getImportAliases().isEmpty()) + { + for (String aliasKey : getSampleType().getImportAliases().keySet()) + url.addParameter("includeColumn", aliasKey); + } + } + catch (IOException e) + {} + templates.add(Pair.of("Download Template", url.toString())); + return templates; + } + + @Override + public void overlayMetadata(String tableName, UserSchema schema, Collection errors) + { + if (SamplesSchema.SCHEMA_NAME.equals(schema.getName())) + { + Collection metadata = QueryService.get().findMetadataOverride(schema, SamplesSchema.SCHEMA_METADATA_NAME, false, false, errors, null); + if (null != metadata) + { + overlayMetadata(metadata, schema, errors); + } + } + super.overlayMetadata(tableName, schema, errors); + } + + static class SampleTypeAmountPrecisionDisplayColumn extends DataColumn + { + private final Unit typeUnit; + private final boolean applySampleTypePrecision; + + public SampleTypeAmountPrecisionDisplayColumn(ColumnInfo col, Unit typeUnit) + { + super(col, false); + this.typeUnit = typeUnit; + this.applySampleTypePrecision = col.getFormat() == null; // only apply if no custom format is set by user + } + + @Override + public Object getDisplayValue(RenderContext ctx) + { + Object value = super.getDisplayValue(ctx); + if (this.applySampleTypePrecision && value != null) + { + int scale = this.typeUnit == null ? Quantity.DEFAULT_PRECISION_SCALE : this.typeUnit.getPrecisionScale(); + value = Precision.round(Double.valueOf(value.toString()), scale); + } + return value; + } + } +}