Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/src/org/labkey/api/data/RecordFactory.java
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public RecordFactory(Class<K> clazz)
{
Object[] params = Arrays.stream(_parameters).map(p -> {
Object value = m.get(p.getName());
return ConvertUtils.convert(value, p.getType());
return value != null ? ConvertUtils.convert(value, p.getType()) : null;
}).toArray();

try
Expand Down
1 change: 1 addition & 0 deletions api/src/org/labkey/api/data/SqlScriptExecutor.java
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ public void execute()
{
LOG.info("Executing {}", upgradeMethod.getDisplayName());
_moduleContext.invokeUpgradeMethod(upgradeMethod);
LOG.info("Finished executing {}", upgradeMethod.getDisplayName());
}
}
catch (NoSuchMethodException e)
Expand Down
18 changes: 18 additions & 0 deletions api/src/org/labkey/api/data/dialect/PostgreSqlService.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package org.labkey.api.data.dialect;

import org.labkey.api.services.ServiceRegistry;

public interface PostgreSqlService
{
static PostgreSqlService get()
{
return ServiceRegistry.get().getService(PostgreSqlService.class);
}

static void setInstance(PostgreSqlService impl)
{
ServiceRegistry.get().registerService(PostgreSqlService.class, impl);
}

BasePostgreSqlDialect getDialect();
}
7 changes: 4 additions & 3 deletions core/src/org/labkey/core/CoreModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@
import org.labkey.api.data.TestSchema;
import org.labkey.api.data.WorkbookContainerType;
import org.labkey.api.data.dialect.BasePostgreSqlDialect;
import org.labkey.api.data.dialect.PostgreSqlService;
import org.labkey.api.data.dialect.SqlDialect;
import org.labkey.api.data.dialect.SqlDialectManager;
import org.labkey.api.data.dialect.SqlDialectRegistry;
Expand All @@ -88,6 +89,7 @@
import org.labkey.api.files.FileBrowserConfigWriter;
import org.labkey.api.files.FileContentService;
import org.labkey.api.markdown.MarkdownService;
import org.labkey.api.mcp.McpService;
import org.labkey.api.message.settings.MessageConfigService;
import org.labkey.api.migration.DatabaseMigrationService;
import org.labkey.api.module.FolderType;
Expand All @@ -98,7 +100,6 @@
import org.labkey.api.module.SchemaUpdateType;
import org.labkey.api.module.SpringModule;
import org.labkey.api.module.Summary;
import org.labkey.api.mcp.McpService;
import org.labkey.api.notification.EmailMessage;
import org.labkey.api.notification.EmailService;
import org.labkey.api.notification.NotificationMenuView;
Expand Down Expand Up @@ -253,9 +254,9 @@
import org.labkey.core.login.DbLoginAuthenticationProvider;
import org.labkey.core.login.DbLoginManager;
import org.labkey.core.login.LoginController;
import org.labkey.core.mcp.McpServiceImpl;
import org.labkey.core.metrics.SimpleMetricsServiceImpl;
import org.labkey.core.metrics.WebSocketConnectionManager;
import org.labkey.core.mcp.McpServiceImpl;
import org.labkey.core.notification.EmailPreferenceConfigServiceImpl;
import org.labkey.core.notification.EmailPreferenceContainerListener;
import org.labkey.core.notification.EmailPreferenceUserListener;
Expand Down Expand Up @@ -560,11 +561,11 @@ public QuerySchema createSchema(DefaultSchema schema, Module module)
ScriptEngineManagerImpl.registerEncryptionMigrationHandler();

McpService.get().register(new CoreMcp());
PostgreSqlService.setInstance(PostgreSqlDialectFactory::getLatestSupportedDialect);

deleteTempFiles();
}


private void deleteTempFiles()
{
try
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1605,6 +1605,7 @@ public boolean handlePost(UpgradeCodeForm form, BindException errors) throws Exc
{
LOG.info("Executing {}.{}(ModuleContext moduleContext)", _method.getDeclaringClass().getSimpleName(), _method.getName());
_method.invoke(_code, _ctx);
LOG.info("Finished executing {}.{}(ModuleContext moduleContext)", _method.getDeclaringClass().getSimpleName(), _method.getName());
return true;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.junit.Assert;
import org.junit.Test;
import org.labkey.api.data.dialect.AbstractDialectRetrievalTestCase;
import org.labkey.api.data.dialect.BasePostgreSqlDialect;
import org.labkey.api.data.dialect.DatabaseNotSupportedException;
import org.labkey.api.data.dialect.JdbcHelperTest;
import org.labkey.api.data.dialect.PostgreSqlServerType;
Expand Down Expand Up @@ -145,6 +146,11 @@ public static PostgreSql_13_Dialect getOldestSupportedDialect()
return new PostgreSql_13_Dialect();
}

public static BasePostgreSqlDialect getLatestSupportedDialect()
{
return new PostgreSql_18_Dialect();
}

public static class DialectRetrievalTestCase extends AbstractDialectRetrievalTestCase
{
@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- SQL Server only
EXEC core.executeJavaUpgradeCode 'shortenAllStorageNames';
Original file line number Diff line number Diff line change
Expand Up @@ -178,23 +178,27 @@ public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema s

DbScope sourceScope = configuration.getSourceScope();
DbScope targetScope = configuration.getTargetScope();
DbSchema biologicsSourceSchema = sourceScope.getSchema("biologics", DbSchemaType.Migration);
DbSchema biologicsTargetSchema = targetScope.getSchema("biologics", DbSchemaType.Module);

if (biologicsSourceSchema.existsInDatabase() && biologicsTargetSchema.existsInDatabase())
if (sourceScope.getSchemaNames().contains("biologics") && targetScope.getSchemaNames().contains("biologics"))
{
TableInfo sourceTable = biologicsSourceSchema.getTable("SequenceIdentity");
TableInfo targetTable = biologicsTargetSchema.getTable("SequenceIdentity");
DbSchema biologicsSourceSchema = sourceScope.getSchema("biologics", DbSchemaType.Migration);
DbSchema biologicsTargetSchema = targetScope.getSchema("biologics", DbSchemaType.Module);

DatabaseMigrationService.get().copySourceTableToTargetTable(configuration, sourceTable, targetTable, DbSchemaType.Module, true, null, new DefaultMigrationSchemaHandler(biologicsTargetSchema)
if (biologicsSourceSchema.existsInDatabase() && biologicsTargetSchema.existsInDatabase())
{
@Override
public FilterClause getTableFilterClause(TableInfo sourceTable, Set<GUID> containers)
TableInfo sourceTable = biologicsSourceSchema.getTable("SequenceIdentity");
TableInfo targetTable = biologicsTargetSchema.getTable("SequenceIdentity");

DatabaseMigrationService.get().copySourceTableToTargetTable(configuration, sourceTable, targetTable, DbSchemaType.Module, true, null, new DefaultMigrationSchemaHandler(biologicsTargetSchema)
{
// This is a global table, so no container clause. Just query and copy the sequence IDs referenced by data class rows we copied.
return new InClause(FieldKey.fromParts("SequenceId"), SEQUENCE_IDS);
}
});
@Override
public FilterClause getTableFilterClause(TableInfo sourceTable, Set<GUID> containers)
{
// This is a global table, so no container clause. Just query and copy the sequence IDs referenced by data class rows we copied.
return new InClause(FieldKey.fromParts("SequenceId"), SEQUENCE_IDS);
}
});
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion experiment/src/org/labkey/experiment/ExperimentModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ public String getName()
@Override
public Double getSchemaVersion()
{
return 26.004;
return 26.005;
}

@Nullable
Expand Down
155 changes: 154 additions & 1 deletion experiment/src/org/labkey/experiment/ExperimentUpgradeCode.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@
*/
package org.labkey.experiment;

import org.apache.commons.collections4.MultiValuedMap;
import org.apache.commons.collections4.multimap.ArrayListValuedHashMap;
import org.apache.commons.lang3.StringUtils;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
Expand All @@ -25,7 +27,10 @@
import org.labkey.api.audit.SampleTimelineAuditEvent;
import org.labkey.api.audit.TransactionAuditProvider;
import org.labkey.api.collections.CaseInsensitiveHashSet;
import org.labkey.api.collections.CsvSet;
import org.labkey.api.collections.LabKeyCollectors;
import org.labkey.api.data.ColumnInfo;
import org.labkey.api.data.CompareType;
import org.labkey.api.data.Container;
import org.labkey.api.data.ContainerManager;
import org.labkey.api.data.DbSchema;
Expand All @@ -39,11 +44,16 @@
import org.labkey.api.data.SQLFragment;
import org.labkey.api.data.SchemaTableInfo;
import org.labkey.api.data.Selector;
import org.labkey.api.data.SimpleFilter;
import org.labkey.api.data.SqlExecutor;
import org.labkey.api.data.SqlSelector;
import org.labkey.api.data.Table;
import org.labkey.api.data.TableInfo;
import org.labkey.api.data.TableSelector;
import org.labkey.api.data.UpgradeCode;
import org.labkey.api.data.dialect.BasePostgreSqlDialect;
import org.labkey.api.data.dialect.PostgreSqlService;
import org.labkey.api.exp.OntologyManager;
import org.labkey.api.exp.PropertyDescriptor;
import org.labkey.api.exp.api.ExpSampleType;
import org.labkey.api.exp.api.ExperimentService;
Expand All @@ -64,6 +74,8 @@
import org.labkey.api.security.User;
import org.labkey.api.security.roles.SiteAdminRole;
import org.labkey.api.settings.AppProps;
import org.labkey.api.util.PageFlowUtil;
import org.labkey.api.util.StringUtilsLabKey;
import org.labkey.api.util.logging.LogHelper;
import org.labkey.experiment.api.DataClass;
import org.labkey.experiment.api.DataClassDomainKind;
Expand All @@ -73,11 +85,14 @@
import org.labkey.experiment.api.MaterialSource;
import org.labkey.experiment.api.property.DomainImpl;
import org.labkey.experiment.api.property.DomainPropertyImpl;
import org.labkey.experiment.api.property.StorageNameGenerator;
import org.labkey.experiment.api.property.StorageProvisionerImpl;

import java.nio.charset.StandardCharsets;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
Expand Down Expand Up @@ -174,7 +189,6 @@ public static void upgradeAmountsAndUnits(ModuleContext context)
}
ExperimentService.get().clearCaches();
}

}

private static void getAmountAndUnitUpdates(Map<String, Object> sampleMap, Parameter unitsCol, Set<Parameter> amountCols, Unit currentDisplayUnit, Map<String, Object> oldDataMap, Map<String, Object> newDataMap, Map<String, Integer> sampleCounts, boolean aliquotFields)
Expand Down Expand Up @@ -656,5 +670,144 @@ private static void fillRowId(ExpDataClassImpl dc, Domain domain, DbScope scope)
LOG.info("DataClass '{}' ({}) populated 'rowId' column, count={}", dc.getName(), dc.getRowId(), count);
}

record DomainRecord(Container container, int domainId, String name, String storageSchemaName, String storageTableName)
{
String fullName()
{
return storageSchemaName + "." + storageTableName;
}
}

record Property(int domainId, int propertyId, String domainName, String name, String storageSchemaName, String storageTableName, String storageColumnName)
{
String fullName()
{
// Have to bracket storage column name since it could have special characters (like dots)
return storageSchemaName + "." + storageTableName + "." + bracketIt(storageColumnName);
}

// Bracket name and escape any internal ending brackets
private String bracketIt(String name)
{
return "[" + name.replace("]", "]]") + "]";
}
}

/**
* Called from exp-26.004-26.005.sql, on SQL Server only
* GitHub Issue 869: Long table/column names cause SQL Server migration to fail
* Query all table & column storage names and rename the ones that are too long for PostgreSQL
* TODO: When this upgrade code is removed, get rid of the StorageProvisionerImpl.makeTableName() method it uses.
*/
@SuppressWarnings("unused")
public static void shortenAllStorageNames(ModuleContext context)
{
if (context.isNewInstall())
return;

// The PostgreSQL dialect knows which names are too long
BasePostgreSqlDialect dialect = PostgreSqlService.get().getDialect();
DbScope scope = DbScope.getLabKeyScope();
SqlExecutor executor = new SqlExecutor(scope);

// Stream all the storage table names and rename the ones that are too long for PostgreSQL. The filtering must
// be done in code by the dialect; SQL Server has BYTELENGTH(), but that function returns values that are not
// consistent with our dialect check. Also, it looks like the function's behavior changed starting in SS 2019.
TableInfo tinfoDomainDescriptor = OntologyManager.getTinfoDomainDescriptor();
SimpleFilter filter = new SimpleFilter(FieldKey.fromString("StorageSchemaName"), null, CompareType.NONBLANK);
filter.addCondition(FieldKey.fromString("StorageTableName"), null, CompareType.NONBLANK);

new TableSelector(tinfoDomainDescriptor, new CsvSet("Container, DomainId, Name, StorageSchemaName, StorageTableName"), filter, null)
.setJdbcCaching(false)
.stream(DomainRecord.class)
.filter(domain -> dialect.isIdentifierTooLong(domain.storageTableName()))
.forEach(domain -> {
String oldName = domain.fullName();
String newName = StorageProvisionerImpl.get().makeTableName(dialect, domain.container(), domain.domainId(), domain.name());

executor.execute(new SQLFragment("EXEC sp_rename ?, ?").add(oldName).add(newName));
Table.update(null, tinfoDomainDescriptor, PageFlowUtil.map("StorageTableName", newName), domain.domainId());

LOG.info(" Table \"{}\" renamed to \"{}\" ({} bytes)", oldName, newName, newName.getBytes(StandardCharsets.UTF_8).length);
});

List<String> badTableNames = new TableSelector(tinfoDomainDescriptor, new CsvSet("StorageTableName"), filter, null)
.setJdbcCaching(false)
.stream(String.class)
.filter(dialect::isIdentifierTooLong)
.toList();

if (!badTableNames.isEmpty())
LOG.error("Some storage table names are still too long!! {}", badTableNames);

// Collect all the domains that have one or more storage columns names that are too long for PostgreSQL
TableInfo tinfoPropertyDomain = OntologyManager.getTinfoPropertyDomain();
TableInfo tinfoPropertyDescriptor = OntologyManager.getTinfoPropertyDescriptor();
SQLFragment sql = new SQLFragment("SELECT dd.DomainId, dd.Name AS DomainName, px.PropertyId, StorageSchemaName, StorageTableName, StorageColumnName, px.Name FROM ")
.append(tinfoDomainDescriptor, "dd")
.append(" INNER JOIN ")
.append(tinfoPropertyDomain, "pd")
.append(" ON dd.DomainId = pd.DomainId INNER JOIN ")
.append(tinfoPropertyDescriptor, "px")
.append(" ON pd.PropertyId = px.PropertyId ")
.append("WHERE StorageSchemaName IS NOT NULL AND StorageTableName IS NOT NULL AND StorageColumnName IS NOT NULL");

filter = new SimpleFilter(FieldKey.fromString("StorageSchemaName"), null, CompareType.NONBLANK);
filter.addCondition(FieldKey.fromString("StorageTableName"), null, CompareType.NONBLANK);
filter.addCondition(FieldKey.fromString("StorageColumnName"), null, CompareType.NONBLANK);
MultiValuedMap<DomainRecord, Property> badDomainMap = new SqlSelector(scope, sql)
.setJdbcCaching(false)
.stream(Property.class)
.filter(property -> dialect.isIdentifierTooLong(property.storageColumnName()))
.collect(LabKeyCollectors.toMultiValuedMap(
property -> new DomainRecord(null, property.domainId(), property.domainName(), property.storageSchemaName(), property.storageTableName()),
property -> property,
ArrayListValuedHashMap::new)
);

if (!badDomainMap.isEmpty())
LOG.info(" Found {} with storage column names that are too long for PostgreSQL:", StringUtilsLabKey.pluralize(badDomainMap.keySet().size(), "domain"));

// Now enumerate the bad domains and rename their bad storage columns using the PostgreSQL truncation rules
badDomainMap.keySet()
.forEach(domain -> {
Collection<Property> badColumns = badDomainMap.get(domain);
List<String> badColumnNames = badColumns.stream().map(Property::storageColumnName).toList();

// First, populate a new StorageNameGenerator with all the "good" names in this domain so we don't
// accidentally try to re-use one of them
StorageNameGenerator nameGenerator = new StorageNameGenerator(dialect);
SQLFragment domainSql = new SQLFragment("SELECT StorageColumnName FROM ")
.append(tinfoPropertyDomain, "pd")
.append(" INNER JOIN ")
.append(tinfoPropertyDescriptor, "px")
.append(" ON pd.PropertyId = px.PropertyId ")
.append("WHERE DomainId = ? AND StorageColumnName NOT ")
.add(domain.domainId())
.appendInClause(badColumnNames, scope.getSqlDialect());
new SqlSelector(scope, domainSql).forEach(String.class, nameGenerator::claimName);

LOG.info(" Renaming {} in table \"{}\"", StringUtilsLabKey.pluralize(badColumns.size(), "column"), domain.fullName());

// Now use that StorageNameGenerator to create new names. Rename the column and update the PropertyDescriptor table.
badColumns.forEach(property -> {
String oldName = property.fullName();
String newName = nameGenerator.generateColumnName(property.name()); // No need to bracket or quote or escape: JDBC parameter takes care of all special characters

executor.execute(new SQLFragment("EXEC sp_rename ?, ?, 'COLUMN'").add(oldName).add(newName));
Table.update(null, tinfoPropertyDescriptor, PageFlowUtil.map("StorageColumnName", newName), property.propertyId());

LOG.info(" Column \"{}\" renamed to \"{}\" ({} bytes)", oldName, newName, newName.getBytes(StandardCharsets.UTF_8).length);
});
});

List<String> badColumnNames = new TableSelector(tinfoPropertyDescriptor, new CsvSet("StorageColumnName"), new SimpleFilter(FieldKey.fromString("StorageColumnName"), null, CompareType.NONBLANK), null)
.setJdbcCaching(false)
.stream(String.class)
.filter(dialect::isIdentifierTooLong)
.toList();

if (!badColumnNames.isEmpty())
LOG.error("Some storage column names are still too long!! {}", badColumnNames);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import org.junit.Test;
import org.labkey.api.collections.CaseInsensitiveHashSet;
import org.labkey.api.data.DbScope;
import org.labkey.api.data.dialect.PostgreSqlService;
import org.labkey.api.data.dialect.SqlDialect;
import org.labkey.api.exp.OntologyManager;
import org.labkey.api.util.StringUtilsLabKey;
Expand All @@ -32,7 +33,8 @@ public class StorageNameGenerator

public StorageNameGenerator(@NotNull SqlDialect dialect)
{
_dialect = dialect;
// GitHub Issue 869: Create SQL Server storage names using PostgreSQL's rules to ensure all tables and columns can migrate
_dialect = dialect.isSqlServer() ? PostgreSqlService.get().getDialect() : dialect;
}

public String claimName(String name)
Expand Down
Loading