Skip to content

CAY-2424 #291

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASE-NOTES.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ CAY-2414 Modeler: new icon design
CAY-2415 Transaction isolation and propagation support
CAY-2416 Change TreeMap for HashMap to store data in Cayenne model classes
CAY-2422 Modeler: Open driver setup window on driver load error
CAY-2424 Modeler: Migrate DB error query is not skipped, further excecution is not possible

Bug Fixes:

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,15 @@ public List<String> createSql(DbAdapter adapter) {
}

@Override
public void execute(MergerContext mergerContext) {
public void execute(final MergerContext mergerContext) {
try {
DataNode node = mergerContext.getDataNode();
DbAdapter adapter = node.getAdapter();
if(needAutoPkSupport()) {
final DataNode node = mergerContext.getDataNode();
final DbAdapter adapter = node.getAdapter();
if (needAutoPkSupport()) {
adapter.getPkGenerator().createAutoPk(
node,
Collections.singletonList(getEntity()));
Collections.singletonList(getEntity()),
mergerContext.getDataMap().getDefaultCatalog());
}
executeSql(mergerContext, adapter.createTable(getEntity()));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,22 +73,26 @@ public JdbcAdapter getAdapter() {
return this.adapter;
}

public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
public void createAutoPk(final DataNode node, final List<DbEntity> dbEntities) throws Exception {
createAutoPk(node, dbEntities, null);
}

public void createAutoPk(final DataNode node, final List<DbEntity> dbEntities, final String catalog) throws Exception {
// check if a table exists

// create AUTO_PK_SUPPORT table
if (!autoPkTableExists(node)) {
runUpdate(node, pkTableCreateString());
runUpdate(node, pkTableCreateString(), catalog);
}

// delete any existing pk entries
if (!dbEntities.isEmpty()) {
runUpdate(node, pkDeleteString(dbEntities));
runUpdate(node, pkDeleteString(dbEntities), catalog);
}

// insert all needed entries
for (DbEntity ent : dbEntities) {
runUpdate(node, pkCreateString(ent.getName()));
for (final DbEntity ent : dbEntities) {
runUpdate(node, pkCreateString(ent.getName()), catalog);
}
}

Expand Down Expand Up @@ -175,15 +179,29 @@ protected boolean autoPkTableExists(DataNode node) throws SQLException {
*
* @throws SQLException in case of query failure.
*/
public int runUpdate(DataNode node, String sql) throws SQLException {
public int runUpdate(final DataNode node, final String sql) throws SQLException {
return runUpdate(node, sql, null);
}


/**
* Runs JDBC update over a Connection obtained from DataNode with preferred catalog name. Returns a
* number of objects returned from update.
*
* @throws SQLException
* in case of query failure.
*/
public int runUpdate(final DataNode node, final String sql, final String catalog) throws SQLException {
adapter.getJdbcEventLogger().log(sql);

try (Connection con = node.getDataSource().getConnection()) {
try (Statement upd = con.createStatement()) {
return upd.executeUpdate(sql);
}
}
}
try (Connection con = node.getDataSource().getConnection()) {
if (catalog != null) {
con.setCatalog(catalog);
}try (Statement upd = con.createStatement()) {
return upd.executeUpdate(sql);
}
}
}

/**
* Generates a unique and non-repeating primary key for specified dbEntity.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,19 @@ public interface PkGenerator {
* @param dbEntities a list of entities that require primary key auto-generation
* support
*/
@Deprecated
void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception;

/**
* Generates necessary database objects to provide automatic primary key support for specified catalog.
*
* @param node node that provides access to a DataSource.
* @param dbEntities a list of entities that require primary key auto-generation
* support
* @param catalog catalog name
*/
void createAutoPk(DataNode node, List<DbEntity> dbEntities, String catalog) throws Exception;

/**
* Returns a list of SQL strings needed to generates database objects to provide
* automatic primary support for the list of entities. No actual database operations
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public int getPkCacheSize() {
}

@Override
public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
public void createAutoPk(final DataNode node, final List<DbEntity> dbEntities, final String catalog) throws Exception {
// For each entity (re)set the unique counter
for (DbEntity entity : dbEntities) {
runUpdate(node, pkCreateString(entity.getName()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ protected String newIDString(DbEntity ent) {
}

@Override
public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
public void createAutoPk(final DataNode node, final List<DbEntity> dbEntities, final String catalog) throws Exception {
// looks like generating a PK on top of an existing one does not
// result in errors...

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ protected OraclePkGenerator(JdbcAdapter adapter) {
private static final String _SEQUENCE_PREFIX = "pk_";

@Override
public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
public void createAutoPk(final DataNode node, final List<DbEntity> dbEntities, final String catalog) throws Exception {
List<String> sequences = getExistingSequences(node);
// create needed sequences
for (DbEntity dbEntity : dbEntities) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,8 +98,8 @@ protected String pkTableCreateString() {
* node that provides access to a DataSource.
*/
@Override
public void createAutoPk(DataNode node, List<DbEntity> dbEntities) throws Exception {
super.createAutoPk(node, dbEntities);
public void createAutoPk(DataNode node, List<DbEntity> dbEntities, String catalog) throws Exception {
super.createAutoPk(node, dbEntities, catalog);
super.runUpdate(node, safePkProcDrop());
super.runUpdate(node, unsafePkProcCreate());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public void testLongPk() throws Exception {
!DerbyPkGenerator.class.equals(adapter.getPkGenerator().getClass())) { // AUTO_PK_SUPPORT doesn't allow dropping PK support for a single entity
pkGenerator.dropAutoPk(node, Collections.singletonList(artistEntity));
}
pkGenerator.createAutoPk(node, Collections.singletonList(artistEntity));
pkGenerator.createAutoPk(node, Collections.singletonList(artistEntity), null);
pkGenerator.reset();

Object pk = pkGenerator.generatePk(node, pkAttribute);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public void setUp() throws Exception {

List<DbEntity> list = new ArrayList<DbEntity>();
list.add(paintingEntity);
pkGenerator.createAutoPk(node, list);
pkGenerator.createAutoPk(node, list, null);
pkGenerator.reset();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@ private void dropPKSupport(DataNode node, DataMap map) throws Exception {

private void createPKSupport(DataNode node, DataMap map) throws Exception {
List<DbEntity> filteredEntities = dbEntitiesInInsertOrder(node, map);
node.getAdapter().getPkGenerator().createAutoPk(node, filteredEntities);
node.getAdapter().getPkGenerator().createAutoPk(node, filteredEntities, null);
}

private void createSchema(DataNode node, DataMap map) throws Exception {
Expand Down