Skip to content

Commit

Permalink
fix: migrator-integration-test tests not running #191 (#192)
Browse files Browse the repository at this point in the history
* rename migrator integration test package

* fix migrator dependency

* extra ci job for migrator it

* move configuration to it module

* Increase waiting time.

* fix issue migrator, clean code

* remove header and remove code change

* skip SQLServer

* skip Run Migrator in old DAO

* fix oracle schema override

* remove deprecated soft delete SQL

* fix ci condition

* fix oracle clean up

* fix sqlserver insert
  • Loading branch information
Roiocam committed May 21, 2024
1 parent 018447e commit ed3dafc
Show file tree
Hide file tree
Showing 39 changed files with 210 additions and 376 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/mysql-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ jobs:
- name: Run Integration tests for ${{ matrix.name }}
run: sbt ++2.13 "integration/testOnly org.apache.pekko.persistence.jdbc.integration.MySQL*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Run Migrator Integration tests for ${{ matrix.name }}
if: matrix.name == 'MySQL'
run: sbt ++2.13 "migratorIntegration/testOnly org.apache.pekko.persistence.jdbc.migrator.integration.MySQL*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Print logs on failure
if: ${{ failure() }}
run: find . -name "*.log" -exec ./scripts/cat-log.sh {} \;
4 changes: 4 additions & 0 deletions .github/workflows/oracle-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ jobs:
- name: Run Integration tests for ${{ matrix.name }}
run: sbt ++2.13 "integration/testOnly org.apache.pekko.persistence.jdbc.integration.Oracle*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Run Migrator Integration tests for ${{ matrix.name }}
if: matrix.name == 'Oracle'
run: sbt ++2.13 "migratorIntegration/testOnly org.apache.pekko.persistence.jdbc.migrator.integration.Oracle*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Print logs on failure
if: ${{ failure() }}
run: find . -name "*.log" -exec ./scripts/cat-log.sh {} \;
4 changes: 4 additions & 0 deletions .github/workflows/postgres-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ jobs:
- name: Run Integration tests for ${{ matrix.name }}
run: sbt ++2.13 "integration/testOnly org.apache.pekko.persistence.jdbc.integration.Postgres*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Run Migrator Integration tests for ${{ matrix.name }}
if: matrix.name == 'Postgres'
run: sbt ++2.13 "migratorIntegration/testOnly org.apache.pekko.persistence.jdbc.migrator.integration.Postgres*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Print logs on failure
if: ${{ failure() }}
run: find . -name "*.log" -exec ./scripts/cat-log.sh {} \;
4 changes: 4 additions & 0 deletions .github/workflows/sqlserver-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@ jobs:
- name: Run Integration tests for ${{ matrix.name }}
run: sbt ++2.13 "integration/testOnly org.apache.pekko.persistence.jdbc.integration.SqlServer*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Run Migrator Integration tests for ${{ matrix.name }}
if: matrix.name == 'SqlServer'
run: sbt ++2.13 "migratorIntegration/testOnly org.apache.pekko.persistence.jdbc.migrator.integration.SqlServer*" ${{ matrix.extraOpts }} -J-XX:+UnlockExperimentalVMOptions -J-XX:+UseJVMCICompiler

- name: Print logs on failure
if: ${{ failure() }}
run: find . -name "*.log" -exec ./scripts/cat-log.sh {} \;
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ lazy val `pekko-persistence-jdbc` = project
.in(file("."))
.enablePlugins(ScalaUnidocPlugin)
.disablePlugins(MimaPlugin, SitePlugin)
.aggregate(core, migrator, integration, docs)
.aggregate(core, migrator, docs)
.settings(
name := "pekko-persistence-jdbc-root",
publish / skip := true)
Expand Down Expand Up @@ -63,7 +63,7 @@ lazy val migratorIntegration = project
.settings(name := "pekko-persistence-jdbc-migrator-integration", libraryDependencies ++= Dependencies.Libraries)
.settings(publish / skip := true, doc / sources := Seq.empty, Test / fork := true)
.disablePlugins(MimaPlugin, SitePlugin)
.dependsOn(core % "compile->compile;test->test")
.dependsOn(core % "compile->compile;test->test", migrator % "compile->compile;test->test")

lazy val themeSettings = Seq(
pekkoParadoxGithub := Some("https://github.com/apache/pekko-persistence-jdbc"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ DROP TABLE "journal" CASCADE CONSTRAINT
DROP TABLE "snapshot" CASCADE CONSTRAINT
/

DROP TABLE "deleted_to" CASCADE CONSTRAINT
/

DROP TRIGGER "ordering_seq_trigger"
/

Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# SPDX-License-Identifier: Apache-2.0

# Oracle does not support returning a column with a case senstive name so all the column names and table names need
# to be caps. See:
# https://github.com/slick/slick/issues/47
# https://groups.google.com/g/scalaquery/c/U431n-Z2cwM

jdbc-snapshot-store {
tables {

legacy_snapshot {
tableName = "LEGACY_SNAPSHOT"
schemaName = "SYSTEM"
columnNames {
persistenceId = "PERSISTENCE_ID"
sequenceNumber = "SEQUENCE_NUMBER"
created = "CREATED"
snapshot = "SNAPSHOT"
}
}


snapshot {
tableName = "SNAPSHOT"
schemaName = "SYSTEM"
columnNames {
persistenceId = "PERSISTENCE_ID"
sequenceNumber = "SEQUENCE_NUMBER"
created = "CREATED"

snapshotPayload = "SNAPSHOT_PAYLOAD"
snapshotSerId = "SNAPSHOT_SER_ID"
snapshotSerManifest = "SNAPSHOT_SER_MANIFEST"

metaPayload = "META_PAYLOAD"
metaSerId = "META_SER_ID"
metaSerManifest = "META_SER_MANIFEST"
}
}
}
}

jdbc-read-journal {
tables {
event_journal {
tableName = "EVENT_JOURNAL"
schemaName = "SYSTEM"
}
legacy_journal {
tableName = "JOURNAL"
schemaName = "SYSTEM"
}
}
}

jdbc-journal {
tables {
legacy_journal {
tableName = "JOURNAL"
schemaName = "SYSTEM"

columnNames {
ordering = "ORDERING"
deleted = "DELETED"
persistenceId = "PERSISTENCE_ID"
sequenceNumber = "SEQUENCE_NUMBER"
created = "CREATED"
tags = "TAGS"
message = "MESSAGE"
}
}

event_journal {
tableName = "EVENT_JOURNAL"
schemaName = "SYSTEM"

columnNames {
ordering = "ORDERING"
deleted = "DELETED"
persistenceId = "PERSISTENCE_ID"
sequenceNumber = "SEQUENCE_NUMBER"
writer = "WRITER",
writeTimestamp = "WRITE_TIMESTAMP"
adapterManifest = "ADAPTER_MANIFEST"
eventPayload = "EVENT_PAYLOAD"
eventSerId = "EVENT_SER_ID"
eventSerManifest = "EVENT_SER_MANIFEST"
metaPayload = "META_PAYLOAD"
metaSerId = "META_SER_ID"
metaSerManifest = "META_SER_MANIFEST"
}
}

event_tag {
tableName = "EVENT_TAG"
schemaName = "SYSTEM"

columnNames {
eventId = "EVENT_ID"
tag = "TAG"
}
}
}

}

jdbc-durable-state-store {
tables {
durable_state {
tableName = "DURABLE_STATE"
schemaName = "SYSTEM"

columnNames {
globalOffset = "GLOBAL_OFFSET"
persistenceId = "PERSISTENCE_ID"
revision = "REVISION"
statePayload = "STATE_PAYLOAD"
stateSerId = "STATE_SERIAL_ID"
stateSerManifest = "STATE_SERIAL_MANIFEST"
tag = "TAG"
stateTimestamp = "STATE_TIMESTAMP"
}
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
CREATE SEQUENCE ORDERING__SEQ START WITH 1 INCREMENT BY 1 NOMAXVALUE
/

CREATE TABLE JOURNAL (
ORDERING NUMERIC UNIQUE,
DELETED CHAR(1) DEFAULT 0 NOT NULL check (DELETED in (0, 1)),
PERSISTENCE_ID VARCHAR(255) NOT NULL,
SEQUENCE_NUMBER NUMERIC NOT NULL,
TAGS VARCHAR(255) DEFAULT NULL,
MESSAGE BLOB NOT NULL,
PRIMARY KEY(PERSISTENCE_ID, SEQUENCE_NUMBER)
)
/

CREATE TABLE LEGACY_SNAPSHOT (
PERSISTENCE_ID VARCHAR(255) NOT NULL,
SEQUENCE_NUMBER NUMERIC NOT NULL,
CREATED NUMERIC NOT NULL,
SNAPSHOT BLOB NOT NULL,
PRIMARY KEY(PERSISTENCE_ID,SEQUENCE_NUMBER)
)
/

CREATE OR REPLACE TRIGGER JOURNAL__ORDERING_TRG
BEFORE INSERT ON JOURNAL
FOR EACH ROW
BEGIN
SELECT ORDERING__SEQ.NEXTVAL INTO :NEW.ORDERING FROM DUAL;
END;
/

CREATE OR REPLACE PROCEDURE "reset_legacy_sequence"
IS
l_value NUMBER;
BEGIN
EXECUTE IMMEDIATE 'SELECT ORDERING__SEQ.nextval FROM dual' INTO l_value;
EXECUTE IMMEDIATE 'ALTER SEQUENCE ORDERING__SEQ INCREMENT BY -' || l_value || ' MINVALUE 0';
EXECUTE IMMEDIATE 'SELECT ORDERING__SEQ.nextval FROM dual' INTO l_value;
EXECUTE IMMEDIATE 'ALTER SEQUENCE ORDERING__SEQ INCREMENT BY 1 MINVALUE 0';
END;
/
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
-- (ddl lock timeout in seconds) this allows tests which are still writing to the db to finish gracefully
ALTER SESSION SET ddl_lock_timeout = 150
/

DROP TRIGGER JOURNAL__ORDERING_TRG
/

DROP PROCEDURE "reset_legacy_sequence"
/

DROP SEQUENCE ORDERING__SEQ
/

DROP TABLE JOURNAL CASCADE CONSTRAINT
/

DROP TABLE LEGACY_SNAPSHOT CASCADE CONSTRAINT
/
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
IF NOT EXISTS (SELECT 1 FROM sys.objects WHERE object_id = OBJECT_ID(N'"journal"') AND type in (N'U'))
begin
CREATE TABLE journal (
"ordering" BIGINT IDENTITY(1,1) NOT NULL,
"deleted" BIT DEFAULT 0 NOT NULL,
Expand All @@ -9,16 +7,12 @@ CREATE TABLE journal (
"message" VARBINARY(max) NOT NULL,
PRIMARY KEY ("persistence_id", "sequence_number")
)
CREATE UNIQUE INDEX journal_ordering_idx ON journal (ordering)
end;
CREATE UNIQUE INDEX journal_ordering_idx ON journal (ordering);


IF NOT EXISTS (SELECT 1 FROM sys.objects WHERE object_id = OBJECT_ID(N'"snapshot"') AND type in (N'U'))
CREATE TABLE legacy_snapshot (
"persistence_id" VARCHAR(255) NOT NULL,
"sequence_number" NUMERIC(10,0) NOT NULL,
"created" NUMERIC NOT NULL,
"snapshot" VARBINARY(max) NOT NULL,
PRIMARY KEY ("persistence_id", "sequence_number")
);
end;
Original file line number Diff line number Diff line change
Expand Up @@ -342,6 +342,7 @@ object MigratorSpec {
tables.foreach { name =>
withStatement(stmt => stmt.executeUpdate(s"""DELETE FROM "$name" """))(db)
}
withStatement(stmt => stmt.executeUpdate("""BEGIN "reset_legacy_sequence"; END; """))(db)
withStatement(stmt => stmt.executeUpdate("""BEGIN "reset_sequence"; END; """))(db)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,12 @@ final case class JournalMigrator(profile: JdbcProfile)(implicit system: ActorSys
private def writeJournalRowsStatements(
journalSerializedRow: JournalPekkoSerializationRow,
tags: Set[String]): DBIO[Unit] = {
val journalInsert: DBIO[Long] = newJournalQueries.JournalTable
.returning(newJournalQueries.JournalTable.map(_.ordering))
.forceInsert(journalSerializedRow)

val tagInserts =
newJournalQueries.TagTable ++= tags.map(tag => TagRow(journalSerializedRow.ordering, tag)).toSeq

journalInsert.flatMap(_ => tagInserts.asInstanceOf[DBIO[Unit]])
for {
id <- newJournalQueries.JournalTable
.returning(newJournalQueries.JournalTable.map(_.ordering)) += journalSerializedRow
tagInserts = tags.map(tag => TagRow(id, tag))
_ <- newJournalQueries.TagTable ++= tagInserts
} yield ()
}
}

Expand Down
56 changes: 0 additions & 56 deletions migrator/src/test/resources/schema/h2/h2-create-schema.sql

This file was deleted.

5 changes: 0 additions & 5 deletions migrator/src/test/resources/schema/h2/h2-drop-schema.sql

This file was deleted.

Loading

0 comments on commit ed3dafc

Please sign in to comment.