diff --git a/.travis.yml b/.travis.yml index 724c2eba532..9971664b845 100644 --- a/.travis.yml +++ b/.travis.yml @@ -54,6 +54,9 @@ env: - >- BUILD_TYPE=centaurLocal BUILD_MYSQL=5.7 + - >- + BUILD_TYPE=centaurLocal + BUILD_POSTGRESQL=11.3 - >- BUILD_TYPE=centaurPapiV1 BUILD_MYSQL=5.7 diff --git a/CHANGELOG.md b/CHANGELOG.md index 09dd1bd6e54..ddf68b64e27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -55,6 +55,13 @@ YAML](https://cromwell.readthedocs.io/en/stable/Configuring/#yaml) for more info * It is now possible to use `includeKey` and `excludeKey` at the same time. If so, the metadata key must match the `includeKey` **and not** match the `excludeKey` to be included. * It is now possible to use "`calls`" as one of your `excludeKey`s, to request that only workflow metadata gets returned. +### PostgreSQL support + +Cromwell now supports PostgreSQL (version 9.6 or higher, with the Large Object +extension installed) as a database backend. +See [here](https://cromwell.readthedocs.io/en/stable/Configuring/#database) for +instructions for configuring the database connection. + ## 42 Release Notes ### Womtool endpoint diff --git a/core/src/test/resources/application.conf b/core/src/test/resources/application.conf index 6d479c92e23..22ce88ed919 100644 --- a/core/src/test/resources/application.conf +++ b/core/src/test/resources/application.conf @@ -63,13 +63,13 @@ database-test-mariadb { } } -database-test-postgres { +database-test-postgresql { # Run the following to (optionally) drop and (re-)create the database: # psql postgres <<< 'drop database if exists cromwell_test; create database cromwell_test;' profile = "slick.jdbc.PostgresProfile$" db { driver = "org.postgresql.Driver" - url = "jdbc:postgresql://localhost:5432/cromwell_test" + url = "jdbc:postgresql://localhost:5432/cromwell_test?reWriteBatchedInserts=true" url = ${?CROMWELL_BUILD_POSTGRES_JDBC_URL} user = "cromwell" user = ${?CROMWELL_BUILD_POSTGRES_USERNAME} diff --git a/cromwell.example.backends/cromwell.examples.conf b/cromwell.example.backends/cromwell.examples.conf index 4318bce04bd..243b32d4210 100644 --- a/cromwell.example.backends/cromwell.examples.conf +++ b/cromwell.example.backends/cromwell.examples.conf @@ -630,4 +630,17 @@ database { # connectionTimeout = 3000 # } #} + + # Postgresql example + #database { + # profile = "slick.jdbc.PostgresProfile$" + # db { + # driver = "org.postgresql.Driver" + # url = "jdbc:postgresql://localhost:5432/cromwell" + # user = "" + # password = "" + # port = 5432 + # connectionTimeout = 5000 + # } + #} } diff --git a/database/migration/src/main/resources/changelog.xml b/database/migration/src/main/resources/changelog.xml index 5b755a12cba..2accb5c6f94 100644 --- a/database/migration/src/main/resources/changelog.xml +++ b/database/migration/src/main/resources/changelog.xml @@ -77,6 +77,7 @@ + - + - + - + - + Adding some tracking columns for determining eligibility for Call Result Caching. @@ -38,4 +38,4 @@ constraintName="FK_RESULTS_CLONED_FROM" onDelete="SET NULL" /> - \ No newline at end of file + diff --git a/database/migration/src/main/resources/changesets/callcaching.xml b/database/migration/src/main/resources/changesets/callcaching.xml index 69f87ef6554..391c7a6d26e 100644 --- a/database/migration/src/main/resources/changesets/callcaching.xml +++ b/database/migration/src/main/resources/changesets/callcaching.xml @@ -6,7 +6,7 @@ - + One row per cached job result. Stores meta info about which job the result came from. @@ -41,12 +41,12 @@ - + - + One row per hashkey per call cache meta info. Allows us to link hash keys and values to any matching call cache results. @@ -71,13 +71,13 @@ - + - + - + One row per result simpleton in the job result. Simpleton: a single non-complex WDL value. @@ -115,13 +115,13 @@ - + - + - + Change unique constraint for Execution Table to include IDX column. For MySQL this requires first dropping the foreign key constraint, which we then restore after adding back the enhanced diff --git a/database/migration/src/main/resources/changesets/change_max_size_label_entry.xml b/database/migration/src/main/resources/changesets/change_max_size_label_entry.xml index dd82e754bd3..22bf0f6f760 100644 --- a/database/migration/src/main/resources/changesets/change_max_size_label_entry.xml +++ b/database/migration/src/main/resources/changesets/change_max_size_label_entry.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/change_max_size_workflow_url.xml b/database/migration/src/main/resources/changesets/change_max_size_workflow_url.xml index b549371b65d..d9512d4626a 100644 --- a/database/migration/src/main/resources/changesets/change_max_size_workflow_url.xml +++ b/database/migration/src/main/resources/changesets/change_max_size_workflow_url.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/custom_label_entry.xml b/database/migration/src/main/resources/changesets/custom_label_entry.xml index 49f09da8534..a207982f0bc 100644 --- a/database/migration/src/main/resources/changesets/custom_label_entry.xml +++ b/database/migration/src/main/resources/changesets/custom_label_entry.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + @@ -20,7 +20,7 @@ - + - + - + diff --git a/database/migration/src/main/resources/changesets/db_schema.xml b/database/migration/src/main/resources/changesets/db_schema.xml index 6fa0c397eee..1aad77dd3d7 100644 --- a/database/migration/src/main/resources/changesets/db_schema.xml +++ b/database/migration/src/main/resources/changesets/db_schema.xml @@ -2,7 +2,7 @@ - + @@ -104,7 +104,7 @@ - + @@ -131,7 +131,7 @@ - + diff --git a/database/migration/src/main/resources/changesets/docker_hash_store.xml b/database/migration/src/main/resources/changesets/docker_hash_store.xml index 7ec9fbbc4ff..37b3971daf1 100644 --- a/database/migration/src/main/resources/changesets/docker_hash_store.xml +++ b/database/migration/src/main/resources/changesets/docker_hash_store.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Temporary storage area for docker hashes from workflows that are still in progress. @@ -26,7 +26,7 @@ - + - + Add a size column corresponding to the sum of all the layers size from the manifest diff --git a/database/migration/src/main/resources/changesets/drop_workflow_uri_and_local_command.xml b/database/migration/src/main/resources/changesets/drop_workflow_uri_and_local_command.xml index 645a496c41b..863c31733ed 100644 --- a/database/migration/src/main/resources/changesets/drop_workflow_uri_and_local_command.xml +++ b/database/migration/src/main/resources/changesets/drop_workflow_uri_and_local_command.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Workflow URI is not needed in the DB. Local jobs don't need to store the command either. diff --git a/database/migration/src/main/resources/changesets/embiggen_detritus_value.xml b/database/migration/src/main/resources/changesets/embiggen_detritus_value.xml index 096f17a4c63..d1c9c397b57 100644 --- a/database/migration/src/main/resources/changesets/embiggen_detritus_value.xml +++ b/database/migration/src/main/resources/changesets/embiggen_detritus_value.xml @@ -6,7 +6,7 @@ - + diff --git a/database/migration/src/main/resources/changesets/embiggen_metadata_value.xml b/database/migration/src/main/resources/changesets/embiggen_metadata_value.xml index e0b1ed38d85..d965a9b9ee1 100644 --- a/database/migration/src/main/resources/changesets/embiggen_metadata_value.xml +++ b/database/migration/src/main/resources/changesets/embiggen_metadata_value.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + @@ -25,7 +25,7 @@ - + @@ -36,7 +36,7 @@ - + diff --git a/database/migration/src/main/resources/changesets/encrypt_and_clear_workflow_options.xml b/database/migration/src/main/resources/changesets/encrypt_and_clear_workflow_options.xml index 2fe2d766100..e44422d1fda 100644 --- a/database/migration/src/main/resources/changesets/encrypt_and_clear_workflow_options.xml +++ b/database/migration/src/main/resources/changesets/encrypt_and_clear_workflow_options.xml @@ -3,11 +3,11 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + - + diff --git a/database/migration/src/main/resources/changesets/enlarge_call_caching_hash_entry_id.xml b/database/migration/src/main/resources/changesets/enlarge_call_caching_hash_entry_id.xml index 9e69c154730..e875f45dc0d 100644 --- a/database/migration/src/main/resources/changesets/enlarge_call_caching_hash_entry_id.xml +++ b/database/migration/src/main/resources/changesets/enlarge_call_caching_hash_entry_id.xml @@ -8,7 +8,7 @@ could both alter the datatype of and add autoincrement to the PK in one shot. The changeset cannot be renamed as the name is part of the Liquibase key that will prevent it from running again in environments that already suffered through the old two-changeset migration. --> - + diff --git a/database/migration/src/main/resources/changesets/events_table.xml b/database/migration/src/main/resources/changesets/events_table.xml index 47014b6f8fb..681ba71c452 100644 --- a/database/migration/src/main/resources/changesets/events_table.xml +++ b/database/migration/src/main/resources/changesets/events_table.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/execution_backend_info.xml b/database/migration/src/main/resources/changesets/execution_backend_info.xml index 1368b9d1b4e..551112a6321 100644 --- a/database/migration/src/main/resources/changesets/execution_backend_info.xml +++ b/database/migration/src/main/resources/changesets/execution_backend_info.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + @@ -20,14 +20,14 @@ - + - + @@ -35,7 +35,7 @@ - + insert into EXECUTION_INFO(EXECUTION_ID, INFO_KEY, INFO_VALUE) select EXECUTION_ID, "JES_RUN_ID", JES_RUN_ID from JES_JOB; @@ -51,7 +51,7 @@ - + update EXECUTION e set BACKEND_TYPE = 'JES' where exists (select 1 from JES_JOB jj where jj.EXECUTION_ID = e.EXECUTION_ID); @@ -64,19 +64,19 @@ - + - + - + diff --git a/database/migration/src/main/resources/changesets/failure_table.xml b/database/migration/src/main/resources/changesets/failure_table.xml index 295e8d366ba..c00a73edc5b 100644 --- a/database/migration/src/main/resources/changesets/failure_table.xml +++ b/database/migration/src/main/resources/changesets/failure_table.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/jes_id_update.xml b/database/migration/src/main/resources/changesets/jes_id_update.xml index cbf10fd7f50..b28ec05ac0f 100644 --- a/database/migration/src/main/resources/changesets/jes_id_update.xml +++ b/database/migration/src/main/resources/changesets/jes_id_update.xml @@ -2,7 +2,7 @@ - + - + Temporary storage area for completed jobs which belong to workflows that are still in progress. @@ -49,14 +49,14 @@ - + - + diff --git a/database/migration/src/main/resources/changesets/job_store_simpletons.xml b/database/migration/src/main/resources/changesets/job_store_simpletons.xml index a251021bfb0..d6e60308018 100644 --- a/database/migration/src/main/resources/changesets/job_store_simpletons.xml +++ b/database/migration/src/main/resources/changesets/job_store_simpletons.xml @@ -6,7 +6,7 @@ - + One row per result simpleton in the job result. Simpleton: a single non-complex WDL value. @@ -35,13 +35,13 @@ - + - + - + There is no attempt at migrating the contents of JOB_STORE.JOB_OUTPUTS to simpletons, this just removes the column. diff --git a/database/migration/src/main/resources/changesets/job_store_tinyints.xml b/database/migration/src/main/resources/changesets/job_store_tinyints.xml index 1e91a696f09..cfe263d18bc 100644 --- a/database/migration/src/main/resources/changesets/job_store_tinyints.xml +++ b/database/migration/src/main/resources/changesets/job_store_tinyints.xml @@ -3,23 +3,23 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + - + - + - + diff --git a/database/migration/src/main/resources/changesets/lengthen_wdl_value.xml b/database/migration/src/main/resources/changesets/lengthen_wdl_value.xml index a138d58e30c..6595fe7f194 100644 --- a/database/migration/src/main/resources/changesets/lengthen_wdl_value.xml +++ b/database/migration/src/main/resources/changesets/lengthen_wdl_value.xml @@ -4,7 +4,7 @@ xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + WDL_VALUE should accept large strings diff --git a/database/migration/src/main/resources/changesets/local_job_allow_null.xml b/database/migration/src/main/resources/changesets/local_job_allow_null.xml index a4b564f1eeb..81598f88da6 100644 --- a/database/migration/src/main/resources/changesets/local_job_allow_null.xml +++ b/database/migration/src/main/resources/changesets/local_job_allow_null.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + The local jobs don't have process ID and result codes at the start. diff --git a/database/migration/src/main/resources/changesets/metadata_journal.xml b/database/migration/src/main/resources/changesets/metadata_journal.xml index 0eeedc27a5a..99ba1d9382f 100644 --- a/database/migration/src/main/resources/changesets/metadata_journal.xml +++ b/database/migration/src/main/resources/changesets/metadata_journal.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + @@ -36,14 +36,14 @@ - + - + @@ -53,7 +53,7 @@ - + diff --git a/database/migration/src/main/resources/changesets/metadata_journal_subsecond_timestamp.xml b/database/migration/src/main/resources/changesets/metadata_journal_subsecond_timestamp.xml index ae463a19225..d1751af3d9a 100644 --- a/database/migration/src/main/resources/changesets/metadata_journal_subsecond_timestamp.xml +++ b/database/migration/src/main/resources/changesets/metadata_journal_subsecond_timestamp.xml @@ -3,14 +3,14 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + - + - + diff --git a/database/migration/src/main/resources/changesets/move_sql_metadata_changelog.xml b/database/migration/src/main/resources/changesets/move_sql_metadata_changelog.xml index dc6c81b59cc..2edd9137aad 100644 --- a/database/migration/src/main/resources/changesets/move_sql_metadata_changelog.xml +++ b/database/migration/src/main/resources/changesets/move_sql_metadata_changelog.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + SELECT COUNT(1) FROM METADATA_ENTRY diff --git a/database/migration/src/main/resources/changesets/nullable_lobs.xml b/database/migration/src/main/resources/changesets/nullable_lobs.xml index 13783585c79..9f977a0aed6 100644 --- a/database/migration/src/main/resources/changesets/nullable_lobs.xml +++ b/database/migration/src/main/resources/changesets/nullable_lobs.xml @@ -2,7 +2,7 @@ - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/database/migration/src/main/resources/changesets/rc.xml b/database/migration/src/main/resources/changesets/rc.xml index 5f03d69ea35..b0425f5e42d 100644 --- a/database/migration/src/main/resources/changesets/rc.xml +++ b/database/migration/src/main/resources/changesets/rc.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Refactor the RC column off LOCAL_JOB up into EXECUTION since it should be usable by all backends. @@ -11,4 +11,4 @@ - \ No newline at end of file + diff --git a/database/migration/src/main/resources/changesets/remove_pre_pbe_tables.xml b/database/migration/src/main/resources/changesets/remove_pre_pbe_tables.xml index e2892b91b93..613eb7bb2ab 100644 --- a/database/migration/src/main/resources/changesets/remove_pre_pbe_tables.xml +++ b/database/migration/src/main/resources/changesets/remove_pre_pbe_tables.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Remove the old pre-pluggable backend tables. diff --git a/database/migration/src/main/resources/changesets/rename_iteration_to_index.xml b/database/migration/src/main/resources/changesets/rename_iteration_to_index.xml index a85483f7908..afd71b2ae83 100644 --- a/database/migration/src/main/resources/changesets/rename_iteration_to_index.xml +++ b/database/migration/src/main/resources/changesets/rename_iteration_to_index.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + - + - \ No newline at end of file + diff --git a/database/migration/src/main/resources/changesets/rename_workflow_options_in_metadata.xml b/database/migration/src/main/resources/changesets/rename_workflow_options_in_metadata.xml index b189e535a8c..7f1cff48ca6 100644 --- a/database/migration/src/main/resources/changesets/rename_workflow_options_in_metadata.xml +++ b/database/migration/src/main/resources/changesets/rename_workflow_options_in_metadata.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/replace_empty_custom_labels.xml b/database/migration/src/main/resources/changesets/replace_empty_custom_labels.xml index 508497a8912..da7b346d3e0 100644 --- a/database/migration/src/main/resources/changesets/replace_empty_custom_labels.xml +++ b/database/migration/src/main/resources/changesets/replace_empty_custom_labels.xml @@ -7,7 +7,7 @@ - + - + Restart/recover migration from 0.19 to 0.21. @@ -31,7 +31,7 @@ - + Restart/recover migration from 0.19 to 0.21. @@ -76,7 +76,7 @@ - + @@ -93,14 +93,14 @@ columnDataType="LONGTEXT"/> - + Restart/recover migration from 0.19 to 0.21. - + Restart/recover migration from 0.19 to 0.21. @@ -130,7 +130,7 @@ - + Restart/recover migration from 0.19 to 0.21. diff --git a/database/migration/src/main/resources/changesets/runtime_attributes_table.xml b/database/migration/src/main/resources/changesets/runtime_attributes_table.xml index 4b4cfd368cc..a5ea9bb4611 100644 --- a/database/migration/src/main/resources/changesets/runtime_attributes_table.xml +++ b/database/migration/src/main/resources/changesets/runtime_attributes_table.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/sge.xml b/database/migration/src/main/resources/changesets/sge.xml index a3bca146662..aab8faa4b92 100644 --- a/database/migration/src/main/resources/changesets/sge.xml +++ b/database/migration/src/main/resources/changesets/sge.xml @@ -2,7 +2,7 @@ - + @@ -21,7 +21,7 @@ - + diff --git a/database/migration/src/main/resources/changesets/sge_job_execution_unique_key.xml b/database/migration/src/main/resources/changesets/sge_job_execution_unique_key.xml index 71aa97435db..7a362016bc2 100644 --- a/database/migration/src/main/resources/changesets/sge_job_execution_unique_key.xml +++ b/database/migration/src/main/resources/changesets/sge_job_execution_unique_key.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Adds unique constraints UK_SGE_JOB_EXECUTION_UUID. diff --git a/database/migration/src/main/resources/changesets/standardize_column_names.xml b/database/migration/src/main/resources/changesets/standardize_column_names.xml index a309c8ea28b..50d26c429b4 100644 --- a/database/migration/src/main/resources/changesets/standardize_column_names.xml +++ b/database/migration/src/main/resources/changesets/standardize_column_names.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + Change all Workflow UUID column names to Workflow Execution UUID. @@ -21,7 +21,7 @@ tableName="JOB_STORE"/> - + Choose and implement common call/job identifiers. @@ -39,4 +39,4 @@ tableName="METADATA_JOURNAL"/> - \ No newline at end of file + diff --git a/database/migration/src/main/resources/changesets/standardize_column_names_again.xml b/database/migration/src/main/resources/changesets/standardize_column_names_again.xml index 111bcd74fed..a98e35f3448 100644 --- a/database/migration/src/main/resources/changesets/standardize_column_names_again.xml +++ b/database/migration/src/main/resources/changesets/standardize_column_names_again.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/workflow_store_state_widening.xml b/database/migration/src/main/resources/changesets/workflow_store_state_widening.xml index 157fd43bd3b..5d3b37bce87 100644 --- a/database/migration/src/main/resources/changesets/workflow_store_state_widening.xml +++ b/database/migration/src/main/resources/changesets/workflow_store_state_widening.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/workflow_store_type_and_version.xml b/database/migration/src/main/resources/changesets/workflow_store_type_and_version.xml index bdb38ad09ed..59876e1dffa 100644 --- a/database/migration/src/main/resources/changesets/workflow_store_type_and_version.xml +++ b/database/migration/src/main/resources/changesets/workflow_store_type_and_version.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/changesets/workflow_store_workflow_root_column.xml b/database/migration/src/main/resources/changesets/workflow_store_workflow_root_column.xml index dab1df1b71e..7358ffd4347 100644 --- a/database/migration/src/main/resources/changesets/workflow_store_workflow_root_column.xml +++ b/database/migration/src/main/resources/changesets/workflow_store_workflow_root_column.xml @@ -3,7 +3,7 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> - + diff --git a/database/migration/src/main/resources/metadata_changesets/postgresql_metadata_schema.xml b/database/migration/src/main/resources/metadata_changesets/postgresql_metadata_schema.xml new file mode 100644 index 00000000000..d1c5835954a --- /dev/null +++ b/database/migration/src/main/resources/metadata_changesets/postgresql_metadata_schema.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/database/migration/src/main/resources/sql_metadata_changelog.xml b/database/migration/src/main/resources/sql_metadata_changelog.xml index 05500b36bc5..809942372cc 100644 --- a/database/migration/src/main/resources/sql_metadata_changelog.xml +++ b/database/migration/src/main/resources/sql_metadata_changelog.xml @@ -11,5 +11,6 @@ + diff --git a/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala index 622e62aaf33..d28801e7fb5 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/MetadataSlickDatabase.scala @@ -34,7 +34,7 @@ class MetadataSlickDatabase(originalDatabaseConfig: Config) override def addMetadataEntries(metadataEntries: Iterable[MetadataEntry]) (implicit ec: ExecutionContext): Future[Unit] = { val action = DBIO.seq(metadataEntries.grouped(insertBatchSize).map(dataAccess.metadataEntries ++= _).toSeq:_*) - runAction(action) + runLobAction(action) } override def metadataEntryExists(workflowExecutionUuid: String)(implicit ec: ExecutionContext): Future[Boolean] = { diff --git a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala index 725ee999451..b1d68f5c590 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/SlickDatabase.scala @@ -8,6 +8,7 @@ import com.typesafe.config.{Config, ConfigFactory} import cromwell.database.slick.tables.DataAccessComponent import cromwell.database.sql.SqlDatabase import net.ceedubs.ficus.Ficus._ +import org.postgresql.util.{PSQLException, ServerErrorMessage} import org.slf4j.LoggerFactory import slick.basic.DatabaseConfig import slick.jdbc.{JdbcCapabilities, JdbcProfile, TransactionIsolation} @@ -58,6 +59,7 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend override val urlKey = SlickDatabase.urlKey(originalDatabaseConfig) protected val slickConfig = DatabaseConfig.forConfig[JdbcProfile]("", databaseConfig) + lazy val isPostgresql = databaseConfig.getOrElse("db.driver", "unknown") == "org.postgresql.Driver" /* Not a def because we need to have a "stable identifier" for the imports below. @@ -167,10 +169,22 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend runActionInternal(action.transactionally.withTransactionIsolation(isolationLevel)) } + /* Note that this is only appropriate for actions that do not involve Blob + * or Clob fields in Postgres, since large object support requires running + * transactionally. Use runLobAction instead, which will still run in + * auto-commit mode when using other database engines. + */ protected[this] def runAction[R](action: DBIO[R]): Future[R] = { runActionInternal(action.withPinnedSession) } + /* Wrapper for queries where Clob/Blob types are used + * https://stackoverflow.com/questions/3164072/large-objects-may-not-be-used-in-auto-commit-mode#answer-3164352 + */ + protected[this] def runLobAction[R](action: DBIO[R]): Future[R] = { + if (isPostgresql) runTransaction(action) else runAction(action) + } + private def runActionInternal[R](action: DBIO[R]): Future[R] = { //database.run(action) <-- See comment above private val actionThreadPool Future { @@ -186,6 +200,33 @@ abstract class SlickDatabase(override val originalDatabaseConfig: Config) extend case _ => /* keep going */ } throw rollbackException + case pSQLException: PSQLException => + val detailOption = for { + message <- Option(pSQLException.getServerErrorMessage) + detail <- Option(message.getDetail) + } yield detail + + detailOption match { + case None => throw pSQLException + case Some(_) => + /* + The exception may contain possibly sensitive row contents within the DETAIL section. Remove it. + + Tried adjusting this using configuration: + - log_error_verbosity=TERSE + - log_min_messages=PANIC + - client_min_messages=ERROR + + Instead resorting to reflection. + */ + val message = pSQLException.getServerErrorMessage + val field = classOf[ServerErrorMessage].getDeclaredField("m_mesgParts") + field.setAccessible(true) + val parts = field.get(message).asInstanceOf[java.util.Map[Character, String]] + parts.remove('D') + // The original exception has already stored the DETAIL into a string. So we must create a new Exception. + throw new PSQLException(message) + } } }(actionExecutionContext) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala index c36b8047d11..db7c1f3d826 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingDetritusEntryComponent.scala @@ -1,6 +1,6 @@ package cromwell.database.slick.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob import cromwell.database.sql.tables.CallCachingDetritusEntry @@ -16,7 +16,7 @@ trait CallCachingDetritusEntryComponent { def detritusKey = column[String]("DETRITUS_KEY", O.Length(255)) - def detritusValue = column[Option[Clob]]("DETRITUS_VALUE") + def detritusValue = column[Option[SerialClob]]("DETRITUS_VALUE") def callCachingEntryId = column[Int]("CALL_CACHING_ENTRY_ID") diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala index 7170ae9025c..38a095a9682 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CallCachingSimpletonEntryComponent.scala @@ -1,6 +1,6 @@ package cromwell.database.slick.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob import cromwell.database.sql.tables.CallCachingSimpletonEntry @@ -16,7 +16,7 @@ trait CallCachingSimpletonEntryComponent { def simpletonKey = column[String]("SIMPLETON_KEY", O.Length(255)) - def simpletonValue = column[Option[Clob]]("SIMPLETON_VALUE") + def simpletonValue = column[Option[SerialClob]]("SIMPLETON_VALUE") def wdlType = column[String]("WDL_TYPE", O.Length(255)) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala index 8121b3705ae..153b6815810 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/CustomLabelEntryComponent.scala @@ -1,12 +1,14 @@ package cromwell.database.slick.tables import cromwell.database.sql.tables.CustomLabelEntry +import shapeless.syntax.std.tuple._ import slick.model.ForeignKeyAction.Cascade trait CustomLabelEntryComponent { this: DriverComponent with WorkflowMetadataSummaryEntryComponent => + import driver.api.TupleMethods._ import driver.api._ class CustomLabelEntries(tag: Tag) @@ -19,8 +21,14 @@ trait CustomLabelEntryComponent { def workflowExecutionUuid = column[String]("WORKFLOW_EXECUTION_UUID", O.Length(100)) - override def * = (customLabelKey, customLabelValue, workflowExecutionUuid, - customLabelEntryId.?) <> (CustomLabelEntry.tupled, CustomLabelEntry.unapply) + def baseProjection = (customLabelKey, customLabelValue, workflowExecutionUuid) + + override def * = baseProjection ~ customLabelEntryId.? <> (CustomLabelEntry.tupled, CustomLabelEntry.unapply) + + def forUpdate = baseProjection.shaped <> ( + tuple => CustomLabelEntry.tupled(tuple :+ None), + CustomLabelEntry.unapply(_: CustomLabelEntry).map(_.reverse.tail.reverse) + ) def fkCustomLabelEntryWorkflowExecutionUuid = foreignKey("FK_CUSTOM_LABEL_ENTRY_WORKFLOW_EXECUTION_UUID", workflowExecutionUuid, workflowMetadataSummaryEntries)(_.workflowExecutionUuid, onDelete = Cascade) @@ -41,7 +49,7 @@ trait CustomLabelEntryComponent { customLabelEntry <- customLabelEntries if customLabelEntry.workflowExecutionUuid === workflowExecutionUuid && customLabelEntry.customLabelKey === labelKey - } yield customLabelEntry) + } yield customLabelEntry.forUpdate) def existsWorkflowIdLabelKeyAndValue(workflowId: Rep[String], labelKey: Rep[String], diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala index d5f78601862..f9343883f4d 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/DriverComponent.scala @@ -1,7 +1,55 @@ package cromwell.database.slick.tables -import slick.jdbc.JdbcProfile +import java.sql.{Blob, Clob} + +import javax.sql.rowset.serial.{SerialBlob, SerialClob} +import org.apache.commons.io.IOUtils +import slick.jdbc.{JdbcProfile, PostgresProfile} trait DriverComponent { val driver: JdbcProfile + + import driver.api._ + + /** Ensure clobs are retrieved inside the transaction, not after */ + implicit val serialClobColumnType = MappedColumnType.base[SerialClob, Clob]( + identity, + { + case serialClob: SerialClob => serialClob + case clob => + /* + PostgreSQL's JDBC driver has issues with non-ascii characters. + https://stackoverflow.com/questions/5043992/postgres-utf-8-clobs-with-jdbc + + It returns bad values for length() and getAsciiStream(), and causes an extra null bytes to be added at the end + of the resultant SerialClob. + + Example via copy_workflow_outputs/unscattered.wdl: + + "... Enfin un peu de francais pour contrer ce raz-de-marée anglais ! ..." + + The 'é' in results in an extra null byte at the end of getAsciiStream(). + */ + val string = IOUtils.toString(clob.getCharacterStream) + new SerialClob(string.toCharArray) + } + ) + + /** Ensure clobs are retrieved inside the transaction, not after */ + implicit val serialBlobColumnType = MappedColumnType.base[SerialBlob, Blob]( + identity, + { + case serialBlob: SerialBlob => serialBlob + case blob => new SerialBlob(blob) + } + ) + + private val shouldQuote = this.driver match { + // https://stackoverflow.com/questions/43111996/why-postgresql-does-not-like-uppercase-table-names#answer-43112096 + case PostgresProfile => true + case _ => false + } + + /** Adds quotes around the string if required by the DBMS. */ + def quoted(string: String) = if (shouldQuote) s""""$string"""" else string } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala index 6422e535ef7..de5bfe57698 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreEntryComponent.scala @@ -1,6 +1,6 @@ package cromwell.database.slick.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob import cromwell.database.sql.tables.JobStoreEntry @@ -26,7 +26,7 @@ trait JobStoreEntryComponent { def returnCode = column[Option[Int]]("RETURN_CODE") // Only set for failure: - def exceptionMessage = column[Option[Clob]]("EXCEPTION_MESSAGE") + def exceptionMessage = column[Option[SerialClob]]("EXCEPTION_MESSAGE") def retryableFailure = column[Option[Boolean]]("RETRYABLE_FAILURE") diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala index 40d3f094ea3..e2e9c83dac9 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/JobStoreSimpletonEntryComponent.scala @@ -1,6 +1,6 @@ package cromwell.database.slick.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob import cromwell.database.sql.tables.JobStoreSimpletonEntry import slick.model.ForeignKeyAction.Cascade @@ -16,7 +16,7 @@ trait JobStoreSimpletonEntryComponent { def simpletonKey = column[String]("SIMPLETON_KEY", O.Length(255)) - def simpletonValue = column[Option[Clob]]("SIMPLETON_VALUE") + def simpletonValue = column[Option[SerialClob]]("SIMPLETON_VALUE") def wdlType = column[String]("WDL_TYPE", O.Length(255)) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala index 9e8106aebd4..6e440fdeebf 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/MetadataEntryComponent.scala @@ -1,6 +1,7 @@ package cromwell.database.slick.tables -import java.sql.{Clob, Timestamp} +import java.sql.Timestamp +import javax.sql.rowset.serial.SerialClob import cromwell.database.sql.tables.MetadataEntry @@ -35,7 +36,7 @@ trait MetadataEntryComponent { def metadataKey = column[String]("METADATA_KEY", O.Length(255)) - def metadataValue = column[Option[Clob]]("METADATA_VALUE") + def metadataValue = column[Option[SerialClob]]("METADATA_VALUE") def metadataValueType = column[Option[String]]("METADATA_VALUE_TYPE", O.Length(10)) diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala index 14721104971..d5c5273d245 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowMetadataSummaryEntryComponent.scala @@ -1,11 +1,11 @@ package cromwell.database.slick.tables import java.sql.Timestamp -import java.util.concurrent.atomic.AtomicInteger import cats.data import cats.data.NonEmptyList import cromwell.database.sql.tables.WorkflowMetadataSummaryEntry +import shapeless.syntax.std.tuple._ import slick.jdbc.{GetResult, PositionedParameters, SQLActionBuilder} //noinspection SqlDialectInspection @@ -13,6 +13,7 @@ trait WorkflowMetadataSummaryEntryComponent { this: DriverComponent with CustomLabelEntryComponent with MetadataEntryComponent => + import driver.api.TupleMethods._ import driver.api._ class WorkflowMetadataSummaryEntries(tag: Tag) @@ -35,9 +36,15 @@ trait WorkflowMetadataSummaryEntryComponent { def rootWorkflowExecutionUuid = column[Option[String]]("ROOT_WORKFLOW_EXECUTION_UUID", O.Length(100)) - override def * = (workflowExecutionUuid, workflowName, workflowStatus, startTimestamp, endTimestamp, - submissionTimestamp, parentWorkflowExecutionUuid, rootWorkflowExecutionUuid, - workflowMetadataSummaryEntryId.?) <> (WorkflowMetadataSummaryEntry.tupled, WorkflowMetadataSummaryEntry.unapply) + def baseProjection = (workflowExecutionUuid, workflowName, workflowStatus, startTimestamp, endTimestamp, + submissionTimestamp, parentWorkflowExecutionUuid, rootWorkflowExecutionUuid) + + override def * = baseProjection ~ workflowMetadataSummaryEntryId.? <> (WorkflowMetadataSummaryEntry.tupled, WorkflowMetadataSummaryEntry.unapply) + + def forUpdate = baseProjection.shaped <> ( + tuple => WorkflowMetadataSummaryEntry.tupled(tuple :+ None), + WorkflowMetadataSummaryEntry.unapply(_: WorkflowMetadataSummaryEntry).map(_.reverse.tail.reverse) + ) def ucWorkflowMetadataSummaryEntryWeu = index("UC_WORKFLOW_METADATA_SUMMARY_ENTRY_WEU", workflowExecutionUuid, unique = true) @@ -63,7 +70,7 @@ trait WorkflowMetadataSummaryEntryComponent { (workflowExecutionUuid: Rep[String]) => for { workflowMetadataSummaryEntry <- workflowMetadataSummaryEntries if workflowMetadataSummaryEntry.workflowExecutionUuid === workflowExecutionUuid - } yield workflowMetadataSummaryEntry) + } yield workflowMetadataSummaryEntry.forUpdate) val workflowMetadataSummaryEntryExistsForWorkflowExecutionUuid = Compiled( (workflowExecutionUuid: Rep[String]) => (for { @@ -81,8 +88,8 @@ trait WorkflowMetadataSummaryEntryComponent { def concat(a: SQLActionBuilder, b: SQLActionBuilder): SQLActionBuilder = { SQLActionBuilder(a.queryParts ++ b.queryParts, (p: Unit, pp: PositionedParameters) => { - a.unitPConv.apply(p, pp) - b.unitPConv.apply(p, pp) + a.unitPConv.apply(p, pp) + b.unitPConv.apply(p, pp) }) } @@ -118,79 +125,130 @@ trait WorkflowMetadataSummaryEntryComponent { endTimestampOption: Option[Timestamp], includeSubworkflows: Boolean): SQLActionBuilder = { - val summaryTableAlias = "summaryTable" - val labelsOrTableAlias = "labelsOrMixin" - val labelsAndTableAliases = labelAndKeyLabelValues.zipWithIndex.map { case (labelPair, i) => s"labelAndTable$i" -> labelPair }.toMap + val customLabelEntryTable = quoted("CUSTOM_LABEL_ENTRY") + val workflowMetadataSummaryEntryTable = quoted("WORKFLOW_METADATA_SUMMARY_ENTRY") + + val workflowExecutionUuidColumn = quoted("WORKFLOW_EXECUTION_UUID") + val customLabelKeyColumn = quoted("CUSTOM_LABEL_KEY") + val customLabelValueColumn = quoted("CUSTOM_LABEL_VALUE") + val parentWorkflowExecutionUuidColumn = quoted("PARENT_WORKFLOW_EXECUTION_UUID") + + val summaryTableAlias = quoted("summaryTable") + val labelsOrTableAlias = quoted("labelsOrMixin") + val labelsAndTableAliases = labelAndKeyLabelValues.zipWithIndex.map { + case (labelPair, i) => quoted(s"labelAndTable$i") -> labelPair + }.toMap + + val selectColumns = List( + "WORKFLOW_EXECUTION_UUID", + "WORKFLOW_NAME", + "WORKFLOW_STATUS", + "START_TIMESTAMP", + "END_TIMESTAMP", + "SUBMISSION_TIMESTAMP", + "PARENT_WORKFLOW_EXECUTION_UUID", + "ROOT_WORKFLOW_EXECUTION_UUID", + "WORKFLOW_METADATA_SUMMARY_ENTRY_ID", + ) + .map(quoted) + .mkString(s"$summaryTableAlias.", ", ", "") val select = selectOrCount match { case Select => - sql"""|SELECT #$summaryTableAlias.WORKFLOW_EXECUTION_UUID, - | #$summaryTableAlias.WORKFLOW_NAME, - | #$summaryTableAlias.WORKFLOW_STATUS, - | #$summaryTableAlias.START_TIMESTAMP, - | #$summaryTableAlias.END_TIMESTAMP, - | #$summaryTableAlias.SUBMISSION_TIMESTAMP, - | #$summaryTableAlias.PARENT_WORKFLOW_EXECUTION_UUID, - | #$summaryTableAlias.ROOT_WORKFLOW_EXECUTION_UUID, - | #$summaryTableAlias.WORKFLOW_METADATA_SUMMARY_ENTRY_ID - | """.stripMargin + sql"""|SELECT #$selectColumns + |""".stripMargin case Count => - sql"""SELECT COUNT(1) - | """.stripMargin + sql"""|SELECT COUNT(1) + |""".stripMargin } val labelOrJoin = if (labelOrKeyLabelValues.nonEmpty) { Option( - sql""" JOIN CUSTOM_LABEL_ENTRY #$labelsOrTableAlias on #$summaryTableAlias.WORKFLOW_EXECUTION_UUID = #$labelsOrTableAlias.WORKFLOW_EXECUTION_UUID - | """.stripMargin) + sql"""| JOIN #$customLabelEntryTable #$labelsOrTableAlias + | ON #$summaryTableAlias.#$workflowExecutionUuidColumn + | = #$labelsOrTableAlias.#$workflowExecutionUuidColumn + |""".stripMargin) } else None val labelAndJoins = labelsAndTableAliases.toList.map { case (labelAndTableAlias, _) => - sql""" JOIN CUSTOM_LABEL_ENTRY #$labelAndTableAlias on #$summaryTableAlias.WORKFLOW_EXECUTION_UUID = #$labelAndTableAlias.WORKFLOW_EXECUTION_UUID - | """.stripMargin + sql"""| JOIN #$customLabelEntryTable #$labelAndTableAlias + | ON #$summaryTableAlias.#$workflowExecutionUuidColumn + | = #$labelAndTableAlias.#$workflowExecutionUuidColumn + |""".stripMargin } val from = concatNel(NonEmptyList.of( - sql"""FROM WORKFLOW_METADATA_SUMMARY_ENTRY #$summaryTableAlias - | """.stripMargin) ++ labelOrJoin.toList ++ labelAndJoins ) + sql"""|FROM #$workflowMetadataSummaryEntryTable #$summaryTableAlias + |""".stripMargin) ++ labelOrJoin.toList ++ labelAndJoins) + + def makeSetConstraint(column: String, elements: Set[String]) = { + val list = elements.toList.map(element => sql"""#$summaryTableAlias.#${quoted(column)} = $element""") + NonEmptyList.fromList(list).map(or).toList + } - val statusConstraint = NonEmptyList.fromList(workflowStatuses.toList.map(status => sql"""#$summaryTableAlias.WORKFLOW_STATUS=$status""")).map(or).toList - val nameConstraint = NonEmptyList.fromList(workflowNames.toList.map(name => sql"""#$summaryTableAlias.WORKFLOW_NAME=$name""")).map(or).toList - val idConstraint = NonEmptyList.fromList(workflowExecutionUuids.toList.map(uuid => sql"""#$summaryTableAlias.WORKFLOW_EXECUTION_UUID=$uuid""")).map(or).toList - val submissionTimeConstraint = submissionTimestampOption.map(ts => sql"""#$summaryTableAlias.SUBMISSION_TIMESTAMP>=$ts""").toList - val startTimeConstraint = startTimestampOption.map(ts => sql"""#$summaryTableAlias.START_TIMESTAMP>=$ts""").toList - val endTimeConstraint = endTimestampOption.map(ts => sql"""#$summaryTableAlias.END_TIMESTAMP<=$ts""").toList + def makeTimeConstraint(column: String, comparison: String, elementOption: Option[Timestamp]) = { + elementOption.map(element => sql"""#$summaryTableAlias.#${quoted(column)} #$comparison $element""").toList + } + + val statusConstraint = makeSetConstraint("WORKFLOW_STATUS", workflowStatuses) + val nameConstraint = makeSetConstraint("WORKFLOW_NAME", workflowNames) + val idConstraint = makeSetConstraint("WORKFLOW_EXECUTION_UUID", workflowExecutionUuids) + + val submissionTimeConstraint = makeTimeConstraint("SUBMISSION_TIMESTAMP", ">=", submissionTimestampOption) + val startTimeConstraint = makeTimeConstraint("START_TIMESTAMP", ">=", startTimestampOption) + val endTimeConstraint = makeTimeConstraint("END_TIMESTAMP", "<=", endTimestampOption) // *ALL* of the labelAnd list of KV pairs must exist: - val labelsAndConstraint = NonEmptyList.fromList(labelsAndTableAliases.toList.map { case (labelsAndTableAlias, (labelKey, labelValue)) => - and(NonEmptyList.of(sql"#$labelsAndTableAlias.custom_label_key=$labelKey") :+ sql"#$labelsAndTableAlias.custom_label_value=$labelValue") + val labelsAndConstraint = NonEmptyList.fromList(labelsAndTableAliases.toList.map { + case (labelsAndTableAlias, (labelKey, labelValue)) => + and(NonEmptyList.of( + sql"""#$labelsAndTableAlias.#$customLabelKeyColumn = $labelKey""", + sql"""#$labelsAndTableAlias.#$customLabelValueColumn = $labelValue""", + )) }).map(and).toList // At least one of the labelOr list of KV pairs must exist: - val labelOrConstraint = NonEmptyList.fromList(labelOrKeyLabelValues.toList.map { case (k, v) => - and(NonEmptyList.of(sql"#$labelsOrTableAlias.custom_label_key=$k") :+ sql"#$labelsOrTableAlias.custom_label_value=$v") + val labelOrConstraint = NonEmptyList.fromList(labelOrKeyLabelValues.toList.map { + case (labelKey, labelValue) => + and(NonEmptyList.of( + sql"""#$labelsOrTableAlias.#$customLabelKeyColumn = $labelKey""", + sql"""#$labelsOrTableAlias.#$customLabelValueColumn = $labelValue""", + )) }).map(or).toList - val mixinTableCounter = new AtomicInteger(0) + var mixinTableCounter = 0 def labelExists(labelKey: String, labelValue: String) = { - val tableName = s"labelsMixin" + mixinTableCounter.getAndIncrement() - sql"""EXISTS(SELECT 1 from CUSTOM_LABEL_ENTRY #$tableName WHERE ((#$tableName.WORKFLOW_EXECUTION_UUID = #$summaryTableAlias.WORKFLOW_EXECUTION_UUID) AND (#$tableName.CUSTOM_LABEL_KEY = $labelKey) AND (#$tableName.CUSTOM_LABEL_VALUE = $labelValue)))""" + val tableName = quoted(s"labelsMixin" + mixinTableCounter) + mixinTableCounter += 1 + sql"""|EXISTS ( + | SELECT 1 FROM #$customLabelEntryTable #$tableName + | WHERE ( + | (#$tableName.#$workflowExecutionUuidColumn = #$summaryTableAlias.#$workflowExecutionUuidColumn) + | AND (#$tableName.#$customLabelKeyColumn = $labelKey) + | AND (#$tableName.#$customLabelValueColumn = $labelValue) + | ) + |) + |""".stripMargin } // *ALL* of the excludeLabelOr list of KV pairs must *NOT* exist: - val excludeLabelsOrConstraint = NonEmptyList.fromList(excludeLabelOrValues.toList.map { case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) } ).map(and).toList + val excludeLabelsOrConstraint = NonEmptyList.fromList(excludeLabelOrValues.toList map { + case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) + }).map(and).toList // At least one of the excludeLabelAnd list of KV pairs must *NOT* exist: - val excludeLabelsAndConstraint = NonEmptyList.fromList(excludeLabelAndValues.toList.map { case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) } ).map(or).toList + val excludeLabelsAndConstraint = NonEmptyList.fromList(excludeLabelAndValues.toList.map { + case (labelKey, labelValue) => not(labelExists(labelKey, labelValue)) + }).map(or).toList val includeSubworkflowsConstraint = if (includeSubworkflows) List.empty else { - List(sql"""#$summaryTableAlias.PARENT_WORKFLOW_EXECUTION_UUID IS NULL""".stripMargin) + List(sql"""#$summaryTableAlias.#$parentWorkflowExecutionUuidColumn IS NULL""".stripMargin) } val constraintList = - statusConstraint ++ + statusConstraint ++ nameConstraint ++ idConstraint ++ submissionTimeConstraint ++ @@ -274,18 +332,20 @@ trait WorkflowMetadataSummaryEntryComponent { ) val paginationAddendum: List[SQLActionBuilder] = (page, pageSize) match { - case (Some(p), Some(ps)) => List(sql""" LIMIT #${Integer.max(p-1, 0) * ps},#$ps """) - case (None, Some(ps)) => List(sql""" LIMIT 0,#$ps """) + case (Some(p), Some(ps)) => List(sql""" LIMIT #$ps OFFSET #${ps * ((p - 1) max 0)}""") + case (None, Some(ps)) => List(sql""" LIMIT #$ps OFFSET 0""") case _ => List.empty } - val orderByAddendum = sql""" ORDER BY WORKFLOW_METADATA_SUMMARY_ENTRY_ID DESC - | """.stripMargin + val orderByAddendum = + sql"""| ORDER BY #${quoted("WORKFLOW_METADATA_SUMMARY_ENTRY_ID")} DESC + |""".stripMargin // NB you can preview the prepared statement created here by using, for example: println(result.statements.head) - concatNel((NonEmptyList.of(mainQuery) :+ orderByAddendum) ++ paginationAddendum) - .as[WorkflowMetadataSummaryEntry](rconv = GetResult { r => + val fullQuery = concatNel(NonEmptyList(mainQuery, orderByAddendum :: paginationAddendum)) + + fullQuery.as[WorkflowMetadataSummaryEntry](rconv = GetResult { r => WorkflowMetadataSummaryEntry(r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<, r.<<) }) } diff --git a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala index 3d99dafcd0c..d3888c8aa49 100644 --- a/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala +++ b/database/sql/src/main/scala/cromwell/database/slick/tables/WorkflowStoreEntryComponent.scala @@ -1,7 +1,7 @@ package cromwell.database.slick.tables -import java.sql.{Blob, Clob, Timestamp} - +import java.sql.Timestamp +import javax.sql.rowset.serial.{SerialBlob, SerialClob} import cromwell.database.sql.tables.WorkflowStoreEntry trait WorkflowStoreEntryComponent { @@ -21,21 +21,21 @@ trait WorkflowStoreEntryComponent { def workflowTypeVersion = column[Option[String]]("WORKFLOW_TYPE_VERSION", O.Length(255)) - def workflowDefinition = column[Option[Clob]]("WORKFLOW_DEFINITION") + def workflowDefinition = column[Option[SerialClob]]("WORKFLOW_DEFINITION") def workflowUrl = column[Option[String]]("WORKFLOW_URL", O.Length(2000)) - def workflowInputs = column[Option[Clob]]("WORKFLOW_INPUTS") + def workflowInputs = column[Option[SerialClob]]("WORKFLOW_INPUTS") - def workflowOptions = column[Option[Clob]]("WORKFLOW_OPTIONS") + def workflowOptions = column[Option[SerialClob]]("WORKFLOW_OPTIONS") - def customLabels = column[Clob]("CUSTOM_LABELS") + def customLabels = column[SerialClob]("CUSTOM_LABELS") def workflowState = column[String]("WORKFLOW_STATE", O.Length(20)) def submissionTime = column[Timestamp]("SUBMISSION_TIME") - def importsZip = column[Option[Blob]]("IMPORTS_ZIP") + def importsZip = column[Option[SerialBlob]]("IMPORTS_ZIP") def cromwellId = column[Option[String]]("CROMWELL_ID", O.Length(100)) diff --git a/database/sql/src/main/scala/cromwell/database/sql/SqlConverters.scala b/database/sql/src/main/scala/cromwell/database/sql/SqlConverters.scala index 44a7a5e8853..fe1bf5f7dba 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/SqlConverters.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/SqlConverters.scala @@ -32,12 +32,16 @@ object SqlConverters { } implicit class ClobToRawString(val clob: Clob) extends AnyVal { - // yes, it starts at 1 - def toRawString: String = clob.getSubString(1, clob.length.toInt) + def toRawString: String = { + // See notes on empty clob issues in StringToClobOption + val length = clob.length.toInt + // yes, it starts at 1 + if (length == 0) "" else clob.getSubString(1, length) + } } implicit class StringOptionToClobOption(val strOption: Option[String]) extends AnyVal { - def toClobOption: Option[Clob] = strOption.flatMap(_.toClobOption) + def toClobOption: Option[SerialClob] = strOption.flatMap(_.toClobOption) } implicit class StringToClobOption(val str: String) extends AnyVal { @@ -52,17 +56,21 @@ object SqlConverters { import eu.timepit.refined.api.Refined import eu.timepit.refined.collection.NonEmpty - def toClobOption: Option[Clob] = if (str.isEmpty) None else Option(new SerialClob(str.toCharArray)) + def toClobOption: Option[SerialClob] = if (str.isEmpty) None else Option(new SerialClob(str.toCharArray)) - def toClob(default: String Refined NonEmpty): Clob = { + def toClob(default: String Refined NonEmpty): SerialClob = { val nonEmpty = if (str.isEmpty) default.value else str new SerialClob(nonEmpty.toCharArray) } } implicit class BlobToBytes(val blob: Blob) extends AnyVal { - // yes, it starts at 1 - def toBytes: Array[Byte] = blob.getBytes(1, blob.length.toInt) + def toBytes: Array[Byte] = { + // See notes on empty blob issues in BytesOptionToBlob + val length = blob.length.toInt + // yes, it starts at 1 + if (length == 0) Array.empty else blob.getBytes(1, length) + } } implicit class BlobOptionToBytes(val blobOption: Option[Blob]) extends AnyVal { @@ -79,11 +87,11 @@ object SqlConverters { https://github.com/apache/derby/blob/10.13/java/engine/org/apache/derby/iapi/types/HarmonySerialBlob.java#L111 OK! -> https://github.com/arteam/hsqldb/blob/2.3.4/src/org/hsqldb/jdbc/JDBCBlob.java#L184 */ - def toBlobOption: Option[Blob] = bytesOption.flatMap(_.toBlobOption) + def toBlobOption: Option[SerialBlob] = bytesOption.flatMap(_.toBlobOption) } implicit class BytesToBlobOption(val bytes: Array[Byte]) extends AnyVal { - def toBlobOption: Option[Blob] = if (bytes.isEmpty) None else Option(new SerialBlob(bytes)) + def toBlobOption: Option[SerialBlob] = if (bytes.isEmpty) None else Option(new SerialBlob(bytes)) } implicit class EnhancedFiniteDuration(val duration: FiniteDuration) extends AnyVal { diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala index 0fd6d87cb15..31e6af183ae 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingDetritusEntry.scala @@ -1,11 +1,11 @@ package cromwell.database.sql.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob case class CallCachingDetritusEntry ( detritusKey: String, - detritusValue: Option[Clob], + detritusValue: Option[SerialClob], callCachingEntryId: Option[Int] = None, callCachingDetritusEntryId: Option[Int] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala index fb2627ba3be..626246c7bf2 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/CallCachingSimpletonEntry.scala @@ -1,11 +1,11 @@ package cromwell.database.sql.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob case class CallCachingSimpletonEntry ( simpletonKey: String, - simpletonValue: Option[Clob], + simpletonValue: Option[SerialClob], wdlType: String, callCachingEntryId: Option[Int] = None, callCachingSimpletonEntryId: Option[Int] = None diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala index bf819e63283..c1a904af3be 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreEntry.scala @@ -1,6 +1,6 @@ package cromwell.database.sql.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob case class JobStoreEntry ( @@ -10,7 +10,7 @@ case class JobStoreEntry jobAttempt: Int, jobSuccessful: Boolean, returnCode: Option[Int], - exceptionMessage: Option[Clob], + exceptionMessage: Option[SerialClob], retryableFailure: Option[Boolean], jobStoreEntryId: Option[Int] = None ) diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala index 909dd17fea2..e0c66921973 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/JobStoreSimpletonEntry.scala @@ -1,11 +1,11 @@ package cromwell.database.sql.tables -import java.sql.Clob +import javax.sql.rowset.serial.SerialClob case class JobStoreSimpletonEntry ( simpletonKey: String, - simpletonValue: Option[Clob], + simpletonValue: Option[SerialClob], wdlType: String, jobStoreEntryId: Option[Int] = None, jobStoreSimpletonEntryId: Option[Int] = None diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala index fcc4a40006e..c273c3e47e3 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/MetadataEntry.scala @@ -1,6 +1,8 @@ package cromwell.database.sql.tables -import java.sql.{Clob, Timestamp} +import java.sql.Timestamp + +import javax.sql.rowset.serial.SerialClob case class MetadataEntry ( @@ -9,7 +11,7 @@ case class MetadataEntry jobIndex: Option[Int], jobAttempt: Option[Int], metadataKey: String, - metadataValue: Option[Clob], + metadataValue: Option[SerialClob], metadataValueType: Option[String], metadataTimestamp: Timestamp, metadataEntryId: Option[Long] = None diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala index 66f22ae12b7..b969939d16c 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/WorkflowStoreEntry.scala @@ -1,21 +1,23 @@ package cromwell.database.sql.tables -import java.sql.{Blob, Clob, Timestamp} +import java.sql.Timestamp + +import javax.sql.rowset.serial.{SerialBlob, SerialClob} case class WorkflowStoreEntry ( workflowExecutionUuid: String, - workflowDefinition: Option[Clob], + workflowDefinition: Option[SerialClob], workflowUrl: Option[String], workflowRoot: Option[String], workflowType: Option[String], workflowTypeVersion: Option[String], - workflowInputs: Option[Clob], - workflowOptions: Option[Clob], + workflowInputs: Option[SerialClob], + workflowOptions: Option[SerialClob], workflowState: String, submissionTime: Timestamp, - importsZip: Option[Blob], - customLabels: Clob, + importsZip: Option[SerialBlob], + customLabels: SerialClob, cromwellId: Option[String], heartbeatTimestamp: Option[Timestamp], workflowStoreEntryId: Option[Int] = None diff --git a/database/sql/src/main/scala/cromwell/database/sql/tables/package.scala b/database/sql/src/main/scala/cromwell/database/sql/tables/package.scala index b9421d7b615..9ed1e801abf 100644 --- a/database/sql/src/main/scala/cromwell/database/sql/tables/package.scala +++ b/database/sql/src/main/scala/cromwell/database/sql/tables/package.scala @@ -20,10 +20,11 @@ package cromwell.database.sql * - `Double` * - `Int` * - `Long` - * - `java.sql.Clob` + * - `javax.sql.rowset.serial.SerialClob` * - `java.sql.Timestamp` * - * Nullable columns should be wrapped in an `Option`. + * Nullable columns should be wrapped in an `Option`. Note that SerialClob is + * required instead of java.sql.Clob, for Postgres support. * * Primary and foreign key columns are the only columns that should be defaulted, as they are to be filled in by the * database, and cannot and should not be set within the business logic. On the other hand, columns to be filled in by diff --git a/docs/Configuring.md b/docs/Configuring.md index 08764bf3382..60248d0bbfd 100644 --- a/docs/Configuring.md +++ b/docs/Configuring.md @@ -297,6 +297,33 @@ url = "jdbc:mysql://host/cromwell?rewriteBatchedStatements=true&serverTimezone=U Using this option does not alter your database's underlying timezone; rather, it causes Cromwell to "speak UTC" when communicating with the DB, and the DB server performs the conversion for you. +**Using Cromwell with Postgresql** + +To use Postgresql as the database, you will need to install and enable the +Large Object extension. If the extension is present, setting up the database +requires just these commands: + +``` +$ createdb cromwell +$ psql -d cromwell -c "create extension lo;" +``` + +Postgresql configuration in Cromwell is very similar to MySQL. An example: + +```hocon +database { + profile = "slick.jdbc.PostgresProfile$" + db { + driver = "org.postgresql.Driver" + url = "jdbc:postgresql//localhost:5432/cromwell" + user = "user" + password = "pass" + port = 5432 + connectionTimeout = 5000 + } +} +``` + ### Abort **Control-C (SIGINT) abort handler** diff --git a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala index b28c250490b..b2d73e21fcb 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/lifecycle/execution/callcaching/CallCachingSlickDatabaseSpec.scala @@ -27,6 +27,8 @@ class CallCachingSlickDatabaseSpec extends FlatSpec with Matchers with ScalaFutu "SlickDatabase (mariadb)" should behave like testWith("database-test-mariadb") + "SlickDatabase (postgresql)" should behave like testWith("database-test-postgresql") + def testWith(configPath: String): Unit = { lazy val databaseConfig = ConfigFactory.load.getConfig(configPath) lazy val dataAccess = new EngineSlickDatabase(databaseConfig) diff --git a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala index aab401c9934..62c8b3879b2 100644 --- a/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala +++ b/engine/src/test/scala/cromwell/engine/workflow/workflowstore/SqlWorkflowStoreSpec.scala @@ -31,6 +31,8 @@ class SqlWorkflowStoreSpec extends FlatSpec with Matchers with ScalaFutures with "SqlWorkflowStore (mariadb)" should behave like testWith("database-test-mariadb") + "SqlWorkflowStore (postgresql)" should behave like testWith("database-test-postgresql") + def testWith(configPath: String): Unit = { lazy val databaseConfig = ConfigFactory.load.getConfig(configPath) diff --git a/project/Dependencies.scala b/project/Dependencies.scala index e7c23b5a7de..fcebf3a8f76 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -61,6 +61,7 @@ object Dependencies { private val owlApiV = "5.1.9" private val paradiseV = "2.1.1" private val pegdownV = "1.6.0" + private val postgresV = "42.2.5" private val rdf4jV = "2.4.2" private val refinedV = "0.9.8" private val rhinoV = "1.7.10" @@ -321,7 +322,8 @@ object Dependencies { private val dbmsDependencies = List( "org.hsqldb" % "hsqldb" % hsqldbV, - "mysql" % "mysql-connector-java" % mysqlV + "mysql" % "mysql-connector-java" % mysqlV, + "org.postgresql" % "postgresql" % postgresV ) private val refinedTypeDependenciesList = List( @@ -338,8 +340,9 @@ object Dependencies { val cloudSupportDependencies = googleApiClientDependencies ++ googleCloudDependencies ++ betterFilesDependencies ++ awsCloudDependencies - val databaseSqlDependencies = configDependencies ++ catsDependencies ++ slickDependencies ++ dbmsDependencies ++ - refinedTypeDependenciesList + val databaseSqlDependencies = List( + "commons-io" % "commons-io" % commonsIoV, + ) ++ configDependencies ++ catsDependencies ++ slickDependencies ++ dbmsDependencies ++ refinedTypeDependenciesList val statsDDependencies = List( "nl.grons" %% "metrics-scala" % metrics3ScalaV, diff --git a/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala b/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala index 275398a6609..4106a5e8e62 100644 --- a/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala +++ b/services/src/test/scala/cromwell/services/ServicesStoreSpec.scala @@ -253,6 +253,8 @@ class ServicesStoreSpec extends FlatSpec with Matchers with ScalaFutures with St "SlickDatabase (mariadb)" should behave like testWith("database-test-mariadb") + "SlickDatabase (postgresql)" should behave like testWith("database-test-postgresql") + def testWith(configPath: String): Unit = { import ServicesStore.EnhancedSqlDatabase @@ -300,14 +302,14 @@ class ServicesStoreSpec extends FlatSpec with Matchers with ScalaFutures with St val future = for { product <- dataAccess.database.run(getProduct) _ <- product match { - case "HSQL Database Engine" => - // HSQLDB doesn't crash because it calls getCharacterStream instead of getSubString. - dataAccess.addJobStores(jobStoreJoins, 1) case "MySQL" => + // MySQL crashes because it calls SerialClob's getSubString instead of getCharacterStream dataAccess.addJobStores(jobStoreJoins, 1).failed map { exception => exception should be(a[SerialException]) exception.getMessage should be("Invalid position in SerialClob object set") } + case "HSQL Database Engine" | "PostgreSQL" => + dataAccess.addJobStores(jobStoreJoins, 1) } } yield () @@ -343,15 +345,15 @@ class ServicesStoreSpec extends FlatSpec with Matchers with ScalaFutures with St val future = for { product <- dataAccess.database.run(getProduct) _ <- product match { - case "HSQL Database Engine" => - // HSQLDB doesn't crash because it calls getBinaryStream instead of getBytes. - dataAccess.addWorkflowStoreEntries(workflowStoreEntries) case "MySQL" => + // MySQL crashes because it calls SerialBlob's getBytes instead of getBinaryStream dataAccess.addWorkflowStoreEntries(workflowStoreEntries).failed map { exception => exception should be(a[SerialException]) exception.getMessage should be("Invalid arguments: position cannot be less than 1 or greater than the length of the SerialBlob") } + case _ => + dataAccess.addWorkflowStoreEntries(workflowStoreEntries) } } yield () diff --git a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala index 95749caddd7..8641d9e39b5 100644 --- a/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala +++ b/services/src/test/scala/cromwell/services/keyvalue/impl/KeyValueDatabaseSpec.scala @@ -9,6 +9,7 @@ import cromwell.database.slick.EngineSlickDatabase import cromwell.database.sql.tables.JobKeyValueEntry import cromwell.services.EngineServicesStore import cromwell.services.ServicesStore.EnhancedSqlDatabase +import org.postgresql.util.PSQLException import org.scalatest.concurrent.ScalaFutures import org.scalatest.time.{Millis, Seconds, Span} import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers, RecoverMethods} @@ -37,6 +38,11 @@ class KeyValueDatabaseSpec extends FlatSpec with Matchers with ScalaFutures with "Column 'STORE_VALUE' cannot be null" ) + "SlickDatabase (postgresql)" should behave like testWith[PSQLException]( + "database-test-postgresql", + """ERROR: null value in column "STORE_VALUE" violates not-null constraint""" + ) + def testWith[E <: Throwable](configPath: String, failureMessage: String)(implicit classTag: ClassTag[E]): Unit = { lazy val databaseConfig = ConfigFactory.load.getConfig(configPath) lazy val dataAccess = new EngineSlickDatabase(databaseConfig) diff --git a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala index 27f112d3e1e..2af41be01fb 100644 --- a/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala +++ b/services/src/test/scala/cromwell/services/metadata/impl/MetadataDatabaseAccessSpec.scala @@ -42,6 +42,8 @@ class MetadataDatabaseAccessSpec extends FlatSpec with Matchers with ScalaFuture "MetadataDatabaseAccess (mariadb)" should behave like testWith("database-test-mariadb") + "MetadataDatabaseAccess (postgresql)" should behave like testWith("database-test-postgresql") + implicit val ec = ExecutionContext.global implicit val defaultPatience = PatienceConfig(scaled(Span(30, Seconds)), scaled(Span(100, Millis))) diff --git a/src/ci/bin/test.inc.sh b/src/ci/bin/test.inc.sh index 9749082c628..bbbad2d60bc 100644 --- a/src/ci/bin/test.inc.sh +++ b/src/ci/bin/test.inc.sh @@ -326,7 +326,7 @@ cromwell::private::create_database_variables() { CROMWELL_BUILD_MARIADB_JDBC_URL="jdbc:mysql://${CROMWELL_BUILD_MARIADB_HOSTNAME}:${CROMWELL_BUILD_MARIADB_PORT}/${CROMWELL_BUILD_MARIADB_SCHEMA}?useSSL=false&rewriteBatchedStatements=true&serverTimezone=UTC" CROMWELL_BUILD_MYSQL_JDBC_URL="jdbc:mysql://${CROMWELL_BUILD_MYSQL_HOSTNAME}:${CROMWELL_BUILD_MYSQL_PORT}/${CROMWELL_BUILD_MYSQL_SCHEMA}?useSSL=false&rewriteBatchedStatements=true&serverTimezone=UTC" - CROMWELL_BUILD_POSTGRESQL_JDBC_URL="jdbc:postgresql://${CROMWELL_BUILD_POSTGRESQL_HOSTNAME}:${CROMWELL_BUILD_POSTGRESQL_PORT}/${CROMWELL_BUILD_POSTGRESQL_SCHEMA}" + CROMWELL_BUILD_POSTGRESQL_JDBC_URL="jdbc:postgresql://${CROMWELL_BUILD_POSTGRESQL_HOSTNAME}:${CROMWELL_BUILD_POSTGRESQL_PORT}/${CROMWELL_BUILD_POSTGRESQL_SCHEMA}?reWriteBatchedInserts=true" export CROMWELL_BUILD_MARIADB_DOCKER_TAG export CROMWELL_BUILD_MARIADB_HOSTNAME