bitcrowd/carbonite

Cannot migrate down with 0.12.0

Closed this issue · 3 comments

With 0.11 the up/down migration

  def up do
    Carbonite.Migrations.up(1..7)
  end

  def down do
    Carbonite.Migrations.down(7..1)
  end

where ok.

With the 0.12.0, the same code (updated to 1..8 and 8..1) leads to an error:

16:02:46.066 [info] == Running 20240513142413 TestApp.Repo.Migrations.InstallCarbonite.down/0 forward

16:02:46.088 [info] execute "CREATE OR REPLACE FUNCTION carbonite_default.capture_changes() RETURNS TRIGGER AS $body$ DECLARE trigger_row carbonite_default.triggers; change_row carbonite_default.changes; pk_source RECORD; col_name VARCHAR; pk_col_val VARCHAR; old_value JSONB; BEGIN /* load trigger config */ SELECT * INTO trigger_row FROM carbonite_default.triggers WHERE table_prefix = TG_TABLE_SCHEMA AND table_name = TG_TABLE_NAME; IF (trigger_row.mode = 'ignore' AND (trigger_row.override_xact_id IS NULL OR trigger_row.override_xact_id != pg_current_xact_id())) OR (trigger_row.mode = 'capture' AND trigger_row.override_xact_id = pg_current_xact_id()) THEN RETURN NULL; END IF; /* instantiate change row */ change_row = ROW( NEXTVAL('carbonite_default.changes_id_seq'), pg_current_xact_id(), LOWER(TG_OP::TEXT), TG_TABLE_SCHEMA::TEXT, TG_TABLE_NAME::TEXT, NULL, NULL, '{}', NULL, NULL ); /* build table_pk */ IF trigger_row.primary_key_columns != '{}' THEN IF (TG_OP IN ('INSERT', 'UPDATE')) THEN pk_source := NEW; ELSIF (TG_OP = 'DELETE') THEN pk_source := OLD; END IF; change_row.table_pk := '{}'; FOREACH col_name IN ARRAY trigger_row.primary_key_columns LOOP EXECUTE 'SELECT $1.' || col_name || '::TEXT' USING pk_source INTO pk_col_val; change_row.table_pk := change_row.table_pk || pk_col_val; END LOOP; END IF; /* fill in changed data */ IF (TG_OP = 'UPDATE') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; change_row.changed_from = '{}'::JSONB; FOR col_name, old_value IN SELECT * FROM jsonb_each(to_jsonb(OLD.*) - trigger_row.excluded_columns) LOOP IF (change_row.data->col_name)::JSONB != old_value THEN change_row.changed_from := jsonb_set(change_row.changed_from, ARRAY[col_name], old_value); END IF; END LOOP; change_row.changed := ARRAY(SELECT jsonb_object_keys(change_row.changed_from)); IF change_row.changed = '{}' THEN /* All changed fields are ignored. Skip this update. */ RETURN NULL; END IF; /* Persisting the old data is opt-in, discard if not configured. */ IF trigger_row.store_changed_from IS FALSE THEN change_row.changed_from := NULL; END IF; ELSIF (TG_OP = 'DELETE') THEN change_row.data = to_jsonb(OLD.*) - trigger_row.excluded_columns; ELSIF (TG_OP = 'INSERT') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; END IF; /* filtered columns */ FOREACH col_name IN ARRAY trigger_row.filtered_columns LOOP change_row.data = jsonb_set(change_row.data, ('{' || col_name || '}')::TEXT[], jsonb('\"[FILTERED]\"')); END LOOP; /* insert, fail gracefully unless transaction record present or NEXTVAL has never been called */ BEGIN change_row.transaction_id = CURRVAL('carbonite_default.transactions_id_seq'); /* verify that xact_id matches */ IF NOT EXISTS( SELECT 1 FROM carbonite_default.transactions WHERE id = change_row.transaction_id AND xact_id = change_row.transaction_xact_id ) THEN RAISE USING ERRCODE = 'foreign_key_violation'; END IF; INSERT INTO carbonite_default.changes VALUES (change_row.*); EXCEPTION WHEN foreign_key_violation OR object_not_in_prerequisite_state THEN RAISE '% on table %.% without prior INSERT into carbonite_default.transactions', TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME USING ERRCODE = 'foreign_key_violation'; END; RETURN NULL; END; $body$ LANGUAGE plpgsql;"

16:02:46.091 [info] drop index carbonite_default.changes_transaction_id_index

16:02:46.092 [info] execute "ALTER INDEX carbonite_default.changes_transaction_xact_id_index RENAME TO changes_transaction_id_index;"

16:02:46.093 [info] execute "CREATE OR REPLACE FUNCTION carbonite_default.capture_changes() RETURNS TRIGGER AS $body$ DECLARE trigger_row carbonite_default.triggers; change_row carbonite_default.changes; pk_source RECORD; col_name VARCHAR; pk_col_val VARCHAR; old_field RECORD; old_field_jsonb JSONB; BEGIN /* load trigger config */ SELECT * INTO trigger_row FROM carbonite_default.triggers WHERE table_prefix = TG_TABLE_SCHEMA AND table_name = TG_TABLE_NAME; IF (trigger_row.mode = 'ignore' AND (trigger_row.override_xact_id IS NULL OR trigger_row.override_xact_id != pg_current_xact_id())) OR (trigger_row.mode = 'capture' AND trigger_row.override_xact_id = pg_current_xact_id()) THEN RETURN NULL; END IF; /* instantiate change row */ change_row = ROW( NEXTVAL('carbonite_default.changes_id_seq'), pg_current_xact_id(), LOWER(TG_OP::TEXT), TG_TABLE_SCHEMA::TEXT, TG_TABLE_NAME::TEXT, NULL, NULL, '{}', NULL, NULL ); /* build table_pk */ IF trigger_row.primary_key_columns != '{}' THEN IF (TG_OP IN ('INSERT', 'UPDATE')) THEN pk_source := NEW; ELSIF (TG_OP = 'DELETE') THEN pk_source := OLD; END IF; change_row.table_pk := '{}'; FOREACH col_name IN ARRAY trigger_row.primary_key_columns LOOP EXECUTE 'SELECT $1.' || col_name || '::TEXT' USING pk_source INTO pk_col_val; change_row.table_pk := change_row.table_pk || pk_col_val; END LOOP; END IF; /* fill in changed data */ IF (TG_OP = 'UPDATE') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; change_row.changed_from = '{}'::JSONB; FOR old_field_jsonb IN SELECT jsonb_build_object(key, value) FROM jsonb_each(to_jsonb(OLD.*) - trigger_row.excluded_columns) LOOP IF NOT change_row.data @> old_field_jsonb THEN change_row.changed_from := change_row.changed_from || old_field_jsonb; END IF; END LOOP; change_row.changed := ARRAY(SELECT jsonb_object_keys(change_row.changed_from)); IF change_row.changed = '{}' THEN /* All changed fields are ignored. Skip this update. */ RETURN NULL; END IF; /* Persisting the old data is opt-in, discard if not configured. */ IF trigger_row.store_changed_from IS FALSE THEN change_row.changed_from := NULL; END IF; ELSIF (TG_OP = 'DELETE') THEN change_row.data = to_jsonb(OLD.*) - trigger_row.excluded_columns; ELSIF (TG_OP = 'INSERT') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; END IF; /* filtered columns */ FOREACH col_name IN ARRAY trigger_row.filtered_columns LOOP change_row.data = jsonb_set(change_row.data, ('{' || col_name || '}')::TEXT[], jsonb('\"[FILTERED]\"')); END LOOP; /* insert, fail gracefully unless transaction record present or NEXTVAL has never been called */ BEGIN change_row.transaction_id = CURRVAL('carbonite_default.transactions_id_seq'); /* verify that xact_id matches */ IF NOT EXISTS( SELECT 1 FROM carbonite_default.transactions WHERE id = change_row.transaction_id AND xact_id = change_row.transaction_xact_id ) THEN RAISE USING ERRCODE = 'foreign_key_violation'; END IF; INSERT INTO carbonite_default.changes VALUES (change_row.*); EXCEPTION WHEN foreign_key_violation OR object_not_in_prerequisite_state THEN RAISE '% on table %.% without prior INSERT into carbonite_default.transactions', TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME USING ERRCODE = 'foreign_key_violation'; END; RETURN NULL; END; $body$ LANGUAGE plpgsql;"

16:02:46.095 [info] execute "LOCK TABLE carbonite_default.changes IN EXCLUSIVE MODE;"

16:02:46.095 [info] alter table carbonite_default.changes

16:02:46.095 [info] alter table carbonite_default.triggers

16:02:46.096 [info] execute "CREATE OR REPLACE FUNCTION carbonite_default.capture_changes() RETURNS TRIGGER AS $body$ DECLARE trigger_row carbonite_default.triggers; change_row carbonite_default.changes; pk_source RECORD; col_name VARCHAR; pk_col_val VARCHAR; old_field RECORD; BEGIN /* load trigger config */ SELECT * INTO trigger_row FROM carbonite_default.triggers WHERE table_prefix = TG_TABLE_SCHEMA AND table_name = TG_TABLE_NAME; IF (trigger_row.mode = 'ignore' AND (trigger_row.override_xact_id IS NULL OR trigger_row.override_xact_id != pg_current_xact_id())) OR (trigger_row.mode = 'capture' AND trigger_row.override_xact_id = pg_current_xact_id()) THEN RETURN NULL; END IF; /* instantiate change row */ change_row = ROW( NEXTVAL('carbonite_default.changes_id_seq'), pg_current_xact_id(), LOWER(TG_OP::TEXT), TG_TABLE_SCHEMA::TEXT, TG_TABLE_NAME::TEXT, NULL, NULL, '{}', NULL ); /* build table_pk */ IF trigger_row.primary_key_columns != '{}' THEN IF (TG_OP IN ('INSERT', 'UPDATE')) THEN pk_source := NEW; ELSIF (TG_OP = 'DELETE') THEN pk_source := OLD; END IF; change_row.table_pk := '{}'; FOREACH col_name IN ARRAY trigger_row.primary_key_columns LOOP EXECUTE 'SELECT $1.' || col_name || '::TEXT' USING pk_source INTO pk_col_val; change_row.table_pk := change_row.table_pk || pk_col_val; END LOOP; END IF; /* fill in changed data */ IF (TG_OP = 'UPDATE') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; FOR old_field IN SELECT * FROM jsonb_each(to_jsonb(OLD.*) - trigger_row.excluded_columns) LOOP IF NOT change_row.data @> jsonb_build_object(old_field.key, old_field.value) THEN change_row.changed := change_row.changed || old_field.key::VARCHAR; END IF; END LOOP; IF change_row.changed = '{}' THEN /* All changed fields are ignored. Skip this update. */ RETURN NULL; END IF; ELSIF (TG_OP = 'DELETE') THEN change_row.data = to_jsonb(OLD.*) - trigger_row.excluded_columns; ELSIF (TG_OP = 'INSERT') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; END IF; /* filtered columns */ FOREACH col_name IN ARRAY trigger_row.filtered_columns LOOP change_row.data = jsonb_set(change_row.data, ('{' || col_name || '}')::TEXT[], jsonb('\"[FILTERED]\"')); END LOOP; /* insert, fail gracefully unless transaction record present or NEXTVAL has never been called */ BEGIN change_row.transaction_id = CURRVAL('carbonite_default.transactions_id_seq'); /* verify that xact_id matches */ IF NOT EXISTS( SELECT 1 FROM carbonite_default.transactions WHERE id = change_row.transaction_id AND xact_id = change_row.transaction_xact_id ) THEN RAISE USING ERRCODE = 'foreign_key_violation'; END IF; INSERT INTO carbonite_default.changes VALUES (change_row.*); EXCEPTION WHEN foreign_key_violation OR object_not_in_prerequisite_state THEN RAISE '% on table %.% without prior INSERT into carbonite_default.transactions', TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME USING ERRCODE = 'foreign_key_violation'; END; RETURN NULL; END; $body$ LANGUAGE plpgsql;"

16:02:46.097 [info] execute "LOCK TABLE carbonite_default.changes IN EXCLUSIVE MODE;"

16:02:46.097 [info] execute "ALTER TABLE carbonite_default.changes DROP CONSTRAINT changes_transaction_id_fkey;"

16:02:46.098 [info] execute "ALTER TABLE carbonite_default.transactions DROP CONSTRAINT transactions_pkey;"

16:02:46.098 [info] execute "ALTER TABLE carbonite_default.transactions ADD PRIMARY KEY (xact_id);"

16:02:46.109 [info] alter table carbonite_default.changes

16:02:46.110 [info] rename column transaction_xact_id to transaction_id on table carbonite_default.changes

16:02:46.110 [info] alter table carbonite_default.transactions

16:02:46.111 [info] rename column xact_id to id on table carbonite_default.transactions

16:02:46.111 [info] execute "ALTER TABLE carbonite_default.changes ADD CONSTRAINT changes_transaction_id_fkey FOREIGN KEY (transaction_id) REFERENCES carbonite_default.transactions ON DELETE CASCADE ON UPDATE CASCADE;"

16:02:46.112 [info] execute "ALTER TABLE carbonite_default.outboxes ALTER COLUMN last_transaction_id DROP DEFAULT;"

16:02:46.112 [info] execute "ALTER TABLE carbonite_default.outboxes ALTER COLUMN last_transaction_id SET DATA TYPE BIGINT USING last_transaction_id::text::BIGINT;"

16:02:46.116 [info] execute "ALTER TABLE carbonite_default.outboxes ALTER COLUMN last_transaction_id SET DEFAULT 0;"

16:02:46.117 [info] rename column override_xact_id to override_transaction_id on table carbonite_default.triggers

16:02:46.117 [info] execute "CREATE OR REPLACE FUNCTION carbonite_default.set_transaction_id() RETURNS TRIGGER AS $body$ BEGIN NEW.id = COALESCE(NEW.id, pg_current_xact_id()); RETURN NEW; END $body$ LANGUAGE plpgsql;"

16:02:46.118 [info] execute "CREATE OR REPLACE FUNCTION carbonite_default.capture_changes() RETURNS TRIGGER AS $body$ DECLARE trigger_row carbonite_default.triggers; change_row carbonite_default.changes; pk_source RECORD; col_name VARCHAR; pk_col_val VARCHAR; old_field RECORD; BEGIN /* load trigger config */ SELECT * INTO trigger_row FROM carbonite_default.triggers WHERE table_prefix = TG_TABLE_SCHEMA AND table_name = TG_TABLE_NAME; IF (trigger_row.mode = 'ignore' AND (trigger_row.override_transaction_id IS NULL OR trigger_row.override_transaction_id != pg_current_xact_id())) OR (trigger_row.mode = 'capture' AND trigger_row.override_transaction_id = pg_current_xact_id()) THEN RETURN NULL; END IF; /* instantiate change row */ change_row = ROW( NEXTVAL('carbonite_default.changes_id_seq'), pg_current_xact_id(), LOWER(TG_OP::TEXT), TG_TABLE_SCHEMA::TEXT, TG_TABLE_NAME::TEXT, NULL, NULL, '{}' ); /* build table_pk */ IF trigger_row.primary_key_columns != '{}' THEN IF (TG_OP IN ('INSERT', 'UPDATE')) THEN pk_source := NEW; ELSIF (TG_OP = 'DELETE') THEN pk_source := OLD; END IF; change_row.table_pk := '{}'; FOREACH col_name IN ARRAY trigger_row.primary_key_columns LOOP EXECUTE 'SELECT $1.' || col_name || '::text' USING pk_source INTO pk_col_val; change_row.table_pk := change_row.table_pk || pk_col_val; END LOOP; END IF; /* fill in changed data */ IF (TG_OP = 'UPDATE') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; FOR old_field IN SELECT * FROM jsonb_each(to_jsonb(OLD.*) - trigger_row.excluded_columns) LOOP IF NOT change_row.data @> jsonb_build_object(old_field.key, old_field.value) THEN change_row.changed := change_row.changed || old_field.key::VARCHAR; END IF; END LOOP; IF change_row.changed = '{}' THEN /* All changed fields are ignored. Skip this update. */ RETURN NULL; END IF; ELSIF (TG_OP = 'DELETE') THEN change_row.data = to_jsonb(OLD.*) - trigger_row.excluded_columns; ELSIF (TG_OP = 'INSERT') THEN change_row.data = to_jsonb(NEW.*) - trigger_row.excluded_columns; END IF; /* filtered columns */ FOREACH col_name IN ARRAY trigger_row.filtered_columns LOOP change_row.data = jsonb_set(change_row.data, ('{' || col_name || '}')::text[], jsonb('\"[FILTERED]\"')); END LOOP; /* insert, fail gracefully unless transaction record present */ BEGIN INSERT INTO carbonite_default.changes VALUES (change_row.*); EXCEPTION WHEN foreign_key_violation THEN RAISE '% on table %.% without prior INSERT into carbonite_default.transactions', TG_OP, TG_TABLE_SCHEMA, TG_TABLE_NAME USING ERRCODE = 'foreign_key_violation'; END; RETURN NULL; END; $body$ LANGUAGE plpgsql;"

16:02:46.119 [info] alter table carbonite_default.transactions

16:02:46.120 [info] create index carbonite_default.transactions_inserted_at_index

16:02:46.122 [info] drop table carbonite_default.outboxes

16:02:46.123 [info] execute "DROP FUNCTION carbonite_default.capture_changes;"

16:02:46.123 [info] drop table carbonite_default.triggers

16:02:46.124 [info] execute "DROP TYPE carbonite_default.trigger_mode;"

16:02:46.124 [info] drop table carbonite_default.changes

16:02:46.125 [info] execute "DROP TYPE carbonite_default.change_op;"

16:02:46.126 [info] drop table carbonite_default.transactions

16:02:46.126 [info] execute "DROP FUNCTION carbonite_default.set_transaction_id;"

16:02:46.127 [info] execute "DROP SCHEMA carbonite_default;"
** (Postgrex.Error) ERROR 2BP01 (dependent_objects_still_exist) cannot drop schema carbonite_default because other objects depend on it

    hint: Use DROP ... CASCADE to drop the dependent objects too.

function carbonite_default.record_dynamic_varchar(record,character varying) depends on schema carbonite_default
function carbonite_default.record_dynamic_varchar_agg(record,character varying[]) depends on schema carbonite_default
function carbonite_default.jsonb_redact_keys(jsonb,character varying[]) depends on schema carbonite_default
    (ecto_sql 3.11.2) lib/ecto/adapters/sql.ex:1054: Ecto.Adapters.SQL.raise_sql_call_error/1
    (elixir 1.14.5) lib/enum.ex:1658: Enum."-map/2-lists^map/1-0-"/2
    (ecto_sql 3.11.2) lib/ecto/adapters/sql.ex:1161: Ecto.Adapters.SQL.execute_ddl/4
    (ecto_sql 3.11.2) lib/ecto/migration/runner.ex:348: Ecto.Migration.Runner.log_and_execute_ddl/3
    (ecto_sql 3.11.2) lib/ecto/migration/runner.ex:117: anonymous fn/6 in Ecto.Migration.Runner.flush/0
    (elixir 1.14.5) lib/enum.ex:2468: Enum."-reduce/3-lists^foldl/2-0-"/3
    (ecto_sql 3.11.2) lib/ecto/migration/runner.ex:116: Ecto.Migration.Runner.flush/0
    (ecto_sql 3.11.2) lib/ecto/migration/runner.ex:311: Ecto.Migration.Runner.perform_operation/3
    ```

Thanks for reporting, I forgot about the rollback when refactoring the functions. Hope this didn't hit you in prod.

Np, still playing around with the lib. In any case, this is a great lib, very useful ;)

Thank you!

Released as v0.12.1.