I’m sharing this bash script that I had to create to export/import the Mautic 5 MySQL database to a new server. Feel free to use it to save yourself some time. It will export all tables from a local source database, fix issues which cause import to fail, and import into a destination database (could be on a different server, assuming you have opened the remote connection for your server IP).
What I thought would be a 30 minute task took over 8 hours as a result of foreign-key constraints and import errors caused by SQL generated values which blocked the import.
If this is useful to you, leave a reply so I know it helped.
#!/bin/bash
####################################################################################################
# MAUTIC 5 DATABASE EXPORT/IMPORT SCRIPT
#
# This script will export a Mautic 5 database to a new MySQL database (on the same or different server).
# It was created after 8 hours of struggle with the Mautic 5 database export/import process, as a result
# of foreign-key constraint errors and other issues that arose during the process.
#
# HOW IT WORKS:
#
# 1. It exports all database tables, one table at a time.
# 2. It fixes the mt_email_stats and mt_leads tables to remove generated columns from INSERT statements,
# which without being fixed, caused errors that cause the import to fail.
# 3. It batches the INSERT statements into groups of 250 rows for mt_email_stats and mt_leads tables.
# 4. It imports the tables into the new database in a specific order to avoid foreign-key constraint errors.
# 5. It verifies the schema and sample data for mt_email_stats and mt_leads tables after import.
# 6. It cleans up temporary files and directories.
#
# HOW TO USE:
#
# 1. Set the Source and Destination database configuration variables.
# 2. On your source server, go into a non-public directory where the backups will be created.
# 3. Create a file named "movedb.sh", then "sudo chown +x movedb.sh" to make it executable.
# 4. Run the script with "sudo ./movedb.sh" to start the export/import process.
#
# SUGGESTED USAGE:
#
# 1. Create a test database on the destination server (e.g., "mautic5_test").
# 2. Run the script to your test database and verify the import worked without errors.
# 3. Verify all tables were created, and that table sizes match the source database.
# 4. If everything looks good:
# 4.a. Create a new database on the destination server (e.g., "mautic5_production").
# 4.b. Setup your "movedb.sh" script with configuration to be ready to run.
# 4.c. Create a duplicate of your Mautic "config/local.php" file with the new db settings, name
# it something like "local.php.ready" so it is ready for the switch.
# 4.c. Take the website offline/into maintenance mode for ~5 minutes.
# 4.d. Pause all Campaigns in Mautic.
# 4.d. Run "sudo ./movedb.sh" to begin transfer of the production db.
# 4.e. When transfer is complete, switch "local.php.ready" to "local.php".
# 4.f. Bring website out of maintenance mode.
# 4.g. Resume all Campaigns in Mautic.
# 5. Delete the test database, "movedb.sh" script, and any remaining backup files.
#
####################################################################################################
# Source database configuration
SRC_HOST="YOUR_MYSQL_SERVER_IP"
SRC_PORT=3306
SRC_USER="YOUR_DB_USER"
SRC_PASS="YOUR_DB_PASSWORD"
SRC_DB="YOUR_DB_NAME"
# Destination database configuration
DST_HOST="DESTINATION_MYSQL_SERVER_IP"
DST_PORT=3306
DST_USER="DESTINATION_DB_USER"
DST_PASS="DESTINATION_DB_PASSWORD"
DST_DB="DESTINATION_DB_NAME"
# Helper folders used on Source Linux server for creating
DUMP_DIR="dump_tables"
FIXED_DIR="fixed_tables"
TEMP_DIR="$FIXED_DIR/temp"
# Step 1: Export all tables from source database
echo "Exporting tables from $SRC_DB on $SRC_HOST..."
# Create directories
mkdir -p "$DUMP_DIR" "$FIXED_DIR" "$TEMP_DIR"
# Export mt_email_stats and mt_leads with one INSERT per line
for table in "mt_email_stats" "mt_leads"; do
echo "Exporting $table with one INSERT per line..."
mysqldump -h "$SRC_HOST" -P "$SRC_PORT" -u "$SRC_USER" -p"$SRC_PASS" --opt --single-transaction --no-create-db --skip-extended-insert "$SRC_DB" "$table" > "$DUMP_DIR/$table.sql"
if [ $? -ne 0 ] || [ ! -s "$DUMP_DIR/$table.sql" ]; then
echo "Error: Failed to export $table."
exit 1
fi
done
# Export all other tables with regular extended INSERT
echo "Exporting all other tables with extended INSERT..."
mysql -h "$SRC_HOST" -P "$SRC_PORT" -u "$SRC_USER" -p"$SRC_PASS" -N -e "SHOW TABLES FROM $SRC_DB WHERE Tables_in_$SRC_DB NOT IN ('mt_email_stats', 'mt_leads')" | while read -r table; do
echo "Exporting table $table..."
mysqldump -h "$SRC_HOST" -P "$SRC_PORT" -u "$SRC_USER" -p"$SRC_PASS" --opt --single-transaction --no-create-db "$SRC_DB" "$table" > "$DUMP_DIR/$table.sql"
if [ $? -ne 0 ] || [ ! -s "$DUMP_DIR/$table.sql" ]; then
echo "Error: Failed to export table $table."
exit 1
fi
done
# Verify dumps
if [ -z "$(ls -A "$DUMP_DIR")" ]; then
echo "Error: No tables exported into $DUMP_DIR."
exit 1
fi
# Step 2: Fix and batch mt_email_stats and mt_leads
for table in "mt_email_stats" "mt_leads"; do
TEMP_FILE="$TEMP_DIR/${table}_temp.sql"
BATCHED_FILE="$TEMP_DIR/${table}_batched.sql"
if [ "$table" = "mt_email_stats" ]; then
echo "Fixing generated_sent_date in $DUMP_DIR/$table.sql (removing INSERT values)..."
sed -E \
-e '/`generated_sent_date`/ s/`generated_sent_date`.*GENERATED ALWAYS AS (concat(year(`date_sent`),_utf8mb4.*))/`generated_sent_date` date GENERATED ALWAYS AS (concat(year(`date_sent`),'"'"'-'"'"',lpad(month(`date_sent`),2,'"'"'0'"'"'),'"'"'-'"'"',lpad(dayofmonth(`date_sent`),2,'"'"'0'"'"'))) VIRTUAL/' \
-e '/INSERT INTO `mt_email_stats`/ s/('"'"'[0-9]{4}-[0-9]{2}-[0-9]{2}'"'"'|\(NULL\))\);$/DEFAULT);/' "$DUMP_DIR/$table.sql" > "$TEMP_FILE"
elif [ "$table" = "mt_leads" ]; then
echo "Fixing generated_email_domain in $DUMP_DIR/$table.sql (removing INSERT values)..."
sed -E \
-e "s/(([^,]*,){45})(NULL|'[^']*')/\1DEFAULT/" "$DUMP_DIR/mt_leads.sql" > "$TEMP_FILE"
fi
if [ $? -ne 0 ] || [ ! -s "$TEMP_FILE" ]; then
echo "Error: Failed to fix $table.sql."
exit 1
fi
# Verify initial fix
if grep -q "_utf8mb4" "$TEMP_FILE"; then
echo "Error: '_utf8mb4' still present in $table.sql."
exit 1
fi
if [ "$table" = "mt_email_stats" ] && grep -q "'[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}'" "$TEMP_FILE"; then
echo "Warning: Possible date values remain in $table.sql. Checking INSERT statements..."
if grep "'[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}'" "$TEMP_FILE" | grep -B 1 "INSERT INTO \`$table\`" | grep -v "date_sent" | grep -v "date_read" | grep -v "last_opened" | grep -q "'[0-9]\{4\}-[0-9]\{2\}-[0-9]\{2\}'"; then
echo "Error: Date values still present in INSERT statements for generated_sent_date."
exit 1
fi
fi
if [ "$table" = "mt_leads" ] && grep -q "'[a-zA-Z0-9.-]+\.[a-zA-Z0-9.-]*'" "$TEMP_FILE"; then
echo "Warning: Possible domain values remain in $table.sql. Checking INSERT statements..."
if grep "'[a-zA-Z0-9.-]+\.[a-zA-Z0-9.-]*'" "$TEMP_FILE" | grep -B 1 "INSERT INTO \`$table\`" | grep -v "email" | grep -v "website" | grep -v "facebook" | grep -v "linkedin" | grep -q "'[a-zA-Z0-9.-]+\.[a-zA-Z0-9.-]*'"; then
echo "Error: Domain values still present in INSERT statements for generated_email_domain."
exit 1
fi
fi
# Batch into groups of 250 rows
echo "Batching $table INSERT statements into groups of 250..."
awk '
BEGIN {
count = 0;
values = "";
header = "INSERT INTO `'"$table"'` VALUES "
}
/INSERT INTO `'"$table"'` VALUES/ {
sub(/INSERT INTO `'"$table"'` VALUES /, "")
match($0, /\((.*)\);$/, arr)
if (count == 0) {
values = "(" arr[1] ")"
} else {
values = values ",(" arr[1] ")"
}
count++
if (count == 250) {
print header values ";"
count = 0
values = ""
}
next
}
END {
if (count > 0) {
print header values ";"
}
}
!/INSERT INTO `'"$table"'` VALUES/ { print $0 }
' "$TEMP_FILE" > "$BATCHED_FILE"
if [ $? -ne 0 ] || [ ! -s "$BATCHED_FILE" ]; then
echo "Error: Failed to batch $table.sql into $BATCHED_FILE."
exit 1
fi
# Move batched file to final location
mv "$BATCHED_FILE" "$FIXED_DIR/$table.sql"
rm -f "$TEMP_FILE"
echo "Successfully fixed and batched $table.sql"
done
# Step 3: Move other tables from DUMP_DIR to FIXED_DIR
echo "Moving other tables from $DUMP_DIR to $FIXED_DIR..."
for table_file in "$DUMP_DIR"/*.sql; do
table_name=$(basename "$table_file" .sql)
if [ "$table_name" != "mt_email_stats" ] && [ "$table_name" != "mt_leads" ]; then
cp "$table_file" "$FIXED_DIR/$table_name.sql"
fi
done
# Step 4: Import tables in dependency order
echo "Importing tables into $DST_DB on $DST_HOST in dependency order..."
# Define import order based on Mautic dependencies
TABLE_ORDER=(
"mt_ip_addresses" # No FKs
"mt_leads" # Referenced by mt_email_stats (lead_id)
"mt_emails" # Referenced by mt_email_stats (email_id)
"mt_lead_lists" # Referenced by mt_email_stats (list_id)
"mt_email_copies" # Referenced by mt_email_stats (copy_id)
"mt_email_stats" # Depends on above tables
)
# Import tables in specified order
for table in "${TABLE_ORDER[@]}"; do
if [ -f "$FIXED_DIR/$table.sql" ]; then
echo "Importing table $table..."
mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" "$DST_DB" < "$FIXED_DIR/$table.sql"
if [ $? -ne 0 ]; then
echo "Error: Failed to import table $table."
exit 1
fi
ROW_COUNT=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SELECT COUNT(*) FROM \`$table\`" "$DST_DB" 2>/dev/null | grep -o '[0-9]\+' || echo "0")
if [ "$ROW_COUNT" ]; then
echo "Imported $table with $ROW_COUNT rows."
else
echo "Error: Could not verify table $table after import."
exit 1
fi
# Remove from fixed_tables to avoid re-importing
rm "$FIXED_DIR/$table.sql"
fi
done
# Import remaining tables (alphabetical order)
for table_file in "$FIXED_DIR"/*.sql; do
if [ -f "$table_file" ]; then
table_name=$(basename "$table_file" .sql)
echo "Importing table $table_name..."
mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" "$DST_DB" < "$table_file"
if [ $? -ne 0 ]; then
echo "Error: Failed to import table $table_name."
exit 1
fi
ROW_COUNT=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SELECT COUNT(*) FROM \`$table_name\`" "$DST_DB" 2>/dev/null | grep -o '[0-9]\+' || echo "0")
if [ "$ROW_COUNT" ]; then
echo "Imported $table_name with $ROW_COUNT rows."
else
echo "Error: Could not verify table $table_name after import."
exit 1
fi
fi
done
# Step 5: Verify mt_email_stats and mt_leads schema and sample data
for table in "mt_email_stats" "mt_leads"; do
echo "Verifying $table schema and sample data..."
if [ "$table" = "mt_email_stats" ]; then
SCHEMA=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SHOW CREATE TABLE $table\G" "$DST_DB" 2>/dev/null | grep "generated_sent_date")
if echo "$SCHEMA" | grep -q "concat(year(\`date_sent\`),'-',lpad(month(\`date_sent\`),2,'0'),'-',lpad(dayofmonth(\`date_sent\`),2,'0'))"; then
echo "Schema for generated_sent_date looks correct."
else
echo "Error: generated_sent_date schema does not match expected expression."
exit 1
fi
SAMPLE=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SELECT date_sent, generated_sent_date FROM $table LIMIT 1" "$DST_DB" 2>/dev/null)
echo "Sample data for $table: $SAMPLE"
elif [ "$table" = "mt_leads" ]; then
SCHEMA=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SHOW CREATE TABLE $table\G" "$DST_DB" 2>/dev/null | grep "generated_email_domain")
if echo "$SCHEMA" | grep -q "substr(\`email\`,locate('@',\`email\`) + 1)"; then
echo "Schema for generated_email_domain looks correct."
else
echo "Error: generated_email_domain schema does not match expected expression."
exit 1
fi
SAMPLE=$(mysql -h "$DST_HOST" -P "$DST_PORT" -u "$DST_USER" -p"$DST_PASS" -e "SELECT email, generated_email_domain FROM $table LIMIT 1" "$DST_DB" 2>/dev/null)
echo "Sample data for $table: $SAMPLE"
fi
done
# Step 6: Clean up temporary directory
echo "Cleaning up temporary files and directory..."
rm -rf "$TEMP_DIR"
echo "Export, fix, batch, and import of all tables completed successfully."