From 1298659114d533fd48241aaed28ae6a05c804364 Mon Sep 17 00:00:00 2001 From: Charaf Rezrazi Date: Thu, 17 Mar 2022 08:49:40 +0100 Subject: [PATCH 01/20] added support for Laravel 9 --- composer.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/composer.json b/composer.json index 4ae9ea7..b544f83 100644 --- a/composer.json +++ b/composer.json @@ -19,8 +19,8 @@ "php": "^7.3 || ^8.0", "doctrine/dbal": "^2.0|^3.0", "fakerphp/faker": "^1.13", - "illuminate/console": "^7.0|^8.0", - "illuminate/support": "^7.0|^8.0" + "illuminate/console": "^7.0|^8.0|^9.0", + "illuminate/support": "^7.0|^8.0|^9.0" }, "require-dev": { "orchestra/testbench": "^6.12", From 3b42f68b9406ae19e75a54b8b814c9c7eb9f8983 Mon Sep 17 00:00:00 2001 From: Charaf Rezrazi Date: Thu, 17 Mar 2022 08:54:27 +0100 Subject: [PATCH 02/20] Bump testbench and phpunit versions --- composer.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/composer.json b/composer.json index b544f83..3083101 100644 --- a/composer.json +++ b/composer.json @@ -23,8 +23,8 @@ "illuminate/support": "^7.0|^8.0|^9.0" }, "require-dev": { - "orchestra/testbench": "^6.12", - "phpunit/phpunit": "^8.0", + "orchestra/testbench": "^6.12|^7.1", + "phpunit/phpunit": "^8.0|^9.5", "spatie/phpunit-snapshot-assertions": "^4.2" }, "autoload": { From 216f78933d0ae55b719434726816234227acf5ae Mon Sep 17 00:00:00 2001 From: kswilliames Date: Mon, 29 Aug 2022 13:36:32 +1000 Subject: [PATCH 03/20] feat: allow callable as definition for config caching --- src/Console/DumpDatabaseCommand.php | 1 + 1 file changed, 1 insertion(+) diff --git a/src/Console/DumpDatabaseCommand.php b/src/Console/DumpDatabaseCommand.php index cef7377..2ab4b5a 100644 --- a/src/Console/DumpDatabaseCommand.php +++ b/src/Console/DumpDatabaseCommand.php @@ -14,6 +14,7 @@ class DumpDatabaseCommand extends Command public function handle() { $definition = config('masked-dump.' . $this->option('definition')); + $definition = is_callable($definition) ? call_user_func($definition) : $definition; $definition->load(); $this->info('Starting Database dump'); From 20827e2c821a198a034ede6286ed1599e136f84a Mon Sep 17 00:00:00 2001 From: mechelon Date: Mon, 2 Oct 2023 13:12:14 +0200 Subject: [PATCH 04/20] Deprecated string handling --- src/DumpSchema.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/DumpSchema.php b/src/DumpSchema.php index d45d4c6..523de85 100644 --- a/src/DumpSchema.php +++ b/src/DumpSchema.php @@ -62,7 +62,7 @@ protected function getTable(string $tableName) }); if (is_null($table)) { - throw new \Exception("Invalid table name ${tableName}"); + throw new \Exception("Invalid table name {$tableName}"); } return $table; From 90207ba0cdddd8e741d77b3171041c8832ef2d0b Mon Sep 17 00:00:00 2001 From: Shift Date: Tue, 27 Feb 2024 19:42:11 +0000 Subject: [PATCH 05/20] Bump dependencies for Laravel 11 --- composer.json | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/composer.json b/composer.json index f7f13be..ca05b35 100644 --- a/composer.json +++ b/composer.json @@ -19,13 +19,13 @@ "php": "^7.3 || ^8.0", "doctrine/dbal": "^2.0|^3.0", "fakerphp/faker": "^1.13", - "illuminate/console": "^7.0|^8.0|^9.0|^10.0", - "illuminate/support": "^7.0|^8.0|^9.0|^10.0" + "illuminate/console": "^7.0|^8.0|^9.0|^10.0 || ^11.0", + "illuminate/support": "^7.0|^8.0|^9.0|^10.0 || ^11.0" }, "require-dev": { - "orchestra/testbench": "^6.12|^7.0|^8.0", - "phpunit/phpunit": "^8.0 || ^9.0", - "spatie/phpunit-snapshot-assertions": "^4.2" + "orchestra/testbench": "^6.12|^7.0|^8.0 || ^9.0", + "phpunit/phpunit": "^8.0 || ^9.0 || ^10.5", + "spatie/phpunit-snapshot-assertions": "^4.2 || ^5.1" }, "autoload": { "psr-4": { @@ -40,7 +40,6 @@ "scripts": { "test": "vendor/bin/phpunit", "test-coverage": "vendor/bin/phpunit --coverage-html coverage" - }, "config": { "sort-packages": true From e06094fe4aed0b2818f38c26f6f73ccae2a224fe Mon Sep 17 00:00:00 2001 From: Di Date: Thu, 26 Sep 2024 10:01:17 +0200 Subject: [PATCH 06/20] PHP Unit config --- phpunit.xml.dist | 18 +----------------- 1 file changed, 1 insertion(+), 17 deletions(-) diff --git a/phpunit.xml.dist b/phpunit.xml.dist index 1eef57c..4e91cd1 100644 --- a/phpunit.xml.dist +++ b/phpunit.xml.dist @@ -1,12 +1,8 @@ @@ -14,16 +10,4 @@ tests - - - src/ - - - - - - - - - From e4e4354281c7da171a55ebaf8c373a7404648692 Mon Sep 17 00:00:00 2001 From: Di Date: Thu, 26 Sep 2024 10:04:14 +0200 Subject: [PATCH 07/20] Laravel 11 Support --- src/DumpSchema.php | 76 ++++++++++++++++++- src/LaravelMaskedDump.php | 38 ++++++++-- ...mp_all_tables_without_modifications__1.txt | 52 ++++++++++--- ..._dump_certain_tables_as_schema_only__1.txt | 46 +++++++++-- .../DumperTest__it_can_mask_user_names__1.txt | 52 ++++++++++--- ...n_replace_columns_with_faker_values__1.txt | 54 ++++++++++--- ..._replace_columns_with_static_values__1.txt | 52 ++++++++++--- 7 files changed, 305 insertions(+), 65 deletions(-) diff --git a/src/DumpSchema.php b/src/DumpSchema.php index 523de85..e8eabdd 100644 --- a/src/DumpSchema.php +++ b/src/DumpSchema.php @@ -5,7 +5,8 @@ use Faker\Factory; use Doctrine\DBAL\Schema\Table; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; -use Illuminate\Support\Facades\DB; +use Doctrine\DBAL\Types\Types; +use Illuminate\Support\Facades\Schema; class DumpSchema { @@ -48,11 +49,19 @@ public function allTables() } /** - * @return \Illuminate\Database\ConnectionInterface + * @return \Illuminate\Database\Schema\Builder + */ + public function getBuilder() + { + return Schema::connection($this->connectionName); + } + + /** + * @return \Illuminate\Database\Connection */ public function getConnection() { - return DB::connection($this->connectionName); + return Schema::connection($this->connectionName)->getConnection(); } protected function getTable(string $tableName) @@ -82,9 +91,68 @@ protected function loadAvailableTables() return; } - $this->availableTables = $this->getConnection()->getDoctrineSchemaManager()->listTables(); + $this->availableTables = $this->createDoctrineTables($this->getBuilder()->getTables()); } + protected function createDoctrineTables(array $tables): array + { + $doctrineTables = []; + + foreach ($tables as $table) { + $columns = $this->getBuilder()->getColumns($table['name']); + + $table = new Table($table['name']); + + foreach ($columns as $column) { + $type = $this->mapType($column['type_name']); + $table->addColumn( + $column['name'], + $type + ); + } + + $doctrineTables[] = $table; + } + + return $doctrineTables; + } + + protected function mapType(string $typeName): string + { + switch ($typeName) { + case 'char': + case 'varchar': + return Types::STRING; + case 'int': + case 'integer': + return Types::INTEGER; + case 'text': + case 'longtext': + case 'mediumtext': + return Types::TEXT; + case 'date': + return Types::DATE_MUTABLE; + case 'datetime': + case 'timestamp': + return Types::DATETIME_MUTABLE; + case 'bigint': + case 'mediumint': + return Types::BIGINT; + case 'tinyint': + case 'smallint': + return Types::SMALLINT; + case 'binary': + return Types::BINARY; + case 'json': + return Types::JSON; + case 'decimal': + return Types::DECIMAL; + default: + return Types::TEXT; + } + } + + public function load() { $this->loadAvailableTables(); diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index 49309f3..66ea47b 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -6,6 +6,13 @@ use Doctrine\DBAL\Schema\Schema; use Illuminate\Console\OutputStyle; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; +use Doctrine\DBAL\Platforms\MariaDBPlatform; +use Doctrine\DBAL\Platforms\MySQLPlatform; +use Doctrine\DBAL\Platforms\PostgreSQLPlatform; +use Doctrine\DBAL\Platforms\SqlitePlatform; +use Doctrine\DBAL\Platforms\SQLServerPlatform; +use Illuminate\Database\Connection as DatabaseConnection; +use Doctrine\DBAL\Platforms\AbstractPlatform; class LaravelMaskedDump { @@ -15,10 +22,14 @@ class LaravelMaskedDump /** @var OutputStyle */ protected $output; + /** @var AbstractPlatform */ + protected $platform; + public function __construct(DumpSchema $definition, OutputStyle $output) { $this->definition = $definition; $this->output = $output; + $this->platform = $this->getPlatform($this->definition->getConnection()); } public function dump() @@ -49,10 +60,7 @@ public function dump() protected function transformResultForInsert($row, TableDefinition $table) { - /** @var Connection $connection */ - $connection = $this->definition->getConnection()->getDoctrineConnection(); - - return collect($row)->map(function ($value, $column) use ($connection, $table) { + return collect($row)->map(function ($value, $column) use ($table) { if ($columnDefinition = $table->findColumn($column)) { $value = $columnDefinition->modifyValue($value); } @@ -64,17 +72,31 @@ protected function transformResultForInsert($row, TableDefinition $table) return '""'; } - return $connection->quote($value); + return $this->platform->quoteStringLiteral($value); })->toArray(); } protected function dumpSchema(TableDefinition $table) { - $platform = $this->definition->getConnection()->getDoctrineSchemaManager()->getDatabasePlatform(); - $schema = new Schema([$table->getDoctrineTable()]); - return implode(";", $schema->toSql($platform)) . ";" . PHP_EOL; + return implode(";", $schema->toSql($this->platform)) . ";" . PHP_EOL; + } + + protected function getPlatform(DatabaseConnection $connection) + { + switch ($connection->getDriverName()) { + case 'mysql': + return new MySQLPlatform; + case 'mariadb': + return new MariaDBPlatform; + case 'pgsql': + return new PostgreSQLPlatform; + case 'sqlite': + return new SqlitePlatform; + default: + throw new \RuntimeException("Unsupported platform: {$connection->getDriverName()}"); + } } protected function lockTable(string $tableName) diff --git a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt index f333155..f75cd65 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt @@ -1,26 +1,56 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); LOCK TABLES `failed_jobs` WRITE; ALTER TABLE `failed_jobs` DISABLE KEYS; ALTER TABLE `failed_jobs` ENABLE KEYS; UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); LOCK TABLES `migrations` WRITE; ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); ALTER TABLE `migrations` ENABLE KEYS; UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; UNLOCK TABLES; DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); LOCK TABLES `users` WRITE; ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt index 783facb..56aa9c9 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt @@ -1,16 +1,46 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); LOCK TABLES `failed_jobs` WRITE; ALTER TABLE `failed_jobs` DISABLE KEYS; ALTER TABLE `failed_jobs` ENABLE KEYS; UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; UNLOCK TABLES; DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); diff --git a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt index 8c3c43c..5049efb 100644 --- a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt @@ -1,26 +1,56 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); LOCK TABLES `failed_jobs` WRITE; ALTER TABLE `failed_jobs` DISABLE KEYS; ALTER TABLE `failed_jobs` ENABLE KEYS; UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); LOCK TABLES `migrations` WRITE; ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); ALTER TABLE `migrations` ENABLE KEYS; UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; UNLOCK TABLES; DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); LOCK TABLES `users` WRITE; ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'xxxxxx', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt index 5def904..1078ade 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt @@ -1,28 +1,58 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); LOCK TABLES `failed_jobs` WRITE; ALTER TABLE `failed_jobs` DISABLE KEYS; ALTER TABLE `failed_jobs` ENABLE KEYS; UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); LOCK TABLES `migrations` WRITE; ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); ALTER TABLE `migrations` ENABLE KEYS; UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; UNLOCK TABLES; DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); LOCK TABLES `users` WRITE; ALTER TABLE `users` DISABLE KEYS; -INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'morgan93@example.net', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'joy.schultz@example.org', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); ALTER TABLE `users` ENABLE KEYS; UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt index f333155..f75cd65 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt @@ -1,26 +1,56 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); LOCK TABLES `failed_jobs` WRITE; ALTER TABLE `failed_jobs` DISABLE KEYS; ALTER TABLE `failed_jobs` ENABLE KEYS; UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); LOCK TABLES `migrations` WRITE; ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); ALTER TABLE `migrations` ENABLE KEYS; UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; UNLOCK TABLES; DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); LOCK TABLES `users` WRITE; ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); From 0229cd29320f514528f14b3d50875e6a6930eb81 Mon Sep 17 00:00:00 2001 From: Di Date: Thu, 26 Sep 2024 10:05:33 +0200 Subject: [PATCH 08/20] Exclude tables by @k2idev #9 --- src/DumpSchema.php | 18 ++++++- src/LaravelMaskedDump.php | 3 +- tests/DumperTest.php | 27 ++++++++++ ...move_excluded_tables_from_allTables__1.txt | 51 +++++++++++++++++++ 4 files changed, 95 insertions(+), 4 deletions(-) create mode 100644 tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt diff --git a/src/DumpSchema.php b/src/DumpSchema.php index e8eabdd..792ead6 100644 --- a/src/DumpSchema.php +++ b/src/DumpSchema.php @@ -16,6 +16,7 @@ class DumpSchema protected $loadAllTables = false; protected $customizedTables = []; + protected $excludedTables = []; public function __construct($connectionName = null) { @@ -48,6 +49,13 @@ public function allTables() return $this; } + public function exclude(string $tableName) + { + $this->excludedTables[] = $tableName; + + return $this; + } + /** * @return \Illuminate\Database\Schema\Builder */ @@ -158,9 +166,15 @@ public function load() $this->loadAvailableTables(); if ($this->loadAllTables) { - $this->dumpTables = collect($this->availableTables)->mapWithKeys(function (Table $table) { + $dumpTables = collect($this->availableTables)->mapWithKeys(function (Table $table) { return [$table->getName() => new TableDefinition($table)]; - })->toArray(); + }); + + $excluded = $this->excludedTables; + $this->dumpTables = $dumpTables + ->filter(function ($table, $tableName) use ($excluded) { + return !in_array($tableName, $excluded); + })->toArray(); } foreach ($this->customizedTables as $tableName => $tableDefinition) { diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index 66ea47b..10f0024 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -2,7 +2,6 @@ namespace BeyondCode\LaravelMaskedDumper; -use Doctrine\DBAL\Connection; use Doctrine\DBAL\Schema\Schema; use Illuminate\Console\OutputStyle; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; @@ -125,7 +124,7 @@ protected function dumpTableData(TableDefinition $table) $row = $this->transformResultForInsert((array)$row, $table); $tableName = $table->getDoctrineTable()->getName(); - $query .= "INSERT INTO `${tableName}` (`" . implode('`, `', array_keys($row)) . '`) VALUES '; + $query .= "INSERT INTO `$tableName` (`" . implode('`, `', array_keys($row)) . '`) VALUES '; $query .= "("; $firstColumn = true; diff --git a/tests/DumperTest.php b/tests/DumperTest.php index 7dd7cb9..aef176d 100644 --- a/tests/DumperTest.php +++ b/tests/DumperTest.php @@ -171,4 +171,31 @@ public function it_can_dump_certain_tables_as_schema_only() $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); } + + /** @test */ + public function it_does_remove_excluded_tables_from_allTables() + { + $this->loadLaravelMigrations(); + + DB::table('users') + ->insert([ + 'name' => 'Marcel', + 'email' => 'marcel@beyondco.de', + 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', + 'updated_at' => '2021-01-01 00:00:00', + ]); + + $outputFile = base_path('test.sql'); + + $this->app['config']['masked-dump.default'] = DumpSchema::define() + ->allTables() + ->exclude('users'); + + $this->artisan('db:masked-dump', [ + 'output' => $outputFile + ]); + + $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); + } } diff --git a/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt new file mode 100644 index 0000000..0b7e0ff --- /dev/null +++ b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt @@ -0,0 +1,51 @@ +DROP TABLE IF EXISTS `cache`; +CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache` WRITE; +ALTER TABLE `cache` DISABLE KEYS; +ALTER TABLE `cache` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `cache_locks`; +CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); +LOCK TABLES `cache_locks` WRITE; +ALTER TABLE `cache_locks` DISABLE KEYS; +ALTER TABLE `cache_locks` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `failed_jobs`; +CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); +LOCK TABLES `failed_jobs` WRITE; +ALTER TABLE `failed_jobs` DISABLE KEYS; +ALTER TABLE `failed_jobs` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `job_batches`; +CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); +LOCK TABLES `job_batches` WRITE; +ALTER TABLE `job_batches` DISABLE KEYS; +ALTER TABLE `job_batches` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `jobs`; +CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); +LOCK TABLES `jobs` WRITE; +ALTER TABLE `jobs` DISABLE KEYS; +ALTER TABLE `jobs` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `migrations`; +CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); +LOCK TABLES `migrations` WRITE; +ALTER TABLE `migrations` DISABLE KEYS; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +ALTER TABLE `migrations` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `password_reset_tokens`; +CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); +LOCK TABLES `password_reset_tokens` WRITE; +ALTER TABLE `password_reset_tokens` DISABLE KEYS; +ALTER TABLE `password_reset_tokens` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `sessions`; +CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); +LOCK TABLES `sessions` WRITE; +ALTER TABLE `sessions` DISABLE KEYS; +ALTER TABLE `sessions` ENABLE KEYS; +UNLOCK TABLES; From b253c6f8b3d4faddfd1347b246038f28d84e24cd Mon Sep 17 00:00:00 2001 From: Di Date: Thu, 26 Sep 2024 10:28:28 +0200 Subject: [PATCH 09/20] Update supported platforms --- src/LaravelMaskedDump.php | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index 10f0024..062d6b8 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -7,9 +7,7 @@ use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; use Doctrine\DBAL\Platforms\MariaDBPlatform; use Doctrine\DBAL\Platforms\MySQLPlatform; -use Doctrine\DBAL\Platforms\PostgreSQLPlatform; use Doctrine\DBAL\Platforms\SqlitePlatform; -use Doctrine\DBAL\Platforms\SQLServerPlatform; use Illuminate\Database\Connection as DatabaseConnection; use Doctrine\DBAL\Platforms\AbstractPlatform; @@ -89,12 +87,11 @@ protected function getPlatform(DatabaseConnection $connection) return new MySQLPlatform; case 'mariadb': return new MariaDBPlatform; - case 'pgsql': - return new PostgreSQLPlatform; - case 'sqlite': - return new SqlitePlatform; default: - throw new \RuntimeException("Unsupported platform: {$connection->getDriverName()}"); + if ($connection->getDriverName() === 'sqlite' && $this->isTesting()) { + return new SqlitePlatform; + } + throw new \RuntimeException("Unsupported platform: {$connection->getDriverName()}. Please check the documentation for more information."); } } @@ -141,4 +138,8 @@ protected function dumpTableData(TableDefinition $table) return $query; } + + protected function isTesting(): bool { + return config('app.env') === 'workbench' || config('app.env') === 'ci'; + } } From 4f6a45219775fe78185d5c9fce70e634d5b0b1f5 Mon Sep 17 00:00:00 2001 From: Alex Staenke Date: Fri, 5 Mar 2021 09:15:48 +0100 Subject: [PATCH 10/20] Added TableDefinition::outputInChunksOf(int $chunkSize) to allow for generation of chunked INSERT statements Added test for outputInChunksOf() Added snapshot Modified LaravelMaskedDump::dumpTableData() to check for chunked output generation --- src/LaravelMaskedDump.php | 80 +++++++++++++++---- src/TableDefinitions/TableDefinition.php | 13 +++ tests/DumperTest.php | 41 ++++++++++ ...unked_insert_statements_for_a_table__1.txt | 29 +++++++ 4 files changed, 148 insertions(+), 15 deletions(-) create mode 100644 tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index 062d6b8..b41f32e 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -111,30 +111,80 @@ protected function dumpTableData(TableDefinition $table) { $query = ''; - $queryBuilder = $this->definition->getConnection() - ->table($table->getDoctrineTable()->getName()); + $queryBuilder = $this->definition->getConnection()->table($table->getDoctrineTable()->getName()); $table->modifyQuery($queryBuilder); - $queryBuilder->get() - ->each(function ($row, $index) use ($table, &$query) { - $row = $this->transformResultForInsert((array)$row, $table); - $tableName = $table->getDoctrineTable()->getName(); + + if($table->getChunkSize() > 0) { + + $data = $queryBuilder->get(); + + if($data->isEmpty()) { + return ""; + } + + $tableName = $table->getDoctrineTable()->getName(); + $columns = array_keys((array)$data->first()); + $column_names = "(`" . join('`, `', $columns) . "`)"; + + // When tables have 1000+ rows we must split them in reasonably sized chunks of e.g. 100 + // otherwise the INSERT statement will fail + // this returns a collection of value tuples + + $valuesChunks = $data + ->chunk($table->getChunkSize()) + ->map(function($chunk) use($table) { + // for each chunk we generate a list of VALUES for the INSERT statement + // (1, 'some 1', 'data A'), + // (2, 'some 2', 'data B'), + // (3, 'some 3', 'data C'), + // ... etc + + $values = $chunk->map(function($row) use($table) { + $row = $this->transformResultForInsert((array)$row, $table); + $query = '(' . join(', ', $row) . ')'; + return $query; + })->join(', '); + + return $values; + }); + + // Now we generate the INSERT statements for each chunk of values + // INSERT INTO table VALUES (1, 'some 1', 'data A'), (2, 'some 2', 'data B'), (3, 'some 3', 'data C')... + $insert_statement = $valuesChunks->map( + + function($values) use($table, $tableName, $column_names) { + + return "INSERT INTO `${tableName}` $column_names VALUES " . $values .';'; + + }) + ->join(PHP_EOL); + + return $insert_statement . PHP_EOL; + + } else { + + // orig + $queryBuilder->get() + ->each(function ($row, $index) use ($table, &$query) { + $row = $this->transformResultForInsert((array)$row, $table); + $tableName = $table->getDoctrineTable()->getName(); $query .= "INSERT INTO `$tableName` (`" . implode('`, `', array_keys($row)) . '`) VALUES '; $query .= "("; - $firstColumn = true; - foreach ($row as $value) { - if (!$firstColumn) { - $query .= ", "; + $firstColumn = true; + foreach ($row as $value) { + if (!$firstColumn) { + $query .= ", "; + } + $query .= $value; + $firstColumn = false; } - $query .= $value; - $firstColumn = false; - } - $query .= ");" . PHP_EOL; - }); + $query .= ");" . PHP_EOL; + }); return $query; } diff --git a/src/TableDefinitions/TableDefinition.php b/src/TableDefinitions/TableDefinition.php index c8416ea..b7b32c0 100644 --- a/src/TableDefinitions/TableDefinition.php +++ b/src/TableDefinitions/TableDefinition.php @@ -15,6 +15,7 @@ class TableDefinition protected $dumpType; protected $query; protected $columns = []; + protected $chunkSize = 0; public function __construct(Table $table) { @@ -36,6 +37,13 @@ public function fullDump() return $this; } + public function outputInChunksOf(int $chunkSize) + { + $this->chunkSize = $chunkSize; + + return $this; + } + public function query(callable $callable) { $this->query = $callable; @@ -68,6 +76,11 @@ public function findColumn(string $column) return false; } + public function getChunkSize() + { + return $this->chunkSize; + } + public function getDoctrineTable() { return $this->table; diff --git a/tests/DumperTest.php b/tests/DumperTest.php index aef176d..be73046 100644 --- a/tests/DumperTest.php +++ b/tests/DumperTest.php @@ -198,4 +198,45 @@ public function it_does_remove_excluded_tables_from_allTables() $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); } + + /** @test */ + public function it_creates_chunked_insert_statements_for_a_table() + { + $this->loadLaravelMigrations(); + + DB::table('users') + ->insert(['name' => 'Marcel1', 'email' => 'marcel1@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel2', 'email' => 'marcel2@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel3', 'email' => 'marcel3@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel4', 'email' => 'marcel4@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel5', 'email' => 'marcel5@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + + $outputFile = base_path('test.sql'); + + $this->app['config']['masked-dump.default'] = DumpSchema::define() + ->allTables() + ->table('users', function($table) { + return $table->outputInChunksOf(3); + }); + + $this->artisan('db:masked-dump', [ + 'output' => $outputFile + ]); + + $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); + } } diff --git a/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt new file mode 100644 index 0000000..47e7f81 --- /dev/null +++ b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt @@ -0,0 +1,29 @@ +DROP TABLE IF EXISTS `failed_jobs`; +CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE BINARY, connection CLOB NOT NULL COLLATE BINARY, queue CLOB NOT NULL COLLATE BINARY, payload CLOB NOT NULL COLLATE BINARY, exception CLOB NOT NULL COLLATE BINARY, failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); +LOCK TABLES `failed_jobs` WRITE; +ALTER TABLE `failed_jobs` DISABLE KEYS; +ALTER TABLE `failed_jobs` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `migrations`; +CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE BINARY, batch INTEGER NOT NULL); +LOCK TABLES `migrations` WRITE; +ALTER TABLE `migrations` DISABLE KEYS; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); +ALTER TABLE `migrations` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `password_resets`; +CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE BINARY, token VARCHAR(255) NOT NULL COLLATE BINARY, created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); +LOCK TABLES `password_resets` WRITE; +ALTER TABLE `password_resets` DISABLE KEYS; +ALTER TABLE `password_resets` ENABLE KEYS; +UNLOCK TABLES; +DROP TABLE IF EXISTS `users`; +CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE BINARY, email VARCHAR(255) NOT NULL COLLATE BINARY, email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE BINARY, remember_token VARCHAR(255) DEFAULT NULL COLLATE BINARY, created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +LOCK TABLES `users` WRITE; +ALTER TABLE `users` DISABLE KEYS; +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel1', 'marcel1@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('2', 'Marcel2', 'marcel2@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('3', 'Marcel3', 'marcel3@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('4', 'Marcel4', 'marcel4@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('5', 'Marcel5', 'marcel5@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); +ALTER TABLE `users` ENABLE KEYS; +UNLOCK TABLES; From d6a296695ed07fc719d7742c4fd8890ecf741bab Mon Sep 17 00:00:00 2001 From: Di Date: Thu, 26 Sep 2024 13:54:34 +0200 Subject: [PATCH 11/20] Remove schema from dump output; support for SQLite and PostgreSQL; outputInChunksOf (by @k2idev, #9) --- config/masked-dump.php | 2 - src/DumpSchema.php | 54 +-------- src/LaravelMaskedDump.php | 110 ++++++------------ src/TableDefinitions/TableDefinition.php | 7 -- tests/DumperTest.php | 4 +- ...mp_all_tables_without_modifications__1.txt | 54 --------- ..._dump_certain_tables_as_schema_only__1.txt | 50 +------- .../DumperTest__it_can_mask_user_names__1.txt | 54 --------- ...n_replace_columns_with_faker_values__1.txt | 54 --------- ..._replace_columns_with_static_values__1.txt | 54 --------- ...unked_insert_statements_for_a_table__1.txt | 34 +----- ...move_excluded_tables_from_allTables__1.txt | 48 -------- 12 files changed, 51 insertions(+), 474 deletions(-) diff --git a/config/masked-dump.php b/config/masked-dump.php index d26d9e8..6d5459a 100644 --- a/config/masked-dump.php +++ b/config/masked-dump.php @@ -19,6 +19,4 @@ }); $table->mask('password'); }) - ->schemaOnly('failed_jobs') - ->schemaOnly('password_reset_tokens'), ]; diff --git a/src/DumpSchema.php b/src/DumpSchema.php index 792ead6..5a7c623 100644 --- a/src/DumpSchema.php +++ b/src/DumpSchema.php @@ -28,13 +28,6 @@ public static function define($connectionName = null) return new static($connectionName); } - public function schemaOnly(string $tableName) - { - return $this->table($tableName, function (TableDefinition $table) { - $table->schemaOnly(); - }); - } - public function table(string $tableName, callable $tableDefinition) { $this->customizedTables[$tableName] = $tableDefinition; @@ -109,58 +102,21 @@ protected function createDoctrineTables(array $tables): array foreach ($tables as $table) { $columns = $this->getBuilder()->getColumns($table['name']); - $table = new Table($table['name']); - + $doctrineTable = new Table($table['name']); foreach ($columns as $column) { - $type = $this->mapType($column['type_name']); - $table->addColumn( + + $doctrineTable->addColumn( $column['name'], - $type + Types::STRING, // doesn't matter, but is required ); } - $doctrineTables[] = $table; + $doctrineTables[] = $doctrineTable; } return $doctrineTables; } - protected function mapType(string $typeName): string - { - switch ($typeName) { - case 'char': - case 'varchar': - return Types::STRING; - case 'int': - case 'integer': - return Types::INTEGER; - case 'text': - case 'longtext': - case 'mediumtext': - return Types::TEXT; - case 'date': - return Types::DATE_MUTABLE; - case 'datetime': - case 'timestamp': - return Types::DATETIME_MUTABLE; - case 'bigint': - case 'mediumint': - return Types::BIGINT; - case 'tinyint': - case 'smallint': - return Types::SMALLINT; - case 'binary': - return Types::BINARY; - case 'json': - return Types::JSON; - case 'decimal': - return Types::DECIMAL; - default: - return Types::TEXT; - } - } - - public function load() { $this->loadAvailableTables(); diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index b41f32e..6ea4063 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -2,7 +2,6 @@ namespace BeyondCode\LaravelMaskedDumper; -use Doctrine\DBAL\Schema\Schema; use Illuminate\Console\OutputStyle; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; use Doctrine\DBAL\Platforms\MariaDBPlatform; @@ -10,6 +9,7 @@ use Doctrine\DBAL\Platforms\SqlitePlatform; use Illuminate\Database\Connection as DatabaseConnection; use Doctrine\DBAL\Platforms\AbstractPlatform; +use Doctrine\DBAL\Platforms\PostgreSQLPlatform; class LaravelMaskedDump { @@ -22,11 +22,18 @@ class LaravelMaskedDump /** @var AbstractPlatform */ protected $platform; + /** @var string */ + protected $escapeString = "`"; + public function __construct(DumpSchema $definition, OutputStyle $output) { $this->definition = $definition; $this->output = $output; $this->platform = $this->getPlatform($this->definition->getConnection()); + + if($this->platform instanceof PostgreSQLPlatform) { + $this->escapeString = '"'; + } } public function dump() @@ -38,15 +45,8 @@ public function dump() $overallTableProgress = $this->output->createProgressBar(count($tables)); foreach ($tables as $tableName => $table) { - $query .= "DROP TABLE IF EXISTS `$tableName`;" . PHP_EOL; - $query .= $this->dumpSchema($table); - if ($table->shouldDumpData()) { - $query .= $this->lockTable($tableName); - $query .= $this->dumpTableData($table); - - $query .= $this->unlockTable($tableName); } $overallTableProgress->advance(); @@ -73,40 +73,22 @@ protected function transformResultForInsert($row, TableDefinition $table) })->toArray(); } - protected function dumpSchema(TableDefinition $table) - { - $schema = new Schema([$table->getDoctrineTable()]); - - return implode(";", $schema->toSql($this->platform)) . ";" . PHP_EOL; - } - protected function getPlatform(DatabaseConnection $connection) { switch ($connection->getDriverName()) { case 'mysql': return new MySQLPlatform; + case 'pgsql': + return new PostgreSQLPlatform; + case 'sqlite': + return new SqlitePlatform; case 'mariadb': return new MariaDBPlatform; default: - if ($connection->getDriverName() === 'sqlite' && $this->isTesting()) { - return new SqlitePlatform; - } throw new \RuntimeException("Unsupported platform: {$connection->getDriverName()}. Please check the documentation for more information."); } } - protected function lockTable(string $tableName) - { - return "LOCK TABLES `$tableName` WRITE;" . PHP_EOL . - "ALTER TABLE `$tableName` DISABLE KEYS;" . PHP_EOL; - } - - protected function unlockTable(string $tableName) - { - return "ALTER TABLE `$tableName` ENABLE KEYS;" . PHP_EOL . - "UNLOCK TABLES;" . PHP_EOL; - } - protected function dumpTableData(TableDefinition $table) { $query = ''; @@ -115,64 +97,47 @@ protected function dumpTableData(TableDefinition $table) $table->modifyQuery($queryBuilder); + $tableName = $table->getDoctrineTable()->getName(); + $tableName = "$this->escapeString$tableName$this->escapeString"; - if($table->getChunkSize() > 0) { + if ($table->getChunkSize() > 0) { $data = $queryBuilder->get(); - if($data->isEmpty()) { + if ($data->isEmpty()) { return ""; } $tableName = $table->getDoctrineTable()->getName(); $columns = array_keys((array)$data->first()); - $column_names = "(`" . join('`, `', $columns) . "`)"; - - // When tables have 1000+ rows we must split them in reasonably sized chunks of e.g. 100 - // otherwise the INSERT statement will fail - // this returns a collection of value tuples + $column_names = "($this->escapeString" . join("$this->escapeString, $this->escapeString", $columns) . "$this->escapeString)"; $valuesChunks = $data - ->chunk($table->getChunkSize()) - ->map(function($chunk) use($table) { - // for each chunk we generate a list of VALUES for the INSERT statement - // (1, 'some 1', 'data A'), - // (2, 'some 2', 'data B'), - // (3, 'some 3', 'data C'), - // ... etc - - $values = $chunk->map(function($row) use($table) { - $row = $this->transformResultForInsert((array)$row, $table); - $query = '(' . join(', ', $row) . ')'; - return $query; - })->join(', '); - - return $values; - }); - - // Now we generate the INSERT statements for each chunk of values - // INSERT INTO table VALUES (1, 'some 1', 'data A'), (2, 'some 2', 'data B'), (3, 'some 3', 'data C')... - $insert_statement = $valuesChunks->map( - - function($values) use($table, $tableName, $column_names) { - - return "INSERT INTO `${tableName}` $column_names VALUES " . $values .';'; + ->chunk($table->getChunkSize()) + ->map(function ($chunk) use ($table) { + $values = $chunk->map(function ($row) use ($table) { + $row = $this->transformResultForInsert((array)$row, $table); + $query = '(' . join(', ', $row) . ')'; + return $query; + })->join(', '); + + return $values; + }); - }) - ->join(PHP_EOL); + $insert_statement = $valuesChunks->map(function ($values) use ($table, $tableName, $column_names) { + return "INSERT INTO $tableName $column_names VALUES " . $values . ';'; + }) + ->join(PHP_EOL); return $insert_statement . PHP_EOL; - } else { - - // orig $queryBuilder->get() - ->each(function ($row, $index) use ($table, &$query) { + ->each(function ($row, $index) use ($table, &$query, $tableName) { $row = $this->transformResultForInsert((array)$row, $table); - $tableName = $table->getDoctrineTable()->getName(); - $query .= "INSERT INTO `$tableName` (`" . implode('`, `', array_keys($row)) . '`) VALUES '; - $query .= "("; + $query .= "INSERT INTO $tableName ($this->escapeString" . implode("$this->escapeString, $this->escapeString", array_keys($row)) . "$this->escapeString) VALUES "; + + $query .= "("; $firstColumn = true; foreach ($row as $value) { @@ -185,11 +150,8 @@ function($values) use($table, $tableName, $column_names) { $query .= ");" . PHP_EOL; }); + } return $query; } - - protected function isTesting(): bool { - return config('app.env') === 'workbench' || config('app.env') === 'ci'; - } } diff --git a/src/TableDefinitions/TableDefinition.php b/src/TableDefinitions/TableDefinition.php index b7b32c0..dfefd88 100644 --- a/src/TableDefinitions/TableDefinition.php +++ b/src/TableDefinitions/TableDefinition.php @@ -23,13 +23,6 @@ public function __construct(Table $table) $this->dumpType = static::DUMP_FULL; } - public function schemaOnly() - { - $this->dumpType = static::DUMP_SCHEMA; - - return $this; - } - public function fullDump() { $this->dumpType = static::DUMP_FULL; diff --git a/tests/DumperTest.php b/tests/DumperTest.php index be73046..ca20e52 100644 --- a/tests/DumperTest.php +++ b/tests/DumperTest.php @@ -161,9 +161,7 @@ public function it_can_dump_certain_tables_as_schema_only() $outputFile = base_path('test.sql'); $this->app['config']['masked-dump.default'] = DumpSchema::define() - ->allTables() - ->schemaOnly('migrations') - ->schemaOnly('users'); + ->allTables(); $this->artisan('db:masked-dump', [ 'output' => $outputFile diff --git a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt index f75cd65..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt @@ -1,58 +1,4 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt index 56aa9c9..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt @@ -1,46 +1,4 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt index 5049efb..c6b5539 100644 --- a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt @@ -1,58 +1,4 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'xxxxxx', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt index 1078ade..0675a18 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt @@ -1,58 +1,4 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'joy.schultz@example.org', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt index f75cd65..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt @@ -1,58 +1,4 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER NOT NULL, name VARCHAR(255) NOT NULL, email VARCHAR(255) NOT NULL, email_verified_at DATETIME NOT NULL, password VARCHAR(255) NOT NULL, remember_token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL, updated_at DATETIME NOT NULL); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt index 47e7f81..ea2ed5a 100644 --- a/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt +++ b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt @@ -1,29 +1,5 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE BINARY, connection CLOB NOT NULL COLLATE BINARY, queue CLOB NOT NULL COLLATE BINARY, payload CLOB NOT NULL COLLATE BINARY, exception CLOB NOT NULL COLLATE BINARY, failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE BINARY, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE BINARY, token VARCHAR(255) NOT NULL COLLATE BINARY, created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE BINARY, email VARCHAR(255) NOT NULL COLLATE BINARY, email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE BINARY, remember_token VARCHAR(255) DEFAULT NULL COLLATE BINARY, created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; -INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel1', 'marcel1@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('2', 'Marcel2', 'marcel2@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('3', 'Marcel3', 'marcel3@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('4', 'Marcel4', 'marcel4@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('5', 'Marcel5', 'marcel5@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +INSERT INTO users (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel1', 'marcel1@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('2', 'Marcel2', 'marcel2@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('3', 'Marcel3', 'marcel3@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); +INSERT INTO users (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('4', 'Marcel4', 'marcel4@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('5', 'Marcel5', 'marcel5@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt index 0b7e0ff..e1088c5 100644 --- a/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt +++ b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt @@ -1,51 +1,3 @@ -DROP TABLE IF EXISTS `cache`; -CREATE TABLE cache ("key" VARCHAR(255) NOT NULL, value CLOB NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache` WRITE; -ALTER TABLE `cache` DISABLE KEYS; -ALTER TABLE `cache` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `cache_locks`; -CREATE TABLE cache_locks ("key" VARCHAR(255) NOT NULL, owner VARCHAR(255) NOT NULL, expiration INTEGER NOT NULL); -LOCK TABLES `cache_locks` WRITE; -ALTER TABLE `cache_locks` DISABLE KEYS; -ALTER TABLE `cache_locks` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER NOT NULL, uuid VARCHAR(255) NOT NULL, connection CLOB NOT NULL, queue CLOB NOT NULL, payload CLOB NOT NULL, exception CLOB NOT NULL, failed_at DATETIME NOT NULL); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `job_batches`; -CREATE TABLE job_batches (id VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, total_jobs INTEGER NOT NULL, pending_jobs INTEGER NOT NULL, failed_jobs INTEGER NOT NULL, failed_job_ids CLOB NOT NULL, options CLOB NOT NULL, cancelled_at INTEGER NOT NULL, created_at INTEGER NOT NULL, finished_at INTEGER NOT NULL); -LOCK TABLES `job_batches` WRITE; -ALTER TABLE `job_batches` DISABLE KEYS; -ALTER TABLE `job_batches` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `jobs`; -CREATE TABLE jobs (id INTEGER NOT NULL, queue VARCHAR(255) NOT NULL, payload CLOB NOT NULL, attempts INTEGER NOT NULL, reserved_at INTEGER NOT NULL, available_at INTEGER NOT NULL, created_at INTEGER NOT NULL); -LOCK TABLES `jobs` WRITE; -ALTER TABLE `jobs` DISABLE KEYS; -ALTER TABLE `jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER NOT NULL, migration VARCHAR(255) NOT NULL, batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_reset_tokens`; -CREATE TABLE password_reset_tokens (email VARCHAR(255) NOT NULL, token VARCHAR(255) NOT NULL, created_at DATETIME NOT NULL); -LOCK TABLES `password_reset_tokens` WRITE; -ALTER TABLE `password_reset_tokens` DISABLE KEYS; -ALTER TABLE `password_reset_tokens` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `sessions`; -CREATE TABLE sessions (id VARCHAR(255) NOT NULL, user_id INTEGER NOT NULL, ip_address VARCHAR(255) NOT NULL, user_agent CLOB NOT NULL, payload CLOB NOT NULL, last_activity INTEGER NOT NULL); -LOCK TABLES `sessions` WRITE; -ALTER TABLE `sessions` DISABLE KEYS; -ALTER TABLE `sessions` ENABLE KEYS; -UNLOCK TABLES; From 3c33ca06cc715bde58c86d265ac4949f76425deb Mon Sep 17 00:00:00 2001 From: Di Date: Fri, 6 Dec 2024 11:09:34 +0100 Subject: [PATCH 12/20] Added docs folder --- .gitignore | 1 - docs/_index.md | 4 + docs/dumping-the-database.md | 32 ++++++++ docs/installation.md | 19 +++++ docs/schema-definition.md | 147 +++++++++++++++++++++++++++++++++++ 5 files changed, 202 insertions(+), 1 deletion(-) create mode 100644 docs/_index.md create mode 100644 docs/dumping-the-database.md create mode 100644 docs/installation.md create mode 100644 docs/schema-definition.md diff --git a/.gitignore b/.gitignore index 896e906..7e3a05e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ build composer.lock -docs vendor coverage .phpunit.result.cache diff --git a/docs/_index.md b/docs/_index.md new file mode 100644 index 0000000..62bb162 --- /dev/null +++ b/docs/_index.md @@ -0,0 +1,4 @@ +--- +packageName: Laravel Masked DB Dump +githubUrl: https://github.com/beyondcode/laravel-masked-db-dump +--- \ No newline at end of file diff --git a/docs/dumping-the-database.md b/docs/dumping-the-database.md new file mode 100644 index 0000000..e718494 --- /dev/null +++ b/docs/dumping-the-database.md @@ -0,0 +1,32 @@ +--- +title: Dumping the Database +order: 3 +--- +# Dumping the Database + +After you have configured your dump schema, it's time to dump your tables. This can be done using the `db:masked-dump` artisan command. +The command expects one argument, which is the name of the output file to use. + +``` +php artisan db:masked-dump output.sql +``` + +Running this command, will use the `default` dump schema definition and write the resulting dump to a file called `output.sql`. + +## Changing Definitions + +In case that your configuration file contains multiple dump schema definitions, you can pass the definition to use to the command like this: + +``` +php artisan db:masked-dump output.sql --definition=sqlite +``` + +## GZip compression + +The default output is a plain text file - depending on the size of your dump, you might want to enable GZip compression. This can be done by passing the `--gzip` flag to the command: + +``` +php artisan db:masked-dump output.sql --gzip +``` + +This will write the compressed output to a file called `output.sql.gz`. \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 0000000..ff61a36 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,19 @@ +--- +title: Installation +order: 1 +--- +# Installation + +To install the Laravel Masked DB Dump package, you can use composer: + +``` +composer require beyondcode/laravel-masked-db-dump +``` + +Next, you should publish the package configuration file, so that you can configure your dump schema: + +``` +php artisan vendor:publish --provider=BeyondCode\\LaravelMaskedDumper\\LaravelMaskedDumpServiceProvider +``` + +This will create a new file called `masked-dump.php` in your config folder. \ No newline at end of file diff --git a/docs/schema-definition.md b/docs/schema-definition.md new file mode 100644 index 0000000..4d65d96 --- /dev/null +++ b/docs/schema-definition.md @@ -0,0 +1,147 @@ +--- +title: Dump Schema Definition +order: 2 +--- +# Dump Schema Definition + +Your database dump configuration takes place in the `config/masked-dump.php` file. + +You can use the package's fluent API to define which tables should be dumped and which information should be replaced or masked during the dump process. + +This is the basic configuration that you'll receive after installing the package: + +```php + +use BeyondCode\LaravelMaskedDumper\DumpSchema; +use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; +use Faker\Generator as Faker; + +return [ + /** + * Use this dump schema definition to remove, replace or mask certain parts of your database tables. + */ + 'default' => DumpSchema::define() + ->allTables() + ->table('users', function (TableDefinition $table) { + $table->replace('name', function (Faker $faker) { + return $faker->name; + }); + $table->replace('email', function (Faker $faker) { + return $faker->safeEmail; + }); + $table->mask('password'); + }) + ->schemaOnly('failed_jobs') + ->schemaOnly('password_resets'), +]; +``` + +## Definiting which tables to dump + +The dump configuration allows you to specify which tables you want to dump. The simplest form of dumping your database can be achieved by using the `allTables()` method. +This ensures that all of your database tables will be represented in the dump. You can then go and customize how certain tables should be dumped: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables(), +]; +``` + +## Dumping table schemas only + +For certain tables, you do not need to dump the data, but only need the structure of the table itself - like a `password_reset` table. To instruct the masked dumper to only dump the schema, you may use the `schemaOnly` method: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables() + ->schemaOnly('password_resets'), +]; +``` + +This configuration will dump all of your tables - but for the `password_resets` table, it will not create any `INSERT` statements and only dumps the schema of this table. + +## Masking table column content + +To mask the content of a given table column, you can use the `mask` method on a custom table definition. For example, let's mask the `password` column on our `users` table: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->mask('password'); + }) +]; +``` + +By default, the data will be masked using the `x` character, but you can also specify your own custom masking character as a second parameter: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->mask('password', '-'); + }) +]; +``` + +## Replacing table column content + +Instead of completely masking the content of a column, you can also replace the column content. The content can either be replaced with a static string, or you can make use of a callable and replace it with custom content - for example faker data. + +To replace a column with a static string, you can use the `replace` method and pass the string to use as a replacement as the second argument: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->replace('name', 'John Doe'); + }) +]; +``` + +This configuration will dump all users and replace their name with "John Doe". + +To gain more flexibility over the replacement, you can pass a function as the second argument. This function receives a Faker instance, as well as the original value of the column: + +```php + +return [ + 'default' => DumpSchema::define() + ->table('users', function (TableDefinition $table) { + $table->replace('email', function (Faker $faker, $value) { + return $faker->safeEmail; + }); + }) +]; +``` + +When dumping your data, the dump will now contain a safe, randomly generated email address for every user. + +## Specifying the database connection to use + +By default, this package will use your `default` database connection when dumping the tables. +You can pass the connection to the `DumpSchema::define` method, in order to specify your own database connection string: + +```php +return [ + 'default' => DumpSchema::define('sqlite') + ->allTables() +]; +``` + +## Multiple dump schemas + +You can define multiple database dump schemas in the `masked-dump.php` configuration file. +The key in the configuration array is the identifier that will be used when you dump your tables: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables(), + + 'sqlite' => DumpSchema::define('sqlite') + ->schemaOnly('custom_table'), +]; +``` \ No newline at end of file From 921c7b0091e5a37f74361ffc0ea206ee15612384 Mon Sep 17 00:00:00 2001 From: Di Date: Mon, 9 Dec 2024 17:12:27 +0100 Subject: [PATCH 13/20] Updated docs --- docs/schema-definition.md | 32 +++++++++++++++++++++----------- 1 file changed, 21 insertions(+), 11 deletions(-) diff --git a/docs/schema-definition.md b/docs/schema-definition.md index 4d65d96..d12b6fd 100644 --- a/docs/schema-definition.md +++ b/docs/schema-definition.md @@ -30,9 +30,7 @@ return [ return $faker->safeEmail; }); $table->mask('password'); - }) - ->schemaOnly('failed_jobs') - ->schemaOnly('password_resets'), + }), ]; ``` @@ -48,20 +46,18 @@ return [ ]; ``` -## Dumping table schemas only +## Exclude specific tables from dumps -For certain tables, you do not need to dump the data, but only need the structure of the table itself - like a `password_reset` table. To instruct the masked dumper to only dump the schema, you may use the `schemaOnly` method: +The `exclude()` method allows you to exclude specific tables from the dump. This can be useful if you want to exclude certain tables from the dump: ```php return [ 'default' => DumpSchema::define() - ->allTables() - ->schemaOnly('password_resets'), + ->allTables() + ->exclude('password_resets'), ]; ``` -This configuration will dump all of your tables - but for the `password_resets` table, it will not create any `INSERT` statements and only dumps the schema of this table. - ## Masking table column content To mask the content of a given table column, you can use the `mask` method on a custom table definition. For example, let's mask the `password` column on our `users` table: @@ -106,7 +102,6 @@ This configuration will dump all users and replace their name with "John Doe". To gain more flexibility over the replacement, you can pass a function as the second argument. This function receives a Faker instance, as well as the original value of the column: ```php - return [ 'default' => DumpSchema::define() ->table('users', function (TableDefinition $table) { @@ -119,6 +114,21 @@ return [ When dumping your data, the dump will now contain a safe, randomly generated email address for every user. +## Optimizing large datasets + +The method TableDefinition::outputInChunksOf(int $chunkSize) allows for chunked inserts for large datasets, +improving performance and reducing memory consumption during the dump process. + +```php +return [ + 'default' => DumpSchema::define() + ->allTables() + ->table('users', function($table) { + return $table->outputInChunksOf(3); + }); +]; +``` + ## Specifying the database connection to use By default, this package will use your `default` database connection when dumping the tables. @@ -144,4 +154,4 @@ return [ 'sqlite' => DumpSchema::define('sqlite') ->schemaOnly('custom_table'), ]; -``` \ No newline at end of file +``` From 2afa02a61b2045f17bb56faff99d9d39df171f9b Mon Sep 17 00:00:00 2001 From: Di Date: Mon, 9 Dec 2024 17:19:31 +0100 Subject: [PATCH 14/20] Updated README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e8151ef..ff0106a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Laravel Masked DB Dump -A database dumping package that allows you to replace and mask columns while dumping your database. +A database dumping package that allows you to replace and mask columns while dumping your MySQL database. [![Latest Version on Packagist](https://img.shields.io/packagist/v/beyondcode/laravel-masked-db-dump.svg?style=flat-square)](https://packagist.org/packages/beyondcode/laravel-masked-db-dump) [![Total Downloads](https://img.shields.io/packagist/dt/beyondcode/laravel-masked-db-dump.svg?style=flat-square)](https://packagist.org/packages/beyondcode/laravel-masked-db-dump) From 549048b790e89b3cd0e77ac47bcbc8b14bcf03bc Mon Sep 17 00:00:00 2001 From: Shift Date: Sun, 16 Feb 2025 19:08:53 +0000 Subject: [PATCH 15/20] Bump dependencies for Laravel 12 --- composer.json | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/composer.json b/composer.json index ca05b35..cd0ccb0 100644 --- a/composer.json +++ b/composer.json @@ -17,14 +17,14 @@ ], "require": { "php": "^7.3 || ^8.0", - "doctrine/dbal": "^2.0|^3.0", + "doctrine/dbal": "^2.0|^3.0 || ^4.2", "fakerphp/faker": "^1.13", - "illuminate/console": "^7.0|^8.0|^9.0|^10.0 || ^11.0", - "illuminate/support": "^7.0|^8.0|^9.0|^10.0 || ^11.0" + "illuminate/console": "^7.0|^8.0|^9.0|^10.0 || ^11.0 || ^12.0", + "illuminate/support": "^7.0|^8.0|^9.0|^10.0 || ^11.0 || ^12.0" }, "require-dev": { - "orchestra/testbench": "^6.12|^7.0|^8.0 || ^9.0", - "phpunit/phpunit": "^8.0 || ^9.0 || ^10.5", + "orchestra/testbench": "^6.12|^7.0|^8.0 || ^9.0 || ^10.0", + "phpunit/phpunit": "^8.0 || ^9.0 || ^10.5 || ^11.5.3", "spatie/phpunit-snapshot-assertions": "^4.2 || ^5.1" }, "autoload": { From ee9adae21847144f4ddbc78e15e8f9b4ccd37769 Mon Sep 17 00:00:00 2001 From: Diana Scharf Date: Fri, 7 Mar 2025 11:02:26 +0100 Subject: [PATCH 16/20] wip --- .github/workflows/tests.yml | 70 +++++++++++++++++++++++++++++++++++++ composer.json | 6 ++-- 2 files changed, 73 insertions(+), 3 deletions(-) create mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..3abd57b --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,70 @@ +name: run-tests + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + php-tests: + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + php: ['8.4', '8.3', '8.2', '8.1', '8.0'] + laravel: ['8.*', '9.*', '10.*', '11.*', '12.*'] + dependency-version: [prefer-stable] + exclude: + - php: 8.0 + laravel: 10.* + - php: 8.0 + laravel: 11.* + - php: 8.0 + laravel: 12.* + - php: 8.1 + laravel: 11.* + - php: 8.1 + laravel: 12.* + - php: 8.2 + laravel: 8.* + - php: 8.3 + laravel: 8.* + - php: 8.4 + laravel: 8.* + include: + - laravel: 8.* + testbench: 6.23 + - laravel: 9.* + testbench: 7.* + - laravel: 10.* + testbench: 8.* + - laravel: 11.* + testbench: 9.* + - laravel: 12.* + testbench: 10.* + + + name: P${{ matrix.php }} - L${{ matrix.laravel }} - ${{ matrix.dependency-version }} - ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Setup PHP + uses: shivammathur/setup-php@v2 + with: + php-version: ${{ matrix.php }} + extensions: dom, curl, libxml, mbstring, zip, pcntl, pdo, sqlite, pdo_sqlite, bcmath, soap, intl, gd, exif, iconv, imagick + coverage: none + + - name: Install dependencies + run: | + composer require "laravel/framework:${{ matrix.laravel }}" "orchestra/testbench:${{ matrix.testbench }}" --no-interaction --no-update + composer update --${{ matrix.dependency-version }} --prefer-dist --no-interaction + + - name: Execute tests + run: vendor/bin/phpunit \ No newline at end of file diff --git a/composer.json b/composer.json index ca05b35..862d2a6 100644 --- a/composer.json +++ b/composer.json @@ -16,11 +16,11 @@ } ], "require": { - "php": "^7.3 || ^8.0", + "php": " ^8.0", "doctrine/dbal": "^2.0|^3.0", "fakerphp/faker": "^1.13", - "illuminate/console": "^7.0|^8.0|^9.0|^10.0 || ^11.0", - "illuminate/support": "^7.0|^8.0|^9.0|^10.0 || ^11.0" + "illuminate/console": "^8.0|^9.0|^10.0 || ^11.0", + "illuminate/support": "^8.0|^9.0|^10.0 || ^11.0" }, "require-dev": { "orchestra/testbench": "^6.12|^7.0|^8.0 || ^9.0", From d2e92fa243b2ab1a9ead9fee8f08711e1ddcb6ac Mon Sep 17 00:00:00 2001 From: Diana Scharf Date: Fri, 7 Mar 2025 11:05:23 +0100 Subject: [PATCH 17/20] wip --- .github/workflows/tests.yml | 70 ------------------------------------- 1 file changed, 70 deletions(-) delete mode 100644 .github/workflows/tests.yml diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 3abd57b..0000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: run-tests - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - php-tests: - runs-on: ubuntu-latest - - strategy: - fail-fast: false - matrix: - php: ['8.4', '8.3', '8.2', '8.1', '8.0'] - laravel: ['8.*', '9.*', '10.*', '11.*', '12.*'] - dependency-version: [prefer-stable] - exclude: - - php: 8.0 - laravel: 10.* - - php: 8.0 - laravel: 11.* - - php: 8.0 - laravel: 12.* - - php: 8.1 - laravel: 11.* - - php: 8.1 - laravel: 12.* - - php: 8.2 - laravel: 8.* - - php: 8.3 - laravel: 8.* - - php: 8.4 - laravel: 8.* - include: - - laravel: 8.* - testbench: 6.23 - - laravel: 9.* - testbench: 7.* - - laravel: 10.* - testbench: 8.* - - laravel: 11.* - testbench: 9.* - - laravel: 12.* - testbench: 10.* - - - name: P${{ matrix.php }} - L${{ matrix.laravel }} - ${{ matrix.dependency-version }} - ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - name: Setup PHP - uses: shivammathur/setup-php@v2 - with: - php-version: ${{ matrix.php }} - extensions: dom, curl, libxml, mbstring, zip, pcntl, pdo, sqlite, pdo_sqlite, bcmath, soap, intl, gd, exif, iconv, imagick - coverage: none - - - name: Install dependencies - run: | - composer require "laravel/framework:${{ matrix.laravel }}" "orchestra/testbench:${{ matrix.testbench }}" --no-interaction --no-update - composer update --${{ matrix.dependency-version }} --prefer-dist --no-interaction - - - name: Execute tests - run: vendor/bin/phpunit \ No newline at end of file From dc3df4d2d047fed9c9ff43bb60260f708c1c21b9 Mon Sep 17 00:00:00 2001 From: Di Date: Fri, 7 Mar 2025 11:07:13 +0100 Subject: [PATCH 18/20] Update README.md --- README.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/README.md b/README.md index ff0106a..0cb5b4d 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,15 @@ composer require beyondcode/laravel-masked-db-dump The documentation can be found on [our website](https://beyondco.de/docs/laravel-masked-db-dump). +## Databases at your fingertips +Herd is the control panel for your local environment, making it easy to set up and run complementary services to your Laravel applications. +From databases to storage systems, we got you covered with MySQL, PostgreSQL, Redis, Typesense, Meilisearch, MinIO, and even Laravel Reverb. + +[herd.laravel.com](https://herd.laravel.com/) + +![image](https://github.com/user-attachments/assets/7fee3bdf-a521-47e9-9023-eb973452209e) + + ### Changelog Please see [CHANGELOG](CHANGELOG.md) for more information on what has changed recently. From ddc0f72821ac499407b04e4842d60ddc05af46f4 Mon Sep 17 00:00:00 2001 From: Kieran Williames Date: Sat, 29 Mar 2025 11:47:54 +1100 Subject: [PATCH 19/20] docs: updated docs to include php callables config as preferred method --- docs/schema-definition.md | 84 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 78 insertions(+), 6 deletions(-) diff --git a/docs/schema-definition.md b/docs/schema-definition.md index d12b6fd..e1182e4 100644 --- a/docs/schema-definition.md +++ b/docs/schema-definition.md @@ -2,16 +2,73 @@ title: Dump Schema Definition order: 2 --- + # Dump Schema Definition Your database dump configuration takes place in the `config/masked-dump.php` file. You can use the package's fluent API to define which tables should be dumped and which information should be replaced or masked during the dump process. -This is the basic configuration that you'll receive after installing the package: +## Configuration Methods + +There are two ways to configure your dump schema. For production applications using Laravel's config caching, the callable method is strongly recommended. + +### Method 1: Using PHP Callables (Recommended for Production) + +When using Laravel's config caching feature, the default inline configuration approach may cause serialization errors. To avoid this issue, use PHP callables in your configuration: ```php + [MaskedDump::class, 'define'], +]; +``` + +Then create the referenced class: + +```php +allTables() + ->table('users', function (TableDefinition $table) { + $table->replace('name', function (Faker $faker) { + return $faker->name; + }); + $table->replace('email', function (Faker $faker) { + return $faker->safeEmail; + }); + $table->mask('password'); + }) + ->schemaOnly('failed_jobs') + ->schemaOnly('password_reset_tokens'); + } +} +``` + +### Method 2: Inline Definition + +This is the basic configuration that you'll receive after installing the package. While simpler for development, this method is not compatible with Laravel's config caching: + +```php use BeyondCode\LaravelMaskedDumper\DumpSchema; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; use Faker\Generator as Faker; @@ -19,6 +76,7 @@ use Faker\Generator as Faker; return [ /** * Use this dump schema definition to remove, replace or mask certain parts of your database tables. + * NOTE: This approach is not compatible with Laravel's config caching. */ 'default' => DumpSchema::define() ->allTables() @@ -34,7 +92,7 @@ return [ ]; ``` -## Definiting which tables to dump +## Defining which tables to dump The dump configuration allows you to specify which tables you want to dump. The simplest form of dumping your database can be achieved by using the `allTables()` method. This ensures that all of your database tables will be represented in the dump. You can then go and customize how certain tables should be dumped: @@ -116,22 +174,22 @@ When dumping your data, the dump will now contain a safe, randomly generated ema ## Optimizing large datasets -The method TableDefinition::outputInChunksOf(int $chunkSize) allows for chunked inserts for large datasets, +The method TableDefinition::outputInChunksOf(int $chunkSize) allows for chunked inserts for large datasets, improving performance and reducing memory consumption during the dump process. ```php return [ 'default' => DumpSchema::define() ->allTables() - ->table('users', function($table) { - return $table->outputInChunksOf(3); + ->table('users', function($table) { + return $table->outputInChunksOf(3); }); ]; ``` ## Specifying the database connection to use -By default, this package will use your `default` database connection when dumping the tables. +By default, this package will use your `default` database connection when dumping the tables. You can pass the connection to the `DumpSchema::define` method, in order to specify your own database connection string: ```php @@ -155,3 +213,17 @@ return [ ->schemaOnly('custom_table'), ]; ``` + +When using the callable approach with multiple schemas, you can define separate classes for each schema: + +```php + [DefaultMaskedDump::class, 'define'], + 'sqlite' => [SqliteMaskedDump::class, 'define'], +]; +``` From 87d42f50df7707b74c43e0c92bea901d65deb73d Mon Sep 17 00:00:00 2001 From: Di Date: Fri, 4 Apr 2025 11:00:52 +0200 Subject: [PATCH 20/20] Removed PHP tags --- docs/schema-definition.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/docs/schema-definition.md b/docs/schema-definition.md index e1182e4..89f250b 100644 --- a/docs/schema-definition.md +++ b/docs/schema-definition.md @@ -18,8 +18,6 @@ There are two ways to configure your dump schema. For production applications us When using Laravel's config caching feature, the default inline configuration approach may cause serialization errors. To avoid this issue, use PHP callables in your configuration: ```php -