diff --git a/.gitignore b/.gitignore index 896e906..7e3a05e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,5 @@ build composer.lock -docs vendor coverage .phpunit.result.cache diff --git a/README.md b/README.md index e8151ef..0cb5b4d 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Laravel Masked DB Dump -A database dumping package that allows you to replace and mask columns while dumping your database. +A database dumping package that allows you to replace and mask columns while dumping your MySQL database. [![Latest Version on Packagist](https://img.shields.io/packagist/v/beyondcode/laravel-masked-db-dump.svg?style=flat-square)](https://packagist.org/packages/beyondcode/laravel-masked-db-dump) [![Total Downloads](https://img.shields.io/packagist/dt/beyondcode/laravel-masked-db-dump.svg?style=flat-square)](https://packagist.org/packages/beyondcode/laravel-masked-db-dump) @@ -17,6 +17,15 @@ composer require beyondcode/laravel-masked-db-dump The documentation can be found on [our website](https://beyondco.de/docs/laravel-masked-db-dump). +## Databases at your fingertips +Herd is the control panel for your local environment, making it easy to set up and run complementary services to your Laravel applications. +From databases to storage systems, we got you covered with MySQL, PostgreSQL, Redis, Typesense, Meilisearch, MinIO, and even Laravel Reverb. + +[herd.laravel.com](https://herd.laravel.com/) + +![image](https://github.com/user-attachments/assets/7fee3bdf-a521-47e9-9023-eb973452209e) + + ### Changelog Please see [CHANGELOG](CHANGELOG.md) for more information on what has changed recently. diff --git a/composer.json b/composer.json index f7f13be..c8fcf6b 100644 --- a/composer.json +++ b/composer.json @@ -16,16 +16,16 @@ } ], "require": { - "php": "^7.3 || ^8.0", - "doctrine/dbal": "^2.0|^3.0", + "php": "^8.0", + "doctrine/dbal": "^2.0|^3.0 || ^4.2", "fakerphp/faker": "^1.13", - "illuminate/console": "^7.0|^8.0|^9.0|^10.0", - "illuminate/support": "^7.0|^8.0|^9.0|^10.0" + "illuminate/console": "^8.0|^9.0|^10.0 || ^11.0 || ^12.0", + "illuminate/support": "^8.0|^9.0|^10.0 || ^11.0 || ^12.0" }, "require-dev": { - "orchestra/testbench": "^6.12|^7.0|^8.0", - "phpunit/phpunit": "^8.0 || ^9.0", - "spatie/phpunit-snapshot-assertions": "^4.2" + "orchestra/testbench": "^6.12|^7.0|^8.0 || ^9.0 || ^10.0", + "phpunit/phpunit": "^8.0 || ^9.0 || ^10.5 || ^11.5.3", + "spatie/phpunit-snapshot-assertions": "^4.2 || ^5.1" }, "autoload": { "psr-4": { @@ -40,7 +40,6 @@ "scripts": { "test": "vendor/bin/phpunit", "test-coverage": "vendor/bin/phpunit --coverage-html coverage" - }, "config": { "sort-packages": true diff --git a/config/masked-dump.php b/config/masked-dump.php index d26d9e8..6d5459a 100644 --- a/config/masked-dump.php +++ b/config/masked-dump.php @@ -19,6 +19,4 @@ }); $table->mask('password'); }) - ->schemaOnly('failed_jobs') - ->schemaOnly('password_reset_tokens'), ]; diff --git a/docs/_index.md b/docs/_index.md new file mode 100644 index 0000000..62bb162 --- /dev/null +++ b/docs/_index.md @@ -0,0 +1,4 @@ +--- +packageName: Laravel Masked DB Dump +githubUrl: https://github.com/beyondcode/laravel-masked-db-dump +--- \ No newline at end of file diff --git a/docs/dumping-the-database.md b/docs/dumping-the-database.md new file mode 100644 index 0000000..e718494 --- /dev/null +++ b/docs/dumping-the-database.md @@ -0,0 +1,32 @@ +--- +title: Dumping the Database +order: 3 +--- +# Dumping the Database + +After you have configured your dump schema, it's time to dump your tables. This can be done using the `db:masked-dump` artisan command. +The command expects one argument, which is the name of the output file to use. + +``` +php artisan db:masked-dump output.sql +``` + +Running this command, will use the `default` dump schema definition and write the resulting dump to a file called `output.sql`. + +## Changing Definitions + +In case that your configuration file contains multiple dump schema definitions, you can pass the definition to use to the command like this: + +``` +php artisan db:masked-dump output.sql --definition=sqlite +``` + +## GZip compression + +The default output is a plain text file - depending on the size of your dump, you might want to enable GZip compression. This can be done by passing the `--gzip` flag to the command: + +``` +php artisan db:masked-dump output.sql --gzip +``` + +This will write the compressed output to a file called `output.sql.gz`. \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 0000000..ff61a36 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,19 @@ +--- +title: Installation +order: 1 +--- +# Installation + +To install the Laravel Masked DB Dump package, you can use composer: + +``` +composer require beyondcode/laravel-masked-db-dump +``` + +Next, you should publish the package configuration file, so that you can configure your dump schema: + +``` +php artisan vendor:publish --provider=BeyondCode\\LaravelMaskedDumper\\LaravelMaskedDumpServiceProvider +``` + +This will create a new file called `masked-dump.php` in your config folder. \ No newline at end of file diff --git a/docs/schema-definition.md b/docs/schema-definition.md new file mode 100644 index 0000000..89f250b --- /dev/null +++ b/docs/schema-definition.md @@ -0,0 +1,223 @@ +--- +title: Dump Schema Definition +order: 2 +--- + +# Dump Schema Definition + +Your database dump configuration takes place in the `config/masked-dump.php` file. + +You can use the package's fluent API to define which tables should be dumped and which information should be replaced or masked during the dump process. + +## Configuration Methods + +There are two ways to configure your dump schema. For production applications using Laravel's config caching, the callable method is strongly recommended. + +### Method 1: Using PHP Callables (Recommended for Production) + +When using Laravel's config caching feature, the default inline configuration approach may cause serialization errors. To avoid this issue, use PHP callables in your configuration: + +```php +use BeyondCode\LaravelMaskedDumper\DumpSchema; +use App\Support\MaskedDump; + +return [ + /** + * Use a callable class to define your dump schema + * This method is compatible with Laravel's config caching + */ + 'default' => [MaskedDump::class, 'define'], +]; +``` + +Then create the referenced class: + +```php +namespace App\Support; + +use BeyondCode\LaravelMaskedDumper\DumpSchema; +use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; +use Faker\Generator as Faker; + +class MaskedDump +{ + public static function define() + { + return DumpSchema::define() + ->allTables() + ->table('users', function (TableDefinition $table) { + $table->replace('name', function (Faker $faker) { + return $faker->name; + }); + $table->replace('email', function (Faker $faker) { + return $faker->safeEmail; + }); + $table->mask('password'); + }) + ->schemaOnly('failed_jobs') + ->schemaOnly('password_reset_tokens'); + } +} +``` + +### Method 2: Inline Definition + +This is the basic configuration that you'll receive after installing the package. While simpler for development, this method is not compatible with Laravel's config caching: + +```php +use BeyondCode\LaravelMaskedDumper\DumpSchema; +use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; +use Faker\Generator as Faker; + +return [ + /** + * Use this dump schema definition to remove, replace or mask certain parts of your database tables. + * NOTE: This approach is not compatible with Laravel's config caching. + */ + 'default' => DumpSchema::define() + ->allTables() + ->table('users', function (TableDefinition $table) { + $table->replace('name', function (Faker $faker) { + return $faker->name; + }); + $table->replace('email', function (Faker $faker) { + return $faker->safeEmail; + }); + $table->mask('password'); + }), +]; +``` + +## Defining which tables to dump + +The dump configuration allows you to specify which tables you want to dump. The simplest form of dumping your database can be achieved by using the `allTables()` method. +This ensures that all of your database tables will be represented in the dump. You can then go and customize how certain tables should be dumped: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables(), +]; +``` + +## Exclude specific tables from dumps + +The `exclude()` method allows you to exclude specific tables from the dump. This can be useful if you want to exclude certain tables from the dump: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables() + ->exclude('password_resets'), +]; +``` + +## Masking table column content + +To mask the content of a given table column, you can use the `mask` method on a custom table definition. For example, let's mask the `password` column on our `users` table: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->mask('password'); + }) +]; +``` + +By default, the data will be masked using the `x` character, but you can also specify your own custom masking character as a second parameter: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->mask('password', '-'); + }) +]; +``` + +## Replacing table column content + +Instead of completely masking the content of a column, you can also replace the column content. The content can either be replaced with a static string, or you can make use of a callable and replace it with custom content - for example faker data. + +To replace a column with a static string, you can use the `replace` method and pass the string to use as a replacement as the second argument: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function ($table) { + $table->replace('name', 'John Doe'); + }) +]; +``` + +This configuration will dump all users and replace their name with "John Doe". + +To gain more flexibility over the replacement, you can pass a function as the second argument. This function receives a Faker instance, as well as the original value of the column: + +```php +return [ + 'default' => DumpSchema::define() + ->table('users', function (TableDefinition $table) { + $table->replace('email', function (Faker $faker, $value) { + return $faker->safeEmail; + }); + }) +]; +``` + +When dumping your data, the dump will now contain a safe, randomly generated email address for every user. + +## Optimizing large datasets + +The method TableDefinition::outputInChunksOf(int $chunkSize) allows for chunked inserts for large datasets, +improving performance and reducing memory consumption during the dump process. + +```php +return [ + 'default' => DumpSchema::define() + ->allTables() + ->table('users', function($table) { + return $table->outputInChunksOf(3); + }); +]; +``` + +## Specifying the database connection to use + +By default, this package will use your `default` database connection when dumping the tables. +You can pass the connection to the `DumpSchema::define` method, in order to specify your own database connection string: + +```php +return [ + 'default' => DumpSchema::define('sqlite') + ->allTables() +]; +``` + +## Multiple dump schemas + +You can define multiple database dump schemas in the `masked-dump.php` configuration file. +The key in the configuration array is the identifier that will be used when you dump your tables: + +```php +return [ + 'default' => DumpSchema::define() + ->allTables(), + + 'sqlite' => DumpSchema::define('sqlite') + ->schemaOnly('custom_table'), +]; +``` + +When using the callable approach with multiple schemas, you can define separate classes for each schema: + +```php +use App\Support\DefaultMaskedDump; +use App\Support\SqliteMaskedDump; + +return [ + 'default' => [DefaultMaskedDump::class, 'define'], + 'sqlite' => [SqliteMaskedDump::class, 'define'], +]; +``` diff --git a/phpunit.xml.dist b/phpunit.xml.dist index 1eef57c..4e91cd1 100644 --- a/phpunit.xml.dist +++ b/phpunit.xml.dist @@ -1,12 +1,8 @@ @@ -14,16 +10,4 @@ tests - - - src/ - - - - - - - - - diff --git a/src/DumpSchema.php b/src/DumpSchema.php index 523de85..5a7c623 100644 --- a/src/DumpSchema.php +++ b/src/DumpSchema.php @@ -5,7 +5,8 @@ use Faker\Factory; use Doctrine\DBAL\Schema\Table; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; -use Illuminate\Support\Facades\DB; +use Doctrine\DBAL\Types\Types; +use Illuminate\Support\Facades\Schema; class DumpSchema { @@ -15,6 +16,7 @@ class DumpSchema protected $loadAllTables = false; protected $customizedTables = []; + protected $excludedTables = []; public function __construct($connectionName = null) { @@ -26,13 +28,6 @@ public static function define($connectionName = null) return new static($connectionName); } - public function schemaOnly(string $tableName) - { - return $this->table($tableName, function (TableDefinition $table) { - $table->schemaOnly(); - }); - } - public function table(string $tableName, callable $tableDefinition) { $this->customizedTables[$tableName] = $tableDefinition; @@ -47,12 +42,27 @@ public function allTables() return $this; } + public function exclude(string $tableName) + { + $this->excludedTables[] = $tableName; + + return $this; + } + + /** + * @return \Illuminate\Database\Schema\Builder + */ + public function getBuilder() + { + return Schema::connection($this->connectionName); + } + /** - * @return \Illuminate\Database\ConnectionInterface + * @return \Illuminate\Database\Connection */ public function getConnection() { - return DB::connection($this->connectionName); + return Schema::connection($this->connectionName)->getConnection(); } protected function getTable(string $tableName) @@ -82,7 +92,29 @@ protected function loadAvailableTables() return; } - $this->availableTables = $this->getConnection()->getDoctrineSchemaManager()->listTables(); + $this->availableTables = $this->createDoctrineTables($this->getBuilder()->getTables()); + } + + protected function createDoctrineTables(array $tables): array + { + $doctrineTables = []; + + foreach ($tables as $table) { + $columns = $this->getBuilder()->getColumns($table['name']); + + $doctrineTable = new Table($table['name']); + foreach ($columns as $column) { + + $doctrineTable->addColumn( + $column['name'], + Types::STRING, // doesn't matter, but is required + ); + } + + $doctrineTables[] = $doctrineTable; + } + + return $doctrineTables; } public function load() @@ -90,9 +122,15 @@ public function load() $this->loadAvailableTables(); if ($this->loadAllTables) { - $this->dumpTables = collect($this->availableTables)->mapWithKeys(function (Table $table) { + $dumpTables = collect($this->availableTables)->mapWithKeys(function (Table $table) { return [$table->getName() => new TableDefinition($table)]; - })->toArray(); + }); + + $excluded = $this->excludedTables; + $this->dumpTables = $dumpTables + ->filter(function ($table, $tableName) use ($excluded) { + return !in_array($tableName, $excluded); + })->toArray(); } foreach ($this->customizedTables as $tableName => $tableDefinition) { diff --git a/src/LaravelMaskedDump.php b/src/LaravelMaskedDump.php index 49309f3..6ea4063 100755 --- a/src/LaravelMaskedDump.php +++ b/src/LaravelMaskedDump.php @@ -2,10 +2,14 @@ namespace BeyondCode\LaravelMaskedDumper; -use Doctrine\DBAL\Connection; -use Doctrine\DBAL\Schema\Schema; use Illuminate\Console\OutputStyle; use BeyondCode\LaravelMaskedDumper\TableDefinitions\TableDefinition; +use Doctrine\DBAL\Platforms\MariaDBPlatform; +use Doctrine\DBAL\Platforms\MySQLPlatform; +use Doctrine\DBAL\Platforms\SqlitePlatform; +use Illuminate\Database\Connection as DatabaseConnection; +use Doctrine\DBAL\Platforms\AbstractPlatform; +use Doctrine\DBAL\Platforms\PostgreSQLPlatform; class LaravelMaskedDump { @@ -15,10 +19,21 @@ class LaravelMaskedDump /** @var OutputStyle */ protected $output; + /** @var AbstractPlatform */ + protected $platform; + + /** @var string */ + protected $escapeString = "`"; + public function __construct(DumpSchema $definition, OutputStyle $output) { $this->definition = $definition; $this->output = $output; + $this->platform = $this->getPlatform($this->definition->getConnection()); + + if($this->platform instanceof PostgreSQLPlatform) { + $this->escapeString = '"'; + } } public function dump() @@ -30,15 +45,8 @@ public function dump() $overallTableProgress = $this->output->createProgressBar(count($tables)); foreach ($tables as $tableName => $table) { - $query .= "DROP TABLE IF EXISTS `$tableName`;" . PHP_EOL; - $query .= $this->dumpSchema($table); - if ($table->shouldDumpData()) { - $query .= $this->lockTable($tableName); - $query .= $this->dumpTableData($table); - - $query .= $this->unlockTable($tableName); } $overallTableProgress->advance(); @@ -49,10 +57,7 @@ public function dump() protected function transformResultForInsert($row, TableDefinition $table) { - /** @var Connection $connection */ - $connection = $this->definition->getConnection()->getDoctrineConnection(); - - return collect($row)->map(function ($value, $column) use ($connection, $table) { + return collect($row)->map(function ($value, $column) use ($table) { if ($columnDefinition = $table->findColumn($column)) { $value = $columnDefinition->modifyValue($value); } @@ -64,59 +69,88 @@ protected function transformResultForInsert($row, TableDefinition $table) return '""'; } - return $connection->quote($value); + return $this->platform->quoteStringLiteral($value); })->toArray(); } - protected function dumpSchema(TableDefinition $table) + protected function getPlatform(DatabaseConnection $connection) { - $platform = $this->definition->getConnection()->getDoctrineSchemaManager()->getDatabasePlatform(); - - $schema = new Schema([$table->getDoctrineTable()]); - - return implode(";", $schema->toSql($platform)) . ";" . PHP_EOL; - } - - protected function lockTable(string $tableName) - { - return "LOCK TABLES `$tableName` WRITE;" . PHP_EOL . - "ALTER TABLE `$tableName` DISABLE KEYS;" . PHP_EOL; - } - - protected function unlockTable(string $tableName) - { - return "ALTER TABLE `$tableName` ENABLE KEYS;" . PHP_EOL . - "UNLOCK TABLES;" . PHP_EOL; + switch ($connection->getDriverName()) { + case 'mysql': + return new MySQLPlatform; + case 'pgsql': + return new PostgreSQLPlatform; + case 'sqlite': + return new SqlitePlatform; + case 'mariadb': + return new MariaDBPlatform; + default: + throw new \RuntimeException("Unsupported platform: {$connection->getDriverName()}. Please check the documentation for more information."); + } } protected function dumpTableData(TableDefinition $table) { $query = ''; - $queryBuilder = $this->definition->getConnection() - ->table($table->getDoctrineTable()->getName()); + $queryBuilder = $this->definition->getConnection()->table($table->getDoctrineTable()->getName()); $table->modifyQuery($queryBuilder); - $queryBuilder->get() - ->each(function ($row, $index) use ($table, &$query) { - $row = $this->transformResultForInsert((array)$row, $table); - $tableName = $table->getDoctrineTable()->getName(); + $tableName = $table->getDoctrineTable()->getName(); + $tableName = "$this->escapeString$tableName$this->escapeString"; + + if ($table->getChunkSize() > 0) { - $query .= "INSERT INTO `${tableName}` (`" . implode('`, `', array_keys($row)) . '`) VALUES '; - $query .= "("; + $data = $queryBuilder->get(); - $firstColumn = true; - foreach ($row as $value) { - if (!$firstColumn) { - $query .= ", "; + if ($data->isEmpty()) { + return ""; + } + + $tableName = $table->getDoctrineTable()->getName(); + $columns = array_keys((array)$data->first()); + $column_names = "($this->escapeString" . join("$this->escapeString, $this->escapeString", $columns) . "$this->escapeString)"; + + $valuesChunks = $data + ->chunk($table->getChunkSize()) + ->map(function ($chunk) use ($table) { + $values = $chunk->map(function ($row) use ($table) { + $row = $this->transformResultForInsert((array)$row, $table); + $query = '(' . join(', ', $row) . ')'; + return $query; + })->join(', '); + + return $values; + }); + + $insert_statement = $valuesChunks->map(function ($values) use ($table, $tableName, $column_names) { + return "INSERT INTO $tableName $column_names VALUES " . $values . ';'; + }) + ->join(PHP_EOL); + + return $insert_statement . PHP_EOL; + } else { + $queryBuilder->get() + ->each(function ($row, $index) use ($table, &$query, $tableName) { + $row = $this->transformResultForInsert((array)$row, $table); + + $query .= "INSERT INTO $tableName ($this->escapeString" . implode("$this->escapeString, $this->escapeString", array_keys($row)) . "$this->escapeString) VALUES "; + + $query .= "("; + + $firstColumn = true; + foreach ($row as $value) { + if (!$firstColumn) { + $query .= ", "; + } + $query .= $value; + $firstColumn = false; } - $query .= $value; - $firstColumn = false; - } - $query .= ");" . PHP_EOL; - }); + $query .= ");" . PHP_EOL; + }); + } return $query; } diff --git a/src/TableDefinitions/TableDefinition.php b/src/TableDefinitions/TableDefinition.php index c8416ea..dfefd88 100644 --- a/src/TableDefinitions/TableDefinition.php +++ b/src/TableDefinitions/TableDefinition.php @@ -15,6 +15,7 @@ class TableDefinition protected $dumpType; protected $query; protected $columns = []; + protected $chunkSize = 0; public function __construct(Table $table) { @@ -22,16 +23,16 @@ public function __construct(Table $table) $this->dumpType = static::DUMP_FULL; } - public function schemaOnly() + public function fullDump() { - $this->dumpType = static::DUMP_SCHEMA; + $this->dumpType = static::DUMP_FULL; return $this; } - public function fullDump() + public function outputInChunksOf(int $chunkSize) { - $this->dumpType = static::DUMP_FULL; + $this->chunkSize = $chunkSize; return $this; } @@ -68,6 +69,11 @@ public function findColumn(string $column) return false; } + public function getChunkSize() + { + return $this->chunkSize; + } + public function getDoctrineTable() { return $this->table; diff --git a/tests/DumperTest.php b/tests/DumperTest.php index 7dd7cb9..ca20e52 100644 --- a/tests/DumperTest.php +++ b/tests/DumperTest.php @@ -160,10 +160,76 @@ public function it_can_dump_certain_tables_as_schema_only() $outputFile = base_path('test.sql'); + $this->app['config']['masked-dump.default'] = DumpSchema::define() + ->allTables(); + + $this->artisan('db:masked-dump', [ + 'output' => $outputFile + ]); + + $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); + } + + /** @test */ + public function it_does_remove_excluded_tables_from_allTables() + { + $this->loadLaravelMigrations(); + + DB::table('users') + ->insert([ + 'name' => 'Marcel', + 'email' => 'marcel@beyondco.de', + 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', + 'updated_at' => '2021-01-01 00:00:00', + ]); + + $outputFile = base_path('test.sql'); + $this->app['config']['masked-dump.default'] = DumpSchema::define() ->allTables() - ->schemaOnly('migrations') - ->schemaOnly('users'); + ->exclude('users'); + + $this->artisan('db:masked-dump', [ + 'output' => $outputFile + ]); + + $this->assertMatchesTextSnapshot(file_get_contents($outputFile)); + } + + /** @test */ + public function it_creates_chunked_insert_statements_for_a_table() + { + $this->loadLaravelMigrations(); + + DB::table('users') + ->insert(['name' => 'Marcel1', 'email' => 'marcel1@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel2', 'email' => 'marcel2@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel3', 'email' => 'marcel3@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel4', 'email' => 'marcel4@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + DB::table('users') + ->insert(['name' => 'Marcel5', 'email' => 'marcel5@beyondco.de', 'password' => 'test', + 'created_at' => '2021-01-01 00:00:00', 'updated_at' => '2021-01-01 00:00:00', + ]); + + $outputFile = base_path('test.sql'); + + $this->app['config']['masked-dump.default'] = DumpSchema::define() + ->allTables() + ->table('users', function($table) { + return $table->outputInChunksOf(3); + }); $this->artisan('db:masked-dump', [ 'output' => $outputFile diff --git a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt index f333155..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_all_tables_without_modifications__1.txt @@ -1,28 +1,4 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt index 783facb..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_dump_certain_tables_as_schema_only__1.txt @@ -1,16 +1,4 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt index 8c3c43c..c6b5539 100644 --- a/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_mask_user_names__1.txt @@ -1,28 +1,4 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'xxxxxx', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt index 5def904..0675a18 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_faker_values__1.txt @@ -1,28 +1,4 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; -INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'morgan93@example.net', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'joy.schultz@example.org', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt index f333155..4f6eb6d 100644 --- a/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt +++ b/tests/__snapshots__/DumperTest__it_can_replace_columns_with_static_values__1.txt @@ -1,28 +1,4 @@ -DROP TABLE IF EXISTS `failed_jobs`; -CREATE TABLE failed_jobs (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, uuid VARCHAR(255) NOT NULL COLLATE "BINARY", connection CLOB NOT NULL COLLATE "BINARY", queue CLOB NOT NULL COLLATE "BINARY", payload CLOB NOT NULL COLLATE "BINARY", exception CLOB NOT NULL COLLATE "BINARY", failed_at DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL);CREATE UNIQUE INDEX failed_jobs_uuid_unique ON failed_jobs (uuid); -LOCK TABLES `failed_jobs` WRITE; -ALTER TABLE `failed_jobs` DISABLE KEYS; -ALTER TABLE `failed_jobs` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `migrations`; -CREATE TABLE migrations (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, migration VARCHAR(255) NOT NULL COLLATE "BINARY", batch INTEGER NOT NULL); -LOCK TABLES `migrations` WRITE; -ALTER TABLE `migrations` DISABLE KEYS; -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '2014_10_12_000000_testbench_create_users_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '2014_10_12_100000_testbench_create_password_resets_table', '1'); -INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '2019_08_19_000000_testbench_create_failed_jobs_table', '1'); -ALTER TABLE `migrations` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `password_resets`; -CREATE TABLE password_resets (email VARCHAR(255) NOT NULL COLLATE "BINARY", token VARCHAR(255) NOT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL);CREATE INDEX password_resets_email_index ON password_resets (email); -LOCK TABLES `password_resets` WRITE; -ALTER TABLE `password_resets` DISABLE KEYS; -ALTER TABLE `password_resets` ENABLE KEYS; -UNLOCK TABLES; -DROP TABLE IF EXISTS `users`; -CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name VARCHAR(255) NOT NULL COLLATE "BINARY", email VARCHAR(255) NOT NULL COLLATE "BINARY", email_verified_at DATETIME DEFAULT NULL, password VARCHAR(255) NOT NULL COLLATE "BINARY", remember_token VARCHAR(255) DEFAULT NULL COLLATE "BINARY", created_at DATETIME DEFAULT NULL, updated_at DATETIME DEFAULT NULL);CREATE UNIQUE INDEX users_email_unique ON users (email); -LOCK TABLES `users` WRITE; -ALTER TABLE `users` DISABLE KEYS; +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); INSERT INTO `users` (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel', 'marcel@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); -ALTER TABLE `users` ENABLE KEYS; -UNLOCK TABLES; diff --git a/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt new file mode 100644 index 0000000..ea2ed5a --- /dev/null +++ b/tests/__snapshots__/DumperTest__it_creates_chunked_insert_statements_for_a_table__1.txt @@ -0,0 +1,5 @@ +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1'); +INSERT INTO users (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('1', 'Marcel1', 'marcel1@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('2', 'Marcel2', 'marcel2@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('3', 'Marcel3', 'marcel3@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); +INSERT INTO users (`id`, `name`, `email`, `email_verified_at`, `password`, `remember_token`, `created_at`, `updated_at`) VALUES ('4', 'Marcel4', 'marcel4@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'), ('5', 'Marcel5', 'marcel5@beyondco.de', NULL, 'test', NULL, '2021-01-01 00:00:00', '2021-01-01 00:00:00'); diff --git a/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt new file mode 100644 index 0000000..e1088c5 --- /dev/null +++ b/tests/__snapshots__/DumperTest__it_does_remove_excluded_tables_from_allTables__1.txt @@ -0,0 +1,3 @@ +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('1', '0001_01_01_000000_testbench_create_users_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('2', '0001_01_01_000001_testbench_create_cache_table', '1'); +INSERT INTO `migrations` (`id`, `migration`, `batch`) VALUES ('3', '0001_01_01_000002_testbench_create_jobs_table', '1');