mirror of
https://github.com/pocket-id/pocket-id.git
synced 2026-02-14 16:27:29 +00:00
feat: add CLI command for importing and exporting Pocket ID data (#998)
Co-authored-by: Alessandro (Ale) Segala <43508+ItalyPaleAle@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
@@ -119,11 +119,11 @@ test('Update email configuration', async ({ page }) => {
|
||||
test('Update application images', async ({ page }) => {
|
||||
await page.getByRole('button', { name: 'Expand card' }).nth(4).click();
|
||||
|
||||
await page.getByLabel('Favicon').setInputFiles('assets/w3-schools-favicon.ico');
|
||||
await page.getByLabel('Light Mode Logo').setInputFiles('assets/pingvin-share-logo.png');
|
||||
await page.getByLabel('Dark Mode Logo').setInputFiles('assets/nextcloud-logo.png');
|
||||
await page.getByLabel('Default Profile Picture').setInputFiles('assets/pingvin-share-logo.png');
|
||||
await page.getByLabel('Background Image').setInputFiles('assets/clouds.jpg');
|
||||
await page.getByLabel('Favicon').setInputFiles('resources/images/w3-schools-favicon.ico');
|
||||
await page.getByLabel('Light Mode Logo').setInputFiles('resources/images/pingvin-share-logo.png');
|
||||
await page.getByLabel('Dark Mode Logo').setInputFiles('resources/images/nextcloud-logo.png');
|
||||
await page.getByLabel('Default Profile Picture').setInputFiles('resources/images/pingvin-share-logo.png');
|
||||
await page.getByLabel('Background Image').setInputFiles('resources/images/clouds.jpg');
|
||||
await page.getByRole('button', { name: 'Save' }).last().click();
|
||||
|
||||
await expect(page.locator('[data-type="success"]')).toHaveText(
|
||||
|
||||
366
tests/specs/cli.spec.ts
Normal file
366
tests/specs/cli.spec.ts
Normal file
@@ -0,0 +1,366 @@
|
||||
import { expect, test } from '@playwright/test';
|
||||
import AdmZip from 'adm-zip';
|
||||
import { execFileSync, ExecFileSyncOptions } from 'child_process';
|
||||
import crypto from 'crypto';
|
||||
import { users } from 'data';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { cleanupBackend } from 'utils/cleanup.util';
|
||||
import { pathFromRoot, tmpDir } from 'utils/fs.util';
|
||||
|
||||
const containerName = 'pocket-id';
|
||||
const setupDir = pathFromRoot('setup');
|
||||
const exampleExportPath = pathFromRoot('resources/export');
|
||||
const dockerCommandMaxBuffer = 100 * 1024 * 1024;
|
||||
let mode: 'sqlite' | 'postgres' | 's3' = 'sqlite';
|
||||
|
||||
test.beforeAll(() => {
|
||||
const dockerComposeLs = runDockerCommand(['compose', 'ls', '--format', 'json']);
|
||||
if (dockerComposeLs.includes('postgres')) {
|
||||
mode = 'postgres';
|
||||
} else if (dockerComposeLs.includes('s3')) {
|
||||
mode = 's3';
|
||||
}
|
||||
console.log(`Running CLI tests in ${mode.toUpperCase()} mode`);
|
||||
});
|
||||
|
||||
test('Export', async ({ baseURL }) => {
|
||||
// Reset the backend but with LDAP setup because the example export has no LDAP data
|
||||
await cleanupBackend({ skipLdapSetup: true });
|
||||
|
||||
// Fetch the profile pictures because they get generated on demand
|
||||
await Promise.all([
|
||||
fetch(`${baseURL}/api/users/${users.craig.id}/profile-picture.png`),
|
||||
fetch(`${baseURL}/api/users/${users.tim.id}/profile-picture.png`)
|
||||
]);
|
||||
|
||||
// Export the data from the seeded container
|
||||
const exportPath = path.join(tmpDir, 'export.zip');
|
||||
const extractPath = path.join(tmpDir, 'export-extracted');
|
||||
|
||||
runExport(exportPath);
|
||||
unzipExport(exportPath, extractPath);
|
||||
|
||||
compareExports(exampleExportPath, extractPath);
|
||||
});
|
||||
|
||||
test('Export via stdout', async ({ baseURL }) => {
|
||||
await cleanupBackend({ skipLdapSetup: true });
|
||||
|
||||
await Promise.all([
|
||||
fetch(`${baseURL}/api/users/${users.craig.id}/profile-picture.png`),
|
||||
fetch(`${baseURL}/api/users/${users.tim.id}/profile-picture.png`)
|
||||
]);
|
||||
|
||||
const stdoutBuffer = runExportToStdout();
|
||||
const stdoutExtractPath = path.join(tmpDir, 'export-stdout-extracted');
|
||||
unzipExportBuffer(stdoutBuffer, stdoutExtractPath);
|
||||
|
||||
compareExports(exampleExportPath, stdoutExtractPath);
|
||||
});
|
||||
|
||||
test('Import', async () => {
|
||||
// Reset the backend without seeding
|
||||
await cleanupBackend({ skipSeed: true });
|
||||
|
||||
// Run the import with the example export data
|
||||
const exampleExportArchivePath = path.join(tmpDir, 'example-export.zip');
|
||||
archiveExampleExport(exampleExportArchivePath);
|
||||
|
||||
try {
|
||||
runDockerComposeCommand(['stop', containerName]);
|
||||
runImport(exampleExportArchivePath);
|
||||
} finally {
|
||||
runDockerComposeCommand(['up', '-d', containerName]);
|
||||
}
|
||||
|
||||
// Export again from the imported instance
|
||||
const exportPath = path.join(tmpDir, 'export.zip');
|
||||
const exportExtracted = path.join(tmpDir, 'export-extracted');
|
||||
runExport(exportPath);
|
||||
unzipExport(exportPath, exportExtracted);
|
||||
|
||||
compareExports(exampleExportPath, exportExtracted);
|
||||
});
|
||||
|
||||
test('Import via stdin', async () => {
|
||||
await cleanupBackend({ skipSeed: true });
|
||||
|
||||
const exampleExportArchivePath = path.join(tmpDir, 'example-export-stdin.zip');
|
||||
const exampleExportBuffer = archiveExampleExport(exampleExportArchivePath);
|
||||
|
||||
try {
|
||||
runDockerComposeCommand(['stop', containerName]);
|
||||
runImportFromStdin(exampleExportBuffer);
|
||||
} finally {
|
||||
runDockerComposeCommand(['up', '-d', containerName]);
|
||||
}
|
||||
|
||||
const exportPath = path.join(tmpDir, 'export-from-stdin.zip');
|
||||
const exportExtracted = path.join(tmpDir, 'export-from-stdin-extracted');
|
||||
runExport(exportPath);
|
||||
unzipExport(exportPath, exportExtracted);
|
||||
|
||||
compareExports(exampleExportPath, exportExtracted);
|
||||
});
|
||||
|
||||
function compareExports(dir1: string, dir2: string): void {
|
||||
const hashes1 = hashAllFiles(dir1);
|
||||
const hashes2 = hashAllFiles(dir2);
|
||||
|
||||
const files1 = Object.keys(hashes1).sort();
|
||||
const files2 = Object.keys(hashes2).sort();
|
||||
expect(files2).toEqual(files1);
|
||||
|
||||
for (const file of files1) {
|
||||
expect(hashes2[file], `${file} hash should match`).toEqual(hashes1[file]);
|
||||
}
|
||||
|
||||
// Compare database.json contents
|
||||
const expectedData = loadJSON(path.join(dir1, 'database.json'));
|
||||
const actualData = loadJSON(path.join(dir2, 'database.json'));
|
||||
|
||||
// Check special fields
|
||||
validateSpecialFields(actualData);
|
||||
|
||||
// Normalize and compare
|
||||
const normalizedExpected = normalizeJSON(expectedData);
|
||||
const normalizedActual = normalizeJSON(actualData);
|
||||
expect(normalizedActual).toEqual(normalizedExpected);
|
||||
}
|
||||
|
||||
function archiveExampleExport(outputPath: string): Buffer {
|
||||
fs.rmSync(outputPath, { force: true });
|
||||
|
||||
const zip = new AdmZip();
|
||||
const files = fs.readdirSync(exampleExportPath);
|
||||
for (const file of files) {
|
||||
const filePath = path.join(exampleExportPath, file);
|
||||
if (fs.statSync(filePath).isFile()) {
|
||||
zip.addLocalFile(filePath);
|
||||
} else if (fs.statSync(filePath).isDirectory()) {
|
||||
zip.addLocalFolder(filePath, file);
|
||||
}
|
||||
}
|
||||
|
||||
const buffer = zip.toBuffer();
|
||||
fs.writeFileSync(outputPath, buffer);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// Helper to load JSON files
|
||||
function loadJSON(path: string) {
|
||||
return JSON.parse(fs.readFileSync(path, 'utf-8'));
|
||||
}
|
||||
|
||||
function normalizeJSON(obj: any): any {
|
||||
if (typeof obj === 'string') {
|
||||
try {
|
||||
// Normalize JSON strings
|
||||
const parsed = JSON.parse(atob(obj));
|
||||
return JSON.stringify(normalizeJSON(parsed));
|
||||
} catch {
|
||||
return obj;
|
||||
}
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
// Sort arrays to make order irrelevant
|
||||
return obj
|
||||
.map(normalizeJSON)
|
||||
.sort((a, b) => JSON.stringify(a).localeCompare(JSON.stringify(b)));
|
||||
} else if (obj && typeof obj === 'object') {
|
||||
const ignoredKeys = ['id', 'created_at', 'expires_at', 'credentials', 'provider', 'version'];
|
||||
|
||||
// Sort and normalize object keys, skipping ignored ones
|
||||
return Object.keys(obj)
|
||||
.filter((key) => !ignoredKeys.includes(key))
|
||||
.sort()
|
||||
.reduce(
|
||||
(acc, key) => {
|
||||
acc[key] = normalizeJSON(obj[key]);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, any>
|
||||
);
|
||||
}
|
||||
|
||||
return obj;
|
||||
}
|
||||
|
||||
function validateSpecialFields(obj: any): void {
|
||||
if (Array.isArray(obj)) {
|
||||
for (const item of obj) validateSpecialFields(item);
|
||||
} else if (obj && typeof obj === 'object') {
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (key === 'id') {
|
||||
expect(isUUID(value), `Expected '${value}' to be a valid UUID`).toBe(true);
|
||||
} else if (key === 'created_at' || key === 'expires_at') {
|
||||
expect(
|
||||
isValidISODate(value),
|
||||
`Expected '${key}' = ${value} to be a valid ISO 8601 date string`
|
||||
).toBe(true);
|
||||
} else if (key === 'provider') {
|
||||
expect(
|
||||
['postgres', 'sqlite'].includes(value as string),
|
||||
`Expected 'provider' to be either 'postgres' or 'sqlite', got '${value}'`
|
||||
).toBe(true);
|
||||
} else if (key === 'version') {
|
||||
expect(value).toBeGreaterThanOrEqual(20251001000000);
|
||||
} else {
|
||||
validateSpecialFields(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function isUUID(value: any): boolean {
|
||||
if (typeof value !== 'string') return false;
|
||||
const uuidRegex = /^[^-]{8}-[^-]{4}-[^-]{4}-[^-]{4}-[^-]{12}$/;
|
||||
return uuidRegex.test(value);
|
||||
}
|
||||
|
||||
function isValidISODate(value: any): boolean {
|
||||
const isoRegex = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z$/;
|
||||
if (!isoRegex.test(value)) return false;
|
||||
const date = new Date(value);
|
||||
return !isNaN(date.getTime());
|
||||
}
|
||||
|
||||
function runImport(pathToFile: string) {
|
||||
const importContainerId = runDockerComposeCommand([
|
||||
'run',
|
||||
'-d',
|
||||
'-v',
|
||||
`${pathToFile}:/app/data/pocket-id-export.zip`,
|
||||
containerName,
|
||||
'/app/pocket-id',
|
||||
'import',
|
||||
'--path',
|
||||
'/app/data/pocket-id-export.zip',
|
||||
'--yes'
|
||||
]);
|
||||
try {
|
||||
runDockerCommand(['wait', importContainerId]);
|
||||
} finally {
|
||||
runDockerCommand(['rm', '-f', importContainerId]);
|
||||
}
|
||||
}
|
||||
|
||||
function runImportFromStdin(archive: Buffer): void {
|
||||
runDockerComposeCommandRaw(
|
||||
['run', '--rm', '-T', containerName, '/app/pocket-id', 'import', '--yes', '--path', '-'],
|
||||
{ input: archive }
|
||||
);
|
||||
}
|
||||
|
||||
function runExport(outputFile: string): void {
|
||||
const containerId = runDockerComposeCommand([
|
||||
'run',
|
||||
'-d',
|
||||
containerName,
|
||||
'/app/pocket-id',
|
||||
'export',
|
||||
'--path',
|
||||
'/app/data/pocket-id-export.zip'
|
||||
]);
|
||||
|
||||
try {
|
||||
// Wait until export finishes
|
||||
runDockerCommand(['wait', containerId]);
|
||||
runDockerCommand(['cp', `${containerId}:/app/data/pocket-id-export.zip`, outputFile]);
|
||||
} finally {
|
||||
runDockerCommand(['rm', '-f', containerId]);
|
||||
}
|
||||
|
||||
expect(fs.existsSync(outputFile)).toBe(true);
|
||||
}
|
||||
|
||||
function runExportToStdout(): Buffer {
|
||||
const res = runDockerComposeCommandRaw([
|
||||
'run',
|
||||
'--rm',
|
||||
'-T',
|
||||
containerName,
|
||||
'/app/pocket-id',
|
||||
'export',
|
||||
'--path',
|
||||
'-'
|
||||
]);
|
||||
fs.writeFileSync('export-stdout.txt', res);
|
||||
return res;
|
||||
}
|
||||
|
||||
function unzipExport(zipFile: string, destDir: string): void {
|
||||
fs.rmSync(destDir, { recursive: true, force: true });
|
||||
const zip = new AdmZip(zipFile);
|
||||
zip.extractAllTo(destDir, true);
|
||||
}
|
||||
|
||||
function unzipExportBuffer(zipBuffer: Buffer, destDir: string): void {
|
||||
fs.rmSync(destDir, { recursive: true, force: true });
|
||||
const zip = new AdmZip(zipBuffer);
|
||||
zip.extractAllTo(destDir, true);
|
||||
}
|
||||
|
||||
function hashFile(filePath: string): string {
|
||||
const buffer = fs.readFileSync(filePath);
|
||||
return crypto.createHash('sha256').update(buffer).digest('hex');
|
||||
}
|
||||
|
||||
function getAllFiles(dir: string, root = dir): string[] {
|
||||
return fs.readdirSync(dir).flatMap((entry) => {
|
||||
if (['.DS_Store', 'database.json'].includes(entry)) return [];
|
||||
|
||||
const fullPath = path.join(dir, entry);
|
||||
const stat = fs.statSync(fullPath);
|
||||
return stat.isDirectory() ? getAllFiles(fullPath, root) : [path.relative(root, fullPath)];
|
||||
});
|
||||
}
|
||||
|
||||
function hashAllFiles(dir: string): Record<string, string> {
|
||||
const files = getAllFiles(dir);
|
||||
const hashes: Record<string, string> = {};
|
||||
for (const relativePath of files) {
|
||||
const fullPath = path.join(dir, relativePath);
|
||||
hashes[relativePath] = hashFile(fullPath);
|
||||
}
|
||||
return hashes;
|
||||
}
|
||||
|
||||
function runDockerCommand(args: string[], options?: ExecFileSyncOptions): string {
|
||||
return execFileSync('docker', args, {
|
||||
cwd: setupDir,
|
||||
stdio: 'pipe',
|
||||
maxBuffer: dockerCommandMaxBuffer,
|
||||
...options
|
||||
})
|
||||
.toString()
|
||||
.trim();
|
||||
}
|
||||
|
||||
function runDockerComposeCommand(args: string[]): string {
|
||||
return runDockerComposeCommandRaw(args).toString().trim();
|
||||
}
|
||||
|
||||
function runDockerComposeCommandRaw(args: string[], options?: ExecFileSyncOptions): Buffer {
|
||||
return execFileSync('docker', dockerComposeArgs(args), {
|
||||
cwd: setupDir,
|
||||
stdio: 'pipe',
|
||||
maxBuffer: dockerCommandMaxBuffer,
|
||||
...options
|
||||
}) as Buffer;
|
||||
}
|
||||
|
||||
function dockerComposeArgs(args: string[]): string[] {
|
||||
let dockerComposeFile = 'docker-compose.yml';
|
||||
switch (mode) {
|
||||
case 'postgres':
|
||||
dockerComposeFile = 'docker-compose-postgres.yml';
|
||||
break;
|
||||
case 's3':
|
||||
dockerComposeFile = 'docker-compose-s3.yml';
|
||||
break;
|
||||
}
|
||||
return ['compose', '-f', dockerComposeFile, ...args];
|
||||
}
|
||||
@@ -1,8 +1,9 @@
|
||||
import { test as setup } from '@playwright/test';
|
||||
import authUtil from '../utils/auth.util';
|
||||
import { cleanupBackend } from '../utils/cleanup.util';
|
||||
import { pathFromRoot } from 'utils/fs.util';
|
||||
import authUtil from '../../utils/auth.util';
|
||||
import { cleanupBackend } from '../../utils/cleanup.util';
|
||||
|
||||
const authFile = './.auth/user.json';
|
||||
const authFile = pathFromRoot('.tmp/auth/user.json');
|
||||
|
||||
setup('authenticate', async ({ page }) => {
|
||||
await cleanupBackend();
|
||||
8
tests/specs/fixtures/global.setup.ts
vendored
Normal file
8
tests/specs/fixtures/global.setup.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import fs from 'fs';
|
||||
import { tmpDir } from 'utils/fs.util';
|
||||
|
||||
async function globalSetup() {
|
||||
await fs.promises.mkdir(tmpDir, { recursive: true });
|
||||
}
|
||||
|
||||
export default globalSetup;
|
||||
8
tests/specs/fixtures/global.teardown.ts
vendored
Normal file
8
tests/specs/fixtures/global.teardown.ts
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
import fs from 'fs';
|
||||
import { tmpDir } from 'utils/fs.util';
|
||||
|
||||
async function globalTeardown() {
|
||||
await fs.promises.rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
export default globalTeardown;
|
||||
@@ -20,9 +20,9 @@ test.describe('Create OIDC client', () => {
|
||||
await page.getByTestId('callback-url-2').fill(oidcClient.secondCallbackUrl);
|
||||
|
||||
await page.locator('[role="tab"][data-value="light-logo"]').first().click();
|
||||
await page.setInputFiles('#oidc-client-logo-light', 'assets/pingvin-share-logo.png');
|
||||
await page.setInputFiles('#oidc-client-logo-light', 'resources/images/pingvin-share-logo.png');
|
||||
await page.locator('[role="tab"][data-value="dark-logo"]').first().click();
|
||||
await page.setInputFiles('#oidc-client-logo-dark', 'assets/pingvin-share-logo.png');
|
||||
await page.setInputFiles('#oidc-client-logo-dark', 'resources/images/pingvin-share-logo.png');
|
||||
|
||||
if (clientId) {
|
||||
await page.getByRole('button', { name: 'Show Advanced Options' }).click();
|
||||
@@ -71,9 +71,9 @@ test('Edit OIDC client', async ({ page }) => {
|
||||
await page.getByLabel('Name').fill('Nextcloud updated');
|
||||
await page.getByTestId('callback-url-1').first().fill('http://nextcloud-updated/auth/callback');
|
||||
await page.locator('[role="tab"][data-value="light-logo"]').first().click();
|
||||
await page.setInputFiles('#oidc-client-logo-light', 'assets/nextcloud-logo.png');
|
||||
await page.setInputFiles('#oidc-client-logo-light', 'resources/images/nextcloud-logo.png');
|
||||
await page.locator('[role="tab"][data-value="dark-logo"]').first().click();
|
||||
await page.setInputFiles('#oidc-client-logo-dark', 'assets/nextcloud-logo.png');
|
||||
await page.setInputFiles('#oidc-client-logo-dark', 'resources/images/nextcloud-logo.png');
|
||||
await page.getByLabel('Client Launch URL').fill(oidcClient.launchURL);
|
||||
await page.getByRole('button', { name: 'Save' }).click();
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ test.describe('Initial User Signup', () => {
|
||||
});
|
||||
|
||||
test('Initial Signup - success flow', async ({ page }) => {
|
||||
await cleanupBackend(true);
|
||||
await cleanupBackend({ skipSeed: true });
|
||||
await page.goto('/setup');
|
||||
await page.getByLabel('First name').fill('Jane');
|
||||
await page.getByLabel('Last name').fill('Smith');
|
||||
|
||||
Reference in New Issue
Block a user