mirror of
https://github.com/strapi/strapi.git
synced 2025-12-05 19:42:05 +00:00
Merge pull request #14972 from strapi/deits/export-archive
[DEITS] Add tests for local file destination provider
This commit is contained in:
commit
589a06ab58
@ -5,8 +5,10 @@ const pkg = require('./package.json');
|
||||
|
||||
module.exports = {
|
||||
...baseConfig,
|
||||
preset: 'ts-jest',
|
||||
displayName: (pkg.strapi && pkg.strapi.name) || pkg.name,
|
||||
roots: [__dirname],
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
transform: {
|
||||
'^.+\\.(t|j)sx?$': ['@swc/jest'],
|
||||
},
|
||||
};
|
||||
|
||||
@ -0,0 +1,219 @@
|
||||
import stream from 'stream';
|
||||
|
||||
import { createLocalFileDestinationProvider, ILocalFileDestinationProviderOptions } from '../';
|
||||
import * as encryption from '../../../encryption/encrypt';
|
||||
import {
|
||||
createFilePathFactory,
|
||||
createTarEntryStream,
|
||||
} from '../../local-file-destination-provider/utils';
|
||||
|
||||
const filePath = './test-file';
|
||||
|
||||
jest.mock('../../../encryption/encrypt', () => {
|
||||
return {
|
||||
__esModule: true,
|
||||
createEncryptionCipher: (key: string) => {},
|
||||
};
|
||||
});
|
||||
|
||||
jest.mock('../../local-file-destination-provider/utils');
|
||||
|
||||
describe('Local File Destination Provider', () => {
|
||||
(createFilePathFactory as jest.Mock).mockImplementation(jest.fn());
|
||||
|
||||
afterEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
describe('Bootstrap', () => {
|
||||
it('Throws an error if encryption is enabled and the key is not provided', () => {
|
||||
const providerOptions = {
|
||||
encryption: { enabled: true },
|
||||
compression: { enabled: false },
|
||||
file: { path: './test-file' },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
expect(() => provider.bootstrap()).toThrowError("Can't encrypt without a key");
|
||||
});
|
||||
|
||||
it('Adds .gz extension to the archive path when compression is enabled', async () => {
|
||||
const providerOptions = {
|
||||
encryption: { enabled: false },
|
||||
compression: { enabled: true },
|
||||
file: { path: filePath },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
|
||||
expect(provider.results.file!.path).toEqual(`${filePath}.tar.gz`);
|
||||
});
|
||||
|
||||
it('Adds .enc extension to the archive path when encryption is enabled', async () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: true, key: 'key' },
|
||||
compression: { enabled: false },
|
||||
file: { path: filePath },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
|
||||
expect(provider.results.file!.path).toEqual(`${filePath}.tar.enc`);
|
||||
});
|
||||
|
||||
it('Adds .gz.enc extension to the archive path when encryption and compression are enabled', async () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: true, key: 'key' },
|
||||
compression: { enabled: true },
|
||||
file: { path: filePath },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
|
||||
expect(provider.results.file!.path).toEqual(`${filePath}.tar.gz.enc`);
|
||||
});
|
||||
|
||||
it('Adds the compression step to the stream chain when compression is enabled', async () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: true, key: 'key' },
|
||||
compression: { enabled: true },
|
||||
file: { path: filePath },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
jest.spyOn(provider, 'createGzip');
|
||||
|
||||
await provider.bootstrap();
|
||||
|
||||
expect(provider.createGzip).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('Adds the encryption step to the stream chain when encryption is enabled', async () => {
|
||||
jest.spyOn(encryption, 'createEncryptionCipher');
|
||||
const key = 'key';
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: true, key },
|
||||
compression: { enabled: true },
|
||||
file: { path: filePath },
|
||||
};
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
|
||||
expect(encryption.createEncryptionCipher).toHaveBeenCalledWith(key);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Streaming entities', () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: false },
|
||||
compression: { enabled: false },
|
||||
file: { path: filePath },
|
||||
};
|
||||
(createTarEntryStream as jest.Mock).mockImplementation(jest.fn());
|
||||
|
||||
it('Creates a tar entry stream', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
provider.getEntitiesStream();
|
||||
|
||||
expect(createTarEntryStream).toHaveBeenCalled();
|
||||
expect(createFilePathFactory).toHaveBeenCalledWith('entities');
|
||||
});
|
||||
it('Returns a stream', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
const entitiesStream = provider.getEntitiesStream();
|
||||
|
||||
expect(entitiesStream instanceof stream.Writable).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Streaming schemas', () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: false },
|
||||
compression: { enabled: false },
|
||||
file: { path: filePath },
|
||||
};
|
||||
(createTarEntryStream as jest.Mock).mockImplementation(jest.fn());
|
||||
|
||||
it('Creates a tar entry stream for schemas', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
provider.getSchemasStream();
|
||||
|
||||
expect(createTarEntryStream).toHaveBeenCalled();
|
||||
expect(createFilePathFactory).toHaveBeenCalledWith('schemas');
|
||||
});
|
||||
|
||||
it('Returns a stream', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
const schemasStream = provider.getSchemasStream();
|
||||
|
||||
expect(schemasStream instanceof stream.Writable).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Streaming links', () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: false },
|
||||
compression: { enabled: false },
|
||||
file: { path: filePath },
|
||||
};
|
||||
(createTarEntryStream as jest.Mock).mockImplementation(jest.fn());
|
||||
|
||||
it('Creates a tar entry stream for links', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
provider.getLinksStream();
|
||||
|
||||
expect(createTarEntryStream).toHaveBeenCalled();
|
||||
expect(createFilePathFactory).toHaveBeenCalledWith('links');
|
||||
});
|
||||
|
||||
it('Returns a stream', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
const linksStream = provider.getLinksStream();
|
||||
|
||||
expect(linksStream instanceof stream.Writable).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Streaming configuration', () => {
|
||||
const providerOptions: ILocalFileDestinationProviderOptions = {
|
||||
encryption: { enabled: false },
|
||||
compression: { enabled: false },
|
||||
file: { path: filePath },
|
||||
};
|
||||
(createTarEntryStream as jest.Mock).mockImplementation(jest.fn());
|
||||
|
||||
it('Creates a tar entry stream for configuration', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
provider.getConfigurationStream();
|
||||
|
||||
expect(createTarEntryStream).toHaveBeenCalled();
|
||||
expect(createFilePathFactory).toHaveBeenCalledWith('configuration');
|
||||
});
|
||||
|
||||
it('Returns a stream', async () => {
|
||||
const provider = createLocalFileDestinationProvider(providerOptions);
|
||||
|
||||
await provider.bootstrap();
|
||||
const configurationStream = provider.getConfigurationStream();
|
||||
|
||||
expect(configurationStream instanceof stream.Writable).toBeTruthy();
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -0,0 +1,76 @@
|
||||
import tar from 'tar-stream';
|
||||
import { createFilePathFactory, createTarEntryStream } from '../utils';
|
||||
|
||||
describe('Local File Destination Provider - Utils', () => {
|
||||
describe('Create File Path Factory', () => {
|
||||
it('returns a function', () => {
|
||||
const filePathFactory = createFilePathFactory('entities');
|
||||
expect(typeof filePathFactory).toBe('function');
|
||||
});
|
||||
it('returns a file path when calling a function', () => {
|
||||
const type = 'entities';
|
||||
const fileIndex = 0;
|
||||
const filePathFactory = createFilePathFactory(type);
|
||||
|
||||
const path = filePathFactory(fileIndex);
|
||||
|
||||
expect(path).toBe(`${type}/${type}_0000${fileIndex}.jsonl`);
|
||||
});
|
||||
|
||||
describe('returns file paths when calling the factory', () => {
|
||||
const cases = [
|
||||
['schemas', 0, 'schemas/schemas_00000.jsonl'],
|
||||
['entities', 5, 'entities/entities_00005.jsonl'],
|
||||
['links', 11, 'links/links_00011.jsonl'],
|
||||
['schemas', 543, 'schemas/schemas_00543.jsonl'],
|
||||
['entities', 5213, 'entities/entities_05213.jsonl'],
|
||||
['links', 33231, 'links/links_33231.jsonl'],
|
||||
];
|
||||
|
||||
test.each(cases)(
|
||||
'Given type: %s and fileIndex: %d, returns the right file path: %s',
|
||||
(type: any, fileIndex: any, filePath: any) => {
|
||||
const filePathFactory = createFilePathFactory(type);
|
||||
|
||||
const path = filePathFactory(fileIndex);
|
||||
|
||||
expect(path).toBe(filePath);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
describe('Create Tar Entry Stream', () => {
|
||||
it('Throws an error when the payload is too large', async () => {
|
||||
const maxSize = 3;
|
||||
const chunk = 'test';
|
||||
const archive = tar.pack();
|
||||
const pathFactory = createFilePathFactory('entries');
|
||||
const tarEntryStream = createTarEntryStream(archive, pathFactory, maxSize);
|
||||
|
||||
const write = async () =>
|
||||
await new Promise((resolve, reject) => {
|
||||
tarEntryStream.on('finish', resolve);
|
||||
tarEntryStream.on('error', reject);
|
||||
tarEntryStream.write(chunk);
|
||||
});
|
||||
|
||||
await expect(write).rejects.toThrow(`payload too large: ${chunk.length}>${maxSize}`);
|
||||
});
|
||||
it('Resolves when the payload is smaller than the max size', async () => {
|
||||
const maxSize = 30;
|
||||
const chunk = 'test';
|
||||
const archive = tar.pack();
|
||||
const pathFactory = createFilePathFactory('entries');
|
||||
const tarEntryStream = createTarEntryStream(archive, pathFactory, maxSize);
|
||||
|
||||
const write = async () =>
|
||||
await new Promise((resolve, reject) => {
|
||||
tarEntryStream.on('finish', resolve);
|
||||
tarEntryStream.on('error', reject);
|
||||
tarEntryStream.write(chunk);
|
||||
});
|
||||
|
||||
expect(write).resolves;
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -4,18 +4,18 @@ import type {
|
||||
IMetadata,
|
||||
ProviderType,
|
||||
Stream,
|
||||
} from '../../types';
|
||||
} from '../../../types';
|
||||
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import tar from 'tar-stream';
|
||||
import path from 'path';
|
||||
import zlib from 'zlib';
|
||||
import { Writable, Readable } from 'stream';
|
||||
import { Readable } from 'stream';
|
||||
import { stringer } from 'stream-json/jsonl/Stringer';
|
||||
import { chain } from 'stream-chain';
|
||||
|
||||
import { createEncryptionCipher } from '../encryption/encrypt';
|
||||
import { chain, Writable } from 'stream-chain';
|
||||
|
||||
import { createEncryptionCipher } from '../../encryption/encrypt';
|
||||
import { createFilePathFactory, createTarEntryStream } from './utils';
|
||||
export interface ILocalFileDestinationProviderOptions {
|
||||
// Encryption
|
||||
encryption: {
|
||||
@ -96,6 +96,10 @@ class LocalFileDestinationProvider implements IDestinationProvider {
|
||||
return transforms;
|
||||
}
|
||||
|
||||
createGzip(): zlib.Gzip {
|
||||
return zlib.createGzip();
|
||||
}
|
||||
|
||||
bootstrap(): void | Promise<void> {
|
||||
const { compression, encryption } = this.options;
|
||||
|
||||
@ -110,7 +114,7 @@ class LocalFileDestinationProvider implements IDestinationProvider {
|
||||
const archiveTransforms: Stream[] = [];
|
||||
|
||||
if (compression.enabled) {
|
||||
archiveTransforms.push(zlib.createGzip());
|
||||
archiveTransforms.push(this.createGzip());
|
||||
}
|
||||
|
||||
if (encryption.enabled && encryption.key) {
|
||||
@ -270,76 +274,3 @@ class LocalFileDestinationProvider implements IDestinationProvider {
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a file path factory for a given path & prefix.
|
||||
* Upon being called, the factory will return a file path for a given index
|
||||
*/
|
||||
const createFilePathFactory =
|
||||
(type: string) =>
|
||||
(fileIndex: number = 0): string => {
|
||||
return path.join(
|
||||
// "{type}" directory
|
||||
type,
|
||||
// "${type}_XXXXX.jsonl" file
|
||||
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`
|
||||
);
|
||||
};
|
||||
|
||||
const createTarEntryStream = (
|
||||
archive: tar.Pack,
|
||||
pathFactory: (index?: number) => string,
|
||||
maxSize: number = 2.56e8
|
||||
) => {
|
||||
let fileIndex = 0;
|
||||
let buffer = '';
|
||||
|
||||
const flush = async () => {
|
||||
if (!buffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = pathFactory(fileIndex++);
|
||||
const size = buffer.length;
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
archive.entry({ name, size }, buffer, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
buffer = '';
|
||||
};
|
||||
|
||||
const push = (chunk: string | Buffer) => {
|
||||
buffer += chunk;
|
||||
};
|
||||
|
||||
return new Writable({
|
||||
async destroy(err, callback) {
|
||||
await flush();
|
||||
callback(err);
|
||||
},
|
||||
|
||||
async write(chunk, _encoding, callback) {
|
||||
const size = chunk.length;
|
||||
|
||||
if (chunk.length > maxSize) {
|
||||
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.length + size > maxSize) {
|
||||
await flush();
|
||||
}
|
||||
|
||||
push(chunk);
|
||||
|
||||
callback(null);
|
||||
},
|
||||
});
|
||||
};
|
||||
@ -0,0 +1,76 @@
|
||||
import { Writable } from 'stream';
|
||||
import path from 'path';
|
||||
import tar from 'tar-stream';
|
||||
|
||||
/**
|
||||
* Create a file path factory for a given path & prefix.
|
||||
* Upon being called, the factory will return a file path for a given index
|
||||
*/
|
||||
export const createFilePathFactory =
|
||||
(type: string) =>
|
||||
(fileIndex: number = 0): string => {
|
||||
return path.join(
|
||||
// "{type}" directory
|
||||
type,
|
||||
// "${type}_XXXXX.jsonl" file
|
||||
`${type}_${String(fileIndex).padStart(5, '0')}.jsonl`
|
||||
);
|
||||
};
|
||||
|
||||
export const createTarEntryStream = (
|
||||
archive: tar.Pack,
|
||||
pathFactory: (index?: number) => string,
|
||||
maxSize: number = 2.56e8
|
||||
) => {
|
||||
let fileIndex = 0;
|
||||
let buffer = '';
|
||||
|
||||
const flush = async () => {
|
||||
if (!buffer) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = pathFactory(fileIndex++);
|
||||
const size = buffer.length;
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
archive.entry({ name, size }, buffer, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
buffer = '';
|
||||
};
|
||||
|
||||
const push = (chunk: string | Buffer) => {
|
||||
buffer += chunk;
|
||||
};
|
||||
|
||||
return new Writable({
|
||||
async destroy(err, callback) {
|
||||
await flush();
|
||||
callback(err);
|
||||
},
|
||||
|
||||
async write(chunk, _encoding, callback) {
|
||||
const size = chunk.length;
|
||||
|
||||
if (chunk.length > maxSize) {
|
||||
callback(new Error(`payload too large: ${chunk.length}>${maxSize}`));
|
||||
return;
|
||||
}
|
||||
|
||||
if (buffer.length + size > maxSize) {
|
||||
await flush();
|
||||
}
|
||||
|
||||
push(chunk);
|
||||
|
||||
callback(null);
|
||||
},
|
||||
});
|
||||
};
|
||||
Loading…
x
Reference in New Issue
Block a user